blob: 8fb4c79196fdc2c9c2161b62ea4fab8c5c509aeb [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
Ben Murdochb8e0da22011-05-16 14:20:40 +010028#include "v8.h"
29
30#if defined(V8_TARGET_ARCH_IA32)
31
Ben Murdochb0fe1622011-05-05 13:52:32 +010032#include "ia32/lithium-codegen-ia32.h"
33#include "code-stubs.h"
Steve Block44f0eee2011-05-26 01:26:41 +010034#include "deoptimizer.h"
Ben Murdochb0fe1622011-05-05 13:52:32 +010035#include "stub-cache.h"
Ben Murdoch3ef787d2012-04-12 10:51:47 +010036#include "codegen.h"
Ben Murdochb0fe1622011-05-05 13:52:32 +010037
38namespace v8 {
39namespace internal {
40
41
Steve Block1e0659c2011-05-24 12:43:12 +010042// When invoking builtins, we need to record the safepoint in the middle of
43// the invoke instruction sequence generated by the macro assembler.
Ben Murdoch257744e2011-11-30 15:57:28 +000044class SafepointGenerator : public CallWrapper {
Ben Murdochb0fe1622011-05-05 13:52:32 +010045 public:
46 SafepointGenerator(LCodeGen* codegen,
47 LPointerMap* pointers,
Ben Murdoch2b4ba112012-01-20 14:57:15 +000048 Safepoint::DeoptMode mode)
Ben Murdochb0fe1622011-05-05 13:52:32 +010049 : codegen_(codegen),
50 pointers_(pointers),
Ben Murdoch2b4ba112012-01-20 14:57:15 +000051 deopt_mode_(mode) {}
Ben Murdochb0fe1622011-05-05 13:52:32 +010052 virtual ~SafepointGenerator() { }
53
Ben Murdoch257744e2011-11-30 15:57:28 +000054 virtual void BeforeCall(int call_size) const {}
55
56 virtual void AfterCall() const {
Ben Murdoch2b4ba112012-01-20 14:57:15 +000057 codegen_->RecordSafepoint(pointers_, deopt_mode_);
Ben Murdochb0fe1622011-05-05 13:52:32 +010058 }
59
60 private:
61 LCodeGen* codegen_;
62 LPointerMap* pointers_;
Ben Murdoch2b4ba112012-01-20 14:57:15 +000063 Safepoint::DeoptMode deopt_mode_;
Ben Murdochb0fe1622011-05-05 13:52:32 +010064};
65
66
67#define __ masm()->
68
69bool LCodeGen::GenerateCode() {
Ben Murdoch3ef787d2012-04-12 10:51:47 +010070 HPhase phase("Z_Code generation", chunk());
Ben Murdochb0fe1622011-05-05 13:52:32 +010071 ASSERT(is_unused());
72 status_ = GENERATING;
73 CpuFeatures::Scope scope(SSE2);
Ben Murdoch3ef787d2012-04-12 10:51:47 +010074
75 CodeStub::GenerateFPStubs();
76
77 // Open a frame scope to indicate that there is a frame on the stack. The
78 // MANUAL indicates that the scope shouldn't actually generate code to set up
79 // the frame (that is done in GeneratePrologue).
80 FrameScope frame_scope(masm_, StackFrame::MANUAL);
81
Ben Murdochb0fe1622011-05-05 13:52:32 +010082 return GeneratePrologue() &&
83 GenerateBody() &&
84 GenerateDeferredCode() &&
85 GenerateSafepointTable();
86}
87
88
89void LCodeGen::FinishCode(Handle<Code> code) {
90 ASSERT(is_done());
Ben Murdoch257744e2011-11-30 15:57:28 +000091 code->set_stack_slots(GetStackSlotCount());
Steve Block1e0659c2011-05-24 12:43:12 +010092 code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
Ben Murdochb0fe1622011-05-05 13:52:32 +010093 PopulateDeoptimizationData(code);
Steve Block44f0eee2011-05-26 01:26:41 +010094 Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code);
Ben Murdochb0fe1622011-05-05 13:52:32 +010095}
96
97
98void LCodeGen::Abort(const char* format, ...) {
99 if (FLAG_trace_bailout) {
Ben Murdoch589d6972011-11-30 16:04:58 +0000100 SmartArrayPointer<char> name(
101 info()->shared_info()->DebugName()->ToCString());
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100102 PrintF("Aborting LCodeGen in @\"%s\": ", *name);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100103 va_list arguments;
104 va_start(arguments, format);
105 OS::VPrint(format, arguments);
106 va_end(arguments);
107 PrintF("\n");
108 }
109 status_ = ABORTED;
110}
111
112
113void LCodeGen::Comment(const char* format, ...) {
114 if (!FLAG_code_comments) return;
115 char buffer[4 * KB];
116 StringBuilder builder(buffer, ARRAY_SIZE(buffer));
117 va_list arguments;
118 va_start(arguments, format);
119 builder.AddFormattedList(format, arguments);
120 va_end(arguments);
121
122 // Copy the string before recording it in the assembler to avoid
123 // issues when the stack allocated buffer goes out of scope.
124 size_t length = builder.position();
125 Vector<char> copy = Vector<char>::New(length + 1);
126 memcpy(copy.start(), builder.Finalize(), copy.length());
127 masm()->RecordComment(copy.start());
128}
129
130
131bool LCodeGen::GeneratePrologue() {
132 ASSERT(is_generating());
133
134#ifdef DEBUG
135 if (strlen(FLAG_stop_at) > 0 &&
136 info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
137 __ int3();
138 }
139#endif
140
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000141 // Strict mode functions and builtins need to replace the receiver
142 // with undefined when called as functions (without an explicit
143 // receiver object). ecx is zero for method calls and non-zero for
144 // function calls.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100145 if (!info_->is_classic_mode() || info_->is_native()) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000146 Label ok;
147 __ test(ecx, Operand(ecx));
148 __ j(zero, &ok, Label::kNear);
149 // +1 for return address.
150 int receiver_offset = (scope()->num_parameters() + 1) * kPointerSize;
151 __ mov(Operand(esp, receiver_offset),
152 Immediate(isolate()->factory()->undefined_value()));
153 __ bind(&ok);
154 }
155
Ben Murdochb0fe1622011-05-05 13:52:32 +0100156 __ push(ebp); // Caller's frame pointer.
157 __ mov(ebp, esp);
158 __ push(esi); // Callee's context.
159 __ push(edi); // Callee's JS function.
160
161 // Reserve space for the stack slots needed by the code.
Ben Murdoch257744e2011-11-30 15:57:28 +0000162 int slots = GetStackSlotCount();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100163 if (slots > 0) {
164 if (FLAG_debug_code) {
165 __ mov(Operand(eax), Immediate(slots));
166 Label loop;
167 __ bind(&loop);
168 __ push(Immediate(kSlotsZapValue));
169 __ dec(eax);
170 __ j(not_zero, &loop);
171 } else {
172 __ sub(Operand(esp), Immediate(slots * kPointerSize));
Ben Murdochb8e0da22011-05-16 14:20:40 +0100173#ifdef _MSC_VER
174 // On windows, you may not access the stack more than one page below
175 // the most recently mapped page. To make the allocated area randomly
176 // accessible, we write to each page in turn (the value is irrelevant).
177 const int kPageSize = 4 * KB;
178 for (int offset = slots * kPointerSize - kPageSize;
179 offset > 0;
180 offset -= kPageSize) {
181 __ mov(Operand(esp, offset), eax);
182 }
183#endif
Ben Murdochb0fe1622011-05-05 13:52:32 +0100184 }
185 }
186
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100187 // Possibly allocate a local context.
188 int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
189 if (heap_slots > 0) {
190 Comment(";;; Allocate local context");
191 // Argument to NewContext is the function, which is still in edi.
192 __ push(edi);
193 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
194 FastNewContextStub stub(heap_slots);
195 __ CallStub(&stub);
196 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000197 __ CallRuntime(Runtime::kNewFunctionContext, 1);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100198 }
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000199 RecordSafepoint(Safepoint::kNoLazyDeopt);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100200 // Context is returned in both eax and esi. It replaces the context
201 // passed to us. It's saved in the stack and kept live in esi.
202 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
203
204 // Copy parameters into context if necessary.
205 int num_parameters = scope()->num_parameters();
206 for (int i = 0; i < num_parameters; i++) {
Ben Murdoch589d6972011-11-30 16:04:58 +0000207 Variable* var = scope()->parameter(i);
208 if (var->IsContextSlot()) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100209 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
210 (num_parameters - 1 - i) * kPointerSize;
211 // Load parameter from stack.
212 __ mov(eax, Operand(ebp, parameter_offset));
213 // Store it in the context.
Ben Murdoch589d6972011-11-30 16:04:58 +0000214 int context_offset = Context::SlotOffset(var->index());
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100215 __ mov(Operand(esi, context_offset), eax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100216 // Update the write barrier. This clobbers eax and ebx.
217 __ RecordWriteContextSlot(esi,
218 context_offset,
219 eax,
220 ebx,
221 kDontSaveFPRegs);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100222 }
223 }
224 Comment(";;; End allocate local context");
225 }
226
Ben Murdochb0fe1622011-05-05 13:52:32 +0100227 // Trace the call.
228 if (FLAG_trace) {
Steve Block1e0659c2011-05-24 12:43:12 +0100229 // We have not executed any compiled code yet, so esi still holds the
230 // incoming context.
Ben Murdochb0fe1622011-05-05 13:52:32 +0100231 __ CallRuntime(Runtime::kTraceEnter, 0);
232 }
233 return !is_aborted();
234}
235
236
237bool LCodeGen::GenerateBody() {
238 ASSERT(is_generating());
239 bool emit_instructions = true;
240 for (current_instruction_ = 0;
241 !is_aborted() && current_instruction_ < instructions_->length();
242 current_instruction_++) {
243 LInstruction* instr = instructions_->at(current_instruction_);
244 if (instr->IsLabel()) {
245 LLabel* label = LLabel::cast(instr);
246 emit_instructions = !label->HasReplacement();
247 }
248
249 if (emit_instructions) {
250 Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic());
251 instr->CompileToNative(this);
252 }
253 }
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000254 EnsureSpaceForLazyDeopt();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100255 return !is_aborted();
256}
257
258
Ben Murdochb0fe1622011-05-05 13:52:32 +0100259bool LCodeGen::GenerateDeferredCode() {
260 ASSERT(is_generating());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000261 if (deferred_.length() > 0) {
262 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
263 LDeferredCode* code = deferred_[i];
264 __ bind(code->entry());
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100265 Comment(";;; Deferred code @%d: %s.",
266 code->instruction_index(),
267 code->instr()->Mnemonic());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000268 code->Generate();
269 __ jmp(code->exit());
270 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100271 }
272
273 // Deferred code is the last part of the instruction sequence. Mark
274 // the generated code as done unless we bailed out.
275 if (!is_aborted()) status_ = DONE;
276 return !is_aborted();
277}
278
279
280bool LCodeGen::GenerateSafepointTable() {
281 ASSERT(is_done());
Ben Murdoch257744e2011-11-30 15:57:28 +0000282 safepoints_.Emit(masm(), GetStackSlotCount());
Ben Murdochb0fe1622011-05-05 13:52:32 +0100283 return !is_aborted();
284}
285
286
287Register LCodeGen::ToRegister(int index) const {
288 return Register::FromAllocationIndex(index);
289}
290
291
292XMMRegister LCodeGen::ToDoubleRegister(int index) const {
293 return XMMRegister::FromAllocationIndex(index);
294}
295
296
297Register LCodeGen::ToRegister(LOperand* op) const {
298 ASSERT(op->IsRegister());
299 return ToRegister(op->index());
300}
301
302
303XMMRegister LCodeGen::ToDoubleRegister(LOperand* op) const {
304 ASSERT(op->IsDoubleRegister());
305 return ToDoubleRegister(op->index());
306}
307
308
309int LCodeGen::ToInteger32(LConstantOperand* op) const {
310 Handle<Object> value = chunk_->LookupLiteral(op);
311 ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32());
312 ASSERT(static_cast<double>(static_cast<int32_t>(value->Number())) ==
313 value->Number());
314 return static_cast<int32_t>(value->Number());
315}
316
317
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100318Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const {
319 Handle<Object> literal = chunk_->LookupLiteral(op);
320 ASSERT(chunk_->LookupLiteralRepresentation(op).IsTagged());
321 return literal;
322}
323
324
325double LCodeGen::ToDouble(LConstantOperand* op) const {
326 Handle<Object> value = chunk_->LookupLiteral(op);
327 return value->Number();
328}
329
330
331bool LCodeGen::IsInteger32(LConstantOperand* op) const {
332 return chunk_->LookupLiteralRepresentation(op).IsInteger32();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100333}
334
335
336Operand LCodeGen::ToOperand(LOperand* op) const {
337 if (op->IsRegister()) return Operand(ToRegister(op));
338 if (op->IsDoubleRegister()) return Operand(ToDoubleRegister(op));
339 ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot());
340 int index = op->index();
341 if (index >= 0) {
342 // Local or spill slot. Skip the frame pointer, function, and
343 // context in the fixed part of the frame.
344 return Operand(ebp, -(index + 3) * kPointerSize);
345 } else {
346 // Incoming parameter. Skip the return address.
347 return Operand(ebp, -(index - 1) * kPointerSize);
348 }
349}
350
351
Ben Murdochb8e0da22011-05-16 14:20:40 +0100352Operand LCodeGen::HighOperand(LOperand* op) {
353 ASSERT(op->IsDoubleStackSlot());
354 int index = op->index();
355 int offset = (index >= 0) ? index + 3 : index - 1;
356 return Operand(ebp, -offset * kPointerSize);
357}
358
359
360void LCodeGen::WriteTranslation(LEnvironment* environment,
361 Translation* translation) {
362 if (environment == NULL) return;
363
364 // The translation includes one command per value in the environment.
365 int translation_size = environment->values()->length();
366 // The output frame height does not include the parameters.
367 int height = translation_size - environment->parameter_count();
368
369 WriteTranslation(environment->outer(), translation);
370 int closure_id = DefineDeoptimizationLiteral(environment->closure());
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100371 switch (environment->frame_type()) {
372 case JS_FUNCTION:
373 translation->BeginJSFrame(environment->ast_id(), closure_id, height);
374 break;
375 case JS_CONSTRUCT:
376 translation->BeginConstructStubFrame(closure_id, translation_size);
377 break;
378 case ARGUMENTS_ADAPTOR:
379 translation->BeginArgumentsAdaptorFrame(closure_id, translation_size);
380 break;
381 default:
382 UNREACHABLE();
383 }
Ben Murdochb8e0da22011-05-16 14:20:40 +0100384 for (int i = 0; i < translation_size; ++i) {
385 LOperand* value = environment->values()->at(i);
386 // spilled_registers_ and spilled_double_registers_ are either
387 // both NULL or both set.
388 if (environment->spilled_registers() != NULL && value != NULL) {
389 if (value->IsRegister() &&
390 environment->spilled_registers()[value->index()] != NULL) {
391 translation->MarkDuplicate();
392 AddToTranslation(translation,
393 environment->spilled_registers()[value->index()],
394 environment->HasTaggedValueAt(i));
395 } else if (
396 value->IsDoubleRegister() &&
397 environment->spilled_double_registers()[value->index()] != NULL) {
398 translation->MarkDuplicate();
399 AddToTranslation(
400 translation,
401 environment->spilled_double_registers()[value->index()],
402 false);
403 }
404 }
405
406 AddToTranslation(translation, value, environment->HasTaggedValueAt(i));
407 }
408}
409
410
Ben Murdochb0fe1622011-05-05 13:52:32 +0100411void LCodeGen::AddToTranslation(Translation* translation,
412 LOperand* op,
413 bool is_tagged) {
414 if (op == NULL) {
415 // TODO(twuerthinger): Introduce marker operands to indicate that this value
416 // is not present and must be reconstructed from the deoptimizer. Currently
417 // this is only used for the arguments object.
418 translation->StoreArgumentsObject();
419 } else if (op->IsStackSlot()) {
420 if (is_tagged) {
421 translation->StoreStackSlot(op->index());
422 } else {
423 translation->StoreInt32StackSlot(op->index());
424 }
425 } else if (op->IsDoubleStackSlot()) {
426 translation->StoreDoubleStackSlot(op->index());
427 } else if (op->IsArgument()) {
428 ASSERT(is_tagged);
Ben Murdoch257744e2011-11-30 15:57:28 +0000429 int src_index = GetStackSlotCount() + op->index();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100430 translation->StoreStackSlot(src_index);
431 } else if (op->IsRegister()) {
432 Register reg = ToRegister(op);
433 if (is_tagged) {
434 translation->StoreRegister(reg);
435 } else {
436 translation->StoreInt32Register(reg);
437 }
438 } else if (op->IsDoubleRegister()) {
439 XMMRegister reg = ToDoubleRegister(op);
440 translation->StoreDoubleRegister(reg);
441 } else if (op->IsConstantOperand()) {
442 Handle<Object> literal = chunk()->LookupLiteral(LConstantOperand::cast(op));
443 int src_index = DefineDeoptimizationLiteral(literal);
444 translation->StoreLiteral(src_index);
445 } else {
446 UNREACHABLE();
447 }
448}
449
450
Ben Murdoch8b112d22011-06-08 16:22:53 +0100451void LCodeGen::CallCodeGeneric(Handle<Code> code,
452 RelocInfo::Mode mode,
453 LInstruction* instr,
Ben Murdoch8b112d22011-06-08 16:22:53 +0100454 SafepointMode safepoint_mode) {
Steve Block1e0659c2011-05-24 12:43:12 +0100455 ASSERT(instr != NULL);
456 LPointerMap* pointers = instr->pointer_map();
457 RecordPosition(pointers->position());
Steve Block1e0659c2011-05-24 12:43:12 +0100458 __ call(code, mode);
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000459 RecordSafepointWithLazyDeopt(instr, safepoint_mode);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100460
461 // Signal that we don't inline smi code before these stubs in the
462 // optimizing code generator.
Ben Murdoch257744e2011-11-30 15:57:28 +0000463 if (code->kind() == Code::BINARY_OP_IC ||
Ben Murdochb0fe1622011-05-05 13:52:32 +0100464 code->kind() == Code::COMPARE_IC) {
465 __ nop();
466 }
467}
468
469
Ben Murdoch8b112d22011-06-08 16:22:53 +0100470void LCodeGen::CallCode(Handle<Code> code,
471 RelocInfo::Mode mode,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000472 LInstruction* instr) {
473 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100474}
475
476
Steve Block44f0eee2011-05-26 01:26:41 +0100477void LCodeGen::CallRuntime(const Runtime::Function* fun,
Steve Block1e0659c2011-05-24 12:43:12 +0100478 int argc,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000479 LInstruction* instr) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100480 ASSERT(instr != NULL);
Steve Block1e0659c2011-05-24 12:43:12 +0100481 ASSERT(instr->HasPointerMap());
Ben Murdochb0fe1622011-05-05 13:52:32 +0100482 LPointerMap* pointers = instr->pointer_map();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100483 RecordPosition(pointers->position());
484
Steve Block1e0659c2011-05-24 12:43:12 +0100485 __ CallRuntime(fun, argc);
Steve Block44f0eee2011-05-26 01:26:41 +0100486
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000487 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100488}
489
490
Ben Murdoch8b112d22011-06-08 16:22:53 +0100491void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id,
492 int argc,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000493 LInstruction* instr,
494 LOperand* context) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000495 if (context->IsRegister()) {
496 if (!ToRegister(context).is(esi)) {
497 __ mov(esi, ToRegister(context));
498 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100499 } else if (context->IsStackSlot()) {
Ben Murdoch85b71792012-04-11 18:30:58 +0100500 __ mov(esi, ToOperand(context));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100501 } else if (context->IsConstantOperand()) {
502 Handle<Object> literal =
503 chunk_->LookupLiteral(LConstantOperand::cast(context));
504 __ LoadHeapObject(esi, Handle<Context>::cast(literal));
505 } else {
506 UNREACHABLE();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000507 }
508
Ben Murdoch8b112d22011-06-08 16:22:53 +0100509 __ CallRuntimeSaveDoubles(id);
510 RecordSafepointWithRegisters(
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000511 instr->pointer_map(), argc, Safepoint::kNoLazyDeopt);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100512}
513
514
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000515void LCodeGen::RegisterEnvironmentForDeoptimization(
516 LEnvironment* environment, Safepoint::DeoptMode mode) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100517 if (!environment->HasBeenRegistered()) {
518 // Physical stack frame layout:
519 // -x ............. -4 0 ..................................... y
520 // [incoming arguments] [spill slots] [pushed outgoing arguments]
521
522 // Layout of the environment:
523 // 0 ..................................................... size-1
524 // [parameters] [locals] [expression stack including arguments]
525
526 // Layout of the translation:
527 // 0 ........................................................ size - 1 + 4
528 // [expression stack including arguments] [locals] [4 words] [parameters]
529 // |>------------ translation_size ------------<|
530
531 int frame_count = 0;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100532 int jsframe_count = 0;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100533 for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
534 ++frame_count;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100535 if (e->frame_type() == JS_FUNCTION) {
536 ++jsframe_count;
537 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100538 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100539 Translation translation(&translations_, frame_count, jsframe_count);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100540 WriteTranslation(environment, &translation);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100541 int deoptimization_index = deoptimizations_.length();
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000542 int pc_offset = masm()->pc_offset();
543 environment->Register(deoptimization_index,
544 translation.index(),
545 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100546 deoptimizations_.Add(environment);
547 }
548}
549
550
551void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000552 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100553 ASSERT(environment->HasBeenRegistered());
554 int id = environment->deoptimization_index();
555 Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100556 if (entry == NULL) {
557 Abort("bailout was not prepared");
558 return;
559 }
560
561 if (FLAG_deopt_every_n_times != 0) {
562 Handle<SharedFunctionInfo> shared(info_->shared_info());
563 Label no_deopt;
564 __ pushfd();
565 __ push(eax);
566 __ push(ebx);
567 __ mov(ebx, shared);
568 __ mov(eax, FieldOperand(ebx, SharedFunctionInfo::kDeoptCounterOffset));
569 __ sub(Operand(eax), Immediate(Smi::FromInt(1)));
Ben Murdoch257744e2011-11-30 15:57:28 +0000570 __ j(not_zero, &no_deopt, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100571 if (FLAG_trap_on_deopt) __ int3();
572 __ mov(eax, Immediate(Smi::FromInt(FLAG_deopt_every_n_times)));
573 __ mov(FieldOperand(ebx, SharedFunctionInfo::kDeoptCounterOffset), eax);
574 __ pop(ebx);
575 __ pop(eax);
576 __ popfd();
577 __ jmp(entry, RelocInfo::RUNTIME_ENTRY);
578
579 __ bind(&no_deopt);
580 __ mov(FieldOperand(ebx, SharedFunctionInfo::kDeoptCounterOffset), eax);
581 __ pop(ebx);
582 __ pop(eax);
583 __ popfd();
584 }
585
586 if (cc == no_condition) {
587 if (FLAG_trap_on_deopt) __ int3();
588 __ jmp(entry, RelocInfo::RUNTIME_ENTRY);
589 } else {
590 if (FLAG_trap_on_deopt) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000591 Label done;
592 __ j(NegateCondition(cc), &done, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100593 __ int3();
594 __ jmp(entry, RelocInfo::RUNTIME_ENTRY);
595 __ bind(&done);
596 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +0000597 __ j(cc, entry, RelocInfo::RUNTIME_ENTRY);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100598 }
599 }
600}
601
602
603void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
604 int length = deoptimizations_.length();
605 if (length == 0) return;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100606 Handle<DeoptimizationInputData> data =
Steve Block44f0eee2011-05-26 01:26:41 +0100607 factory()->NewDeoptimizationInputData(length, TENURED);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100608
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100609 Handle<ByteArray> translations = translations_.CreateByteArray();
610 data->SetTranslationByteArray(*translations);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100611 data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_));
612
613 Handle<FixedArray> literals =
Steve Block44f0eee2011-05-26 01:26:41 +0100614 factory()->NewFixedArray(deoptimization_literals_.length(), TENURED);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100615 for (int i = 0; i < deoptimization_literals_.length(); i++) {
616 literals->set(i, *deoptimization_literals_[i]);
617 }
618 data->SetLiteralArray(*literals);
619
620 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id()));
621 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
622
623 // Populate the deoptimization entries.
624 for (int i = 0; i < length; i++) {
625 LEnvironment* env = deoptimizations_[i];
626 data->SetAstId(i, Smi::FromInt(env->ast_id()));
627 data->SetTranslationIndex(i, Smi::FromInt(env->translation_index()));
628 data->SetArgumentsStackHeight(i,
629 Smi::FromInt(env->arguments_stack_height()));
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000630 data->SetPc(i, Smi::FromInt(env->pc_offset()));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100631 }
632 code->set_deoptimization_data(*data);
633}
634
635
636int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) {
637 int result = deoptimization_literals_.length();
638 for (int i = 0; i < deoptimization_literals_.length(); ++i) {
639 if (deoptimization_literals_[i].is_identical_to(literal)) return i;
640 }
641 deoptimization_literals_.Add(literal);
642 return result;
643}
644
645
646void LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions() {
647 ASSERT(deoptimization_literals_.length() == 0);
648
649 const ZoneList<Handle<JSFunction> >* inlined_closures =
650 chunk()->inlined_closures();
651
652 for (int i = 0, length = inlined_closures->length();
653 i < length;
654 i++) {
655 DefineDeoptimizationLiteral(inlined_closures->at(i));
656 }
657
658 inlined_function_count_ = deoptimization_literals_.length();
659}
660
661
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000662void LCodeGen::RecordSafepointWithLazyDeopt(
663 LInstruction* instr, SafepointMode safepoint_mode) {
664 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) {
665 RecordSafepoint(instr->pointer_map(), Safepoint::kLazyDeopt);
666 } else {
667 ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
668 RecordSafepointWithRegisters(
669 instr->pointer_map(), 0, Safepoint::kLazyDeopt);
670 }
671}
672
673
Steve Block1e0659c2011-05-24 12:43:12 +0100674void LCodeGen::RecordSafepoint(
675 LPointerMap* pointers,
676 Safepoint::Kind kind,
677 int arguments,
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000678 Safepoint::DeoptMode deopt_mode) {
Ben Murdoch8b112d22011-06-08 16:22:53 +0100679 ASSERT(kind == expected_safepoint_kind_);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100680 const ZoneList<LOperand*>* operands = pointers->GetNormalizedOperands();
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000681 Safepoint safepoint =
682 safepoints_.DefineSafepoint(masm(), kind, arguments, deopt_mode);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100683 for (int i = 0; i < operands->length(); i++) {
684 LOperand* pointer = operands->at(i);
685 if (pointer->IsStackSlot()) {
686 safepoint.DefinePointerSlot(pointer->index());
Steve Block1e0659c2011-05-24 12:43:12 +0100687 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
688 safepoint.DefinePointerRegister(ToRegister(pointer));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100689 }
690 }
691}
692
693
Steve Block1e0659c2011-05-24 12:43:12 +0100694void LCodeGen::RecordSafepoint(LPointerMap* pointers,
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000695 Safepoint::DeoptMode mode) {
696 RecordSafepoint(pointers, Safepoint::kSimple, 0, mode);
Steve Block1e0659c2011-05-24 12:43:12 +0100697}
698
699
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000700void LCodeGen::RecordSafepoint(Safepoint::DeoptMode mode) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100701 LPointerMap empty_pointers(RelocInfo::kNoPosition);
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000702 RecordSafepoint(&empty_pointers, mode);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100703}
704
705
Ben Murdochb0fe1622011-05-05 13:52:32 +0100706void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers,
707 int arguments,
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000708 Safepoint::DeoptMode mode) {
709 RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments, mode);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100710}
711
712
713void LCodeGen::RecordPosition(int position) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000714 if (position == RelocInfo::kNoPosition) return;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100715 masm()->positions_recorder()->RecordPosition(position);
716}
717
718
719void LCodeGen::DoLabel(LLabel* label) {
720 if (label->is_loop_header()) {
721 Comment(";;; B%d - LOOP entry", label->block_id());
722 } else {
723 Comment(";;; B%d", label->block_id());
724 }
725 __ bind(label->label());
726 current_block_ = label->block_id();
Ben Murdoch257744e2011-11-30 15:57:28 +0000727 DoGap(label);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100728}
729
730
731void LCodeGen::DoParallelMove(LParallelMove* move) {
Ben Murdochb8e0da22011-05-16 14:20:40 +0100732 resolver_.Resolve(move);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100733}
734
735
736void LCodeGen::DoGap(LGap* gap) {
737 for (int i = LGap::FIRST_INNER_POSITION;
738 i <= LGap::LAST_INNER_POSITION;
739 i++) {
740 LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i);
741 LParallelMove* move = gap->GetParallelMove(inner_pos);
742 if (move != NULL) DoParallelMove(move);
743 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100744}
745
746
Ben Murdoch257744e2011-11-30 15:57:28 +0000747void LCodeGen::DoInstructionGap(LInstructionGap* instr) {
748 DoGap(instr);
749}
750
751
Ben Murdochb0fe1622011-05-05 13:52:32 +0100752void LCodeGen::DoParameter(LParameter* instr) {
753 // Nothing to do.
754}
755
756
757void LCodeGen::DoCallStub(LCallStub* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +0100758 ASSERT(ToRegister(instr->context()).is(esi));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100759 ASSERT(ToRegister(instr->result()).is(eax));
760 switch (instr->hydrogen()->major_key()) {
761 case CodeStub::RegExpConstructResult: {
762 RegExpConstructResultStub stub;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000763 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100764 break;
765 }
766 case CodeStub::RegExpExec: {
767 RegExpExecStub stub;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000768 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100769 break;
770 }
771 case CodeStub::SubString: {
772 SubStringStub stub;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000773 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100774 break;
775 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100776 case CodeStub::NumberToString: {
777 NumberToStringStub stub;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000778 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100779 break;
780 }
781 case CodeStub::StringAdd: {
782 StringAddStub stub(NO_STRING_ADD_FLAGS);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000783 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100784 break;
785 }
786 case CodeStub::StringCompare: {
787 StringCompareStub stub;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000788 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100789 break;
790 }
791 case CodeStub::TranscendentalCache: {
792 TranscendentalCacheStub stub(instr->transcendental_type(),
793 TranscendentalCacheStub::TAGGED);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000794 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100795 break;
796 }
797 default:
798 UNREACHABLE();
799 }
800}
801
802
803void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
804 // Nothing to do.
805}
806
807
808void LCodeGen::DoModI(LModI* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +0100809 if (instr->hydrogen()->HasPowerOf2Divisor()) {
810 Register dividend = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100811
Steve Block44f0eee2011-05-26 01:26:41 +0100812 int32_t divisor =
813 HConstant::cast(instr->hydrogen()->right())->Integer32Value();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100814
Steve Block44f0eee2011-05-26 01:26:41 +0100815 if (divisor < 0) divisor = -divisor;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100816
Ben Murdoch257744e2011-11-30 15:57:28 +0000817 Label positive_dividend, done;
Steve Block44f0eee2011-05-26 01:26:41 +0100818 __ test(dividend, Operand(dividend));
Ben Murdoch257744e2011-11-30 15:57:28 +0000819 __ j(not_sign, &positive_dividend, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +0100820 __ neg(dividend);
821 __ and_(dividend, divisor - 1);
822 __ neg(dividend);
823 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000824 __ j(not_zero, &done, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +0100825 DeoptimizeIf(no_condition, instr->environment());
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +0100826 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +0000827 __ jmp(&done, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +0100828 }
829 __ bind(&positive_dividend);
830 __ and_(dividend, divisor - 1);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100831 __ bind(&done);
832 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +0000833 Label done, remainder_eq_dividend, slow, do_subtraction, both_positive;
834 Register left_reg = ToRegister(instr->InputAt(0));
835 Register right_reg = ToRegister(instr->InputAt(1));
836 Register result_reg = ToRegister(instr->result());
Steve Block44f0eee2011-05-26 01:26:41 +0100837
Ben Murdoch257744e2011-11-30 15:57:28 +0000838 ASSERT(left_reg.is(eax));
839 ASSERT(result_reg.is(edx));
Steve Block44f0eee2011-05-26 01:26:41 +0100840 ASSERT(!right_reg.is(eax));
841 ASSERT(!right_reg.is(edx));
842
843 // Check for x % 0.
844 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000845 __ test(right_reg, Operand(right_reg));
Steve Block44f0eee2011-05-26 01:26:41 +0100846 DeoptimizeIf(zero, instr->environment());
847 }
848
Ben Murdoch257744e2011-11-30 15:57:28 +0000849 __ test(left_reg, Operand(left_reg));
850 __ j(zero, &remainder_eq_dividend, Label::kNear);
851 __ j(sign, &slow, Label::kNear);
852
853 __ test(right_reg, Operand(right_reg));
854 __ j(not_sign, &both_positive, Label::kNear);
855 // The sign of the divisor doesn't matter.
856 __ neg(right_reg);
857
858 __ bind(&both_positive);
859 // If the dividend is smaller than the nonnegative
860 // divisor, the dividend is the result.
861 __ cmp(left_reg, Operand(right_reg));
862 __ j(less, &remainder_eq_dividend, Label::kNear);
863
864 // Check if the divisor is a PowerOfTwo integer.
865 Register scratch = ToRegister(instr->TempAt(0));
866 __ mov(scratch, right_reg);
867 __ sub(Operand(scratch), Immediate(1));
868 __ test(scratch, Operand(right_reg));
869 __ j(not_zero, &do_subtraction, Label::kNear);
870 __ and_(left_reg, Operand(scratch));
871 __ jmp(&remainder_eq_dividend, Label::kNear);
872
873 __ bind(&do_subtraction);
874 const int kUnfolds = 3;
875 // Try a few subtractions of the dividend.
876 __ mov(scratch, left_reg);
877 for (int i = 0; i < kUnfolds; i++) {
878 // Reduce the dividend by the divisor.
879 __ sub(left_reg, Operand(right_reg));
880 // Check if the dividend is less than the divisor.
881 __ cmp(left_reg, Operand(right_reg));
882 __ j(less, &remainder_eq_dividend, Label::kNear);
883 }
884 __ mov(left_reg, scratch);
885
886 // Slow case, using idiv instruction.
887 __ bind(&slow);
Steve Block44f0eee2011-05-26 01:26:41 +0100888 // Sign extend to edx.
889 __ cdq();
890
891 // Check for (0 % -x) that will produce negative zero.
892 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000893 Label positive_left;
894 Label done;
895 __ test(left_reg, Operand(left_reg));
896 __ j(not_sign, &positive_left, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +0100897 __ idiv(right_reg);
898
899 // Test the remainder for 0, because then the result would be -0.
Ben Murdoch257744e2011-11-30 15:57:28 +0000900 __ test(result_reg, Operand(result_reg));
901 __ j(not_zero, &done, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +0100902
903 DeoptimizeIf(no_condition, instr->environment());
904 __ bind(&positive_left);
905 __ idiv(right_reg);
906 __ bind(&done);
907 } else {
908 __ idiv(right_reg);
909 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000910 __ jmp(&done, Label::kNear);
911
912 __ bind(&remainder_eq_dividend);
913 __ mov(result_reg, left_reg);
914
915 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100916 }
917}
918
919
920void LCodeGen::DoDivI(LDivI* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +0100921 LOperand* right = instr->InputAt(1);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100922 ASSERT(ToRegister(instr->result()).is(eax));
Ben Murdochb8e0da22011-05-16 14:20:40 +0100923 ASSERT(ToRegister(instr->InputAt(0)).is(eax));
924 ASSERT(!ToRegister(instr->InputAt(1)).is(eax));
925 ASSERT(!ToRegister(instr->InputAt(1)).is(edx));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100926
927 Register left_reg = eax;
928
929 // Check for x / 0.
930 Register right_reg = ToRegister(right);
931 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
932 __ test(right_reg, ToOperand(right));
933 DeoptimizeIf(zero, instr->environment());
934 }
935
936 // Check for (0 / -x) that will produce negative zero.
937 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000938 Label left_not_zero;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100939 __ test(left_reg, Operand(left_reg));
Ben Murdoch257744e2011-11-30 15:57:28 +0000940 __ j(not_zero, &left_not_zero, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100941 __ test(right_reg, ToOperand(right));
942 DeoptimizeIf(sign, instr->environment());
943 __ bind(&left_not_zero);
944 }
945
946 // Check for (-kMinInt / -1).
947 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000948 Label left_not_min_int;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100949 __ cmp(left_reg, kMinInt);
Ben Murdoch257744e2011-11-30 15:57:28 +0000950 __ j(not_zero, &left_not_min_int, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100951 __ cmp(right_reg, -1);
952 DeoptimizeIf(zero, instr->environment());
953 __ bind(&left_not_min_int);
954 }
955
956 // Sign extend to edx.
957 __ cdq();
958 __ idiv(right_reg);
959
960 // Deoptimize if remainder is not 0.
961 __ test(edx, Operand(edx));
962 DeoptimizeIf(not_zero, instr->environment());
963}
964
965
966void LCodeGen::DoMulI(LMulI* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +0100967 Register left = ToRegister(instr->InputAt(0));
968 LOperand* right = instr->InputAt(1);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100969
970 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
Ben Murdochb8e0da22011-05-16 14:20:40 +0100971 __ mov(ToRegister(instr->TempAt(0)), left);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100972 }
973
974 if (right->IsConstantOperand()) {
Steve Block44f0eee2011-05-26 01:26:41 +0100975 // Try strength reductions on the multiplication.
976 // All replacement instructions are at most as long as the imul
977 // and have better latency.
978 int constant = ToInteger32(LConstantOperand::cast(right));
979 if (constant == -1) {
980 __ neg(left);
981 } else if (constant == 0) {
982 __ xor_(left, Operand(left));
983 } else if (constant == 2) {
984 __ add(left, Operand(left));
985 } else if (!instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
986 // If we know that the multiplication can't overflow, it's safe to
987 // use instructions that don't set the overflow flag for the
988 // multiplication.
989 switch (constant) {
990 case 1:
991 // Do nothing.
992 break;
993 case 3:
994 __ lea(left, Operand(left, left, times_2, 0));
995 break;
996 case 4:
997 __ shl(left, 2);
998 break;
999 case 5:
1000 __ lea(left, Operand(left, left, times_4, 0));
1001 break;
1002 case 8:
1003 __ shl(left, 3);
1004 break;
1005 case 9:
1006 __ lea(left, Operand(left, left, times_8, 0));
1007 break;
1008 case 16:
1009 __ shl(left, 4);
1010 break;
1011 default:
1012 __ imul(left, left, constant);
1013 break;
1014 }
1015 } else {
1016 __ imul(left, left, constant);
1017 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001018 } else {
1019 __ imul(left, ToOperand(right));
1020 }
1021
1022 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1023 DeoptimizeIf(overflow, instr->environment());
1024 }
1025
1026 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
1027 // Bail out if the result is supposed to be negative zero.
Ben Murdoch257744e2011-11-30 15:57:28 +00001028 Label done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001029 __ test(left, Operand(left));
Ben Murdoch257744e2011-11-30 15:57:28 +00001030 __ j(not_zero, &done, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001031 if (right->IsConstantOperand()) {
Steve Block1e0659c2011-05-24 12:43:12 +01001032 if (ToInteger32(LConstantOperand::cast(right)) <= 0) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001033 DeoptimizeIf(no_condition, instr->environment());
1034 }
1035 } else {
1036 // Test the non-zero operand for negative sign.
Ben Murdochb8e0da22011-05-16 14:20:40 +01001037 __ or_(ToRegister(instr->TempAt(0)), ToOperand(right));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001038 DeoptimizeIf(sign, instr->environment());
1039 }
1040 __ bind(&done);
1041 }
1042}
1043
1044
1045void LCodeGen::DoBitI(LBitI* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001046 LOperand* left = instr->InputAt(0);
1047 LOperand* right = instr->InputAt(1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001048 ASSERT(left->Equals(instr->result()));
1049 ASSERT(left->IsRegister());
1050
1051 if (right->IsConstantOperand()) {
1052 int right_operand = ToInteger32(LConstantOperand::cast(right));
1053 switch (instr->op()) {
1054 case Token::BIT_AND:
1055 __ and_(ToRegister(left), right_operand);
1056 break;
1057 case Token::BIT_OR:
1058 __ or_(ToRegister(left), right_operand);
1059 break;
1060 case Token::BIT_XOR:
1061 __ xor_(ToRegister(left), right_operand);
1062 break;
1063 default:
1064 UNREACHABLE();
1065 break;
1066 }
1067 } else {
1068 switch (instr->op()) {
1069 case Token::BIT_AND:
1070 __ and_(ToRegister(left), ToOperand(right));
1071 break;
1072 case Token::BIT_OR:
1073 __ or_(ToRegister(left), ToOperand(right));
1074 break;
1075 case Token::BIT_XOR:
1076 __ xor_(ToRegister(left), ToOperand(right));
1077 break;
1078 default:
1079 UNREACHABLE();
1080 break;
1081 }
1082 }
1083}
1084
1085
1086void LCodeGen::DoShiftI(LShiftI* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001087 LOperand* left = instr->InputAt(0);
1088 LOperand* right = instr->InputAt(1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001089 ASSERT(left->Equals(instr->result()));
1090 ASSERT(left->IsRegister());
1091 if (right->IsRegister()) {
1092 ASSERT(ToRegister(right).is(ecx));
1093
1094 switch (instr->op()) {
1095 case Token::SAR:
1096 __ sar_cl(ToRegister(left));
1097 break;
1098 case Token::SHR:
1099 __ shr_cl(ToRegister(left));
1100 if (instr->can_deopt()) {
1101 __ test(ToRegister(left), Immediate(0x80000000));
1102 DeoptimizeIf(not_zero, instr->environment());
1103 }
1104 break;
1105 case Token::SHL:
1106 __ shl_cl(ToRegister(left));
1107 break;
1108 default:
1109 UNREACHABLE();
1110 break;
1111 }
1112 } else {
1113 int value = ToInteger32(LConstantOperand::cast(right));
1114 uint8_t shift_count = static_cast<uint8_t>(value & 0x1F);
1115 switch (instr->op()) {
1116 case Token::SAR:
1117 if (shift_count != 0) {
1118 __ sar(ToRegister(left), shift_count);
1119 }
1120 break;
1121 case Token::SHR:
1122 if (shift_count == 0 && instr->can_deopt()) {
1123 __ test(ToRegister(left), Immediate(0x80000000));
1124 DeoptimizeIf(not_zero, instr->environment());
1125 } else {
1126 __ shr(ToRegister(left), shift_count);
1127 }
1128 break;
1129 case Token::SHL:
1130 if (shift_count != 0) {
1131 __ shl(ToRegister(left), shift_count);
1132 }
1133 break;
1134 default:
1135 UNREACHABLE();
1136 break;
1137 }
1138 }
1139}
1140
1141
1142void LCodeGen::DoSubI(LSubI* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001143 LOperand* left = instr->InputAt(0);
1144 LOperand* right = instr->InputAt(1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001145 ASSERT(left->Equals(instr->result()));
1146
1147 if (right->IsConstantOperand()) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001148 __ sub(ToOperand(left), ToInteger32Immediate(right));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001149 } else {
1150 __ sub(ToRegister(left), ToOperand(right));
1151 }
1152 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1153 DeoptimizeIf(overflow, instr->environment());
1154 }
1155}
1156
1157
1158void LCodeGen::DoConstantI(LConstantI* instr) {
1159 ASSERT(instr->result()->IsRegister());
Steve Block9fac8402011-05-12 15:51:54 +01001160 __ Set(ToRegister(instr->result()), Immediate(instr->value()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001161}
1162
1163
1164void LCodeGen::DoConstantD(LConstantD* instr) {
1165 ASSERT(instr->result()->IsDoubleRegister());
1166 XMMRegister res = ToDoubleRegister(instr->result());
1167 double v = instr->value();
1168 // Use xor to produce +0.0 in a fast and compact way, but avoid to
1169 // do so if the constant is -0.0.
1170 if (BitCast<uint64_t, double>(v) == 0) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001171 __ xorps(res, res);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001172 } else {
Steve Block1e0659c2011-05-24 12:43:12 +01001173 Register temp = ToRegister(instr->TempAt(0));
1174 uint64_t int_val = BitCast<uint64_t, double>(v);
1175 int32_t lower = static_cast<int32_t>(int_val);
1176 int32_t upper = static_cast<int32_t>(int_val >> (kBitsPerInt));
Ben Murdoch8b112d22011-06-08 16:22:53 +01001177 if (CpuFeatures::IsSupported(SSE4_1)) {
Steve Block1e0659c2011-05-24 12:43:12 +01001178 CpuFeatures::Scope scope(SSE4_1);
1179 if (lower != 0) {
1180 __ Set(temp, Immediate(lower));
1181 __ movd(res, Operand(temp));
1182 __ Set(temp, Immediate(upper));
1183 __ pinsrd(res, Operand(temp), 1);
1184 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00001185 __ xorps(res, res);
Steve Block1e0659c2011-05-24 12:43:12 +01001186 __ Set(temp, Immediate(upper));
1187 __ pinsrd(res, Operand(temp), 1);
1188 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001189 } else {
Steve Block1e0659c2011-05-24 12:43:12 +01001190 __ Set(temp, Immediate(upper));
1191 __ movd(res, Operand(temp));
1192 __ psllq(res, 32);
1193 if (lower != 0) {
1194 __ Set(temp, Immediate(lower));
1195 __ movd(xmm0, Operand(temp));
1196 __ por(res, xmm0);
1197 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001198 }
1199 }
1200}
1201
1202
1203void LCodeGen::DoConstantT(LConstantT* instr) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001204 Register reg = ToRegister(instr->result());
1205 Handle<Object> handle = instr->value();
1206 if (handle->IsHeapObject()) {
1207 __ LoadHeapObject(reg, Handle<HeapObject>::cast(handle));
1208 } else {
1209 __ Set(reg, Immediate(handle));
1210 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001211}
1212
1213
Steve Block9fac8402011-05-12 15:51:54 +01001214void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001215 Register result = ToRegister(instr->result());
Ben Murdochb8e0da22011-05-16 14:20:40 +01001216 Register array = ToRegister(instr->InputAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01001217 __ mov(result, FieldOperand(array, JSArray::kLengthOffset));
1218}
Ben Murdochb0fe1622011-05-05 13:52:32 +01001219
Ben Murdochb0fe1622011-05-05 13:52:32 +01001220
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001221void LCodeGen::DoFixedArrayBaseLength(
1222 LFixedArrayBaseLength* instr) {
Steve Block9fac8402011-05-12 15:51:54 +01001223 Register result = ToRegister(instr->result());
Ben Murdochb8e0da22011-05-16 14:20:40 +01001224 Register array = ToRegister(instr->InputAt(0));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001225 __ mov(result, FieldOperand(array, FixedArrayBase::kLengthOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01001226}
1227
1228
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001229void LCodeGen::DoElementsKind(LElementsKind* instr) {
1230 Register result = ToRegister(instr->result());
1231 Register input = ToRegister(instr->InputAt(0));
1232
1233 // Load map into |result|.
1234 __ mov(result, FieldOperand(input, HeapObject::kMapOffset));
1235 // Load the map's "bit field 2" into |result|. We only need the first byte,
1236 // but the following masking takes care of that anyway.
1237 __ mov(result, FieldOperand(result, Map::kBitField2Offset));
1238 // Retrieve elements_kind from bit field 2.
1239 __ and_(result, Map::kElementsKindMask);
1240 __ shr(result, Map::kElementsKindShift);
1241}
1242
1243
Ben Murdochb0fe1622011-05-05 13:52:32 +01001244void LCodeGen::DoValueOf(LValueOf* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001245 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001246 Register result = ToRegister(instr->result());
Ben Murdochb8e0da22011-05-16 14:20:40 +01001247 Register map = ToRegister(instr->TempAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001248 ASSERT(input.is(result));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001249
Ben Murdoch257744e2011-11-30 15:57:28 +00001250 Label done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001251 // If the object is a smi return the object.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001252 __ JumpIfSmi(input, &done, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001253
1254 // If the object is not a value type, return the object.
1255 __ CmpObjectType(input, JS_VALUE_TYPE, map);
Ben Murdoch257744e2011-11-30 15:57:28 +00001256 __ j(not_equal, &done, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001257 __ mov(result, FieldOperand(input, JSValue::kValueOffset));
1258
1259 __ bind(&done);
1260}
1261
1262
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001263void LCodeGen::DoDateField(LDateField* instr) {
1264 Register object = ToRegister(instr->InputAt(0));
1265 Register result = ToRegister(instr->result());
1266 Register scratch = ToRegister(instr->TempAt(0));
1267 Smi* index = instr->index();
1268 Label runtime, done;
1269 ASSERT(object.is(result));
1270 ASSERT(object.is(eax));
1271
1272#ifdef DEBUG
1273 __ AbortIfSmi(object);
1274 __ CmpObjectType(object, JS_DATE_TYPE, scratch);
1275 __ Assert(equal, "Trying to get date field from non-date.");
1276#endif
1277
1278 if (index->value() == 0) {
1279 __ mov(result, FieldOperand(object, JSDate::kValueOffset));
1280 } else {
1281 if (index->value() < JSDate::kFirstUncachedField) {
1282 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
1283 __ mov(scratch, Operand::StaticVariable(stamp));
1284 __ cmp(scratch, FieldOperand(object, JSDate::kCacheStampOffset));
1285 __ j(not_equal, &runtime, Label::kNear);
1286 __ mov(result, FieldOperand(object, JSDate::kValueOffset +
1287 kPointerSize * index->value()));
1288 __ jmp(&done);
1289 }
1290 __ bind(&runtime);
1291 __ PrepareCallCFunction(2, scratch);
1292 __ mov(Operand(esp, 0), object);
1293 __ mov(Operand(esp, 1 * kPointerSize), Immediate(index));
1294 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
1295 __ bind(&done);
1296 }
1297}
1298
1299
Ben Murdochb0fe1622011-05-05 13:52:32 +01001300void LCodeGen::DoBitNotI(LBitNotI* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001301 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001302 ASSERT(input->Equals(instr->result()));
1303 __ not_(ToRegister(input));
1304}
1305
1306
1307void LCodeGen::DoThrow(LThrow* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001308 __ push(ToOperand(instr->value()));
1309 ASSERT(ToRegister(instr->context()).is(esi));
1310 CallRuntime(Runtime::kThrow, 1, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001311
1312 if (FLAG_debug_code) {
1313 Comment("Unreachable code.");
1314 __ int3();
1315 }
1316}
1317
1318
1319void LCodeGen::DoAddI(LAddI* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001320 LOperand* left = instr->InputAt(0);
1321 LOperand* right = instr->InputAt(1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001322 ASSERT(left->Equals(instr->result()));
1323
1324 if (right->IsConstantOperand()) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001325 __ add(ToOperand(left), ToInteger32Immediate(right));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001326 } else {
1327 __ add(ToRegister(left), ToOperand(right));
1328 }
1329
1330 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1331 DeoptimizeIf(overflow, instr->environment());
1332 }
1333}
1334
1335
1336void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001337 XMMRegister left = ToDoubleRegister(instr->InputAt(0));
1338 XMMRegister right = ToDoubleRegister(instr->InputAt(1));
1339 XMMRegister result = ToDoubleRegister(instr->result());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001340 // Modulo uses a fixed result register.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001341 ASSERT(instr->op() == Token::MOD || left.is(result));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001342 switch (instr->op()) {
1343 case Token::ADD:
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001344 __ addsd(left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001345 break;
1346 case Token::SUB:
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001347 __ subsd(left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001348 break;
1349 case Token::MUL:
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001350 __ mulsd(left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001351 break;
1352 case Token::DIV:
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001353 __ divsd(left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001354 break;
1355 case Token::MOD: {
1356 // Pass two doubles as arguments on the stack.
1357 __ PrepareCallCFunction(4, eax);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001358 __ movdbl(Operand(esp, 0 * kDoubleSize), left);
1359 __ movdbl(Operand(esp, 1 * kDoubleSize), right);
Steve Block44f0eee2011-05-26 01:26:41 +01001360 __ CallCFunction(
1361 ExternalReference::double_fp_operation(Token::MOD, isolate()),
1362 4);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001363
1364 // Return value is in st(0) on ia32.
1365 // Store it into the (fixed) result register.
1366 __ sub(Operand(esp), Immediate(kDoubleSize));
1367 __ fstp_d(Operand(esp, 0));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001368 __ movdbl(result, Operand(esp, 0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001369 __ add(Operand(esp), Immediate(kDoubleSize));
1370 break;
1371 }
1372 default:
1373 UNREACHABLE();
1374 break;
1375 }
1376}
1377
1378
1379void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001380 ASSERT(ToRegister(instr->context()).is(esi));
1381 ASSERT(ToRegister(instr->left()).is(edx));
1382 ASSERT(ToRegister(instr->right()).is(eax));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001383 ASSERT(ToRegister(instr->result()).is(eax));
1384
Ben Murdoch257744e2011-11-30 15:57:28 +00001385 BinaryOpStub stub(instr->op(), NO_OVERWRITE);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001386 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdoch18a6f572011-07-25 17:16:09 +01001387 __ nop(); // Signals no inlined code.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001388}
1389
1390
1391int LCodeGen::GetNextEmittedBlock(int block) {
1392 for (int i = block + 1; i < graph()->blocks()->length(); ++i) {
1393 LLabel* label = chunk_->GetLabel(i);
1394 if (!label->HasReplacement()) return i;
1395 }
1396 return -1;
1397}
1398
1399
1400void LCodeGen::EmitBranch(int left_block, int right_block, Condition cc) {
1401 int next_block = GetNextEmittedBlock(current_block_);
1402 right_block = chunk_->LookupDestination(right_block);
1403 left_block = chunk_->LookupDestination(left_block);
1404
1405 if (right_block == left_block) {
1406 EmitGoto(left_block);
1407 } else if (left_block == next_block) {
1408 __ j(NegateCondition(cc), chunk_->GetAssemblyLabel(right_block));
1409 } else if (right_block == next_block) {
1410 __ j(cc, chunk_->GetAssemblyLabel(left_block));
1411 } else {
1412 __ j(cc, chunk_->GetAssemblyLabel(left_block));
1413 __ jmp(chunk_->GetAssemblyLabel(right_block));
1414 }
1415}
1416
1417
1418void LCodeGen::DoBranch(LBranch* instr) {
1419 int true_block = chunk_->LookupDestination(instr->true_block_id());
1420 int false_block = chunk_->LookupDestination(instr->false_block_id());
1421
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001422 Representation r = instr->hydrogen()->value()->representation();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001423 if (r.IsInteger32()) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001424 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001425 __ test(reg, Operand(reg));
1426 EmitBranch(true_block, false_block, not_zero);
1427 } else if (r.IsDouble()) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001428 XMMRegister reg = ToDoubleRegister(instr->InputAt(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00001429 __ xorps(xmm0, xmm0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001430 __ ucomisd(reg, xmm0);
1431 EmitBranch(true_block, false_block, not_equal);
1432 } else {
1433 ASSERT(r.IsTagged());
Ben Murdochb8e0da22011-05-16 14:20:40 +01001434 Register reg = ToRegister(instr->InputAt(0));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001435 HType type = instr->hydrogen()->value()->type();
1436 if (type.IsBoolean()) {
Steve Block44f0eee2011-05-26 01:26:41 +01001437 __ cmp(reg, factory()->true_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001438 EmitBranch(true_block, false_block, equal);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001439 } else if (type.IsSmi()) {
1440 __ test(reg, Operand(reg));
1441 EmitBranch(true_block, false_block, not_equal);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001442 } else {
1443 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1444 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1445
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001446 ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types();
1447 // Avoid deopts in the case where we've never executed this path before.
1448 if (expected.IsEmpty()) expected = ToBooleanStub::all_types();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001449
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001450 if (expected.Contains(ToBooleanStub::UNDEFINED)) {
1451 // undefined -> false.
1452 __ cmp(reg, factory()->undefined_value());
1453 __ j(equal, false_label);
1454 }
1455 if (expected.Contains(ToBooleanStub::BOOLEAN)) {
1456 // true -> true.
1457 __ cmp(reg, factory()->true_value());
1458 __ j(equal, true_label);
1459 // false -> false.
1460 __ cmp(reg, factory()->false_value());
1461 __ j(equal, false_label);
1462 }
1463 if (expected.Contains(ToBooleanStub::NULL_TYPE)) {
1464 // 'null' -> false.
1465 __ cmp(reg, factory()->null_value());
1466 __ j(equal, false_label);
1467 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001468
Ben Murdoch69a99ed2011-11-30 16:03:39 +00001469 if (expected.Contains(ToBooleanStub::SMI)) {
1470 // Smis: 0 -> false, all other -> true.
1471 __ test(reg, Operand(reg));
1472 __ j(equal, false_label);
1473 __ JumpIfSmi(reg, true_label);
1474 } else if (expected.NeedsMap()) {
1475 // If we need a map later and have a Smi -> deopt.
1476 __ test(reg, Immediate(kSmiTagMask));
1477 DeoptimizeIf(zero, instr->environment());
1478 }
1479
1480 Register map = no_reg; // Keep the compiler happy.
1481 if (expected.NeedsMap()) {
1482 map = ToRegister(instr->TempAt(0));
1483 ASSERT(!map.is(reg));
1484 __ mov(map, FieldOperand(reg, HeapObject::kMapOffset));
1485
1486 if (expected.CanBeUndetectable()) {
1487 // Undetectable -> false.
1488 __ test_b(FieldOperand(map, Map::kBitFieldOffset),
1489 1 << Map::kIsUndetectable);
1490 __ j(not_zero, false_label);
1491 }
1492 }
1493
1494 if (expected.Contains(ToBooleanStub::SPEC_OBJECT)) {
1495 // spec object -> true.
1496 __ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
1497 __ j(above_equal, true_label);
1498 }
1499
1500 if (expected.Contains(ToBooleanStub::STRING)) {
1501 // String value -> false iff empty.
1502 Label not_string;
1503 __ CmpInstanceType(map, FIRST_NONSTRING_TYPE);
1504 __ j(above_equal, &not_string, Label::kNear);
1505 __ cmp(FieldOperand(reg, String::kLengthOffset), Immediate(0));
1506 __ j(not_zero, true_label);
1507 __ jmp(false_label);
1508 __ bind(&not_string);
1509 }
1510
1511 if (expected.Contains(ToBooleanStub::HEAP_NUMBER)) {
1512 // heap number -> false iff +0, -0, or NaN.
1513 Label not_heap_number;
1514 __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
1515 factory()->heap_number_map());
1516 __ j(not_equal, &not_heap_number, Label::kNear);
1517 __ fldz();
1518 __ fld_d(FieldOperand(reg, HeapNumber::kValueOffset));
1519 __ FCmp();
1520 __ j(zero, false_label);
1521 __ jmp(true_label);
1522 __ bind(&not_heap_number);
1523 }
1524
1525 // We've seen something for the first time -> deopt.
1526 DeoptimizeIf(no_condition, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001527 }
1528 }
1529}
1530
1531
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001532void LCodeGen::EmitGoto(int block) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001533 block = chunk_->LookupDestination(block);
1534 int next_block = GetNextEmittedBlock(current_block_);
1535 if (block != next_block) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001536 __ jmp(chunk_->GetAssemblyLabel(block));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001537 }
1538}
1539
1540
Ben Murdochb0fe1622011-05-05 13:52:32 +01001541void LCodeGen::DoGoto(LGoto* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001542 EmitGoto(instr->block_id());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001543}
1544
1545
1546Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) {
1547 Condition cond = no_condition;
1548 switch (op) {
1549 case Token::EQ:
1550 case Token::EQ_STRICT:
1551 cond = equal;
1552 break;
1553 case Token::LT:
1554 cond = is_unsigned ? below : less;
1555 break;
1556 case Token::GT:
1557 cond = is_unsigned ? above : greater;
1558 break;
1559 case Token::LTE:
1560 cond = is_unsigned ? below_equal : less_equal;
1561 break;
1562 case Token::GTE:
1563 cond = is_unsigned ? above_equal : greater_equal;
1564 break;
1565 case Token::IN:
1566 case Token::INSTANCEOF:
1567 default:
1568 UNREACHABLE();
1569 }
1570 return cond;
1571}
1572
1573
Ben Murdochb0fe1622011-05-05 13:52:32 +01001574void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001575 LOperand* left = instr->InputAt(0);
1576 LOperand* right = instr->InputAt(1);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001577 int false_block = chunk_->LookupDestination(instr->false_block_id());
1578 int true_block = chunk_->LookupDestination(instr->true_block_id());
Ben Murdoch85b71792012-04-11 18:30:58 +01001579 Condition cc = TokenToCondition(instr->op(), instr->is_double());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001580
1581 if (left->IsConstantOperand() && right->IsConstantOperand()) {
1582 // We can statically evaluate the comparison.
1583 double left_val = ToDouble(LConstantOperand::cast(left));
1584 double right_val = ToDouble(LConstantOperand::cast(right));
1585 int next_block =
1586 EvalComparison(instr->op(), left_val, right_val) ? true_block
1587 : false_block;
1588 EmitGoto(next_block);
1589 } else {
1590 if (instr->is_double()) {
1591 // Don't base result on EFLAGS when a NaN is involved. Instead
1592 // jump to the false block.
1593 __ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right));
1594 __ j(parity_even, chunk_->GetAssemblyLabel(false_block));
1595 } else {
1596 if (right->IsConstantOperand()) {
1597 __ cmp(ToRegister(left), ToInteger32Immediate(right));
1598 } else if (left->IsConstantOperand()) {
1599 __ cmp(ToOperand(right), ToInteger32Immediate(left));
1600 // We transposed the operands. Reverse the condition.
1601 cc = ReverseCondition(cc);
1602 } else {
1603 __ cmp(ToRegister(left), ToOperand(right));
1604 }
1605 }
1606 EmitBranch(true_block, false_block, cc);
1607 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001608}
1609
1610
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001611void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001612 Register left = ToRegister(instr->InputAt(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001613 Operand right = ToOperand(instr->InputAt(1));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001614 int false_block = chunk_->LookupDestination(instr->false_block_id());
1615 int true_block = chunk_->LookupDestination(instr->true_block_id());
1616
1617 __ cmp(left, Operand(right));
1618 EmitBranch(true_block, false_block, equal);
1619}
1620
1621
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001622void LCodeGen::DoCmpConstantEqAndBranch(LCmpConstantEqAndBranch* instr) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001623 Register left = ToRegister(instr->InputAt(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00001624 int true_block = chunk_->LookupDestination(instr->true_block_id());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001625 int false_block = chunk_->LookupDestination(instr->false_block_id());
Ben Murdoch257744e2011-11-30 15:57:28 +00001626
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001627 __ cmp(left, instr->hydrogen()->right());
Ben Murdoch257744e2011-11-30 15:57:28 +00001628 EmitBranch(true_block, false_block, equal);
1629}
1630
1631
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001632void LCodeGen::DoIsNilAndBranch(LIsNilAndBranch* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001633 Register reg = ToRegister(instr->InputAt(0));
Ben Murdoch85b71792012-04-11 18:30:58 +01001634 int false_block = chunk_->LookupDestination(instr->false_block_id());
1635
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001636 // If the expression is known to be untagged or a smi, then it's definitely
1637 // not null, and it can't be a an undetectable object.
1638 if (instr->hydrogen()->representation().IsSpecialization() ||
1639 instr->hydrogen()->type().IsSmi()) {
1640 EmitGoto(false_block);
1641 return;
1642 }
1643
1644 int true_block = chunk_->LookupDestination(instr->true_block_id());
1645 Handle<Object> nil_value = instr->nil() == kNullValue ?
1646 factory()->null_value() :
1647 factory()->undefined_value();
1648 __ cmp(reg, nil_value);
1649 if (instr->kind() == kStrictEquality) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001650 EmitBranch(true_block, false_block, equal);
1651 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001652 Handle<Object> other_nil_value = instr->nil() == kNullValue ?
1653 factory()->undefined_value() :
1654 factory()->null_value();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001655 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1656 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1657 __ j(equal, true_label);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001658 __ cmp(reg, other_nil_value);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001659 __ j(equal, true_label);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001660 __ JumpIfSmi(reg, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001661 // Check for undetectable objects by looking in the bit field in
1662 // the map. The object has already been smi checked.
Ben Murdochb8e0da22011-05-16 14:20:40 +01001663 Register scratch = ToRegister(instr->TempAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001664 __ mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
1665 __ movzx_b(scratch, FieldOperand(scratch, Map::kBitFieldOffset));
1666 __ test(scratch, Immediate(1 << Map::kIsUndetectable));
1667 EmitBranch(true_block, false_block, not_zero);
1668 }
1669}
1670
1671
1672Condition LCodeGen::EmitIsObject(Register input,
1673 Register temp1,
Ben Murdochb0fe1622011-05-05 13:52:32 +01001674 Label* is_not_object,
1675 Label* is_object) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001676 __ JumpIfSmi(input, is_not_object);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001677
Steve Block44f0eee2011-05-26 01:26:41 +01001678 __ cmp(input, isolate()->factory()->null_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001679 __ j(equal, is_object);
1680
1681 __ mov(temp1, FieldOperand(input, HeapObject::kMapOffset));
1682 // Undetectable objects behave like undefined.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001683 __ test_b(FieldOperand(temp1, Map::kBitFieldOffset),
1684 1 << Map::kIsUndetectable);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001685 __ j(not_zero, is_not_object);
1686
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001687 __ movzx_b(temp1, FieldOperand(temp1, Map::kInstanceTypeOffset));
1688 __ cmp(temp1, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001689 __ j(below, is_not_object);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001690 __ cmp(temp1, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001691 return below_equal;
1692}
1693
1694
Ben Murdochb0fe1622011-05-05 13:52:32 +01001695void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001696 Register reg = ToRegister(instr->InputAt(0));
1697 Register temp = ToRegister(instr->TempAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001698
1699 int true_block = chunk_->LookupDestination(instr->true_block_id());
1700 int false_block = chunk_->LookupDestination(instr->false_block_id());
1701 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1702 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1703
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001704 Condition true_cond = EmitIsObject(reg, temp, false_label, true_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001705
1706 EmitBranch(true_block, false_block, true_cond);
1707}
1708
1709
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001710Condition LCodeGen::EmitIsString(Register input,
1711 Register temp1,
1712 Label* is_not_string) {
1713 __ JumpIfSmi(input, is_not_string);
1714
1715 Condition cond = masm_->IsObjectStringType(input, temp1, temp1);
1716
1717 return cond;
1718}
1719
1720
1721void LCodeGen::DoIsStringAndBranch(LIsStringAndBranch* instr) {
1722 Register reg = ToRegister(instr->InputAt(0));
1723 Register temp = ToRegister(instr->TempAt(0));
1724
1725 int true_block = chunk_->LookupDestination(instr->true_block_id());
1726 int false_block = chunk_->LookupDestination(instr->false_block_id());
1727 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1728
1729 Condition true_cond = EmitIsString(reg, temp, false_label);
1730
1731 EmitBranch(true_block, false_block, true_cond);
1732}
1733
1734
Ben Murdochb0fe1622011-05-05 13:52:32 +01001735void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001736 Operand input = ToOperand(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001737
1738 int true_block = chunk_->LookupDestination(instr->true_block_id());
1739 int false_block = chunk_->LookupDestination(instr->false_block_id());
1740
1741 __ test(input, Immediate(kSmiTagMask));
1742 EmitBranch(true_block, false_block, zero);
1743}
1744
1745
Ben Murdoch257744e2011-11-30 15:57:28 +00001746void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) {
1747 Register input = ToRegister(instr->InputAt(0));
1748 Register temp = ToRegister(instr->TempAt(0));
1749
1750 int true_block = chunk_->LookupDestination(instr->true_block_id());
1751 int false_block = chunk_->LookupDestination(instr->false_block_id());
1752
1753 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001754 __ JumpIfSmi(input, chunk_->GetAssemblyLabel(false_block));
Ben Murdoch257744e2011-11-30 15:57:28 +00001755 __ mov(temp, FieldOperand(input, HeapObject::kMapOffset));
1756 __ test_b(FieldOperand(temp, Map::kBitFieldOffset),
1757 1 << Map::kIsUndetectable);
1758 EmitBranch(true_block, false_block, not_zero);
1759}
1760
1761
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001762static Condition ComputeCompareCondition(Token::Value op) {
1763 switch (op) {
1764 case Token::EQ_STRICT:
1765 case Token::EQ:
1766 return equal;
1767 case Token::LT:
1768 return less;
1769 case Token::GT:
1770 return greater;
1771 case Token::LTE:
1772 return less_equal;
1773 case Token::GTE:
1774 return greater_equal;
1775 default:
1776 UNREACHABLE();
1777 return no_condition;
1778 }
1779}
1780
1781
1782void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) {
1783 Token::Value op = instr->op();
1784 int true_block = chunk_->LookupDestination(instr->true_block_id());
1785 int false_block = chunk_->LookupDestination(instr->false_block_id());
1786
1787 Handle<Code> ic = CompareIC::GetUninitialized(op);
1788 CallCode(ic, RelocInfo::CODE_TARGET, instr);
1789
1790 Condition condition = ComputeCompareCondition(op);
1791 __ test(eax, Operand(eax));
1792
1793 EmitBranch(true_block, false_block, condition);
1794}
1795
1796
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001797static InstanceType TestType(HHasInstanceTypeAndBranch* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001798 InstanceType from = instr->from();
1799 InstanceType to = instr->to();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001800 if (from == FIRST_TYPE) return to;
1801 ASSERT(from == to || to == LAST_TYPE);
1802 return from;
1803}
1804
1805
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001806static Condition BranchCondition(HHasInstanceTypeAndBranch* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001807 InstanceType from = instr->from();
1808 InstanceType to = instr->to();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001809 if (from == to) return equal;
1810 if (to == LAST_TYPE) return above_equal;
1811 if (from == FIRST_TYPE) return below_equal;
1812 UNREACHABLE();
1813 return equal;
1814}
1815
1816
Ben Murdochb0fe1622011-05-05 13:52:32 +01001817void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001818 Register input = ToRegister(instr->InputAt(0));
1819 Register temp = ToRegister(instr->TempAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001820
1821 int true_block = chunk_->LookupDestination(instr->true_block_id());
1822 int false_block = chunk_->LookupDestination(instr->false_block_id());
1823
1824 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1825
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001826 __ JumpIfSmi(input, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001827
Ben Murdochb8e0da22011-05-16 14:20:40 +01001828 __ CmpObjectType(input, TestType(instr->hydrogen()), temp);
1829 EmitBranch(true_block, false_block, BranchCondition(instr->hydrogen()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001830}
1831
1832
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001833void LCodeGen::DoGetCachedArrayIndex(LGetCachedArrayIndex* instr) {
1834 Register input = ToRegister(instr->InputAt(0));
1835 Register result = ToRegister(instr->result());
1836
1837 if (FLAG_debug_code) {
1838 __ AbortIfNotString(input);
1839 }
1840
1841 __ mov(result, FieldOperand(input, String::kHashFieldOffset));
1842 __ IndexFromHash(result, result);
1843}
1844
1845
Ben Murdochb0fe1622011-05-05 13:52:32 +01001846void LCodeGen::DoHasCachedArrayIndexAndBranch(
1847 LHasCachedArrayIndexAndBranch* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001848 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001849
1850 int true_block = chunk_->LookupDestination(instr->true_block_id());
1851 int false_block = chunk_->LookupDestination(instr->false_block_id());
1852
1853 __ test(FieldOperand(input, String::kHashFieldOffset),
1854 Immediate(String::kContainsCachedArrayIndexMask));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001855 EmitBranch(true_block, false_block, equal);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001856}
1857
1858
1859// Branches to a label or falls through with the answer in the z flag. Trashes
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001860// the temp registers, but not the input.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001861void LCodeGen::EmitClassOfTest(Label* is_true,
1862 Label* is_false,
1863 Handle<String>class_name,
1864 Register input,
1865 Register temp,
1866 Register temp2) {
1867 ASSERT(!input.is(temp));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001868 ASSERT(!input.is(temp2));
1869 ASSERT(!temp.is(temp2));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001870 __ JumpIfSmi(input, is_false);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001871
Ben Murdochb0fe1622011-05-05 13:52:32 +01001872 if (class_name->IsEqualTo(CStrVector("Function"))) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001873 // Assuming the following assertions, we can use the same compares to test
1874 // for both being a function type and being in the object type range.
1875 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
1876 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
1877 FIRST_SPEC_OBJECT_TYPE + 1);
1878 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
1879 LAST_SPEC_OBJECT_TYPE - 1);
1880 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1881 __ CmpObjectType(input, FIRST_SPEC_OBJECT_TYPE, temp);
1882 __ j(below, is_false);
1883 __ j(equal, is_true);
1884 __ CmpInstanceType(temp, LAST_SPEC_OBJECT_TYPE);
1885 __ j(equal, is_true);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001886 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001887 // Faster code path to avoid two compares: subtract lower bound from the
1888 // actual type and do a signed compare with the width of the type range.
1889 __ mov(temp, FieldOperand(input, HeapObject::kMapOffset));
1890 __ movzx_b(temp2, FieldOperand(temp, Map::kInstanceTypeOffset));
1891 __ sub(Operand(temp2), Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
1892 __ cmp(Operand(temp2), Immediate(LAST_NONCALLABLE_SPEC_OBJECT_TYPE -
1893 FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
1894 __ j(above, is_false);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001895 }
1896
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001897 // Now we are in the FIRST-LAST_NONCALLABLE_SPEC_OBJECT_TYPE range.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001898 // Check if the constructor in the map is a function.
1899 __ mov(temp, FieldOperand(temp, Map::kConstructorOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001900 // Objects with a non-function constructor have class 'Object'.
1901 __ CmpObjectType(temp, JS_FUNCTION_TYPE, temp2);
1902 if (class_name->IsEqualTo(CStrVector("Object"))) {
1903 __ j(not_equal, is_true);
1904 } else {
1905 __ j(not_equal, is_false);
1906 }
1907
1908 // temp now contains the constructor function. Grab the
1909 // instance class name from there.
1910 __ mov(temp, FieldOperand(temp, JSFunction::kSharedFunctionInfoOffset));
1911 __ mov(temp, FieldOperand(temp,
1912 SharedFunctionInfo::kInstanceClassNameOffset));
1913 // The class name we are testing against is a symbol because it's a literal.
1914 // The name in the constructor is a symbol because of the way the context is
1915 // booted. This routine isn't expected to work for random API-created
1916 // classes and it doesn't have to because you can't access it with natives
1917 // syntax. Since both sides are symbols it is sufficient to use an identity
1918 // comparison.
1919 __ cmp(temp, class_name);
1920 // End with the answer in the z flag.
1921}
1922
1923
Ben Murdochb0fe1622011-05-05 13:52:32 +01001924void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001925 Register input = ToRegister(instr->InputAt(0));
1926 Register temp = ToRegister(instr->TempAt(0));
1927 Register temp2 = ToRegister(instr->TempAt(1));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001928
Ben Murdochb0fe1622011-05-05 13:52:32 +01001929 Handle<String> class_name = instr->hydrogen()->class_name();
1930
1931 int true_block = chunk_->LookupDestination(instr->true_block_id());
1932 int false_block = chunk_->LookupDestination(instr->false_block_id());
1933
1934 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1935 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1936
1937 EmitClassOfTest(true_label, false_label, class_name, input, temp, temp2);
1938
1939 EmitBranch(true_block, false_block, equal);
1940}
1941
1942
1943void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001944 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001945 int true_block = instr->true_block_id();
1946 int false_block = instr->false_block_id();
1947
1948 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map());
1949 EmitBranch(true_block, false_block, equal);
1950}
1951
1952
1953void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01001954 // Object and function are in fixed registers defined by the stub.
Steve Block1e0659c2011-05-24 12:43:12 +01001955 ASSERT(ToRegister(instr->context()).is(esi));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001956 InstanceofStub stub(InstanceofStub::kArgsInRegisters);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001957 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001958
Ben Murdoch257744e2011-11-30 15:57:28 +00001959 Label true_value, done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001960 __ test(eax, Operand(eax));
Ben Murdoch257744e2011-11-30 15:57:28 +00001961 __ j(zero, &true_value, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01001962 __ mov(ToRegister(instr->result()), factory()->false_value());
Ben Murdoch257744e2011-11-30 15:57:28 +00001963 __ jmp(&done, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001964 __ bind(&true_value);
Steve Block44f0eee2011-05-26 01:26:41 +01001965 __ mov(ToRegister(instr->result()), factory()->true_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001966 __ bind(&done);
1967}
1968
1969
Ben Murdoch086aeea2011-05-13 15:57:08 +01001970void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
1971 class DeferredInstanceOfKnownGlobal: public LDeferredCode {
1972 public:
1973 DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
1974 LInstanceOfKnownGlobal* instr)
1975 : LDeferredCode(codegen), instr_(instr) { }
1976 virtual void Generate() {
Ben Murdoch2b4ba112012-01-20 14:57:15 +00001977 codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_);
Ben Murdoch086aeea2011-05-13 15:57:08 +01001978 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001979 virtual LInstruction* instr() { return instr_; }
Ben Murdoch086aeea2011-05-13 15:57:08 +01001980 Label* map_check() { return &map_check_; }
Ben Murdoch086aeea2011-05-13 15:57:08 +01001981 private:
1982 LInstanceOfKnownGlobal* instr_;
1983 Label map_check_;
1984 };
1985
1986 DeferredInstanceOfKnownGlobal* deferred;
1987 deferred = new DeferredInstanceOfKnownGlobal(this, instr);
1988
1989 Label done, false_result;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001990 Register object = ToRegister(instr->InputAt(1));
Ben Murdochb8e0da22011-05-16 14:20:40 +01001991 Register temp = ToRegister(instr->TempAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01001992
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001993 // A Smi is not an instance of anything.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001994 __ JumpIfSmi(object, &false_result);
Ben Murdoch086aeea2011-05-13 15:57:08 +01001995
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001996 // This is the inlined call site instanceof cache. The two occurences of the
Ben Murdoch086aeea2011-05-13 15:57:08 +01001997 // hole value will be patched to the last map/result pair generated by the
1998 // instanceof stub.
Ben Murdoch257744e2011-11-30 15:57:28 +00001999 Label cache_miss;
Ben Murdochb8e0da22011-05-16 14:20:40 +01002000 Register map = ToRegister(instr->TempAt(0));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002001 __ mov(map, FieldOperand(object, HeapObject::kMapOffset));
2002 __ bind(deferred->map_check()); // Label for calculating code patching.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002003 Handle<JSGlobalPropertyCell> cache_cell =
2004 factory()->NewJSGlobalPropertyCell(factory()->the_hole_value());
2005 __ cmp(map, Operand::Cell(cache_cell)); // Patched to cached map.
Ben Murdoch257744e2011-11-30 15:57:28 +00002006 __ j(not_equal, &cache_miss, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01002007 __ mov(eax, factory()->the_hole_value()); // Patched to either true or false.
Ben Murdoch086aeea2011-05-13 15:57:08 +01002008 __ jmp(&done);
2009
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002010 // The inlined call site cache did not match. Check for null and string
2011 // before calling the deferred code.
Ben Murdoch086aeea2011-05-13 15:57:08 +01002012 __ bind(&cache_miss);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002013 // Null is not an instance of anything.
Steve Block44f0eee2011-05-26 01:26:41 +01002014 __ cmp(object, factory()->null_value());
Ben Murdoch086aeea2011-05-13 15:57:08 +01002015 __ j(equal, &false_result);
2016
2017 // String values are not instances of anything.
2018 Condition is_string = masm_->IsObjectStringType(object, temp, temp);
2019 __ j(is_string, &false_result);
2020
2021 // Go to the deferred code.
2022 __ jmp(deferred->entry());
2023
2024 __ bind(&false_result);
Steve Block44f0eee2011-05-26 01:26:41 +01002025 __ mov(ToRegister(instr->result()), factory()->false_value());
Ben Murdoch086aeea2011-05-13 15:57:08 +01002026
2027 // Here result has either true or false. Deferred code also produces true or
2028 // false object.
2029 __ bind(deferred->exit());
2030 __ bind(&done);
2031}
2032
2033
Ben Murdoch2b4ba112012-01-20 14:57:15 +00002034void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
2035 Label* map_check) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01002036 PushSafepointRegistersScope scope(this);
Ben Murdoch086aeea2011-05-13 15:57:08 +01002037
2038 InstanceofStub::Flags flags = InstanceofStub::kNoFlags;
2039 flags = static_cast<InstanceofStub::Flags>(
2040 flags | InstanceofStub::kArgsInRegisters);
2041 flags = static_cast<InstanceofStub::Flags>(
2042 flags | InstanceofStub::kCallSiteInlineCheck);
2043 flags = static_cast<InstanceofStub::Flags>(
2044 flags | InstanceofStub::kReturnTrueFalseObject);
2045 InstanceofStub stub(flags);
2046
Ben Murdoch8b112d22011-06-08 16:22:53 +01002047 // Get the temp register reserved by the instruction. This needs to be a
2048 // register which is pushed last by PushSafepointRegisters as top of the
2049 // stack is used to pass the offset to the location of the map check to
2050 // the stub.
Ben Murdochb8e0da22011-05-16 14:20:40 +01002051 Register temp = ToRegister(instr->TempAt(0));
Ben Murdoch8b112d22011-06-08 16:22:53 +01002052 ASSERT(MacroAssembler::SafepointRegisterStackIndex(temp) == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002053 __ LoadHeapObject(InstanceofStub::right(), instr->function());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002054 static const int kAdditionalDelta = 13;
Ben Murdoch086aeea2011-05-13 15:57:08 +01002055 int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta;
Ben Murdoch086aeea2011-05-13 15:57:08 +01002056 __ mov(temp, Immediate(delta));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002057 __ StoreToSafepointRegisterSlot(temp, temp);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002058 CallCodeGeneric(stub.GetCode(),
2059 RelocInfo::CODE_TARGET,
2060 instr,
Ben Murdoch8b112d22011-06-08 16:22:53 +01002061 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
Ben Murdoch2b4ba112012-01-20 14:57:15 +00002062 ASSERT(instr->HasDeoptimizationEnvironment());
2063 LEnvironment* env = instr->deoptimization_environment();
2064 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
2065
Ben Murdoch086aeea2011-05-13 15:57:08 +01002066 // Put the result value into the eax slot and restore all registers.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002067 __ StoreToSafepointRegisterSlot(eax, eax);
Ben Murdoch086aeea2011-05-13 15:57:08 +01002068}
2069
2070
Ben Murdochb0fe1622011-05-05 13:52:32 +01002071void LCodeGen::DoCmpT(LCmpT* instr) {
2072 Token::Value op = instr->op();
2073
2074 Handle<Code> ic = CompareIC::GetUninitialized(op);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002075 CallCode(ic, RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002076
2077 Condition condition = ComputeCompareCondition(op);
Ben Murdoch257744e2011-11-30 15:57:28 +00002078 Label true_value, done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002079 __ test(eax, Operand(eax));
Ben Murdoch257744e2011-11-30 15:57:28 +00002080 __ j(condition, &true_value, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01002081 __ mov(ToRegister(instr->result()), factory()->false_value());
Ben Murdoch257744e2011-11-30 15:57:28 +00002082 __ jmp(&done, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002083 __ bind(&true_value);
Steve Block44f0eee2011-05-26 01:26:41 +01002084 __ mov(ToRegister(instr->result()), factory()->true_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002085 __ bind(&done);
2086}
2087
2088
Ben Murdochb0fe1622011-05-05 13:52:32 +01002089void LCodeGen::DoReturn(LReturn* instr) {
2090 if (FLAG_trace) {
Steve Block1e0659c2011-05-24 12:43:12 +01002091 // Preserve the return value on the stack and rely on the runtime call
2092 // to return the value in the same register. We're leaving the code
2093 // managed by the register allocator and tearing down the frame, it's
2094 // safe to write to the context register.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002095 __ push(eax);
Steve Block1e0659c2011-05-24 12:43:12 +01002096 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002097 __ CallRuntime(Runtime::kTraceExit, 1);
2098 }
2099 __ mov(esp, ebp);
2100 __ pop(ebp);
Ben Murdoch257744e2011-11-30 15:57:28 +00002101 __ Ret((GetParameterCount() + 1) * kPointerSize, ecx);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002102}
2103
2104
Ben Murdoch8b112d22011-06-08 16:22:53 +01002105void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002106 Register result = ToRegister(instr->result());
2107 __ mov(result, Operand::Cell(instr->hydrogen()->cell()));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002108 if (instr->hydrogen()->RequiresHoleCheck()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002109 __ cmp(result, factory()->the_hole_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002110 DeoptimizeIf(equal, instr->environment());
2111 }
2112}
2113
2114
Ben Murdoch8b112d22011-06-08 16:22:53 +01002115void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
2116 ASSERT(ToRegister(instr->context()).is(esi));
2117 ASSERT(ToRegister(instr->global_object()).is(eax));
2118 ASSERT(ToRegister(instr->result()).is(eax));
2119
2120 __ mov(ecx, instr->name());
2121 RelocInfo::Mode mode = instr->for_typeof() ? RelocInfo::CODE_TARGET :
2122 RelocInfo::CODE_TARGET_CONTEXT;
2123 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002124 CallCode(ic, mode, instr);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002125}
2126
2127
2128void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002129 Register value = ToRegister(instr->value());
2130 Handle<JSGlobalPropertyCell> cell_handle = instr->hydrogen()->cell();
Steve Block1e0659c2011-05-24 12:43:12 +01002131
2132 // If the cell we are storing to contains the hole it could have
2133 // been deleted from the property dictionary. In that case, we need
2134 // to update the property details in the property dictionary to mark
2135 // it as no longer deleted. We deoptimize in that case.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002136 if (instr->hydrogen()->RequiresHoleCheck()) {
2137 __ cmp(Operand::Cell(cell_handle), factory()->the_hole_value());
Steve Block1e0659c2011-05-24 12:43:12 +01002138 DeoptimizeIf(equal, instr->environment());
2139 }
2140
2141 // Store the value.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002142 __ mov(Operand::Cell(cell_handle), value);
2143 // Cells are always rescanned, so no write barrier here.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002144}
2145
2146
Ben Murdoch8b112d22011-06-08 16:22:53 +01002147void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) {
2148 ASSERT(ToRegister(instr->context()).is(esi));
2149 ASSERT(ToRegister(instr->global_object()).is(edx));
2150 ASSERT(ToRegister(instr->value()).is(eax));
2151
2152 __ mov(ecx, instr->name());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002153 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode)
Ben Murdoch8b112d22011-06-08 16:22:53 +01002154 ? isolate()->builtins()->StoreIC_Initialize_Strict()
2155 : isolate()->builtins()->StoreIC_Initialize();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002156 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002157}
2158
2159
Ben Murdochb8e0da22011-05-16 14:20:40 +01002160void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002161 Register context = ToRegister(instr->context());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002162 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01002163 __ mov(result, ContextOperand(context, instr->slot_index()));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002164
2165 if (instr->hydrogen()->RequiresHoleCheck()) {
2166 __ cmp(result, factory()->the_hole_value());
2167 if (instr->hydrogen()->DeoptimizesOnHole()) {
2168 DeoptimizeIf(equal, instr->environment());
2169 } else {
2170 Label is_not_hole;
2171 __ j(not_equal, &is_not_hole, Label::kNear);
2172 __ mov(result, factory()->undefined_value());
2173 __ bind(&is_not_hole);
2174 }
2175 }
Steve Block1e0659c2011-05-24 12:43:12 +01002176}
2177
2178
2179void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
2180 Register context = ToRegister(instr->context());
2181 Register value = ToRegister(instr->value());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002182
2183 Label skip_assignment;
2184
2185 Operand target = ContextOperand(context, instr->slot_index());
2186 if (instr->hydrogen()->RequiresHoleCheck()) {
2187 __ cmp(target, factory()->the_hole_value());
2188 if (instr->hydrogen()->DeoptimizesOnHole()) {
2189 DeoptimizeIf(equal, instr->environment());
2190 } else {
2191 __ j(not_equal, &skip_assignment, Label::kNear);
2192 }
2193 }
2194
2195 __ mov(target, value);
2196 if (instr->hydrogen()->NeedsWriteBarrier()) {
2197 HType type = instr->hydrogen()->value()->type();
2198 SmiCheck check_needed =
2199 type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
Steve Block1e0659c2011-05-24 12:43:12 +01002200 Register temp = ToRegister(instr->TempAt(0));
2201 int offset = Context::SlotOffset(instr->slot_index());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002202 __ RecordWriteContextSlot(context,
2203 offset,
2204 value,
2205 temp,
2206 kSaveFPRegs,
2207 EMIT_REMEMBERED_SET,
2208 check_needed);
Steve Block1e0659c2011-05-24 12:43:12 +01002209 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002210
2211 __ bind(&skip_assignment);
Ben Murdochb8e0da22011-05-16 14:20:40 +01002212}
2213
2214
Ben Murdochb0fe1622011-05-05 13:52:32 +01002215void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
Steve Block44f0eee2011-05-26 01:26:41 +01002216 Register object = ToRegister(instr->object());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002217 Register result = ToRegister(instr->result());
2218 if (instr->hydrogen()->is_in_object()) {
2219 __ mov(result, FieldOperand(object, instr->hydrogen()->offset()));
2220 } else {
2221 __ mov(result, FieldOperand(object, JSObject::kPropertiesOffset));
2222 __ mov(result, FieldOperand(result, instr->hydrogen()->offset()));
2223 }
2224}
2225
2226
Ben Murdoch257744e2011-11-30 15:57:28 +00002227void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
2228 Register object,
2229 Handle<Map> type,
2230 Handle<String> name) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002231 LookupResult lookup(isolate());
Steve Block44f0eee2011-05-26 01:26:41 +01002232 type->LookupInDescriptors(NULL, *name, &lookup);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002233 ASSERT(lookup.IsFound() &&
Ben Murdoch257744e2011-11-30 15:57:28 +00002234 (lookup.type() == FIELD || lookup.type() == CONSTANT_FUNCTION));
2235 if (lookup.type() == FIELD) {
2236 int index = lookup.GetLocalFieldIndexFromMap(*type);
2237 int offset = index * kPointerSize;
2238 if (index < 0) {
2239 // Negative property indices are in-object properties, indexed
2240 // from the end of the fixed part of the object.
2241 __ mov(result, FieldOperand(object, offset + type->instance_size()));
2242 } else {
2243 // Non-negative property indices are in the properties array.
2244 __ mov(result, FieldOperand(object, JSObject::kPropertiesOffset));
2245 __ mov(result, FieldOperand(result, offset + FixedArray::kHeaderSize));
2246 }
Steve Block44f0eee2011-05-26 01:26:41 +01002247 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00002248 Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*type));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002249 __ LoadHeapObject(result, function);
2250 }
2251}
2252
2253
2254void LCodeGen::EmitPushTaggedOperand(LOperand* operand) {
2255 ASSERT(!operand->IsDoubleRegister());
2256 if (operand->IsConstantOperand()) {
2257 Handle<Object> object = ToHandle(LConstantOperand::cast(operand));
2258 if (object->IsSmi()) {
2259 __ Push(Handle<Smi>::cast(object));
2260 } else {
2261 __ PushHeapObject(Handle<HeapObject>::cast(object));
2262 }
2263 } else if (operand->IsRegister()) {
2264 __ push(ToRegister(operand));
2265 } else {
2266 __ push(ToOperand(operand));
Steve Block44f0eee2011-05-26 01:26:41 +01002267 }
2268}
2269
2270
2271void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
2272 Register object = ToRegister(instr->object());
2273 Register result = ToRegister(instr->result());
2274
2275 int map_count = instr->hydrogen()->types()->length();
2276 Handle<String> name = instr->hydrogen()->name();
2277 if (map_count == 0) {
2278 ASSERT(instr->hydrogen()->need_generic());
2279 __ mov(ecx, name);
2280 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002281 CallCode(ic, RelocInfo::CODE_TARGET, instr);
Steve Block44f0eee2011-05-26 01:26:41 +01002282 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00002283 Label done;
Steve Block44f0eee2011-05-26 01:26:41 +01002284 for (int i = 0; i < map_count - 1; ++i) {
2285 Handle<Map> map = instr->hydrogen()->types()->at(i);
Ben Murdoch257744e2011-11-30 15:57:28 +00002286 Label next;
Steve Block44f0eee2011-05-26 01:26:41 +01002287 __ cmp(FieldOperand(object, HeapObject::kMapOffset), map);
Ben Murdoch257744e2011-11-30 15:57:28 +00002288 __ j(not_equal, &next, Label::kNear);
2289 EmitLoadFieldOrConstantFunction(result, object, map, name);
2290 __ jmp(&done, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01002291 __ bind(&next);
2292 }
2293 Handle<Map> map = instr->hydrogen()->types()->last();
2294 __ cmp(FieldOperand(object, HeapObject::kMapOffset), map);
2295 if (instr->hydrogen()->need_generic()) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002296 Label generic;
2297 __ j(not_equal, &generic, Label::kNear);
2298 EmitLoadFieldOrConstantFunction(result, object, map, name);
2299 __ jmp(&done, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01002300 __ bind(&generic);
2301 __ mov(ecx, name);
2302 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002303 CallCode(ic, RelocInfo::CODE_TARGET, instr);
Steve Block44f0eee2011-05-26 01:26:41 +01002304 } else {
2305 DeoptimizeIf(not_equal, instr->environment());
Ben Murdoch257744e2011-11-30 15:57:28 +00002306 EmitLoadFieldOrConstantFunction(result, object, map, name);
Steve Block44f0eee2011-05-26 01:26:41 +01002307 }
2308 __ bind(&done);
2309 }
2310}
2311
2312
Ben Murdochb0fe1622011-05-05 13:52:32 +01002313void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002314 ASSERT(ToRegister(instr->context()).is(esi));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002315 ASSERT(ToRegister(instr->object()).is(eax));
2316 ASSERT(ToRegister(instr->result()).is(eax));
2317
2318 __ mov(ecx, instr->name());
Steve Block44f0eee2011-05-26 01:26:41 +01002319 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002320 CallCode(ic, RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002321}
2322
2323
Steve Block9fac8402011-05-12 15:51:54 +01002324void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
2325 Register function = ToRegister(instr->function());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002326 Register temp = ToRegister(instr->TempAt(0));
Steve Block9fac8402011-05-12 15:51:54 +01002327 Register result = ToRegister(instr->result());
2328
2329 // Check that the function really is a function.
2330 __ CmpObjectType(function, JS_FUNCTION_TYPE, result);
2331 DeoptimizeIf(not_equal, instr->environment());
2332
2333 // Check whether the function has an instance prototype.
Ben Murdoch257744e2011-11-30 15:57:28 +00002334 Label non_instance;
Steve Block9fac8402011-05-12 15:51:54 +01002335 __ test_b(FieldOperand(result, Map::kBitFieldOffset),
2336 1 << Map::kHasNonInstancePrototype);
Ben Murdoch257744e2011-11-30 15:57:28 +00002337 __ j(not_zero, &non_instance, Label::kNear);
Steve Block9fac8402011-05-12 15:51:54 +01002338
2339 // Get the prototype or initial map from the function.
2340 __ mov(result,
2341 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2342
2343 // Check that the function has a prototype or an initial map.
Steve Block44f0eee2011-05-26 01:26:41 +01002344 __ cmp(Operand(result), Immediate(factory()->the_hole_value()));
Steve Block9fac8402011-05-12 15:51:54 +01002345 DeoptimizeIf(equal, instr->environment());
2346
2347 // If the function does not have an initial map, we're done.
Ben Murdoch257744e2011-11-30 15:57:28 +00002348 Label done;
Steve Block9fac8402011-05-12 15:51:54 +01002349 __ CmpObjectType(result, MAP_TYPE, temp);
Ben Murdoch257744e2011-11-30 15:57:28 +00002350 __ j(not_equal, &done, Label::kNear);
Steve Block9fac8402011-05-12 15:51:54 +01002351
2352 // Get the prototype from the initial map.
2353 __ mov(result, FieldOperand(result, Map::kPrototypeOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00002354 __ jmp(&done, Label::kNear);
Steve Block9fac8402011-05-12 15:51:54 +01002355
2356 // Non-instance prototype: Fetch prototype from constructor field
2357 // in the function's map.
2358 __ bind(&non_instance);
2359 __ mov(result, FieldOperand(result, Map::kConstructorOffset));
2360
2361 // All done.
2362 __ bind(&done);
2363}
2364
2365
Ben Murdochb0fe1622011-05-05 13:52:32 +01002366void LCodeGen::DoLoadElements(LLoadElements* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002367 Register result = ToRegister(instr->result());
2368 Register input = ToRegister(instr->InputAt(0));
2369 __ mov(result, FieldOperand(input, JSObject::kElementsOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002370 if (FLAG_debug_code) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002371 Label done, ok, fail;
Steve Block1e0659c2011-05-24 12:43:12 +01002372 __ cmp(FieldOperand(result, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01002373 Immediate(factory()->fixed_array_map()));
Ben Murdoch257744e2011-11-30 15:57:28 +00002374 __ j(equal, &done, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01002375 __ cmp(FieldOperand(result, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01002376 Immediate(factory()->fixed_cow_array_map()));
Ben Murdoch257744e2011-11-30 15:57:28 +00002377 __ j(equal, &done, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01002378 Register temp((result.is(eax)) ? ebx : eax);
2379 __ push(temp);
2380 __ mov(temp, FieldOperand(result, HeapObject::kMapOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002381 __ movzx_b(temp, FieldOperand(temp, Map::kBitField2Offset));
2382 __ and_(temp, Map::kElementsKindMask);
2383 __ shr(temp, Map::kElementsKindShift);
Ben Murdoch589d6972011-11-30 16:04:58 +00002384 __ cmp(temp, FAST_ELEMENTS);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002385 __ j(equal, &ok, Label::kNear);
Ben Murdoch589d6972011-11-30 16:04:58 +00002386 __ cmp(temp, FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002387 __ j(less, &fail, Label::kNear);
Ben Murdoch589d6972011-11-30 16:04:58 +00002388 __ cmp(temp, LAST_EXTERNAL_ARRAY_ELEMENTS_KIND);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002389 __ j(less_equal, &ok, Label::kNear);
2390 __ bind(&fail);
2391 __ Abort("Check for fast or external elements failed.");
2392 __ bind(&ok);
Steve Block44f0eee2011-05-26 01:26:41 +01002393 __ pop(temp);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002394 __ bind(&done);
2395 }
2396}
2397
2398
Steve Block44f0eee2011-05-26 01:26:41 +01002399void LCodeGen::DoLoadExternalArrayPointer(
2400 LLoadExternalArrayPointer* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002401 Register result = ToRegister(instr->result());
2402 Register input = ToRegister(instr->InputAt(0));
Steve Block44f0eee2011-05-26 01:26:41 +01002403 __ mov(result, FieldOperand(input,
2404 ExternalArray::kExternalPointerOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01002405}
2406
2407
Ben Murdochb0fe1622011-05-05 13:52:32 +01002408void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
2409 Register arguments = ToRegister(instr->arguments());
2410 Register length = ToRegister(instr->length());
2411 Operand index = ToOperand(instr->index());
2412 Register result = ToRegister(instr->result());
2413
2414 __ sub(length, index);
2415 DeoptimizeIf(below_equal, instr->environment());
2416
Ben Murdoch086aeea2011-05-13 15:57:08 +01002417 // There are two words between the frame pointer and the last argument.
2418 // Subtracting from length accounts for one of them add one more.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002419 __ mov(result, Operand(arguments, length, times_4, kPointerSize));
2420}
2421
2422
2423void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01002424 Register result = ToRegister(instr->result());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002425
2426 // Load the result.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002427 __ mov(result,
2428 BuildFastArrayOperand(instr->elements(), instr->key(),
Ben Murdoch589d6972011-11-30 16:04:58 +00002429 FAST_ELEMENTS,
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002430 FixedArray::kHeaderSize - kHeapObjectTag));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002431
Ben Murdochb8e0da22011-05-16 14:20:40 +01002432 // Check for the hole value.
Ben Murdoch257744e2011-11-30 15:57:28 +00002433 if (instr->hydrogen()->RequiresHoleCheck()) {
2434 __ cmp(result, factory()->the_hole_value());
2435 DeoptimizeIf(equal, instr->environment());
2436 }
2437}
2438
2439
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002440void LCodeGen::DoLoadKeyedFastDoubleElement(
2441 LLoadKeyedFastDoubleElement* instr) {
2442 XMMRegister result = ToDoubleRegister(instr->result());
2443
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002444 int offset = FixedDoubleArray::kHeaderSize - kHeapObjectTag +
2445 sizeof(kHoleNanLower32);
2446 Operand hole_check_operand = BuildFastArrayOperand(
2447 instr->elements(), instr->key(),
2448 FAST_DOUBLE_ELEMENTS,
2449 offset);
2450 __ cmp(hole_check_operand, Immediate(kHoleNanUpper32));
2451 DeoptimizeIf(equal, instr->environment());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002452
2453 Operand double_load_operand = BuildFastArrayOperand(
Ben Murdoch589d6972011-11-30 16:04:58 +00002454 instr->elements(), instr->key(), FAST_DOUBLE_ELEMENTS,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002455 FixedDoubleArray::kHeaderSize - kHeapObjectTag);
2456 __ movdbl(result, double_load_operand);
2457}
2458
2459
2460Operand LCodeGen::BuildFastArrayOperand(
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002461 LOperand* elements_pointer,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002462 LOperand* key,
Ben Murdoch589d6972011-11-30 16:04:58 +00002463 ElementsKind elements_kind,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002464 uint32_t offset) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002465 Register elements_pointer_reg = ToRegister(elements_pointer);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002466 int shift_size = ElementsKindToShiftSize(elements_kind);
Ben Murdoch257744e2011-11-30 15:57:28 +00002467 if (key->IsConstantOperand()) {
2468 int constant_value = ToInteger32(LConstantOperand::cast(key));
2469 if (constant_value & 0xF0000000) {
2470 Abort("array index constant value too big");
2471 }
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002472 return Operand(elements_pointer_reg,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002473 constant_value * (1 << shift_size) + offset);
Ben Murdoch257744e2011-11-30 15:57:28 +00002474 } else {
2475 ScaleFactor scale_factor = static_cast<ScaleFactor>(shift_size);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002476 return Operand(elements_pointer_reg, ToRegister(key), scale_factor, offset);
Ben Murdoch257744e2011-11-30 15:57:28 +00002477 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002478}
2479
2480
Steve Block44f0eee2011-05-26 01:26:41 +01002481void LCodeGen::DoLoadKeyedSpecializedArrayElement(
2482 LLoadKeyedSpecializedArrayElement* instr) {
Ben Murdoch589d6972011-11-30 16:04:58 +00002483 ElementsKind elements_kind = instr->elements_kind();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002484 Operand operand(BuildFastArrayOperand(instr->external_pointer(),
2485 instr->key(), elements_kind, 0));
Ben Murdoch589d6972011-11-30 16:04:58 +00002486 if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
Steve Block44f0eee2011-05-26 01:26:41 +01002487 XMMRegister result(ToDoubleRegister(instr->result()));
Ben Murdoch257744e2011-11-30 15:57:28 +00002488 __ movss(result, operand);
Steve Block44f0eee2011-05-26 01:26:41 +01002489 __ cvtss2sd(result, result);
Ben Murdoch589d6972011-11-30 16:04:58 +00002490 } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002491 __ movdbl(ToDoubleRegister(instr->result()), operand);
Steve Block44f0eee2011-05-26 01:26:41 +01002492 } else {
2493 Register result(ToRegister(instr->result()));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002494 switch (elements_kind) {
Ben Murdoch589d6972011-11-30 16:04:58 +00002495 case EXTERNAL_BYTE_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00002496 __ movsx_b(result, operand);
Steve Block44f0eee2011-05-26 01:26:41 +01002497 break;
Ben Murdoch589d6972011-11-30 16:04:58 +00002498 case EXTERNAL_PIXEL_ELEMENTS:
2499 case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00002500 __ movzx_b(result, operand);
Steve Block44f0eee2011-05-26 01:26:41 +01002501 break;
Ben Murdoch589d6972011-11-30 16:04:58 +00002502 case EXTERNAL_SHORT_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00002503 __ movsx_w(result, operand);
Steve Block44f0eee2011-05-26 01:26:41 +01002504 break;
Ben Murdoch589d6972011-11-30 16:04:58 +00002505 case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00002506 __ movzx_w(result, operand);
Steve Block44f0eee2011-05-26 01:26:41 +01002507 break;
Ben Murdoch589d6972011-11-30 16:04:58 +00002508 case EXTERNAL_INT_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00002509 __ mov(result, operand);
Steve Block44f0eee2011-05-26 01:26:41 +01002510 break;
Ben Murdoch589d6972011-11-30 16:04:58 +00002511 case EXTERNAL_UNSIGNED_INT_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00002512 __ mov(result, operand);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002513 __ test(result, Operand(result));
Steve Block44f0eee2011-05-26 01:26:41 +01002514 // TODO(danno): we could be more clever here, perhaps having a special
2515 // version of the stub that detects if the overflow case actually
2516 // happens, and generate code that returns a double rather than int.
Ben Murdoch8b112d22011-06-08 16:22:53 +01002517 DeoptimizeIf(negative, instr->environment());
Steve Block44f0eee2011-05-26 01:26:41 +01002518 break;
Ben Murdoch589d6972011-11-30 16:04:58 +00002519 case EXTERNAL_FLOAT_ELEMENTS:
2520 case EXTERNAL_DOUBLE_ELEMENTS:
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002521 case FAST_SMI_ONLY_ELEMENTS:
Ben Murdoch589d6972011-11-30 16:04:58 +00002522 case FAST_ELEMENTS:
2523 case FAST_DOUBLE_ELEMENTS:
2524 case DICTIONARY_ELEMENTS:
2525 case NON_STRICT_ARGUMENTS_ELEMENTS:
Steve Block44f0eee2011-05-26 01:26:41 +01002526 UNREACHABLE();
2527 break;
2528 }
2529 }
Steve Block1e0659c2011-05-24 12:43:12 +01002530}
2531
2532
Ben Murdochb0fe1622011-05-05 13:52:32 +01002533void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002534 ASSERT(ToRegister(instr->context()).is(esi));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002535 ASSERT(ToRegister(instr->object()).is(edx));
2536 ASSERT(ToRegister(instr->key()).is(eax));
2537
Steve Block44f0eee2011-05-26 01:26:41 +01002538 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002539 CallCode(ic, RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002540}
2541
2542
2543void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
2544 Register result = ToRegister(instr->result());
2545
2546 // Check for arguments adapter frame.
Ben Murdoch257744e2011-11-30 15:57:28 +00002547 Label done, adapted;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002548 __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2549 __ mov(result, Operand(result, StandardFrameConstants::kContextOffset));
2550 __ cmp(Operand(result),
2551 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
Ben Murdoch257744e2011-11-30 15:57:28 +00002552 __ j(equal, &adapted, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002553
2554 // No arguments adaptor frame.
2555 __ mov(result, Operand(ebp));
Ben Murdoch257744e2011-11-30 15:57:28 +00002556 __ jmp(&done, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002557
2558 // Arguments adaptor frame present.
2559 __ bind(&adapted);
2560 __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2561
Ben Murdoch086aeea2011-05-13 15:57:08 +01002562 // Result is the frame pointer for the frame if not adapted and for the real
2563 // frame below the adaptor frame if adapted.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002564 __ bind(&done);
2565}
2566
2567
2568void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01002569 Operand elem = ToOperand(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002570 Register result = ToRegister(instr->result());
2571
Ben Murdoch257744e2011-11-30 15:57:28 +00002572 Label done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002573
Ben Murdoch086aeea2011-05-13 15:57:08 +01002574 // If no arguments adaptor frame the number of arguments is fixed.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002575 __ cmp(ebp, elem);
2576 __ mov(result, Immediate(scope()->num_parameters()));
Ben Murdoch257744e2011-11-30 15:57:28 +00002577 __ j(equal, &done, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002578
2579 // Arguments adaptor frame present. Get argument length from there.
2580 __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2581 __ mov(result, Operand(result,
2582 ArgumentsAdaptorFrameConstants::kLengthOffset));
2583 __ SmiUntag(result);
2584
Ben Murdoch086aeea2011-05-13 15:57:08 +01002585 // Argument length is in result register.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002586 __ bind(&done);
2587}
2588
2589
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002590void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002591 Register receiver = ToRegister(instr->receiver());
Steve Block1e0659c2011-05-24 12:43:12 +01002592 Register function = ToRegister(instr->function());
Steve Block1e0659c2011-05-24 12:43:12 +01002593 Register scratch = ToRegister(instr->TempAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002594
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002595 // If the receiver is null or undefined, we have to pass the global
2596 // object as a receiver to normal functions. Values have to be
2597 // passed unchanged to builtins and strict-mode functions.
Ben Murdoch257744e2011-11-30 15:57:28 +00002598 Label global_object, receiver_ok;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002599
2600 // Do not transform the receiver to object for strict mode
2601 // functions.
2602 __ mov(scratch,
2603 FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
2604 __ test_b(FieldOperand(scratch, SharedFunctionInfo::kStrictModeByteOffset),
2605 1 << SharedFunctionInfo::kStrictModeBitWithinByte);
2606 __ j(not_equal, &receiver_ok, Label::kNear);
2607
2608 // Do not transform the receiver to object for builtins.
2609 __ test_b(FieldOperand(scratch, SharedFunctionInfo::kNativeByteOffset),
2610 1 << SharedFunctionInfo::kNativeBitWithinByte);
2611 __ j(not_equal, &receiver_ok, Label::kNear);
2612
2613 // Normal function. Replace undefined or null with global receiver.
Steve Block44f0eee2011-05-26 01:26:41 +01002614 __ cmp(receiver, factory()->null_value());
Ben Murdoch257744e2011-11-30 15:57:28 +00002615 __ j(equal, &global_object, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01002616 __ cmp(receiver, factory()->undefined_value());
Ben Murdoch257744e2011-11-30 15:57:28 +00002617 __ j(equal, &global_object, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01002618
2619 // The receiver should be a JS object.
2620 __ test(receiver, Immediate(kSmiTagMask));
2621 DeoptimizeIf(equal, instr->environment());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002622 __ CmpObjectType(receiver, FIRST_SPEC_OBJECT_TYPE, scratch);
Steve Block1e0659c2011-05-24 12:43:12 +01002623 DeoptimizeIf(below, instr->environment());
Ben Murdoch257744e2011-11-30 15:57:28 +00002624 __ jmp(&receiver_ok, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01002625
2626 __ bind(&global_object);
2627 // TODO(kmillikin): We have a hydrogen value for the global object. See
2628 // if it's better to use it than to explicitly fetch it from the context
2629 // here.
2630 __ mov(receiver, Operand(ebp, StandardFrameConstants::kContextOffset));
2631 __ mov(receiver, ContextOperand(receiver, Context::GLOBAL_INDEX));
Ben Murdoch257744e2011-11-30 15:57:28 +00002632 __ mov(receiver,
2633 FieldOperand(receiver, JSGlobalObject::kGlobalReceiverOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01002634 __ bind(&receiver_ok);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002635}
2636
2637
2638void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
2639 Register receiver = ToRegister(instr->receiver());
2640 Register function = ToRegister(instr->function());
2641 Register length = ToRegister(instr->length());
2642 Register elements = ToRegister(instr->elements());
2643 ASSERT(receiver.is(eax)); // Used for parameter count.
2644 ASSERT(function.is(edi)); // Required by InvokeFunction.
2645 ASSERT(ToRegister(instr->result()).is(eax));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002646
2647 // Copy the arguments to this function possibly from the
2648 // adaptor frame below it.
2649 const uint32_t kArgumentsLimit = 1 * KB;
2650 __ cmp(length, kArgumentsLimit);
2651 DeoptimizeIf(above, instr->environment());
2652
2653 __ push(receiver);
2654 __ mov(receiver, length);
2655
2656 // Loop through the arguments pushing them onto the execution
2657 // stack.
Ben Murdoch257744e2011-11-30 15:57:28 +00002658 Label invoke, loop;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002659 // length is a small non-negative integer, due to the test above.
2660 __ test(length, Operand(length));
Ben Murdoch257744e2011-11-30 15:57:28 +00002661 __ j(zero, &invoke, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002662 __ bind(&loop);
2663 __ push(Operand(elements, length, times_pointer_size, 1 * kPointerSize));
2664 __ dec(length);
2665 __ j(not_zero, &loop);
2666
2667 // Invoke the function.
2668 __ bind(&invoke);
Steve Block1e0659c2011-05-24 12:43:12 +01002669 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
2670 LPointerMap* pointers = instr->pointer_map();
Steve Block1e0659c2011-05-24 12:43:12 +01002671 RecordPosition(pointers->position());
Ben Murdoch2b4ba112012-01-20 14:57:15 +00002672 SafepointGenerator safepoint_generator(
2673 this, pointers, Safepoint::kLazyDeopt);
Ben Murdoch257744e2011-11-30 15:57:28 +00002674 ParameterCount actual(eax);
2675 __ InvokeFunction(function, actual, CALL_FUNCTION,
2676 safepoint_generator, CALL_AS_METHOD);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002677}
2678
2679
2680void LCodeGen::DoPushArgument(LPushArgument* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01002681 LOperand* argument = instr->InputAt(0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002682 EmitPushTaggedOperand(argument);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002683}
2684
2685
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002686void LCodeGen::DoThisFunction(LThisFunction* instr) {
2687 Register result = ToRegister(instr->result());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002688 __ LoadHeapObject(result, instr->hydrogen()->closure());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002689}
2690
2691
Steve Block1e0659c2011-05-24 12:43:12 +01002692void LCodeGen::DoContext(LContext* instr) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002693 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01002694 __ mov(result, Operand(ebp, StandardFrameConstants::kContextOffset));
2695}
2696
2697
2698void LCodeGen::DoOuterContext(LOuterContext* instr) {
2699 Register context = ToRegister(instr->context());
2700 Register result = ToRegister(instr->result());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002701 __ mov(result,
2702 Operand(context, Context::SlotOffset(Context::PREVIOUS_INDEX)));
Steve Block1e0659c2011-05-24 12:43:12 +01002703}
2704
2705
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002706void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) {
2707 ASSERT(ToRegister(instr->InputAt(0)).is(esi));
2708 __ push(esi); // The context is the first argument.
2709 __ push(Immediate(instr->hydrogen()->pairs()));
2710 __ push(Immediate(Smi::FromInt(instr->hydrogen()->flags())));
2711 CallRuntime(Runtime::kDeclareGlobals, 3, instr);
2712}
2713
2714
Steve Block1e0659c2011-05-24 12:43:12 +01002715void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
2716 Register context = ToRegister(instr->context());
2717 Register result = ToRegister(instr->result());
2718 __ mov(result, Operand(context, Context::SlotOffset(Context::GLOBAL_INDEX)));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002719}
2720
2721
2722void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002723 Register global = ToRegister(instr->global());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002724 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01002725 __ mov(result, FieldOperand(global, GlobalObject::kGlobalReceiverOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002726}
2727
2728
2729void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
2730 int arity,
Ben Murdoch257744e2011-11-30 15:57:28 +00002731 LInstruction* instr,
2732 CallKind call_kind) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002733 bool can_invoke_directly = !function->NeedsArgumentsAdaption() ||
2734 function->shared()->formal_parameter_count() == arity;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002735
2736 LPointerMap* pointers = instr->pointer_map();
2737 RecordPosition(pointers->position());
2738
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002739 if (can_invoke_directly) {
2740 __ LoadHeapObject(edi, function);
Ben Murdoch85b71792012-04-11 18:30:58 +01002741
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002742 // Change context if needed.
2743 bool change_context =
2744 (info()->closure()->context() != function->context()) ||
2745 scope()->contains_with() ||
2746 (scope()->num_heap_slots() > 0);
2747
2748 if (change_context) {
2749 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2750 } else {
2751 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2752 }
2753
2754 // Set eax to arguments count if adaption is not needed. Assumes that eax
2755 // is available to write to at this point.
2756 if (!function->NeedsArgumentsAdaption()) {
2757 __ mov(eax, arity);
2758 }
2759
2760 // Invoke function directly.
2761 __ SetCallKind(ecx, call_kind);
2762 if (*function == *info()->closure()) {
2763 __ CallSelf();
2764 } else {
2765 __ call(FieldOperand(edi, JSFunction::kCodeEntryOffset));
2766 }
2767 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT);
2768 } else {
2769 // We need to adapt arguments.
2770 SafepointGenerator generator(
2771 this, pointers, Safepoint::kLazyDeopt);
2772 ParameterCount count(arity);
2773 __ InvokeFunction(function, count, CALL_FUNCTION, generator, call_kind);
2774 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002775}
2776
2777
2778void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
2779 ASSERT(ToRegister(instr->result()).is(eax));
Ben Murdoch257744e2011-11-30 15:57:28 +00002780 CallKnownFunction(instr->function(),
2781 instr->arity(),
2782 instr,
2783 CALL_AS_METHOD);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002784}
2785
2786
2787void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002788 Register input_reg = ToRegister(instr->value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002789 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01002790 factory()->heap_number_map());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002791 DeoptimizeIf(not_equal, instr->environment());
2792
2793 Label done;
2794 Register tmp = input_reg.is(eax) ? ecx : eax;
2795 Register tmp2 = tmp.is(ecx) ? edx : input_reg.is(ecx) ? edx : ecx;
2796
2797 // Preserve the value of all registers.
Ben Murdoch8b112d22011-06-08 16:22:53 +01002798 PushSafepointRegistersScope scope(this);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002799
2800 Label negative;
2801 __ mov(tmp, FieldOperand(input_reg, HeapNumber::kExponentOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01002802 // Check the sign of the argument. If the argument is positive, just
2803 // return it. We do not need to patch the stack since |input| and
2804 // |result| are the same register and |input| will be restored
2805 // unchanged by popping safepoint registers.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002806 __ test(tmp, Immediate(HeapNumber::kSignMask));
2807 __ j(not_zero, &negative);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002808 __ jmp(&done);
2809
2810 __ bind(&negative);
2811
2812 Label allocated, slow;
2813 __ AllocateHeapNumber(tmp, tmp2, no_reg, &slow);
2814 __ jmp(&allocated);
2815
2816 // Slow case: Call the runtime system to do the number allocation.
2817 __ bind(&slow);
2818
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002819 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0,
2820 instr, instr->context());
Ben Murdoch8b112d22011-06-08 16:22:53 +01002821
Ben Murdochb0fe1622011-05-05 13:52:32 +01002822 // Set the pointer to the new heap number in tmp.
2823 if (!tmp.is(eax)) __ mov(tmp, eax);
2824
2825 // Restore input_reg after call to runtime.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002826 __ LoadFromSafepointRegisterSlot(input_reg, input_reg);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002827
2828 __ bind(&allocated);
2829 __ mov(tmp2, FieldOperand(input_reg, HeapNumber::kExponentOffset));
2830 __ and_(tmp2, ~HeapNumber::kSignMask);
2831 __ mov(FieldOperand(tmp, HeapNumber::kExponentOffset), tmp2);
2832 __ mov(tmp2, FieldOperand(input_reg, HeapNumber::kMantissaOffset));
2833 __ mov(FieldOperand(tmp, HeapNumber::kMantissaOffset), tmp2);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002834 __ StoreToSafepointRegisterSlot(input_reg, tmp);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002835
Steve Block1e0659c2011-05-24 12:43:12 +01002836 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002837}
2838
2839
Steve Block1e0659c2011-05-24 12:43:12 +01002840void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002841 Register input_reg = ToRegister(instr->value());
Steve Block1e0659c2011-05-24 12:43:12 +01002842 __ test(input_reg, Operand(input_reg));
2843 Label is_positive;
2844 __ j(not_sign, &is_positive);
2845 __ neg(input_reg);
2846 __ test(input_reg, Operand(input_reg));
2847 DeoptimizeIf(negative, instr->environment());
2848 __ bind(&is_positive);
2849}
2850
2851
Ben Murdochb0fe1622011-05-05 13:52:32 +01002852void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
2853 // Class for deferred case.
2854 class DeferredMathAbsTaggedHeapNumber: public LDeferredCode {
2855 public:
2856 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen,
2857 LUnaryMathOperation* instr)
2858 : LDeferredCode(codegen), instr_(instr) { }
2859 virtual void Generate() {
2860 codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_);
2861 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002862 virtual LInstruction* instr() { return instr_; }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002863 private:
2864 LUnaryMathOperation* instr_;
2865 };
2866
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002867 ASSERT(instr->value()->Equals(instr->result()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002868 Representation r = instr->hydrogen()->value()->representation();
2869
2870 if (r.IsDouble()) {
2871 XMMRegister scratch = xmm0;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002872 XMMRegister input_reg = ToDoubleRegister(instr->value());
Ben Murdoch257744e2011-11-30 15:57:28 +00002873 __ xorps(scratch, scratch);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002874 __ subsd(scratch, input_reg);
2875 __ pand(input_reg, scratch);
2876 } else if (r.IsInteger32()) {
Steve Block1e0659c2011-05-24 12:43:12 +01002877 EmitIntegerMathAbs(instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002878 } else { // Tagged case.
2879 DeferredMathAbsTaggedHeapNumber* deferred =
2880 new DeferredMathAbsTaggedHeapNumber(this, instr);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002881 Register input_reg = ToRegister(instr->value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002882 // Smi check.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002883 __ JumpIfNotSmi(input_reg, deferred->entry());
Steve Block1e0659c2011-05-24 12:43:12 +01002884 EmitIntegerMathAbs(instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002885 __ bind(deferred->exit());
2886 }
2887}
2888
2889
2890void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
2891 XMMRegister xmm_scratch = xmm0;
2892 Register output_reg = ToRegister(instr->result());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002893 XMMRegister input_reg = ToDoubleRegister(instr->value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002894
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002895 if (CpuFeatures::IsSupported(SSE4_1)) {
2896 CpuFeatures::Scope scope(SSE4_1);
2897 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2898 // Deoptimize on negative zero.
2899 Label non_zero;
2900 __ xorps(xmm_scratch, xmm_scratch); // Zero the register.
2901 __ ucomisd(input_reg, xmm_scratch);
2902 __ j(not_equal, &non_zero, Label::kNear);
2903 __ movmskpd(output_reg, input_reg);
2904 __ test(output_reg, Immediate(1));
2905 DeoptimizeIf(not_zero, instr->environment());
2906 __ bind(&non_zero);
2907 }
2908 __ roundsd(xmm_scratch, input_reg, Assembler::kRoundDown);
2909 __ cvttsd2si(output_reg, Operand(xmm_scratch));
2910 // Overflow is signalled with minint.
2911 __ cmp(output_reg, 0x80000000u);
2912 DeoptimizeIf(equal, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002913 } else {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002914 Label done;
2915 // Deoptimize on negative numbers.
2916 __ xorps(xmm_scratch, xmm_scratch); // Zero the register.
2917 __ ucomisd(input_reg, xmm_scratch);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002918 DeoptimizeIf(below, instr->environment());
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002919
2920 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2921 // Check for negative zero.
2922 Label positive_sign;
2923 __ j(above, &positive_sign, Label::kNear);
2924 __ movmskpd(output_reg, input_reg);
2925 __ test(output_reg, Immediate(1));
2926 DeoptimizeIf(not_zero, instr->environment());
2927 __ Set(output_reg, Immediate(0));
2928 __ jmp(&done, Label::kNear);
2929 __ bind(&positive_sign);
2930 }
2931
2932 // Use truncating instruction (OK because input is positive).
2933 __ cvttsd2si(output_reg, Operand(input_reg));
2934
2935 // Overflow is signalled with minint.
2936 __ cmp(output_reg, 0x80000000u);
2937 DeoptimizeIf(equal, instr->environment());
2938 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002939 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002940}
2941
Ben Murdochb0fe1622011-05-05 13:52:32 +01002942void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
2943 XMMRegister xmm_scratch = xmm0;
2944 Register output_reg = ToRegister(instr->result());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002945 XMMRegister input_reg = ToDoubleRegister(instr->value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002946
Ben Murdoch257744e2011-11-30 15:57:28 +00002947 Label below_half, done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002948 // xmm_scratch = 0.5
2949 ExternalReference one_half = ExternalReference::address_of_one_half();
2950 __ movdbl(xmm_scratch, Operand::StaticVariable(one_half));
Ben Murdoch257744e2011-11-30 15:57:28 +00002951 __ ucomisd(xmm_scratch, input_reg);
2952 __ j(above, &below_half);
Ben Murdoch692be652012-01-10 18:47:50 +00002953 // xmm_scratch = input + 0.5
2954 __ addsd(xmm_scratch, input_reg);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002955
Ben Murdochb0fe1622011-05-05 13:52:32 +01002956 // Compute Math.floor(value + 0.5).
2957 // Use truncating instruction (OK because input is positive).
Ben Murdoch692be652012-01-10 18:47:50 +00002958 __ cvttsd2si(output_reg, Operand(xmm_scratch));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002959
2960 // Overflow is signalled with minint.
2961 __ cmp(output_reg, 0x80000000u);
2962 DeoptimizeIf(equal, instr->environment());
Ben Murdoch257744e2011-11-30 15:57:28 +00002963 __ jmp(&done);
2964
2965 __ bind(&below_half);
2966
2967 // We return 0 for the input range [+0, 0.5[, or [-0.5, 0.5[ if
2968 // we can ignore the difference between a result of -0 and +0.
2969 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2970 // If the sign is positive, we return +0.
2971 __ movmskpd(output_reg, input_reg);
2972 __ test(output_reg, Immediate(1));
2973 DeoptimizeIf(not_zero, instr->environment());
2974 } else {
2975 // If the input is >= -0.5, we return +0.
2976 __ mov(output_reg, Immediate(0xBF000000));
2977 __ movd(xmm_scratch, Operand(output_reg));
2978 __ cvtss2sd(xmm_scratch, xmm_scratch);
2979 __ ucomisd(input_reg, xmm_scratch);
2980 DeoptimizeIf(below, instr->environment());
2981 }
2982 __ Set(output_reg, Immediate(0));
2983 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002984}
2985
2986
2987void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002988 XMMRegister input_reg = ToDoubleRegister(instr->value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002989 ASSERT(ToDoubleRegister(instr->result()).is(input_reg));
2990 __ sqrtsd(input_reg, input_reg);
2991}
2992
2993
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002994void LCodeGen::DoMathPowHalf(LMathPowHalf* instr) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002995 XMMRegister xmm_scratch = xmm0;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002996 XMMRegister input_reg = ToDoubleRegister(instr->value());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002997 Register scratch = ToRegister(instr->temp());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002998 ASSERT(ToDoubleRegister(instr->result()).is(input_reg));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002999
3000 // Note that according to ECMA-262 15.8.2.13:
3001 // Math.pow(-Infinity, 0.5) == Infinity
3002 // Math.sqrt(-Infinity) == NaN
3003 Label done, sqrt;
3004 // Check base for -Infinity. According to IEEE-754, single-precision
3005 // -Infinity has the highest 9 bits set and the lowest 23 bits cleared.
3006 __ mov(scratch, 0xFF800000);
3007 __ movd(xmm_scratch, scratch);
3008 __ cvtss2sd(xmm_scratch, xmm_scratch);
3009 __ ucomisd(input_reg, xmm_scratch);
3010 // Comparing -Infinity with NaN results in "unordered", which sets the
3011 // zero flag as if both were equal. However, it also sets the carry flag.
3012 __ j(not_equal, &sqrt, Label::kNear);
3013 __ j(carry, &sqrt, Label::kNear);
3014 // If input is -Infinity, return Infinity.
3015 __ xorps(input_reg, input_reg);
3016 __ subsd(input_reg, xmm_scratch);
3017 __ jmp(&done, Label::kNear);
3018
3019 // Square root.
3020 __ bind(&sqrt);
Ben Murdoch257744e2011-11-30 15:57:28 +00003021 __ xorps(xmm_scratch, xmm_scratch);
Steve Block1e0659c2011-05-24 12:43:12 +01003022 __ addsd(input_reg, xmm_scratch); // Convert -0 to +0.
Ben Murdochb0fe1622011-05-05 13:52:32 +01003023 __ sqrtsd(input_reg, input_reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003024 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003025}
3026
3027
3028void LCodeGen::DoPower(LPower* instr) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01003029 Representation exponent_type = instr->hydrogen()->right()->representation();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003030 // Having marked this as a call, we can use any registers.
3031 // Just make sure that the input/output registers are the expected ones.
3032 ASSERT(!instr->InputAt(1)->IsDoubleRegister() ||
3033 ToDoubleRegister(instr->InputAt(1)).is(xmm1));
3034 ASSERT(!instr->InputAt(1)->IsRegister() ||
3035 ToRegister(instr->InputAt(1)).is(eax));
3036 ASSERT(ToDoubleRegister(instr->InputAt(0)).is(xmm2));
3037 ASSERT(ToDoubleRegister(instr->result()).is(xmm3));
Steve Block44f0eee2011-05-26 01:26:41 +01003038
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003039 if (exponent_type.IsTagged()) {
3040 Label no_deopt;
3041 __ JumpIfSmi(eax, &no_deopt);
3042 __ CmpObjectType(eax, HEAP_NUMBER_TYPE, ecx);
Ben Murdoch85b71792012-04-11 18:30:58 +01003043 DeoptimizeIf(not_equal, instr->environment());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003044 __ bind(&no_deopt);
3045 MathPowStub stub(MathPowStub::TAGGED);
3046 __ CallStub(&stub);
3047 } else if (exponent_type.IsInteger32()) {
3048 MathPowStub stub(MathPowStub::INTEGER);
3049 __ CallStub(&stub);
3050 } else {
3051 ASSERT(exponent_type.IsDouble());
3052 MathPowStub stub(MathPowStub::DOUBLE);
3053 __ CallStub(&stub);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003054 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003055}
Ben Murdochb0fe1622011-05-05 13:52:32 +01003056
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003057
3058void LCodeGen::DoRandom(LRandom* instr) {
3059 class DeferredDoRandom: public LDeferredCode {
3060 public:
3061 DeferredDoRandom(LCodeGen* codegen, LRandom* instr)
3062 : LDeferredCode(codegen), instr_(instr) { }
3063 virtual void Generate() { codegen()->DoDeferredRandom(instr_); }
3064 virtual LInstruction* instr() { return instr_; }
3065 private:
3066 LRandom* instr_;
3067 };
3068
3069 DeferredDoRandom* deferred = new DeferredDoRandom(this, instr);
3070
3071 // Having marked this instruction as a call we can use any
3072 // registers.
3073 ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
3074 ASSERT(ToRegister(instr->InputAt(0)).is(eax));
3075 // Assert that the register size is indeed the size of each seed.
3076 static const int kSeedSize = sizeof(uint32_t);
3077 STATIC_ASSERT(kPointerSize == kSeedSize);
3078
3079 __ mov(eax, FieldOperand(eax, GlobalObject::kGlobalContextOffset));
3080 static const int kRandomSeedOffset =
3081 FixedArray::kHeaderSize + Context::RANDOM_SEED_INDEX * kPointerSize;
3082 __ mov(ebx, FieldOperand(eax, kRandomSeedOffset));
3083 // ebx: FixedArray of the global context's random seeds
3084
3085 // Load state[0].
3086 __ mov(ecx, FieldOperand(ebx, ByteArray::kHeaderSize));
3087 // If state[0] == 0, call runtime to initialize seeds.
3088 __ test(ecx, ecx);
3089 __ j(zero, deferred->entry());
3090 // Load state[1].
3091 __ mov(eax, FieldOperand(ebx, ByteArray::kHeaderSize + kSeedSize));
3092 // ecx: state[0]
3093 // eax: state[1]
3094
3095 // state[0] = 18273 * (state[0] & 0xFFFF) + (state[0] >> 16)
3096 __ movzx_w(edx, ecx);
3097 __ imul(edx, edx, 18273);
3098 __ shr(ecx, 16);
3099 __ add(ecx, edx);
3100 // Save state[0].
3101 __ mov(FieldOperand(ebx, ByteArray::kHeaderSize), ecx);
3102
3103 // state[1] = 36969 * (state[1] & 0xFFFF) + (state[1] >> 16)
3104 __ movzx_w(edx, eax);
3105 __ imul(edx, edx, 36969);
3106 __ shr(eax, 16);
3107 __ add(eax, edx);
3108 // Save state[1].
3109 __ mov(FieldOperand(ebx, ByteArray::kHeaderSize + kSeedSize), eax);
3110
3111 // Random bit pattern = (state[0] << 14) + (state[1] & 0x3FFFF)
3112 __ shl(ecx, 14);
3113 __ and_(eax, Immediate(0x3FFFF));
3114 __ add(eax, ecx);
3115
3116 __ bind(deferred->exit());
3117 // Convert 32 random bits in eax to 0.(32 random bits) in a double
3118 // by computing:
3119 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
3120 __ mov(ebx, Immediate(0x49800000)); // 1.0 x 2^20 as single.
3121 __ movd(xmm2, ebx);
3122 __ movd(xmm1, eax);
3123 __ cvtss2sd(xmm2, xmm2);
3124 __ xorps(xmm1, xmm2);
3125 __ subsd(xmm1, xmm2);
3126}
3127
3128
3129void LCodeGen::DoDeferredRandom(LRandom* instr) {
3130 __ PrepareCallCFunction(1, ebx);
3131 __ mov(Operand(esp, 0), eax);
3132 __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
3133 // Return value is in eax.
Ben Murdoch5d4cdbf2012-04-11 10:23:59 +01003134}
3135
3136
Ben Murdochb0fe1622011-05-05 13:52:32 +01003137void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003138 ASSERT(instr->value()->Equals(instr->result()));
3139 XMMRegister input_reg = ToDoubleRegister(instr->value());
Ben Murdoch257744e2011-11-30 15:57:28 +00003140 Label positive, done, zero;
3141 __ xorps(xmm0, xmm0);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003142 __ ucomisd(input_reg, xmm0);
Ben Murdoch257744e2011-11-30 15:57:28 +00003143 __ j(above, &positive, Label::kNear);
3144 __ j(equal, &zero, Label::kNear);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003145 ExternalReference nan =
3146 ExternalReference::address_of_canonical_non_hole_nan();
Ben Murdoch8b112d22011-06-08 16:22:53 +01003147 __ movdbl(input_reg, Operand::StaticVariable(nan));
Ben Murdoch257744e2011-11-30 15:57:28 +00003148 __ jmp(&done, Label::kNear);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003149 __ bind(&zero);
3150 __ push(Immediate(0xFFF00000));
3151 __ push(Immediate(0));
3152 __ movdbl(input_reg, Operand(esp, 0));
3153 __ add(Operand(esp), Immediate(kDoubleSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00003154 __ jmp(&done, Label::kNear);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003155 __ bind(&positive);
3156 __ fldln2();
3157 __ sub(Operand(esp), Immediate(kDoubleSize));
3158 __ movdbl(Operand(esp, 0), input_reg);
3159 __ fld_d(Operand(esp, 0));
3160 __ fyl2x();
3161 __ fstp_d(Operand(esp, 0));
3162 __ movdbl(input_reg, Operand(esp, 0));
3163 __ add(Operand(esp), Immediate(kDoubleSize));
3164 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003165}
3166
3167
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003168void LCodeGen::DoMathTan(LUnaryMathOperation* instr) {
3169 ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
3170 TranscendentalCacheStub stub(TranscendentalCache::TAN,
3171 TranscendentalCacheStub::UNTAGGED);
3172 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3173}
3174
3175
Ben Murdochb0fe1622011-05-05 13:52:32 +01003176void LCodeGen::DoMathCos(LUnaryMathOperation* instr) {
3177 ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
3178 TranscendentalCacheStub stub(TranscendentalCache::COS,
3179 TranscendentalCacheStub::UNTAGGED);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003180 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003181}
3182
3183
3184void LCodeGen::DoMathSin(LUnaryMathOperation* instr) {
3185 ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
3186 TranscendentalCacheStub stub(TranscendentalCache::SIN,
3187 TranscendentalCacheStub::UNTAGGED);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003188 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003189}
3190
3191
3192void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
3193 switch (instr->op()) {
3194 case kMathAbs:
3195 DoMathAbs(instr);
3196 break;
3197 case kMathFloor:
3198 DoMathFloor(instr);
3199 break;
3200 case kMathRound:
3201 DoMathRound(instr);
3202 break;
3203 case kMathSqrt:
3204 DoMathSqrt(instr);
3205 break;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003206 case kMathCos:
3207 DoMathCos(instr);
3208 break;
3209 case kMathSin:
3210 DoMathSin(instr);
3211 break;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003212 case kMathTan:
3213 DoMathTan(instr);
3214 break;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003215 case kMathLog:
3216 DoMathLog(instr);
3217 break;
3218
3219 default:
3220 UNREACHABLE();
3221 }
3222}
3223
3224
Ben Murdoch257744e2011-11-30 15:57:28 +00003225void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
3226 ASSERT(ToRegister(instr->context()).is(esi));
3227 ASSERT(ToRegister(instr->function()).is(edi));
3228 ASSERT(instr->HasPointerMap());
3229 ASSERT(instr->HasDeoptimizationEnvironment());
3230 LPointerMap* pointers = instr->pointer_map();
Ben Murdoch257744e2011-11-30 15:57:28 +00003231 RecordPosition(pointers->position());
Ben Murdoch2b4ba112012-01-20 14:57:15 +00003232 SafepointGenerator generator(
3233 this, pointers, Safepoint::kLazyDeopt);
Ben Murdoch257744e2011-11-30 15:57:28 +00003234 ParameterCount count(instr->arity());
3235 __ InvokeFunction(edi, count, CALL_FUNCTION, generator, CALL_AS_METHOD);
3236}
3237
3238
Ben Murdochb0fe1622011-05-05 13:52:32 +01003239void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003240 ASSERT(ToRegister(instr->context()).is(esi));
3241 ASSERT(ToRegister(instr->key()).is(ecx));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003242 ASSERT(ToRegister(instr->result()).is(eax));
3243
3244 int arity = instr->arity();
Ben Murdoch589d6972011-11-30 16:04:58 +00003245 Handle<Code> ic =
3246 isolate()->stub_cache()->ComputeKeyedCallInitialize(arity);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003247 CallCode(ic, RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003248}
3249
3250
3251void LCodeGen::DoCallNamed(LCallNamed* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003252 ASSERT(ToRegister(instr->context()).is(esi));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003253 ASSERT(ToRegister(instr->result()).is(eax));
3254
3255 int arity = instr->arity();
Ben Murdoch257744e2011-11-30 15:57:28 +00003256 RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
3257 Handle<Code> ic =
Ben Murdoch589d6972011-11-30 16:04:58 +00003258 isolate()->stub_cache()->ComputeCallInitialize(arity, mode);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003259 __ mov(ecx, instr->name());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003260 CallCode(ic, mode, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003261}
3262
3263
3264void LCodeGen::DoCallFunction(LCallFunction* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003265 ASSERT(ToRegister(instr->context()).is(esi));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003266 ASSERT(ToRegister(instr->function()).is(edi));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003267 ASSERT(ToRegister(instr->result()).is(eax));
3268
3269 int arity = instr->arity();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003270 CallFunctionStub stub(arity, NO_CALL_FUNCTION_FLAGS);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003271 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003272}
3273
3274
3275void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003276 ASSERT(ToRegister(instr->context()).is(esi));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003277 ASSERT(ToRegister(instr->result()).is(eax));
3278
3279 int arity = instr->arity();
Ben Murdoch257744e2011-11-30 15:57:28 +00003280 RelocInfo::Mode mode = RelocInfo::CODE_TARGET_CONTEXT;
3281 Handle<Code> ic =
Ben Murdoch589d6972011-11-30 16:04:58 +00003282 isolate()->stub_cache()->ComputeCallInitialize(arity, mode);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003283 __ mov(ecx, instr->name());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003284 CallCode(ic, mode, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003285}
3286
3287
3288void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
3289 ASSERT(ToRegister(instr->result()).is(eax));
Ben Murdoch257744e2011-11-30 15:57:28 +00003290 CallKnownFunction(instr->target(), instr->arity(), instr, CALL_AS_FUNCTION);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003291}
3292
3293
3294void LCodeGen::DoCallNew(LCallNew* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003295 ASSERT(ToRegister(instr->context()).is(esi));
3296 ASSERT(ToRegister(instr->constructor()).is(edi));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003297 ASSERT(ToRegister(instr->result()).is(eax));
3298
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003299 CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003300 __ Set(eax, Immediate(instr->arity()));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003301 CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003302}
3303
3304
3305void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003306 CallRuntime(instr->function(), instr->arity(), instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003307}
3308
3309
3310void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
3311 Register object = ToRegister(instr->object());
3312 Register value = ToRegister(instr->value());
3313 int offset = instr->offset();
3314
3315 if (!instr->transition().is_null()) {
3316 __ mov(FieldOperand(object, HeapObject::kMapOffset), instr->transition());
3317 }
3318
3319 // Do the store.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003320 HType type = instr->hydrogen()->value()->type();
3321 SmiCheck check_needed =
3322 type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003323 if (instr->is_in_object()) {
3324 __ mov(FieldOperand(object, offset), value);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003325 if (instr->hydrogen()->NeedsWriteBarrier()) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01003326 Register temp = ToRegister(instr->TempAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003327 // Update the write barrier for the object for in-object properties.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003328 __ RecordWriteField(object,
3329 offset,
3330 value,
3331 temp,
3332 kSaveFPRegs,
3333 EMIT_REMEMBERED_SET,
3334 check_needed);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003335 }
3336 } else {
Ben Murdochb8e0da22011-05-16 14:20:40 +01003337 Register temp = ToRegister(instr->TempAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003338 __ mov(temp, FieldOperand(object, JSObject::kPropertiesOffset));
3339 __ mov(FieldOperand(temp, offset), value);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003340 if (instr->hydrogen()->NeedsWriteBarrier()) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01003341 // Update the write barrier for the properties array.
3342 // object is used as a scratch register.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003343 __ RecordWriteField(temp,
3344 offset,
3345 value,
3346 object,
3347 kSaveFPRegs,
3348 EMIT_REMEMBERED_SET,
3349 check_needed);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003350 }
3351 }
3352}
3353
3354
3355void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003356 ASSERT(ToRegister(instr->context()).is(esi));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003357 ASSERT(ToRegister(instr->object()).is(edx));
3358 ASSERT(ToRegister(instr->value()).is(eax));
3359
3360 __ mov(ecx, instr->name());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003361 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode)
Steve Block44f0eee2011-05-26 01:26:41 +01003362 ? isolate()->builtins()->StoreIC_Initialize_Strict()
3363 : isolate()->builtins()->StoreIC_Initialize();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003364 CallCode(ic, RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003365}
3366
3367
3368void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003369 if (instr->index()->IsConstantOperand()) {
3370 __ cmp(ToOperand(instr->length()),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003371 Immediate(ToInteger32(LConstantOperand::cast(instr->index()))));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003372 DeoptimizeIf(below_equal, instr->environment());
3373 } else {
3374 __ cmp(ToRegister(instr->index()), ToOperand(instr->length()));
3375 DeoptimizeIf(above_equal, instr->environment());
3376 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003377}
3378
3379
Steve Block44f0eee2011-05-26 01:26:41 +01003380void LCodeGen::DoStoreKeyedSpecializedArrayElement(
3381 LStoreKeyedSpecializedArrayElement* instr) {
Ben Murdoch589d6972011-11-30 16:04:58 +00003382 ElementsKind elements_kind = instr->elements_kind();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003383 Operand operand(BuildFastArrayOperand(instr->external_pointer(),
3384 instr->key(), elements_kind, 0));
Ben Murdoch589d6972011-11-30 16:04:58 +00003385 if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
Steve Block44f0eee2011-05-26 01:26:41 +01003386 __ cvtsd2ss(xmm0, ToDoubleRegister(instr->value()));
Ben Murdoch257744e2011-11-30 15:57:28 +00003387 __ movss(operand, xmm0);
Ben Murdoch589d6972011-11-30 16:04:58 +00003388 } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003389 __ movdbl(operand, ToDoubleRegister(instr->value()));
Steve Block44f0eee2011-05-26 01:26:41 +01003390 } else {
3391 Register value = ToRegister(instr->value());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003392 switch (elements_kind) {
Ben Murdoch589d6972011-11-30 16:04:58 +00003393 case EXTERNAL_PIXEL_ELEMENTS:
3394 case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
3395 case EXTERNAL_BYTE_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00003396 __ mov_b(operand, value);
Steve Block44f0eee2011-05-26 01:26:41 +01003397 break;
Ben Murdoch589d6972011-11-30 16:04:58 +00003398 case EXTERNAL_SHORT_ELEMENTS:
3399 case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00003400 __ mov_w(operand, value);
Steve Block44f0eee2011-05-26 01:26:41 +01003401 break;
Ben Murdoch589d6972011-11-30 16:04:58 +00003402 case EXTERNAL_INT_ELEMENTS:
3403 case EXTERNAL_UNSIGNED_INT_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00003404 __ mov(operand, value);
Steve Block44f0eee2011-05-26 01:26:41 +01003405 break;
Ben Murdoch589d6972011-11-30 16:04:58 +00003406 case EXTERNAL_FLOAT_ELEMENTS:
3407 case EXTERNAL_DOUBLE_ELEMENTS:
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003408 case FAST_SMI_ONLY_ELEMENTS:
Ben Murdoch589d6972011-11-30 16:04:58 +00003409 case FAST_ELEMENTS:
3410 case FAST_DOUBLE_ELEMENTS:
3411 case DICTIONARY_ELEMENTS:
3412 case NON_STRICT_ARGUMENTS_ELEMENTS:
Steve Block44f0eee2011-05-26 01:26:41 +01003413 UNREACHABLE();
3414 break;
3415 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003416 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003417}
3418
3419
Ben Murdochb0fe1622011-05-05 13:52:32 +01003420void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
3421 Register value = ToRegister(instr->value());
3422 Register elements = ToRegister(instr->object());
3423 Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
3424
3425 // Do the store.
3426 if (instr->key()->IsConstantOperand()) {
3427 ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
3428 LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
3429 int offset =
3430 ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize;
3431 __ mov(FieldOperand(elements, offset), value);
3432 } else {
Steve Block1e0659c2011-05-24 12:43:12 +01003433 __ mov(FieldOperand(elements,
3434 key,
3435 times_pointer_size,
3436 FixedArray::kHeaderSize),
Ben Murdochb0fe1622011-05-05 13:52:32 +01003437 value);
3438 }
3439
Ben Murdochb0fe1622011-05-05 13:52:32 +01003440 if (instr->hydrogen()->NeedsWriteBarrier()) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003441 HType type = instr->hydrogen()->value()->type();
3442 SmiCheck check_needed =
3443 type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003444 // Compute address of modified element and store it into key register.
Steve Block1e0659c2011-05-24 12:43:12 +01003445 __ lea(key,
3446 FieldOperand(elements,
3447 key,
3448 times_pointer_size,
3449 FixedArray::kHeaderSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003450 __ RecordWrite(elements,
3451 key,
3452 value,
3453 kSaveFPRegs,
3454 EMIT_REMEMBERED_SET,
3455 check_needed);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003456 }
3457}
3458
3459
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003460void LCodeGen::DoStoreKeyedFastDoubleElement(
3461 LStoreKeyedFastDoubleElement* instr) {
3462 XMMRegister value = ToDoubleRegister(instr->value());
3463 Label have_value;
3464
3465 __ ucomisd(value, value);
3466 __ j(parity_odd, &have_value); // NaN.
3467
3468 ExternalReference canonical_nan_reference =
3469 ExternalReference::address_of_canonical_non_hole_nan();
3470 __ movdbl(value, Operand::StaticVariable(canonical_nan_reference));
3471 __ bind(&have_value);
3472
3473 Operand double_store_operand = BuildFastArrayOperand(
Ben Murdoch589d6972011-11-30 16:04:58 +00003474 instr->elements(), instr->key(), FAST_DOUBLE_ELEMENTS,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003475 FixedDoubleArray::kHeaderSize - kHeapObjectTag);
3476 __ movdbl(double_store_operand, value);
3477}
3478
3479
Ben Murdochb0fe1622011-05-05 13:52:32 +01003480void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003481 ASSERT(ToRegister(instr->context()).is(esi));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003482 ASSERT(ToRegister(instr->object()).is(edx));
3483 ASSERT(ToRegister(instr->key()).is(ecx));
3484 ASSERT(ToRegister(instr->value()).is(eax));
3485
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003486 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode)
Steve Block44f0eee2011-05-26 01:26:41 +01003487 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
3488 : isolate()->builtins()->KeyedStoreIC_Initialize();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003489 CallCode(ic, RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003490}
3491
3492
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003493void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
3494 Register object_reg = ToRegister(instr->object());
3495 Register new_map_reg = ToRegister(instr->new_map_reg());
3496
3497 Handle<Map> from_map = instr->original_map();
3498 Handle<Map> to_map = instr->transitioned_map();
3499 ElementsKind from_kind = from_map->elements_kind();
3500 ElementsKind to_kind = to_map->elements_kind();
3501
3502 Label not_applicable;
3503 __ cmp(FieldOperand(object_reg, HeapObject::kMapOffset), from_map);
3504 __ j(not_equal, &not_applicable);
3505 __ mov(new_map_reg, to_map);
3506 if (from_kind == FAST_SMI_ONLY_ELEMENTS && to_kind == FAST_ELEMENTS) {
3507 Register object_reg = ToRegister(instr->object());
3508 __ mov(FieldOperand(object_reg, HeapObject::kMapOffset), new_map_reg);
3509 // Write barrier.
3510 ASSERT_NE(instr->temp_reg(), NULL);
3511 __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg,
3512 ToRegister(instr->temp_reg()), kDontSaveFPRegs);
3513 } else if (from_kind == FAST_SMI_ONLY_ELEMENTS &&
3514 to_kind == FAST_DOUBLE_ELEMENTS) {
3515 Register fixed_object_reg = ToRegister(instr->temp_reg());
3516 ASSERT(fixed_object_reg.is(edx));
3517 ASSERT(new_map_reg.is(ebx));
3518 __ mov(fixed_object_reg, object_reg);
3519 CallCode(isolate()->builtins()->TransitionElementsSmiToDouble(),
3520 RelocInfo::CODE_TARGET, instr);
3521 } else if (from_kind == FAST_DOUBLE_ELEMENTS && to_kind == FAST_ELEMENTS) {
3522 Register fixed_object_reg = ToRegister(instr->temp_reg());
3523 ASSERT(fixed_object_reg.is(edx));
3524 ASSERT(new_map_reg.is(ebx));
3525 __ mov(fixed_object_reg, object_reg);
3526 CallCode(isolate()->builtins()->TransitionElementsDoubleToObject(),
3527 RelocInfo::CODE_TARGET, instr);
3528 } else {
3529 UNREACHABLE();
3530 }
3531 __ bind(&not_applicable);
3532}
3533
3534
Steve Block1e0659c2011-05-24 12:43:12 +01003535void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
3536 class DeferredStringCharCodeAt: public LDeferredCode {
3537 public:
3538 DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr)
3539 : LDeferredCode(codegen), instr_(instr) { }
3540 virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003541 virtual LInstruction* instr() { return instr_; }
Steve Block1e0659c2011-05-24 12:43:12 +01003542 private:
3543 LStringCharCodeAt* instr_;
3544 };
3545
Steve Block1e0659c2011-05-24 12:43:12 +01003546 DeferredStringCharCodeAt* deferred =
3547 new DeferredStringCharCodeAt(this, instr);
3548
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003549 StringCharLoadGenerator::Generate(masm(),
3550 factory(),
3551 ToRegister(instr->string()),
3552 ToRegister(instr->index()),
3553 ToRegister(instr->result()),
3554 deferred->entry());
Steve Block1e0659c2011-05-24 12:43:12 +01003555 __ bind(deferred->exit());
3556}
3557
3558
3559void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) {
3560 Register string = ToRegister(instr->string());
3561 Register result = ToRegister(instr->result());
3562
3563 // TODO(3095996): Get rid of this. For now, we need to make the
3564 // result register contain a valid pointer because it is already
3565 // contained in the register pointer map.
3566 __ Set(result, Immediate(0));
3567
Ben Murdoch8b112d22011-06-08 16:22:53 +01003568 PushSafepointRegistersScope scope(this);
Steve Block1e0659c2011-05-24 12:43:12 +01003569 __ push(string);
3570 // Push the index as a smi. This is safe because of the checks in
3571 // DoStringCharCodeAt above.
3572 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
3573 if (instr->index()->IsConstantOperand()) {
3574 int const_index = ToInteger32(LConstantOperand::cast(instr->index()));
3575 __ push(Immediate(Smi::FromInt(const_index)));
3576 } else {
3577 Register index = ToRegister(instr->index());
3578 __ SmiTag(index);
3579 __ push(index);
3580 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003581 CallRuntimeFromDeferred(Runtime::kStringCharCodeAt, 2,
3582 instr, instr->context());
Steve Block1e0659c2011-05-24 12:43:12 +01003583 if (FLAG_debug_code) {
3584 __ AbortIfNotSmi(eax);
3585 }
3586 __ SmiUntag(eax);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003587 __ StoreToSafepointRegisterSlot(result, eax);
Steve Block1e0659c2011-05-24 12:43:12 +01003588}
3589
3590
Steve Block44f0eee2011-05-26 01:26:41 +01003591void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
3592 class DeferredStringCharFromCode: public LDeferredCode {
3593 public:
3594 DeferredStringCharFromCode(LCodeGen* codegen, LStringCharFromCode* instr)
3595 : LDeferredCode(codegen), instr_(instr) { }
3596 virtual void Generate() { codegen()->DoDeferredStringCharFromCode(instr_); }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003597 virtual LInstruction* instr() { return instr_; }
Steve Block44f0eee2011-05-26 01:26:41 +01003598 private:
3599 LStringCharFromCode* instr_;
3600 };
3601
3602 DeferredStringCharFromCode* deferred =
3603 new DeferredStringCharFromCode(this, instr);
3604
3605 ASSERT(instr->hydrogen()->value()->representation().IsInteger32());
3606 Register char_code = ToRegister(instr->char_code());
3607 Register result = ToRegister(instr->result());
3608 ASSERT(!char_code.is(result));
3609
3610 __ cmp(char_code, String::kMaxAsciiCharCode);
3611 __ j(above, deferred->entry());
3612 __ Set(result, Immediate(factory()->single_character_string_cache()));
3613 __ mov(result, FieldOperand(result,
3614 char_code, times_pointer_size,
3615 FixedArray::kHeaderSize));
3616 __ cmp(result, factory()->undefined_value());
3617 __ j(equal, deferred->entry());
3618 __ bind(deferred->exit());
3619}
3620
3621
3622void LCodeGen::DoDeferredStringCharFromCode(LStringCharFromCode* instr) {
3623 Register char_code = ToRegister(instr->char_code());
3624 Register result = ToRegister(instr->result());
3625
3626 // TODO(3095996): Get rid of this. For now, we need to make the
3627 // result register contain a valid pointer because it is already
3628 // contained in the register pointer map.
3629 __ Set(result, Immediate(0));
3630
Ben Murdoch8b112d22011-06-08 16:22:53 +01003631 PushSafepointRegistersScope scope(this);
Steve Block44f0eee2011-05-26 01:26:41 +01003632 __ SmiTag(char_code);
3633 __ push(char_code);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003634 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr, instr->context());
Steve Block44f0eee2011-05-26 01:26:41 +01003635 __ StoreToSafepointRegisterSlot(result, eax);
Steve Block44f0eee2011-05-26 01:26:41 +01003636}
3637
3638
Steve Block1e0659c2011-05-24 12:43:12 +01003639void LCodeGen::DoStringLength(LStringLength* instr) {
3640 Register string = ToRegister(instr->string());
3641 Register result = ToRegister(instr->result());
3642 __ mov(result, FieldOperand(string, String::kLengthOffset));
3643}
3644
3645
Ben Murdoch257744e2011-11-30 15:57:28 +00003646void LCodeGen::DoStringAdd(LStringAdd* instr) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003647 EmitPushTaggedOperand(instr->left());
3648 EmitPushTaggedOperand(instr->right());
Ben Murdoch257744e2011-11-30 15:57:28 +00003649 StringAddStub stub(NO_STRING_CHECK_IN_STUB);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003650 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdoch257744e2011-11-30 15:57:28 +00003651}
3652
3653
Ben Murdochb0fe1622011-05-05 13:52:32 +01003654void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01003655 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003656 ASSERT(input->IsRegister() || input->IsStackSlot());
3657 LOperand* output = instr->result();
3658 ASSERT(output->IsDoubleRegister());
3659 __ cvtsi2sd(ToDoubleRegister(output), ToOperand(input));
3660}
3661
3662
3663void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
3664 class DeferredNumberTagI: public LDeferredCode {
3665 public:
3666 DeferredNumberTagI(LCodeGen* codegen, LNumberTagI* instr)
3667 : LDeferredCode(codegen), instr_(instr) { }
3668 virtual void Generate() { codegen()->DoDeferredNumberTagI(instr_); }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003669 virtual LInstruction* instr() { return instr_; }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003670 private:
3671 LNumberTagI* instr_;
3672 };
3673
Ben Murdochb8e0da22011-05-16 14:20:40 +01003674 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003675 ASSERT(input->IsRegister() && input->Equals(instr->result()));
3676 Register reg = ToRegister(input);
3677
3678 DeferredNumberTagI* deferred = new DeferredNumberTagI(this, instr);
3679 __ SmiTag(reg);
3680 __ j(overflow, deferred->entry());
3681 __ bind(deferred->exit());
3682}
3683
3684
3685void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) {
3686 Label slow;
Ben Murdochb8e0da22011-05-16 14:20:40 +01003687 Register reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003688 Register tmp = reg.is(eax) ? ecx : eax;
3689
3690 // Preserve the value of all registers.
Ben Murdoch8b112d22011-06-08 16:22:53 +01003691 PushSafepointRegistersScope scope(this);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003692
3693 // There was overflow, so bits 30 and 31 of the original integer
3694 // disagree. Try to allocate a heap number in new space and store
3695 // the value in there. If that fails, call the runtime system.
Ben Murdoch257744e2011-11-30 15:57:28 +00003696 Label done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003697 __ SmiUntag(reg);
3698 __ xor_(reg, 0x80000000);
3699 __ cvtsi2sd(xmm0, Operand(reg));
3700 if (FLAG_inline_new) {
3701 __ AllocateHeapNumber(reg, tmp, no_reg, &slow);
Ben Murdoch257744e2011-11-30 15:57:28 +00003702 __ jmp(&done, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003703 }
3704
3705 // Slow case: Call the runtime system to do the number allocation.
3706 __ bind(&slow);
3707
3708 // TODO(3095996): Put a valid pointer value in the stack slot where the result
3709 // register is stored, as this register is in the pointer map, but contains an
3710 // integer value.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003711 __ StoreToSafepointRegisterSlot(reg, Immediate(0));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003712 // NumberTagI and NumberTagD use the context from the frame, rather than
3713 // the environment's HContext or HInlinedContext value.
3714 // They only call Runtime::kAllocateHeapNumber.
3715 // The corresponding HChange instructions are added in a phase that does
3716 // not have easy access to the local context.
3717 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3718 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
3719 RecordSafepointWithRegisters(
Ben Murdoch2b4ba112012-01-20 14:57:15 +00003720 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003721 if (!reg.is(eax)) __ mov(reg, eax);
3722
3723 // Done. Put the value in xmm0 into the value of the allocated heap
3724 // number.
3725 __ bind(&done);
3726 __ movdbl(FieldOperand(reg, HeapNumber::kValueOffset), xmm0);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003727 __ StoreToSafepointRegisterSlot(reg, reg);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003728}
3729
3730
3731void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
3732 class DeferredNumberTagD: public LDeferredCode {
3733 public:
3734 DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr)
3735 : LDeferredCode(codegen), instr_(instr) { }
3736 virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003737 virtual LInstruction* instr() { return instr_; }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003738 private:
3739 LNumberTagD* instr_;
3740 };
3741
Ben Murdochb8e0da22011-05-16 14:20:40 +01003742 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003743 Register reg = ToRegister(instr->result());
Ben Murdochb8e0da22011-05-16 14:20:40 +01003744 Register tmp = ToRegister(instr->TempAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003745
3746 DeferredNumberTagD* deferred = new DeferredNumberTagD(this, instr);
3747 if (FLAG_inline_new) {
3748 __ AllocateHeapNumber(reg, tmp, no_reg, deferred->entry());
3749 } else {
3750 __ jmp(deferred->entry());
3751 }
3752 __ bind(deferred->exit());
3753 __ movdbl(FieldOperand(reg, HeapNumber::kValueOffset), input_reg);
3754}
3755
3756
3757void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
3758 // TODO(3095996): Get rid of this. For now, we need to make the
3759 // result register contain a valid pointer because it is already
3760 // contained in the register pointer map.
3761 Register reg = ToRegister(instr->result());
3762 __ Set(reg, Immediate(0));
3763
Ben Murdoch8b112d22011-06-08 16:22:53 +01003764 PushSafepointRegistersScope scope(this);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003765 // NumberTagI and NumberTagD use the context from the frame, rather than
3766 // the environment's HContext or HInlinedContext value.
3767 // They only call Runtime::kAllocateHeapNumber.
3768 // The corresponding HChange instructions are added in a phase that does
3769 // not have easy access to the local context.
3770 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3771 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
Ben Murdoch2b4ba112012-01-20 14:57:15 +00003772 RecordSafepointWithRegisters(
3773 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003774 __ StoreToSafepointRegisterSlot(reg, eax);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003775}
3776
3777
3778void LCodeGen::DoSmiTag(LSmiTag* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01003779 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003780 ASSERT(input->IsRegister() && input->Equals(instr->result()));
3781 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow));
3782 __ SmiTag(ToRegister(input));
3783}
3784
3785
3786void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01003787 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003788 ASSERT(input->IsRegister() && input->Equals(instr->result()));
3789 if (instr->needs_check()) {
3790 __ test(ToRegister(input), Immediate(kSmiTagMask));
3791 DeoptimizeIf(not_zero, instr->environment());
3792 }
3793 __ SmiUntag(ToRegister(input));
3794}
3795
3796
3797void LCodeGen::EmitNumberUntagD(Register input_reg,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003798 Register temp_reg,
Ben Murdochb0fe1622011-05-05 13:52:32 +01003799 XMMRegister result_reg,
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003800 bool deoptimize_on_undefined,
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003801 bool deoptimize_on_minus_zero,
Ben Murdochb0fe1622011-05-05 13:52:32 +01003802 LEnvironment* env) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003803 Label load_smi, done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003804
3805 // Smi check.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003806 __ JumpIfSmi(input_reg, &load_smi, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003807
3808 // Heap number map check.
3809 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01003810 factory()->heap_number_map());
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003811 if (deoptimize_on_undefined) {
3812 DeoptimizeIf(not_equal, env);
3813 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00003814 Label heap_number;
3815 __ j(equal, &heap_number, Label::kNear);
3816
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003817 __ cmp(input_reg, factory()->undefined_value());
3818 DeoptimizeIf(not_equal, env);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003819
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003820 // Convert undefined to NaN.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003821 ExternalReference nan =
3822 ExternalReference::address_of_canonical_non_hole_nan();
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003823 __ movdbl(result_reg, Operand::StaticVariable(nan));
Ben Murdoch257744e2011-11-30 15:57:28 +00003824 __ jmp(&done, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003825
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003826 __ bind(&heap_number);
3827 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003828 // Heap number to XMM conversion.
Ben Murdochb0fe1622011-05-05 13:52:32 +01003829 __ movdbl(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003830 if (deoptimize_on_minus_zero) {
3831 XMMRegister xmm_scratch = xmm0;
3832 __ xorps(xmm_scratch, xmm_scratch);
3833 __ ucomisd(result_reg, xmm_scratch);
3834 __ j(not_zero, &done, Label::kNear);
3835 __ movmskpd(temp_reg, result_reg);
3836 __ test_b(temp_reg, 1);
3837 DeoptimizeIf(not_zero, env);
3838 }
Ben Murdoch257744e2011-11-30 15:57:28 +00003839 __ jmp(&done, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003840
3841 // Smi to XMM conversion
3842 __ bind(&load_smi);
3843 __ SmiUntag(input_reg); // Untag smi before converting to float.
3844 __ cvtsi2sd(result_reg, Operand(input_reg));
3845 __ SmiTag(input_reg); // Retag smi.
3846 __ bind(&done);
3847}
3848
3849
Ben Murdochb0fe1622011-05-05 13:52:32 +01003850void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003851 Label done, heap_number;
Ben Murdochb8e0da22011-05-16 14:20:40 +01003852 Register input_reg = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003853
3854 // Heap number map check.
3855 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01003856 factory()->heap_number_map());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003857
3858 if (instr->truncating()) {
Ben Murdoch257744e2011-11-30 15:57:28 +00003859 __ j(equal, &heap_number, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003860 // Check for undefined. Undefined is converted to zero for truncating
3861 // conversions.
Steve Block44f0eee2011-05-26 01:26:41 +01003862 __ cmp(input_reg, factory()->undefined_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003863 DeoptimizeIf(not_equal, instr->environment());
3864 __ mov(input_reg, 0);
Ben Murdoch257744e2011-11-30 15:57:28 +00003865 __ jmp(&done, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003866
3867 __ bind(&heap_number);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003868 if (CpuFeatures::IsSupported(SSE3)) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01003869 CpuFeatures::Scope scope(SSE3);
Ben Murdoch257744e2011-11-30 15:57:28 +00003870 Label convert;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003871 // Use more powerful conversion when sse3 is available.
3872 // Load x87 register with heap number.
3873 __ fld_d(FieldOperand(input_reg, HeapNumber::kValueOffset));
3874 // Get exponent alone and check for too-big exponent.
3875 __ mov(input_reg, FieldOperand(input_reg, HeapNumber::kExponentOffset));
3876 __ and_(input_reg, HeapNumber::kExponentMask);
3877 const uint32_t kTooBigExponent =
3878 (HeapNumber::kExponentBias + 63) << HeapNumber::kExponentShift;
3879 __ cmp(Operand(input_reg), Immediate(kTooBigExponent));
Ben Murdoch257744e2011-11-30 15:57:28 +00003880 __ j(less, &convert, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003881 // Pop FPU stack before deoptimizing.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003882 __ fstp(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003883 DeoptimizeIf(no_condition, instr->environment());
3884
3885 // Reserve space for 64 bit answer.
3886 __ bind(&convert);
3887 __ sub(Operand(esp), Immediate(kDoubleSize));
3888 // Do conversion, which cannot fail because we checked the exponent.
3889 __ fisttp_d(Operand(esp, 0));
3890 __ mov(input_reg, Operand(esp, 0)); // Low word of answer is the result.
3891 __ add(Operand(esp), Immediate(kDoubleSize));
3892 } else {
Ben Murdochb8e0da22011-05-16 14:20:40 +01003893 XMMRegister xmm_temp = ToDoubleRegister(instr->TempAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003894 __ movdbl(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
3895 __ cvttsd2si(input_reg, Operand(xmm0));
3896 __ cmp(input_reg, 0x80000000u);
3897 __ j(not_equal, &done);
3898 // Check if the input was 0x8000000 (kMinInt).
3899 // If no, then we got an overflow and we deoptimize.
3900 ExternalReference min_int = ExternalReference::address_of_min_int();
3901 __ movdbl(xmm_temp, Operand::StaticVariable(min_int));
3902 __ ucomisd(xmm_temp, xmm0);
3903 DeoptimizeIf(not_equal, instr->environment());
3904 DeoptimizeIf(parity_even, instr->environment()); // NaN.
3905 }
3906 } else {
3907 // Deoptimize if we don't have a heap number.
3908 DeoptimizeIf(not_equal, instr->environment());
3909
Ben Murdochb8e0da22011-05-16 14:20:40 +01003910 XMMRegister xmm_temp = ToDoubleRegister(instr->TempAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003911 __ movdbl(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
3912 __ cvttsd2si(input_reg, Operand(xmm0));
3913 __ cvtsi2sd(xmm_temp, Operand(input_reg));
3914 __ ucomisd(xmm0, xmm_temp);
3915 DeoptimizeIf(not_equal, instr->environment());
3916 DeoptimizeIf(parity_even, instr->environment()); // NaN.
3917 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3918 __ test(input_reg, Operand(input_reg));
3919 __ j(not_zero, &done);
3920 __ movmskpd(input_reg, xmm0);
3921 __ and_(input_reg, 1);
3922 DeoptimizeIf(not_zero, instr->environment());
3923 }
3924 }
3925 __ bind(&done);
3926}
3927
3928
3929void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003930 class DeferredTaggedToI: public LDeferredCode {
3931 public:
3932 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr)
3933 : LDeferredCode(codegen), instr_(instr) { }
3934 virtual void Generate() { codegen()->DoDeferredTaggedToI(instr_); }
3935 virtual LInstruction* instr() { return instr_; }
3936 private:
3937 LTaggedToI* instr_;
3938 };
3939
Ben Murdochb8e0da22011-05-16 14:20:40 +01003940 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003941 ASSERT(input->IsRegister());
3942 ASSERT(input->Equals(instr->result()));
3943
3944 Register input_reg = ToRegister(input);
3945
3946 DeferredTaggedToI* deferred = new DeferredTaggedToI(this, instr);
3947
3948 // Smi check.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003949 __ JumpIfNotSmi(input_reg, deferred->entry());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003950
3951 // Smi to int32 conversion
3952 __ SmiUntag(input_reg); // Untag smi.
3953
3954 __ bind(deferred->exit());
3955}
3956
3957
3958void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01003959 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003960 ASSERT(input->IsRegister());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003961 LOperand* temp = instr->TempAt(0);
3962 ASSERT(temp == NULL || temp->IsRegister());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003963 LOperand* result = instr->result();
3964 ASSERT(result->IsDoubleRegister());
3965
3966 Register input_reg = ToRegister(input);
3967 XMMRegister result_reg = ToDoubleRegister(result);
3968
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003969 bool deoptimize_on_minus_zero =
3970 instr->hydrogen()->deoptimize_on_minus_zero();
3971 Register temp_reg = deoptimize_on_minus_zero ? ToRegister(temp) : no_reg;
3972
3973 EmitNumberUntagD(input_reg,
3974 temp_reg,
3975 result_reg,
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003976 instr->hydrogen()->deoptimize_on_undefined(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003977 deoptimize_on_minus_zero,
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01003978 instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003979}
3980
3981
3982void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01003983 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003984 ASSERT(input->IsDoubleRegister());
3985 LOperand* result = instr->result();
3986 ASSERT(result->IsRegister());
3987
3988 XMMRegister input_reg = ToDoubleRegister(input);
3989 Register result_reg = ToRegister(result);
3990
3991 if (instr->truncating()) {
3992 // Performs a truncating conversion of a floating point number as used by
3993 // the JS bitwise operations.
3994 __ cvttsd2si(result_reg, Operand(input_reg));
3995 __ cmp(result_reg, 0x80000000u);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003996 if (CpuFeatures::IsSupported(SSE3)) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01003997 // This will deoptimize if the exponent of the input in out of range.
3998 CpuFeatures::Scope scope(SSE3);
Ben Murdoch257744e2011-11-30 15:57:28 +00003999 Label convert, done;
4000 __ j(not_equal, &done, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004001 __ sub(Operand(esp), Immediate(kDoubleSize));
4002 __ movdbl(Operand(esp, 0), input_reg);
4003 // Get exponent alone and check for too-big exponent.
4004 __ mov(result_reg, Operand(esp, sizeof(int32_t)));
4005 __ and_(result_reg, HeapNumber::kExponentMask);
4006 const uint32_t kTooBigExponent =
4007 (HeapNumber::kExponentBias + 63) << HeapNumber::kExponentShift;
4008 __ cmp(Operand(result_reg), Immediate(kTooBigExponent));
Ben Murdoch257744e2011-11-30 15:57:28 +00004009 __ j(less, &convert, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004010 __ add(Operand(esp), Immediate(kDoubleSize));
4011 DeoptimizeIf(no_condition, instr->environment());
4012 __ bind(&convert);
4013 // Do conversion, which cannot fail because we checked the exponent.
4014 __ fld_d(Operand(esp, 0));
4015 __ fisttp_d(Operand(esp, 0));
4016 __ mov(result_reg, Operand(esp, 0)); // Low word of answer is the result.
4017 __ add(Operand(esp), Immediate(kDoubleSize));
4018 __ bind(&done);
4019 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00004020 Label done;
Ben Murdochb8e0da22011-05-16 14:20:40 +01004021 Register temp_reg = ToRegister(instr->TempAt(0));
4022 XMMRegister xmm_scratch = xmm0;
4023
4024 // If cvttsd2si succeeded, we're done. Otherwise, we attempt
4025 // manual conversion.
Ben Murdoch257744e2011-11-30 15:57:28 +00004026 __ j(not_equal, &done, Label::kNear);
Ben Murdochb8e0da22011-05-16 14:20:40 +01004027
4028 // Get high 32 bits of the input in result_reg and temp_reg.
4029 __ pshufd(xmm_scratch, input_reg, 1);
4030 __ movd(Operand(temp_reg), xmm_scratch);
4031 __ mov(result_reg, temp_reg);
4032
4033 // Prepare negation mask in temp_reg.
4034 __ sar(temp_reg, kBitsPerInt - 1);
4035
4036 // Extract the exponent from result_reg and subtract adjusted
4037 // bias from it. The adjustment is selected in a way such that
4038 // when the difference is zero, the answer is in the low 32 bits
4039 // of the input, otherwise a shift has to be performed.
4040 __ shr(result_reg, HeapNumber::kExponentShift);
4041 __ and_(result_reg,
4042 HeapNumber::kExponentMask >> HeapNumber::kExponentShift);
4043 __ sub(Operand(result_reg),
4044 Immediate(HeapNumber::kExponentBias +
4045 HeapNumber::kExponentBits +
4046 HeapNumber::kMantissaBits));
4047 // Don't handle big (> kMantissaBits + kExponentBits == 63) or
4048 // special exponents.
4049 DeoptimizeIf(greater, instr->environment());
4050
4051 // Zero out the sign and the exponent in the input (by shifting
4052 // it to the left) and restore the implicit mantissa bit,
4053 // i.e. convert the input to unsigned int64 shifted left by
4054 // kExponentBits.
4055 ExternalReference minus_zero = ExternalReference::address_of_minus_zero();
4056 // Minus zero has the most significant bit set and the other
4057 // bits cleared.
4058 __ movdbl(xmm_scratch, Operand::StaticVariable(minus_zero));
4059 __ psllq(input_reg, HeapNumber::kExponentBits);
4060 __ por(input_reg, xmm_scratch);
4061
4062 // Get the amount to shift the input right in xmm_scratch.
4063 __ neg(result_reg);
4064 __ movd(xmm_scratch, Operand(result_reg));
4065
4066 // Shift the input right and extract low 32 bits.
4067 __ psrlq(input_reg, xmm_scratch);
4068 __ movd(Operand(result_reg), input_reg);
4069
4070 // Use the prepared mask in temp_reg to negate the result if necessary.
4071 __ xor_(result_reg, Operand(temp_reg));
4072 __ sub(result_reg, Operand(temp_reg));
4073 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004074 }
4075 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00004076 Label done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01004077 __ cvttsd2si(result_reg, Operand(input_reg));
4078 __ cvtsi2sd(xmm0, Operand(result_reg));
4079 __ ucomisd(xmm0, input_reg);
4080 DeoptimizeIf(not_equal, instr->environment());
4081 DeoptimizeIf(parity_even, instr->environment()); // NaN.
4082 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
4083 // The integer converted back is equal to the original. We
4084 // only have to test if we got -0 as an input.
4085 __ test(result_reg, Operand(result_reg));
Ben Murdoch257744e2011-11-30 15:57:28 +00004086 __ j(not_zero, &done, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004087 __ movmskpd(result_reg, input_reg);
4088 // Bit 0 contains the sign of the double in input_reg.
4089 // If input was positive, we are ok and return 0, otherwise
4090 // deoptimize.
4091 __ and_(result_reg, 1);
4092 DeoptimizeIf(not_zero, instr->environment());
4093 }
4094 __ bind(&done);
4095 }
4096}
4097
4098
4099void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01004100 LOperand* input = instr->InputAt(0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004101 __ test(ToOperand(input), Immediate(kSmiTagMask));
Steve Block44f0eee2011-05-26 01:26:41 +01004102 DeoptimizeIf(not_zero, instr->environment());
4103}
4104
4105
4106void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) {
4107 LOperand* input = instr->InputAt(0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004108 __ test(ToOperand(input), Immediate(kSmiTagMask));
Steve Block44f0eee2011-05-26 01:26:41 +01004109 DeoptimizeIf(zero, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004110}
4111
4112
4113void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01004114 Register input = ToRegister(instr->InputAt(0));
4115 Register temp = ToRegister(instr->TempAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004116
Ben Murdochb0fe1622011-05-05 13:52:32 +01004117 __ mov(temp, FieldOperand(input, HeapObject::kMapOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004118
Ben Murdoch257744e2011-11-30 15:57:28 +00004119 if (instr->hydrogen()->is_interval_check()) {
4120 InstanceType first;
4121 InstanceType last;
4122 instr->hydrogen()->GetCheckInterval(&first, &last);
4123
Steve Block1e0659c2011-05-24 12:43:12 +01004124 __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset),
4125 static_cast<int8_t>(first));
Ben Murdoch257744e2011-11-30 15:57:28 +00004126
4127 // If there is only one type in the interval check for equality.
4128 if (first == last) {
4129 DeoptimizeIf(not_equal, instr->environment());
4130 } else {
4131 DeoptimizeIf(below, instr->environment());
4132 // Omit check for the last type.
4133 if (last != LAST_TYPE) {
4134 __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset),
4135 static_cast<int8_t>(last));
4136 DeoptimizeIf(above, instr->environment());
4137 }
4138 }
4139 } else {
4140 uint8_t mask;
4141 uint8_t tag;
4142 instr->hydrogen()->GetCheckMaskAndTag(&mask, &tag);
4143
4144 if (IsPowerOf2(mask)) {
4145 ASSERT(tag == 0 || IsPowerOf2(tag));
4146 __ test_b(FieldOperand(temp, Map::kInstanceTypeOffset), mask);
4147 DeoptimizeIf(tag == 0 ? not_zero : zero, instr->environment());
4148 } else {
4149 __ movzx_b(temp, FieldOperand(temp, Map::kInstanceTypeOffset));
4150 __ and_(temp, mask);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004151 __ cmp(temp, tag);
Ben Murdoch257744e2011-11-30 15:57:28 +00004152 DeoptimizeIf(not_equal, instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004153 }
4154 }
4155}
4156
4157
4158void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004159 Handle<JSFunction> target = instr->hydrogen()->target();
4160 if (isolate()->heap()->InNewSpace(*target)) {
4161 Register reg = ToRegister(instr->value());
4162 Handle<JSGlobalPropertyCell> cell =
4163 isolate()->factory()->NewJSGlobalPropertyCell(target);
4164 __ cmp(reg, Operand::Cell(cell));
4165 } else {
4166 Operand operand = ToOperand(instr->value());
4167 __ cmp(operand, target);
4168 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004169 DeoptimizeIf(not_equal, instr->environment());
4170}
4171
4172
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004173void LCodeGen::DoCheckMapCommon(Register reg,
4174 Handle<Map> map,
4175 CompareMapMode mode,
4176 LEnvironment* env) {
4177 Label success;
4178 __ CompareMap(reg, map, &success, mode);
4179 DeoptimizeIf(not_equal, env);
4180 __ bind(&success);
4181}
4182
4183
Ben Murdochb0fe1622011-05-05 13:52:32 +01004184void LCodeGen::DoCheckMap(LCheckMap* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01004185 LOperand* input = instr->InputAt(0);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004186 ASSERT(input->IsRegister());
4187 Register reg = ToRegister(input);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004188 Handle<Map> map = instr->hydrogen()->map();
4189 DoCheckMapCommon(reg, map, instr->hydrogen()->mode(), instr->environment());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004190}
4191
4192
Ben Murdoch257744e2011-11-30 15:57:28 +00004193void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) {
4194 XMMRegister value_reg = ToDoubleRegister(instr->unclamped());
4195 Register result_reg = ToRegister(instr->result());
4196 __ ClampDoubleToUint8(value_reg, xmm0, result_reg);
4197}
4198
4199
4200void LCodeGen::DoClampIToUint8(LClampIToUint8* instr) {
4201 ASSERT(instr->unclamped()->Equals(instr->result()));
4202 Register value_reg = ToRegister(instr->result());
4203 __ ClampUint8(value_reg);
4204}
4205
4206
4207void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
4208 ASSERT(instr->unclamped()->Equals(instr->result()));
4209 Register input_reg = ToRegister(instr->unclamped());
4210 Label is_smi, done, heap_number;
4211
4212 __ JumpIfSmi(input_reg, &is_smi);
4213
4214 // Check for heap number
4215 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
4216 factory()->heap_number_map());
4217 __ j(equal, &heap_number, Label::kNear);
4218
4219 // Check for undefined. Undefined is converted to zero for clamping
4220 // conversions.
4221 __ cmp(input_reg, factory()->undefined_value());
4222 DeoptimizeIf(not_equal, instr->environment());
4223 __ mov(input_reg, 0);
4224 __ jmp(&done, Label::kNear);
4225
4226 // Heap number
4227 __ bind(&heap_number);
4228 __ movdbl(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
4229 __ ClampDoubleToUint8(xmm0, xmm1, input_reg);
4230 __ jmp(&done, Label::kNear);
4231
4232 // smi
4233 __ bind(&is_smi);
4234 __ SmiUntag(input_reg);
4235 __ ClampUint8(input_reg);
4236
4237 __ bind(&done);
4238}
4239
4240
Ben Murdochb0fe1622011-05-05 13:52:32 +01004241void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01004242 Register reg = ToRegister(instr->TempAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004243
4244 Handle<JSObject> holder = instr->holder();
Ben Murdochb8e0da22011-05-16 14:20:40 +01004245 Handle<JSObject> current_prototype = instr->prototype();
Ben Murdochb0fe1622011-05-05 13:52:32 +01004246
4247 // Load prototype object.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004248 __ LoadHeapObject(reg, current_prototype);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004249
4250 // Check prototype maps up to the holder.
4251 while (!current_prototype.is_identical_to(holder)) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004252 DoCheckMapCommon(reg, Handle<Map>(current_prototype->map()),
4253 ALLOW_ELEMENT_TRANSITION_MAPS, instr->environment());
4254
Ben Murdochb0fe1622011-05-05 13:52:32 +01004255 current_prototype =
4256 Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype()));
4257 // Load next prototype object.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004258 __ LoadHeapObject(reg, current_prototype);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004259 }
4260
4261 // Check the holder map.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004262 DoCheckMapCommon(reg, Handle<Map>(current_prototype->map()),
4263 ALLOW_ELEMENT_TRANSITION_MAPS, instr->environment());
4264}
4265
4266
4267void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
4268 class DeferredAllocateObject: public LDeferredCode {
4269 public:
4270 DeferredAllocateObject(LCodeGen* codegen, LAllocateObject* instr)
4271 : LDeferredCode(codegen), instr_(instr) { }
4272 virtual void Generate() { codegen()->DoDeferredAllocateObject(instr_); }
4273 virtual LInstruction* instr() { return instr_; }
4274 private:
4275 LAllocateObject* instr_;
4276 };
4277
4278 DeferredAllocateObject* deferred = new DeferredAllocateObject(this, instr);
4279
4280 Register result = ToRegister(instr->result());
4281 Register scratch = ToRegister(instr->TempAt(0));
4282 Handle<JSFunction> constructor = instr->hydrogen()->constructor();
4283 Handle<Map> initial_map(constructor->initial_map());
4284 int instance_size = initial_map->instance_size();
4285 ASSERT(initial_map->pre_allocated_property_fields() +
4286 initial_map->unused_property_fields() -
4287 initial_map->inobject_properties() == 0);
4288
4289 // Allocate memory for the object. The initial map might change when
4290 // the constructor's prototype changes, but instance size and property
4291 // counts remain unchanged (if slack tracking finished).
4292 ASSERT(!constructor->shared()->IsInobjectSlackTrackingInProgress());
4293 __ AllocateInNewSpace(instance_size,
4294 result,
4295 no_reg,
4296 scratch,
4297 deferred->entry(),
4298 TAG_OBJECT);
4299
4300 // Load the initial map.
4301 Register map = scratch;
4302 __ LoadHeapObject(scratch, constructor);
4303 __ mov(map, FieldOperand(scratch, JSFunction::kPrototypeOrInitialMapOffset));
4304
4305 if (FLAG_debug_code) {
4306 __ AbortIfSmi(map);
4307 __ cmpb(FieldOperand(map, Map::kInstanceSizeOffset),
4308 instance_size >> kPointerSizeLog2);
4309 __ Assert(equal, "Unexpected instance size");
4310 __ cmpb(FieldOperand(map, Map::kPreAllocatedPropertyFieldsOffset),
4311 initial_map->pre_allocated_property_fields());
4312 __ Assert(equal, "Unexpected pre-allocated property fields count");
4313 __ cmpb(FieldOperand(map, Map::kUnusedPropertyFieldsOffset),
4314 initial_map->unused_property_fields());
4315 __ Assert(equal, "Unexpected unused property fields count");
4316 __ cmpb(FieldOperand(map, Map::kInObjectPropertiesOffset),
4317 initial_map->inobject_properties());
4318 __ Assert(equal, "Unexpected in-object property fields count");
4319 }
4320
4321 // Initialize map and fields of the newly allocated object.
4322 ASSERT(initial_map->instance_type() == JS_OBJECT_TYPE);
4323 __ mov(FieldOperand(result, JSObject::kMapOffset), map);
4324 __ mov(scratch, factory()->empty_fixed_array());
4325 __ mov(FieldOperand(result, JSObject::kElementsOffset), scratch);
4326 __ mov(FieldOperand(result, JSObject::kPropertiesOffset), scratch);
4327 if (initial_map->inobject_properties() != 0) {
4328 __ mov(scratch, factory()->undefined_value());
4329 for (int i = 0; i < initial_map->inobject_properties(); i++) {
4330 int property_offset = JSObject::kHeaderSize + i * kPointerSize;
4331 __ mov(FieldOperand(result, property_offset), scratch);
4332 }
4333 }
4334
4335 __ bind(deferred->exit());
4336}
4337
4338
4339void LCodeGen::DoDeferredAllocateObject(LAllocateObject* instr) {
4340 Register result = ToRegister(instr->result());
4341 Handle<JSFunction> constructor = instr->hydrogen()->constructor();
4342
4343 // TODO(3095996): Get rid of this. For now, we need to make the
4344 // result register contain a valid pointer because it is already
4345 // contained in the register pointer map.
4346 __ Set(result, Immediate(0));
4347
4348 PushSafepointRegistersScope scope(this);
4349 __ PushHeapObject(constructor);
4350 CallRuntimeFromDeferred(Runtime::kNewObject, 1, instr, instr->context());
4351 __ StoreToSafepointRegisterSlot(result, eax);
Ben Murdoch5d4cdbf2012-04-11 10:23:59 +01004352}
4353
4354
Ben Murdochb0fe1622011-05-05 13:52:32 +01004355void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004356 ASSERT(ToRegister(instr->context()).is(esi));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004357 Heap* heap = isolate()->heap();
4358 ElementsKind boilerplate_elements_kind =
4359 instr->hydrogen()->boilerplate_elements_kind();
4360
4361 // Deopt if the array literal boilerplate ElementsKind is of a type different
4362 // than the expected one. The check isn't necessary if the boilerplate has
4363 // already been converted to FAST_ELEMENTS.
4364 if (boilerplate_elements_kind != FAST_ELEMENTS) {
4365 __ LoadHeapObject(eax, instr->hydrogen()->boilerplate_object());
4366 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
4367 // Load the map's "bit field 2". We only need the first byte,
4368 // but the following masking takes care of that anyway.
4369 __ mov(ebx, FieldOperand(ebx, Map::kBitField2Offset));
4370 // Retrieve elements_kind from bit field 2.
4371 __ and_(ebx, Map::kElementsKindMask);
4372 __ cmp(ebx, boilerplate_elements_kind << Map::kElementsKindShift);
4373 DeoptimizeIf(not_equal, instr->environment());
4374 }
4375
4376 // Set up the parameters to the stub/runtime call.
Ben Murdochb0fe1622011-05-05 13:52:32 +01004377 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
4378 __ push(FieldOperand(eax, JSFunction::kLiteralsOffset));
4379 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004380 // Boilerplate already exists, constant elements are never accessed.
4381 // Pass an empty fixed array.
4382 __ push(Immediate(Handle<FixedArray>(heap->empty_fixed_array())));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004383
4384 // Pick the right runtime function or stub to call.
4385 int length = instr->hydrogen()->length();
4386 if (instr->hydrogen()->IsCopyOnWrite()) {
4387 ASSERT(instr->hydrogen()->depth() == 1);
4388 FastCloneShallowArrayStub::Mode mode =
4389 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
4390 FastCloneShallowArrayStub stub(mode, length);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004391 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004392 } else if (instr->hydrogen()->depth() > 1) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004393 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004394 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004395 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004396 } else {
4397 FastCloneShallowArrayStub::Mode mode =
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004398 boilerplate_elements_kind == FAST_DOUBLE_ELEMENTS
4399 ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
4400 : FastCloneShallowArrayStub::CLONE_ELEMENTS;
Ben Murdochb0fe1622011-05-05 13:52:32 +01004401 FastCloneShallowArrayStub stub(mode, length);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004402 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004403 }
4404}
4405
4406
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004407void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
4408 Register result,
4409 Register source,
4410 int* offset) {
4411 ASSERT(!source.is(ecx));
4412 ASSERT(!result.is(ecx));
4413
4414 if (FLAG_debug_code) {
4415 __ LoadHeapObject(ecx, object);
4416 __ cmp(source, ecx);
4417 __ Assert(equal, "Unexpected object literal boilerplate");
4418 }
4419
4420 // Only elements backing stores for non-COW arrays need to be copied.
4421 Handle<FixedArrayBase> elements(object->elements());
4422 bool has_elements = elements->length() > 0 &&
4423 elements->map() != isolate()->heap()->fixed_cow_array_map();
4424
4425 // Increase the offset so that subsequent objects end up right after
4426 // this object and its backing store.
4427 int object_offset = *offset;
4428 int object_size = object->map()->instance_size();
4429 int elements_offset = *offset + object_size;
4430 int elements_size = has_elements ? elements->Size() : 0;
4431 *offset += object_size + elements_size;
4432
4433 // Copy object header.
4434 ASSERT(object->properties()->length() == 0);
4435 int inobject_properties = object->map()->inobject_properties();
4436 int header_size = object_size - inobject_properties * kPointerSize;
4437 for (int i = 0; i < header_size; i += kPointerSize) {
4438 if (has_elements && i == JSObject::kElementsOffset) {
4439 __ lea(ecx, Operand(result, elements_offset));
4440 } else {
4441 __ mov(ecx, FieldOperand(source, i));
4442 }
4443 __ mov(FieldOperand(result, object_offset + i), ecx);
4444 }
4445
4446 // Copy in-object properties.
4447 for (int i = 0; i < inobject_properties; i++) {
4448 int total_offset = object_offset + object->GetInObjectPropertyOffset(i);
4449 Handle<Object> value = Handle<Object>(object->InObjectPropertyAt(i));
4450 if (value->IsJSObject()) {
4451 Handle<JSObject> value_object = Handle<JSObject>::cast(value);
4452 __ lea(ecx, Operand(result, *offset));
4453 __ mov(FieldOperand(result, total_offset), ecx);
4454 __ LoadHeapObject(source, value_object);
4455 EmitDeepCopy(value_object, result, source, offset);
4456 } else if (value->IsHeapObject()) {
4457 __ LoadHeapObject(ecx, Handle<HeapObject>::cast(value));
4458 __ mov(FieldOperand(result, total_offset), ecx);
4459 } else {
4460 __ mov(FieldOperand(result, total_offset), Immediate(value));
4461 }
4462 }
4463
4464 if (has_elements) {
4465 // Copy elements backing store header.
4466 __ LoadHeapObject(source, elements);
4467 for (int i = 0; i < FixedArray::kHeaderSize; i += kPointerSize) {
4468 __ mov(ecx, FieldOperand(source, i));
4469 __ mov(FieldOperand(result, elements_offset + i), ecx);
4470 }
4471
4472 // Copy elements backing store content.
4473 int elements_length = elements->length();
4474 if (elements->IsFixedDoubleArray()) {
4475 Handle<FixedDoubleArray> double_array =
4476 Handle<FixedDoubleArray>::cast(elements);
4477 for (int i = 0; i < elements_length; i++) {
4478 int64_t value = double_array->get_representation(i);
4479 int32_t value_low = value & 0xFFFFFFFF;
4480 int32_t value_high = value >> 32;
4481 int total_offset =
4482 elements_offset + FixedDoubleArray::OffsetOfElementAt(i);
4483 __ mov(FieldOperand(result, total_offset), Immediate(value_low));
4484 __ mov(FieldOperand(result, total_offset + 4), Immediate(value_high));
4485 }
4486 } else if (elements->IsFixedArray()) {
4487 for (int i = 0; i < elements_length; i++) {
4488 int total_offset = elements_offset + FixedArray::OffsetOfElementAt(i);
4489 Handle<Object> value = JSObject::GetElement(object, i);
4490 if (value->IsJSObject()) {
4491 Handle<JSObject> value_object = Handle<JSObject>::cast(value);
4492 __ lea(ecx, Operand(result, *offset));
4493 __ mov(FieldOperand(result, total_offset), ecx);
4494 __ LoadHeapObject(source, value_object);
4495 EmitDeepCopy(value_object, result, source, offset);
4496 } else if (value->IsHeapObject()) {
4497 __ LoadHeapObject(ecx, Handle<HeapObject>::cast(value));
4498 __ mov(FieldOperand(result, total_offset), ecx);
4499 } else {
4500 __ mov(FieldOperand(result, total_offset), Immediate(value));
4501 }
4502 }
4503 } else {
4504 UNREACHABLE();
4505 }
4506 }
4507}
4508
4509
4510void LCodeGen::DoFastLiteral(LFastLiteral* instr) {
4511 ASSERT(ToRegister(instr->context()).is(esi));
4512 int size = instr->hydrogen()->total_size();
4513
4514 // Allocate all objects that are part of the literal in one big
4515 // allocation. This avoids multiple limit checks.
4516 Label allocated, runtime_allocate;
4517 __ AllocateInNewSpace(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
4518 __ jmp(&allocated);
4519
4520 __ bind(&runtime_allocate);
4521 __ push(Immediate(Smi::FromInt(size)));
4522 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
4523
4524 __ bind(&allocated);
4525 int offset = 0;
4526 __ LoadHeapObject(ebx, instr->hydrogen()->boilerplate());
4527 EmitDeepCopy(instr->hydrogen()->boilerplate(), eax, ebx, &offset);
4528 ASSERT_EQ(size, offset);
4529}
4530
4531
Ben Murdoch5d4cdbf2012-04-11 10:23:59 +01004532void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
Ben Murdoch592a9fc2012-03-05 11:04:45 +00004533 ASSERT(ToRegister(instr->context()).is(esi));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004534 Handle<FixedArray> literals(instr->environment()->closure()->literals());
4535 Handle<FixedArray> constant_properties =
4536 instr->hydrogen()->constant_properties();
4537
4538 // Set up the parameters to the stub/runtime call.
4539 __ PushHeapObject(literals);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004540 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004541 __ push(Immediate(constant_properties));
Steve Block44f0eee2011-05-26 01:26:41 +01004542 int flags = instr->hydrogen()->fast_elements()
4543 ? ObjectLiteral::kFastElements
4544 : ObjectLiteral::kNoFlags;
4545 flags |= instr->hydrogen()->has_function()
4546 ? ObjectLiteral::kHasFunction
4547 : ObjectLiteral::kNoFlags;
4548 __ push(Immediate(Smi::FromInt(flags)));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004549
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004550 // Pick the right runtime function or stub to call.
4551 int properties_count = constant_properties->length() / 2;
Ben Murdochb0fe1622011-05-05 13:52:32 +01004552 if (instr->hydrogen()->depth() > 1) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004553 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004554 } else if (flags != ObjectLiteral::kFastElements ||
4555 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
Ben Murdoch85b71792012-04-11 18:30:58 +01004556 CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004557 } else {
4558 FastCloneShallowObjectStub stub(properties_count);
4559 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004560 }
4561}
4562
4563
Steve Block44f0eee2011-05-26 01:26:41 +01004564void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
4565 ASSERT(ToRegister(instr->InputAt(0)).is(eax));
4566 __ push(eax);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004567 CallRuntime(Runtime::kToFastProperties, 1, instr);
Steve Block44f0eee2011-05-26 01:26:41 +01004568}
4569
4570
Ben Murdochb0fe1622011-05-05 13:52:32 +01004571void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004572 ASSERT(ToRegister(instr->context()).is(esi));
Ben Murdoch257744e2011-11-30 15:57:28 +00004573 Label materialized;
Ben Murdochb0fe1622011-05-05 13:52:32 +01004574 // Registers will be used as follows:
4575 // edi = JS function.
4576 // ecx = literals array.
4577 // ebx = regexp literal.
4578 // eax = regexp literal clone.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004579 // esi = context.
Ben Murdochb0fe1622011-05-05 13:52:32 +01004580 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
4581 __ mov(ecx, FieldOperand(edi, JSFunction::kLiteralsOffset));
4582 int literal_offset = FixedArray::kHeaderSize +
4583 instr->hydrogen()->literal_index() * kPointerSize;
4584 __ mov(ebx, FieldOperand(ecx, literal_offset));
Steve Block44f0eee2011-05-26 01:26:41 +01004585 __ cmp(ebx, factory()->undefined_value());
Ben Murdoch257744e2011-11-30 15:57:28 +00004586 __ j(not_equal, &materialized, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004587
4588 // Create regexp literal using runtime function
4589 // Result will be in eax.
4590 __ push(ecx);
4591 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
4592 __ push(Immediate(instr->hydrogen()->pattern()));
4593 __ push(Immediate(instr->hydrogen()->flags()));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004594 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004595 __ mov(ebx, eax);
4596
4597 __ bind(&materialized);
4598 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
4599 Label allocated, runtime_allocate;
4600 __ AllocateInNewSpace(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
4601 __ jmp(&allocated);
4602
4603 __ bind(&runtime_allocate);
4604 __ push(ebx);
4605 __ push(Immediate(Smi::FromInt(size)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004606 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004607 __ pop(ebx);
4608
4609 __ bind(&allocated);
4610 // Copy the content into the newly allocated memory.
4611 // (Unroll copy loop once for better throughput).
4612 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
4613 __ mov(edx, FieldOperand(ebx, i));
4614 __ mov(ecx, FieldOperand(ebx, i + kPointerSize));
4615 __ mov(FieldOperand(eax, i), edx);
4616 __ mov(FieldOperand(eax, i + kPointerSize), ecx);
4617 }
4618 if ((size % (2 * kPointerSize)) != 0) {
4619 __ mov(edx, FieldOperand(ebx, size - kPointerSize));
4620 __ mov(FieldOperand(eax, size - kPointerSize), edx);
4621 }
4622}
4623
4624
4625void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004626 ASSERT(ToRegister(instr->context()).is(esi));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004627 // Use the fast case closure allocation code that allocates in new
4628 // space for nested functions that don't need literals cloning.
4629 Handle<SharedFunctionInfo> shared_info = instr->shared_info();
Steve Block1e0659c2011-05-24 12:43:12 +01004630 bool pretenure = instr->hydrogen()->pretenure();
Steve Block44f0eee2011-05-26 01:26:41 +01004631 if (!pretenure && shared_info->num_literals() == 0) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004632 FastNewClosureStub stub(shared_info->language_mode());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004633 __ push(Immediate(shared_info));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004634 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004635 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004636 __ push(esi);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004637 __ push(Immediate(shared_info));
4638 __ push(Immediate(pretenure
Steve Block44f0eee2011-05-26 01:26:41 +01004639 ? factory()->true_value()
4640 : factory()->false_value()));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004641 CallRuntime(Runtime::kNewClosure, 3, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004642 }
4643}
4644
4645
4646void LCodeGen::DoTypeof(LTypeof* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004647 LOperand* input = instr->InputAt(1);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004648 EmitPushTaggedOperand(input);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004649 CallRuntime(Runtime::kTypeof, 1, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004650}
4651
4652
4653void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01004654 Register input = ToRegister(instr->InputAt(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004655 int true_block = chunk_->LookupDestination(instr->true_block_id());
4656 int false_block = chunk_->LookupDestination(instr->false_block_id());
4657 Label* true_label = chunk_->GetAssemblyLabel(true_block);
4658 Label* false_label = chunk_->GetAssemblyLabel(false_block);
4659
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004660 Condition final_branch_condition =
4661 EmitTypeofIs(true_label, false_label, input, instr->type_literal());
4662 if (final_branch_condition != no_condition) {
4663 EmitBranch(true_block, false_block, final_branch_condition);
4664 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004665}
4666
4667
4668Condition LCodeGen::EmitTypeofIs(Label* true_label,
4669 Label* false_label,
4670 Register input,
4671 Handle<String> type_name) {
4672 Condition final_branch_condition = no_condition;
Steve Block44f0eee2011-05-26 01:26:41 +01004673 if (type_name->Equals(heap()->number_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004674 __ JumpIfSmi(input, true_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004675 __ cmp(FieldOperand(input, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01004676 factory()->heap_number_map());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004677 final_branch_condition = equal;
4678
Steve Block44f0eee2011-05-26 01:26:41 +01004679 } else if (type_name->Equals(heap()->string_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004680 __ JumpIfSmi(input, false_label);
4681 __ CmpObjectType(input, FIRST_NONSTRING_TYPE, input);
4682 __ j(above_equal, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004683 __ test_b(FieldOperand(input, Map::kBitFieldOffset),
4684 1 << Map::kIsUndetectable);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004685 final_branch_condition = zero;
Ben Murdochb0fe1622011-05-05 13:52:32 +01004686
Steve Block44f0eee2011-05-26 01:26:41 +01004687 } else if (type_name->Equals(heap()->boolean_symbol())) {
4688 __ cmp(input, factory()->true_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004689 __ j(equal, true_label);
Steve Block44f0eee2011-05-26 01:26:41 +01004690 __ cmp(input, factory()->false_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004691 final_branch_condition = equal;
4692
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004693 } else if (FLAG_harmony_typeof && type_name->Equals(heap()->null_symbol())) {
4694 __ cmp(input, factory()->null_value());
4695 final_branch_condition = equal;
4696
Steve Block44f0eee2011-05-26 01:26:41 +01004697 } else if (type_name->Equals(heap()->undefined_symbol())) {
4698 __ cmp(input, factory()->undefined_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004699 __ j(equal, true_label);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004700 __ JumpIfSmi(input, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004701 // Check for undetectable objects => true.
4702 __ mov(input, FieldOperand(input, HeapObject::kMapOffset));
4703 __ test_b(FieldOperand(input, Map::kBitFieldOffset),
4704 1 << Map::kIsUndetectable);
4705 final_branch_condition = not_zero;
4706
Steve Block44f0eee2011-05-26 01:26:41 +01004707 } else if (type_name->Equals(heap()->function_symbol())) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004708 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004709 __ JumpIfSmi(input, false_label);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004710 __ CmpObjectType(input, JS_FUNCTION_TYPE, input);
4711 __ j(equal, true_label);
4712 __ CmpInstanceType(input, JS_FUNCTION_PROXY_TYPE);
4713 final_branch_condition = equal;
Ben Murdochb0fe1622011-05-05 13:52:32 +01004714
Steve Block44f0eee2011-05-26 01:26:41 +01004715 } else if (type_name->Equals(heap()->object_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004716 __ JumpIfSmi(input, false_label);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004717 if (!FLAG_harmony_typeof) {
4718 __ cmp(input, factory()->null_value());
4719 __ j(equal, true_label);
4720 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004721 __ CmpObjectType(input, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, input);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004722 __ j(below, false_label);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004723 __ CmpInstanceType(input, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4724 __ j(above, false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004725 // Check for undetectable objects => false.
4726 __ test_b(FieldOperand(input, Map::kBitFieldOffset),
4727 1 << Map::kIsUndetectable);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004728 final_branch_condition = zero;
Ben Murdochb0fe1622011-05-05 13:52:32 +01004729
4730 } else {
Ben Murdochb0fe1622011-05-05 13:52:32 +01004731 __ jmp(false_label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004732 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004733 return final_branch_condition;
4734}
4735
4736
Steve Block1e0659c2011-05-24 12:43:12 +01004737void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) {
4738 Register temp = ToRegister(instr->TempAt(0));
4739 int true_block = chunk_->LookupDestination(instr->true_block_id());
4740 int false_block = chunk_->LookupDestination(instr->false_block_id());
4741
4742 EmitIsConstructCall(temp);
4743 EmitBranch(true_block, false_block, equal);
4744}
4745
4746
4747void LCodeGen::EmitIsConstructCall(Register temp) {
4748 // Get the frame pointer for the calling frame.
4749 __ mov(temp, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
4750
4751 // Skip the arguments adaptor frame if it exists.
Ben Murdoch257744e2011-11-30 15:57:28 +00004752 Label check_frame_marker;
Steve Block1e0659c2011-05-24 12:43:12 +01004753 __ cmp(Operand(temp, StandardFrameConstants::kContextOffset),
4754 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
Ben Murdoch257744e2011-11-30 15:57:28 +00004755 __ j(not_equal, &check_frame_marker, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01004756 __ mov(temp, Operand(temp, StandardFrameConstants::kCallerFPOffset));
4757
4758 // Check the marker in the calling frame.
4759 __ bind(&check_frame_marker);
4760 __ cmp(Operand(temp, StandardFrameConstants::kMarkerOffset),
4761 Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
4762}
4763
4764
Ben Murdoch2b4ba112012-01-20 14:57:15 +00004765void LCodeGen::EnsureSpaceForLazyDeopt() {
4766 // Ensure that we have enough space after the previous lazy-bailout
4767 // instruction for patching the code here.
4768 int current_pc = masm()->pc_offset();
4769 int patch_size = Deoptimizer::patch_size();
4770 if (current_pc < last_lazy_deopt_pc_ + patch_size) {
4771 int padding_size = last_lazy_deopt_pc_ + patch_size - current_pc;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004772 __ Nop(padding_size);
Ben Murdoch2b4ba112012-01-20 14:57:15 +00004773 }
4774 last_lazy_deopt_pc_ = masm()->pc_offset();
4775}
4776
4777
Ben Murdochb0fe1622011-05-05 13:52:32 +01004778void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
Ben Murdoch2b4ba112012-01-20 14:57:15 +00004779 EnsureSpaceForLazyDeopt();
4780 ASSERT(instr->HasEnvironment());
4781 LEnvironment* env = instr->environment();
4782 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
4783 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004784}
4785
4786
4787void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
4788 DeoptimizeIf(no_condition, instr->environment());
4789}
4790
4791
4792void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
4793 LOperand* obj = instr->object();
4794 LOperand* key = instr->key();
4795 __ push(ToOperand(obj));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004796 EmitPushTaggedOperand(key);
Steve Block1e0659c2011-05-24 12:43:12 +01004797 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
4798 LPointerMap* pointers = instr->pointer_map();
Steve Block1e0659c2011-05-24 12:43:12 +01004799 RecordPosition(pointers->position());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004800 // Create safepoint generator that will also ensure enough space in the
4801 // reloc info for patching in deoptimization (since this is invoking a
4802 // builtin)
Ben Murdoch2b4ba112012-01-20 14:57:15 +00004803 SafepointGenerator safepoint_generator(
4804 this, pointers, Safepoint::kLazyDeopt);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004805 __ push(Immediate(Smi::FromInt(strict_mode_flag())));
Ben Murdoch257744e2011-11-30 15:57:28 +00004806 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, safepoint_generator);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004807}
4808
4809
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004810void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) {
Ben Murdoch2b4ba112012-01-20 14:57:15 +00004811 PushSafepointRegistersScope scope(this);
4812 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
4813 __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
4814 RecordSafepointWithLazyDeopt(
4815 instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
4816 ASSERT(instr->HasEnvironment());
4817 LEnvironment* env = instr->environment();
4818 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004819}
4820
4821
4822void LCodeGen::DoStackCheck(LStackCheck* instr) {
4823 class DeferredStackCheck: public LDeferredCode {
4824 public:
4825 DeferredStackCheck(LCodeGen* codegen, LStackCheck* instr)
4826 : LDeferredCode(codegen), instr_(instr) { }
4827 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004828 virtual LInstruction* instr() { return instr_; }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004829 private:
4830 LStackCheck* instr_;
4831 };
4832
Ben Murdoch2b4ba112012-01-20 14:57:15 +00004833 ASSERT(instr->HasEnvironment());
4834 LEnvironment* env = instr->environment();
4835 // There is no LLazyBailout instruction for stack-checks. We have to
4836 // prepare for lazy deoptimization explicitly here.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004837 if (instr->hydrogen()->is_function_entry()) {
4838 // Perform stack overflow check.
4839 Label done;
4840 ExternalReference stack_limit =
4841 ExternalReference::address_of_stack_limit(isolate());
4842 __ cmp(esp, Operand::StaticVariable(stack_limit));
4843 __ j(above_equal, &done, Label::kNear);
4844
4845 ASSERT(instr->context()->IsRegister());
4846 ASSERT(ToRegister(instr->context()).is(esi));
4847 StackCheckStub stub;
4848 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdoch2b4ba112012-01-20 14:57:15 +00004849 EnsureSpaceForLazyDeopt();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004850 __ bind(&done);
Ben Murdoch2b4ba112012-01-20 14:57:15 +00004851 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
4852 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004853 } else {
4854 ASSERT(instr->hydrogen()->is_backwards_branch());
4855 // Perform stack overflow check if this goto needs it before jumping.
4856 DeferredStackCheck* deferred_stack_check =
4857 new DeferredStackCheck(this, instr);
4858 ExternalReference stack_limit =
4859 ExternalReference::address_of_stack_limit(isolate());
4860 __ cmp(esp, Operand::StaticVariable(stack_limit));
4861 __ j(below, deferred_stack_check->entry());
Ben Murdoch2b4ba112012-01-20 14:57:15 +00004862 EnsureSpaceForLazyDeopt();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004863 __ bind(instr->done_label());
4864 deferred_stack_check->SetExit(instr->done_label());
Ben Murdoch2b4ba112012-01-20 14:57:15 +00004865 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
4866 // Don't record a deoptimization index for the safepoint here.
4867 // This will be done explicitly when emitting call and the safepoint in
4868 // the deferred code.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004869 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004870}
4871
4872
4873void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
4874 // This is a pseudo-instruction that ensures that the environment here is
4875 // properly registered for deoptimization and records the assembler's PC
4876 // offset.
4877 LEnvironment* environment = instr->environment();
4878 environment->SetSpilledRegisters(instr->SpilledRegisterArray(),
4879 instr->SpilledDoubleRegisterArray());
4880
4881 // If the environment were already registered, we would have no way of
4882 // backpatching it with the spill slot operands.
4883 ASSERT(!environment->HasBeenRegistered());
Ben Murdoch2b4ba112012-01-20 14:57:15 +00004884 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004885 ASSERT(osr_pc_offset_ == -1);
4886 osr_pc_offset_ = masm()->pc_offset();
4887}
4888
4889
Ben Murdoch257744e2011-11-30 15:57:28 +00004890void LCodeGen::DoIn(LIn* instr) {
4891 LOperand* obj = instr->object();
4892 LOperand* key = instr->key();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004893 EmitPushTaggedOperand(key);
4894 EmitPushTaggedOperand(obj);
Ben Murdoch257744e2011-11-30 15:57:28 +00004895 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
4896 LPointerMap* pointers = instr->pointer_map();
Ben Murdoch257744e2011-11-30 15:57:28 +00004897 RecordPosition(pointers->position());
Ben Murdoch2b4ba112012-01-20 14:57:15 +00004898 SafepointGenerator safepoint_generator(
4899 this, pointers, Safepoint::kLazyDeopt);
Ben Murdoch257744e2011-11-30 15:57:28 +00004900 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION, safepoint_generator);
4901}
4902
4903
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004904void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) {
4905 __ cmp(eax, isolate()->factory()->undefined_value());
4906 DeoptimizeIf(equal, instr->environment());
4907
4908 __ cmp(eax, isolate()->factory()->null_value());
4909 DeoptimizeIf(equal, instr->environment());
4910
4911 __ test(eax, Immediate(kSmiTagMask));
4912 DeoptimizeIf(zero, instr->environment());
4913
4914 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
4915 __ CmpObjectType(eax, LAST_JS_PROXY_TYPE, ecx);
4916 DeoptimizeIf(below_equal, instr->environment());
4917
4918 Label use_cache, call_runtime;
4919 __ CheckEnumCache(&call_runtime);
4920
4921 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
4922 __ jmp(&use_cache, Label::kNear);
4923
4924 // Get the set of properties to enumerate.
4925 __ bind(&call_runtime);
4926 __ push(eax);
4927 CallRuntime(Runtime::kGetPropertyNamesFast, 1, instr);
4928
4929 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
4930 isolate()->factory()->meta_map());
4931 DeoptimizeIf(not_equal, instr->environment());
4932 __ bind(&use_cache);
4933}
4934
4935
4936void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
4937 Register map = ToRegister(instr->map());
4938 Register result = ToRegister(instr->result());
4939 __ LoadInstanceDescriptors(map, result);
4940 __ mov(result,
4941 FieldOperand(result, DescriptorArray::kEnumerationIndexOffset));
4942 __ mov(result,
4943 FieldOperand(result, FixedArray::SizeFor(instr->idx())));
4944 __ test(result, result);
4945 DeoptimizeIf(equal, instr->environment());
4946}
4947
4948
4949void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) {
4950 Register object = ToRegister(instr->value());
4951 __ cmp(ToRegister(instr->map()),
4952 FieldOperand(object, HeapObject::kMapOffset));
4953 DeoptimizeIf(not_equal, instr->environment());
4954}
4955
4956
4957void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) {
4958 Register object = ToRegister(instr->object());
4959 Register index = ToRegister(instr->index());
4960
4961 Label out_of_object, done;
4962 __ cmp(index, Immediate(0));
4963 __ j(less, &out_of_object);
4964 __ mov(object, FieldOperand(object,
4965 index,
4966 times_half_pointer_size,
4967 JSObject::kHeaderSize));
4968 __ jmp(&done, Label::kNear);
4969
4970 __ bind(&out_of_object);
4971 __ mov(object, FieldOperand(object, JSObject::kPropertiesOffset));
4972 __ neg(index);
4973 // Index is now equal to out of object property index plus 1.
4974 __ mov(object, FieldOperand(object,
4975 index,
4976 times_half_pointer_size,
4977 FixedArray::kHeaderSize - kPointerSize));
4978 __ bind(&done);
4979}
4980
4981
Ben Murdochb0fe1622011-05-05 13:52:32 +01004982#undef __
4983
4984} } // namespace v8::internal
Ben Murdochb8e0da22011-05-16 14:20:40 +01004985
4986#endif // V8_TARGET_ARCH_IA32