blob: 03a0d8a3df94b3d03234f6b037bec2f15ad7cf26 [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Ben Murdochb0fe1622011-05-05 13:52:32 +01004
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#include "src/v8.h"
Ben Murdochb8e0da22011-05-16 14:20:40 +01006
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#if V8_TARGET_ARCH_IA32
Ben Murdochb8e0da22011-05-16 14:20:40 +01008
Ben Murdochb8a8cc12014-11-26 15:28:44 +00009#include "src/base/bits.h"
10#include "src/code-factory.h"
11#include "src/code-stubs.h"
12#include "src/codegen.h"
13#include "src/deoptimizer.h"
14#include "src/hydrogen-osr.h"
15#include "src/ia32/lithium-codegen-ia32.h"
16#include "src/ic/ic.h"
17#include "src/ic/stub-cache.h"
Ben Murdochb0fe1622011-05-05 13:52:32 +010018
19namespace v8 {
20namespace internal {
21
Steve Block1e0659c2011-05-24 12:43:12 +010022// When invoking builtins, we need to record the safepoint in the middle of
23// the invoke instruction sequence generated by the macro assembler.
Ben Murdochb8a8cc12014-11-26 15:28:44 +000024class SafepointGenerator FINAL : public CallWrapper {
Ben Murdochb0fe1622011-05-05 13:52:32 +010025 public:
26 SafepointGenerator(LCodeGen* codegen,
27 LPointerMap* pointers,
Ben Murdoch2b4ba112012-01-20 14:57:15 +000028 Safepoint::DeoptMode mode)
Ben Murdochb0fe1622011-05-05 13:52:32 +010029 : codegen_(codegen),
30 pointers_(pointers),
Ben Murdoch2b4ba112012-01-20 14:57:15 +000031 deopt_mode_(mode) {}
Ben Murdochb8a8cc12014-11-26 15:28:44 +000032 virtual ~SafepointGenerator() {}
Ben Murdochb0fe1622011-05-05 13:52:32 +010033
Emily Bernierd0a1eb72015-03-24 16:35:39 -040034 void BeforeCall(int call_size) const OVERRIDE {}
Ben Murdoch257744e2011-11-30 15:57:28 +000035
Emily Bernierd0a1eb72015-03-24 16:35:39 -040036 void AfterCall() const OVERRIDE {
Ben Murdoch2b4ba112012-01-20 14:57:15 +000037 codegen_->RecordSafepoint(pointers_, deopt_mode_);
Ben Murdochb0fe1622011-05-05 13:52:32 +010038 }
39
40 private:
41 LCodeGen* codegen_;
42 LPointerMap* pointers_;
Ben Murdoch2b4ba112012-01-20 14:57:15 +000043 Safepoint::DeoptMode deopt_mode_;
Ben Murdochb0fe1622011-05-05 13:52:32 +010044};
45
46
47#define __ masm()->
48
49bool LCodeGen::GenerateCode() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000050 LPhase phase("Z_Code generation", chunk());
51 DCHECK(is_unused());
Ben Murdochb0fe1622011-05-05 13:52:32 +010052 status_ = GENERATING;
Ben Murdoch3ef787d2012-04-12 10:51:47 +010053
54 // Open a frame scope to indicate that there is a frame on the stack. The
55 // MANUAL indicates that the scope shouldn't actually generate code to set up
56 // the frame (that is done in GeneratePrologue).
57 FrameScope frame_scope(masm_, StackFrame::MANUAL);
58
Ben Murdochb8a8cc12014-11-26 15:28:44 +000059 support_aligned_spilled_doubles_ = info()->IsOptimizing();
60
61 dynamic_frame_alignment_ = info()->IsOptimizing() &&
62 ((chunk()->num_double_slots() > 2 &&
63 !chunk()->graph()->is_recursive()) ||
64 !info()->osr_ast_id().IsNone());
65
Ben Murdochb0fe1622011-05-05 13:52:32 +010066 return GeneratePrologue() &&
67 GenerateBody() &&
68 GenerateDeferredCode() &&
Ben Murdochb8a8cc12014-11-26 15:28:44 +000069 GenerateJumpTable() &&
Ben Murdochb0fe1622011-05-05 13:52:32 +010070 GenerateSafepointTable();
71}
72
73
74void LCodeGen::FinishCode(Handle<Code> code) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000075 DCHECK(is_done());
Ben Murdoch257744e2011-11-30 15:57:28 +000076 code->set_stack_slots(GetStackSlotCount());
Steve Block1e0659c2011-05-24 12:43:12 +010077 code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
Ben Murdochb8a8cc12014-11-26 15:28:44 +000078 if (code->is_optimized_code()) RegisterWeakObjectsInOptimizedCode(code);
Ben Murdochb0fe1622011-05-05 13:52:32 +010079 PopulateDeoptimizationData(code);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000080 if (!info()->IsStub()) {
81 Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code);
Ben Murdochb0fe1622011-05-05 13:52:32 +010082 }
Ben Murdochb0fe1622011-05-05 13:52:32 +010083}
84
85
Ben Murdochb8a8cc12014-11-26 15:28:44 +000086#ifdef _MSC_VER
87void LCodeGen::MakeSureStackPagesMapped(int offset) {
88 const int kPageSize = 4 * KB;
89 for (offset -= kPageSize; offset > 0; offset -= kPageSize) {
90 __ mov(Operand(esp, offset), eax);
91 }
92}
93#endif
Ben Murdochb0fe1622011-05-05 13:52:32 +010094
Ben Murdochb8a8cc12014-11-26 15:28:44 +000095
96void LCodeGen::SaveCallerDoubles() {
97 DCHECK(info()->saves_caller_doubles());
98 DCHECK(NeedsEagerFrame());
99 Comment(";;; Save clobbered callee double registers");
100 int count = 0;
101 BitVector* doubles = chunk()->allocated_double_registers();
102 BitVector::Iterator save_iterator(doubles);
103 while (!save_iterator.Done()) {
104 __ movsd(MemOperand(esp, count * kDoubleSize),
105 XMMRegister::FromAllocationIndex(save_iterator.Current()));
106 save_iterator.Advance();
107 count++;
108 }
109}
110
111
112void LCodeGen::RestoreCallerDoubles() {
113 DCHECK(info()->saves_caller_doubles());
114 DCHECK(NeedsEagerFrame());
115 Comment(";;; Restore clobbered callee double registers");
116 BitVector* doubles = chunk()->allocated_double_registers();
117 BitVector::Iterator save_iterator(doubles);
118 int count = 0;
119 while (!save_iterator.Done()) {
120 __ movsd(XMMRegister::FromAllocationIndex(save_iterator.Current()),
121 MemOperand(esp, count * kDoubleSize));
122 save_iterator.Advance();
123 count++;
124 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100125}
126
127
128bool LCodeGen::GeneratePrologue() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000129 DCHECK(is_generating());
130
131 if (info()->IsOptimizing()) {
132 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100133
134#ifdef DEBUG
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000135 if (strlen(FLAG_stop_at) > 0 &&
136 info_->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
137 __ int3();
138 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100139#endif
140
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000141 // Sloppy mode functions and builtins need to replace the receiver with the
142 // global proxy when called as functions (without an explicit receiver
143 // object).
144 if (info_->this_has_uses() &&
145 info_->strict_mode() == SLOPPY &&
146 !info_->is_native()) {
147 Label ok;
148 // +1 for return address.
149 int receiver_offset = (scope()->num_parameters() + 1) * kPointerSize;
150 __ mov(ecx, Operand(esp, receiver_offset));
151
152 __ cmp(ecx, isolate()->factory()->undefined_value());
153 __ j(not_equal, &ok, Label::kNear);
154
155 __ mov(ecx, GlobalObjectOperand());
156 __ mov(ecx, FieldOperand(ecx, GlobalObject::kGlobalProxyOffset));
157
158 __ mov(Operand(esp, receiver_offset), ecx);
159
160 __ bind(&ok);
161 }
162
163 if (support_aligned_spilled_doubles_ && dynamic_frame_alignment_) {
164 // Move state of dynamic frame alignment into edx.
165 __ Move(edx, Immediate(kNoAlignmentPadding));
166
167 Label do_not_pad, align_loop;
168 STATIC_ASSERT(kDoubleSize == 2 * kPointerSize);
169 // Align esp + 4 to a multiple of 2 * kPointerSize.
170 __ test(esp, Immediate(kPointerSize));
171 __ j(not_zero, &do_not_pad, Label::kNear);
172 __ push(Immediate(0));
173 __ mov(ebx, esp);
174 __ mov(edx, Immediate(kAlignmentPaddingPushed));
175 // Copy arguments, receiver, and return address.
176 __ mov(ecx, Immediate(scope()->num_parameters() + 2));
177
178 __ bind(&align_loop);
179 __ mov(eax, Operand(ebx, 1 * kPointerSize));
180 __ mov(Operand(ebx, 0), eax);
181 __ add(Operand(ebx), Immediate(kPointerSize));
182 __ dec(ecx);
183 __ j(not_zero, &align_loop, Label::kNear);
184 __ mov(Operand(ebx, 0), Immediate(kAlignmentZapValue));
185 __ bind(&do_not_pad);
186 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000187 }
188
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000189 info()->set_prologue_offset(masm_->pc_offset());
190 if (NeedsEagerFrame()) {
191 DCHECK(!frame_is_built_);
192 frame_is_built_ = true;
193 if (info()->IsStub()) {
194 __ StubPrologue();
195 } else {
196 __ Prologue(info()->IsCodePreAgingActive());
197 }
198 info()->AddNoFrameRange(0, masm_->pc_offset());
199 }
200
201 if (info()->IsOptimizing() &&
202 dynamic_frame_alignment_ &&
203 FLAG_debug_code) {
204 __ test(esp, Immediate(kPointerSize));
205 __ Assert(zero, kFrameIsExpectedToBeAligned);
206 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100207
208 // Reserve space for the stack slots needed by the code.
Ben Murdoch257744e2011-11-30 15:57:28 +0000209 int slots = GetStackSlotCount();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000210 DCHECK(slots != 0 || !info()->IsOptimizing());
Ben Murdochb0fe1622011-05-05 13:52:32 +0100211 if (slots > 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000212 if (slots == 1) {
213 if (dynamic_frame_alignment_) {
214 __ push(edx);
215 } else {
216 __ push(Immediate(kNoAlignmentPadding));
Ben Murdochb8e0da22011-05-16 14:20:40 +0100217 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000218 } else {
219 if (FLAG_debug_code) {
220 __ sub(Operand(esp), Immediate(slots * kPointerSize));
221#ifdef _MSC_VER
222 MakeSureStackPagesMapped(slots * kPointerSize);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100223#endif
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000224 __ push(eax);
225 __ mov(Operand(eax), Immediate(slots));
226 Label loop;
227 __ bind(&loop);
228 __ mov(MemOperand(esp, eax, times_4, 0),
229 Immediate(kSlotsZapValue));
230 __ dec(eax);
231 __ j(not_zero, &loop);
232 __ pop(eax);
233 } else {
234 __ sub(Operand(esp), Immediate(slots * kPointerSize));
235#ifdef _MSC_VER
236 MakeSureStackPagesMapped(slots * kPointerSize);
237#endif
238 }
239
240 if (support_aligned_spilled_doubles_) {
241 Comment(";;; Store dynamic frame alignment tag for spilled doubles");
242 // Store dynamic frame alignment state in the first local.
243 int offset = JavaScriptFrameConstants::kDynamicAlignmentStateOffset;
244 if (dynamic_frame_alignment_) {
245 __ mov(Operand(ebp, offset), edx);
246 } else {
247 __ mov(Operand(ebp, offset), Immediate(kNoAlignmentPadding));
248 }
249 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100250 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000251
252 if (info()->saves_caller_doubles()) SaveCallerDoubles();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100253 }
254
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100255 // Possibly allocate a local context.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000256 int heap_slots = info_->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100257 if (heap_slots > 0) {
258 Comment(";;; Allocate local context");
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000259 bool need_write_barrier = true;
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100260 // Argument to NewContext is the function, which is still in edi.
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100261 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000262 FastNewContextStub stub(isolate(), heap_slots);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100263 __ CallStub(&stub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000264 // Result of FastNewContextStub is always in new space.
265 need_write_barrier = false;
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100266 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000267 __ push(edi);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000268 __ CallRuntime(Runtime::kNewFunctionContext, 1);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100269 }
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000270 RecordSafepoint(Safepoint::kNoLazyDeopt);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000271 // Context is returned in eax. It replaces the context passed to us.
272 // It's saved in the stack and kept live in esi.
273 __ mov(esi, eax);
274 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), eax);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100275
276 // Copy parameters into context if necessary.
277 int num_parameters = scope()->num_parameters();
278 for (int i = 0; i < num_parameters; i++) {
Ben Murdoch589d6972011-11-30 16:04:58 +0000279 Variable* var = scope()->parameter(i);
280 if (var->IsContextSlot()) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100281 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
282 (num_parameters - 1 - i) * kPointerSize;
283 // Load parameter from stack.
284 __ mov(eax, Operand(ebp, parameter_offset));
285 // Store it in the context.
Ben Murdoch589d6972011-11-30 16:04:58 +0000286 int context_offset = Context::SlotOffset(var->index());
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100287 __ mov(Operand(esi, context_offset), eax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100288 // Update the write barrier. This clobbers eax and ebx.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000289 if (need_write_barrier) {
290 __ RecordWriteContextSlot(esi,
291 context_offset,
292 eax,
293 ebx,
294 kDontSaveFPRegs);
295 } else if (FLAG_debug_code) {
296 Label done;
297 __ JumpIfInNewSpace(esi, eax, &done, Label::kNear);
298 __ Abort(kExpectedNewSpaceObject);
299 __ bind(&done);
300 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100301 }
302 }
303 Comment(";;; End allocate local context");
304 }
305
Ben Murdochb0fe1622011-05-05 13:52:32 +0100306 // Trace the call.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000307 if (FLAG_trace && info()->IsOptimizing()) {
Steve Block1e0659c2011-05-24 12:43:12 +0100308 // We have not executed any compiled code yet, so esi still holds the
309 // incoming context.
Ben Murdochb0fe1622011-05-05 13:52:32 +0100310 __ CallRuntime(Runtime::kTraceEnter, 0);
311 }
312 return !is_aborted();
313}
314
315
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000316void LCodeGen::GenerateOsrPrologue() {
317 // Generate the OSR entry prologue at the first unknown OSR value, or if there
318 // are none, at the OSR entrypoint instruction.
319 if (osr_pc_offset_ >= 0) return;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100320
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000321 osr_pc_offset_ = masm()->pc_offset();
322
323 // Move state of dynamic frame alignment into edx.
324 __ Move(edx, Immediate(kNoAlignmentPadding));
325
326 if (support_aligned_spilled_doubles_ && dynamic_frame_alignment_) {
327 Label do_not_pad, align_loop;
328 // Align ebp + 4 to a multiple of 2 * kPointerSize.
329 __ test(ebp, Immediate(kPointerSize));
330 __ j(zero, &do_not_pad, Label::kNear);
331 __ push(Immediate(0));
332 __ mov(ebx, esp);
333 __ mov(edx, Immediate(kAlignmentPaddingPushed));
334
335 // Move all parts of the frame over one word. The frame consists of:
336 // unoptimized frame slots, alignment state, context, frame pointer, return
337 // address, receiver, and the arguments.
338 __ mov(ecx, Immediate(scope()->num_parameters() +
339 5 + graph()->osr()->UnoptimizedFrameSlots()));
340
341 __ bind(&align_loop);
342 __ mov(eax, Operand(ebx, 1 * kPointerSize));
343 __ mov(Operand(ebx, 0), eax);
344 __ add(Operand(ebx), Immediate(kPointerSize));
345 __ dec(ecx);
346 __ j(not_zero, &align_loop, Label::kNear);
347 __ mov(Operand(ebx, 0), Immediate(kAlignmentZapValue));
348 __ sub(Operand(ebp), Immediate(kPointerSize));
349 __ bind(&do_not_pad);
350 }
351
352 // Save the first local, which is overwritten by the alignment state.
353 Operand alignment_loc = MemOperand(ebp, -3 * kPointerSize);
354 __ push(alignment_loc);
355
356 // Set the dynamic frame alignment state.
357 __ mov(alignment_loc, edx);
358
359 // Adjust the frame size, subsuming the unoptimized frame into the
360 // optimized frame.
361 int slots = GetStackSlotCount() - graph()->osr()->UnoptimizedFrameSlots();
362 DCHECK(slots >= 1);
363 __ sub(esp, Immediate((slots - 1) * kPointerSize));
364}
365
366
367void LCodeGen::GenerateBodyInstructionPre(LInstruction* instr) {
368 if (instr->IsCall()) {
369 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
370 }
371 if (!instr->IsLazyBailout() && !instr->IsGap()) {
372 safepoints_.BumpLastLazySafepointIndex();
373 }
374}
375
376
377void LCodeGen::GenerateBodyInstructionPost(LInstruction* instr) { }
378
379
380bool LCodeGen::GenerateJumpTable() {
381 Label needs_frame;
382 if (jump_table_.length() > 0) {
383 Comment(";;; -------------------- Jump table --------------------");
384 }
385 for (int i = 0; i < jump_table_.length(); i++) {
386 Deoptimizer::JumpTableEntry* table_entry = &jump_table_[i];
387 __ bind(&table_entry->label);
388 Address entry = table_entry->address;
389 DeoptComment(table_entry->reason);
390 if (table_entry->needs_frame) {
391 DCHECK(!info()->saves_caller_doubles());
392 __ push(Immediate(ExternalReference::ForDeoptEntry(entry)));
393 if (needs_frame.is_bound()) {
394 __ jmp(&needs_frame);
395 } else {
396 __ bind(&needs_frame);
397 __ push(MemOperand(ebp, StandardFrameConstants::kContextOffset));
398 // This variant of deopt can only be used with stubs. Since we don't
399 // have a function pointer to install in the stack frame that we're
400 // building, install a special marker there instead.
401 DCHECK(info()->IsStub());
402 __ push(Immediate(Smi::FromInt(StackFrame::STUB)));
403 // Push a PC inside the function so that the deopt code can find where
404 // the deopt comes from. It doesn't have to be the precise return
405 // address of a "calling" LAZY deopt, it only has to be somewhere
406 // inside the code body.
407 Label push_approx_pc;
408 __ call(&push_approx_pc);
409 __ bind(&push_approx_pc);
410 // Push the continuation which was stashed were the ebp should
411 // be. Replace it with the saved ebp.
412 __ push(MemOperand(esp, 3 * kPointerSize));
413 __ mov(MemOperand(esp, 4 * kPointerSize), ebp);
414 __ lea(ebp, MemOperand(esp, 4 * kPointerSize));
415 __ ret(0); // Call the continuation without clobbering registers.
416 }
417 } else {
418 if (info()->saves_caller_doubles()) RestoreCallerDoubles();
419 __ call(entry, RelocInfo::RUNTIME_ENTRY);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100420 }
421 }
422 return !is_aborted();
423}
424
425
Ben Murdochb0fe1622011-05-05 13:52:32 +0100426bool LCodeGen::GenerateDeferredCode() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000427 DCHECK(is_generating());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000428 if (deferred_.length() > 0) {
429 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
430 LDeferredCode* code = deferred_[i];
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000431
432 HValue* value =
433 instructions_->at(code->instruction_index())->hydrogen_value();
434 RecordAndWritePosition(
435 chunk()->graph()->SourcePositionToScriptPosition(value->position()));
436
437 Comment(";;; <@%d,#%d> "
438 "-------------------- Deferred %s --------------------",
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100439 code->instruction_index(),
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000440 code->instr()->hydrogen_value()->id(),
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100441 code->instr()->Mnemonic());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000442 __ bind(code->entry());
443 if (NeedsDeferredFrame()) {
444 Comment(";;; Build frame");
445 DCHECK(!frame_is_built_);
446 DCHECK(info()->IsStub());
447 frame_is_built_ = true;
448 // Build the frame in such a way that esi isn't trashed.
449 __ push(ebp); // Caller's frame pointer.
450 __ push(Operand(ebp, StandardFrameConstants::kContextOffset));
451 __ push(Immediate(Smi::FromInt(StackFrame::STUB)));
452 __ lea(ebp, Operand(esp, 2 * kPointerSize));
453 Comment(";;; Deferred code");
454 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000455 code->Generate();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000456 if (NeedsDeferredFrame()) {
457 __ bind(code->done());
458 Comment(";;; Destroy frame");
459 DCHECK(frame_is_built_);
460 frame_is_built_ = false;
461 __ mov(esp, ebp);
462 __ pop(ebp);
463 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000464 __ jmp(code->exit());
465 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100466 }
467
468 // Deferred code is the last part of the instruction sequence. Mark
469 // the generated code as done unless we bailed out.
470 if (!is_aborted()) status_ = DONE;
471 return !is_aborted();
472}
473
474
475bool LCodeGen::GenerateSafepointTable() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000476 DCHECK(is_done());
477 if (!info()->IsStub()) {
478 // For lazy deoptimization we need space to patch a call after every call.
479 // Ensure there is always space for such patching, even if the code ends
480 // in a call.
481 int target_offset = masm()->pc_offset() + Deoptimizer::patch_size();
482 while (masm()->pc_offset() < target_offset) {
483 masm()->nop();
484 }
485 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000486 safepoints_.Emit(masm(), GetStackSlotCount());
Ben Murdochb0fe1622011-05-05 13:52:32 +0100487 return !is_aborted();
488}
489
490
491Register LCodeGen::ToRegister(int index) const {
492 return Register::FromAllocationIndex(index);
493}
494
495
496XMMRegister LCodeGen::ToDoubleRegister(int index) const {
497 return XMMRegister::FromAllocationIndex(index);
498}
499
500
501Register LCodeGen::ToRegister(LOperand* op) const {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000502 DCHECK(op->IsRegister());
Ben Murdochb0fe1622011-05-05 13:52:32 +0100503 return ToRegister(op->index());
504}
505
506
507XMMRegister LCodeGen::ToDoubleRegister(LOperand* op) const {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000508 DCHECK(op->IsDoubleRegister());
Ben Murdochb0fe1622011-05-05 13:52:32 +0100509 return ToDoubleRegister(op->index());
510}
511
512
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000513int32_t LCodeGen::ToInteger32(LConstantOperand* op) const {
514 return ToRepresentation(op, Representation::Integer32());
515}
516
517
518int32_t LCodeGen::ToRepresentation(LConstantOperand* op,
519 const Representation& r) const {
520 HConstant* constant = chunk_->LookupConstant(op);
521 int32_t value = constant->Integer32Value();
522 if (r.IsInteger32()) return value;
523 DCHECK(r.IsSmiOrTagged());
524 return reinterpret_cast<int32_t>(Smi::FromInt(value));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100525}
526
527
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100528Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000529 HConstant* constant = chunk_->LookupConstant(op);
530 DCHECK(chunk_->LookupLiteralRepresentation(op).IsSmiOrTagged());
531 return constant->handle(isolate());
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100532}
533
534
535double LCodeGen::ToDouble(LConstantOperand* op) const {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000536 HConstant* constant = chunk_->LookupConstant(op);
537 DCHECK(constant->HasDoubleValue());
538 return constant->DoubleValue();
539}
540
541
542ExternalReference LCodeGen::ToExternalReference(LConstantOperand* op) const {
543 HConstant* constant = chunk_->LookupConstant(op);
544 DCHECK(constant->HasExternalReferenceValue());
545 return constant->ExternalReferenceValue();
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100546}
547
548
549bool LCodeGen::IsInteger32(LConstantOperand* op) const {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000550 return chunk_->LookupLiteralRepresentation(op).IsSmiOrInteger32();
551}
552
553
554bool LCodeGen::IsSmi(LConstantOperand* op) const {
555 return chunk_->LookupLiteralRepresentation(op).IsSmi();
556}
557
558
559static int ArgumentsOffsetWithoutFrame(int index) {
560 DCHECK(index < 0);
561 return -(index + 1) * kPointerSize + kPCOnStackSize;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100562}
563
564
565Operand LCodeGen::ToOperand(LOperand* op) const {
566 if (op->IsRegister()) return Operand(ToRegister(op));
567 if (op->IsDoubleRegister()) return Operand(ToDoubleRegister(op));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000568 DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot());
569 if (NeedsEagerFrame()) {
570 return Operand(ebp, StackSlotOffset(op->index()));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100571 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000572 // Retrieve parameter without eager stack-frame relative to the
573 // stack-pointer.
574 return Operand(esp, ArgumentsOffsetWithoutFrame(op->index()));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100575 }
576}
577
578
Ben Murdochb8e0da22011-05-16 14:20:40 +0100579Operand LCodeGen::HighOperand(LOperand* op) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000580 DCHECK(op->IsDoubleStackSlot());
581 if (NeedsEagerFrame()) {
582 return Operand(ebp, StackSlotOffset(op->index()) + kPointerSize);
583 } else {
584 // Retrieve parameter without eager stack-frame relative to the
585 // stack-pointer.
586 return Operand(
587 esp, ArgumentsOffsetWithoutFrame(op->index()) + kPointerSize);
588 }
Ben Murdochb8e0da22011-05-16 14:20:40 +0100589}
590
591
592void LCodeGen::WriteTranslation(LEnvironment* environment,
593 Translation* translation) {
594 if (environment == NULL) return;
595
596 // The translation includes one command per value in the environment.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000597 int translation_size = environment->translation_size();
Ben Murdochb8e0da22011-05-16 14:20:40 +0100598 // The output frame height does not include the parameters.
599 int height = translation_size - environment->parameter_count();
600
601 WriteTranslation(environment->outer(), translation);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000602 bool has_closure_id = !info()->closure().is_null() &&
603 !info()->closure().is_identical_to(environment->closure());
604 int closure_id = has_closure_id
605 ? DefineDeoptimizationLiteral(environment->closure())
606 : Translation::kSelfLiteralId;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100607 switch (environment->frame_type()) {
608 case JS_FUNCTION:
609 translation->BeginJSFrame(environment->ast_id(), closure_id, height);
610 break;
611 case JS_CONSTRUCT:
612 translation->BeginConstructStubFrame(closure_id, translation_size);
613 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000614 case JS_GETTER:
615 DCHECK(translation_size == 1);
616 DCHECK(height == 0);
617 translation->BeginGetterStubFrame(closure_id);
618 break;
619 case JS_SETTER:
620 DCHECK(translation_size == 2);
621 DCHECK(height == 0);
622 translation->BeginSetterStubFrame(closure_id);
623 break;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100624 case ARGUMENTS_ADAPTOR:
625 translation->BeginArgumentsAdaptorFrame(closure_id, translation_size);
626 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000627 case STUB:
628 translation->BeginCompiledStubFrame();
629 break;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100630 default:
631 UNREACHABLE();
632 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000633
634 int object_index = 0;
635 int dematerialized_index = 0;
Ben Murdochb8e0da22011-05-16 14:20:40 +0100636 for (int i = 0; i < translation_size; ++i) {
637 LOperand* value = environment->values()->at(i);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000638 AddToTranslation(environment,
639 translation,
640 value,
641 environment->HasTaggedValueAt(i),
642 environment->HasUint32ValueAt(i),
643 &object_index,
644 &dematerialized_index);
Ben Murdochb8e0da22011-05-16 14:20:40 +0100645 }
646}
647
648
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000649void LCodeGen::AddToTranslation(LEnvironment* environment,
650 Translation* translation,
Ben Murdochb0fe1622011-05-05 13:52:32 +0100651 LOperand* op,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000652 bool is_tagged,
653 bool is_uint32,
654 int* object_index_pointer,
655 int* dematerialized_index_pointer) {
656 if (op == LEnvironment::materialization_marker()) {
657 int object_index = (*object_index_pointer)++;
658 if (environment->ObjectIsDuplicateAt(object_index)) {
659 int dupe_of = environment->ObjectDuplicateOfAt(object_index);
660 translation->DuplicateObject(dupe_of);
661 return;
662 }
663 int object_length = environment->ObjectLengthAt(object_index);
664 if (environment->ObjectIsArgumentsAt(object_index)) {
665 translation->BeginArgumentsObject(object_length);
666 } else {
667 translation->BeginCapturedObject(object_length);
668 }
669 int dematerialized_index = *dematerialized_index_pointer;
670 int env_offset = environment->translation_size() + dematerialized_index;
671 *dematerialized_index_pointer += object_length;
672 for (int i = 0; i < object_length; ++i) {
673 LOperand* value = environment->values()->at(env_offset + i);
674 AddToTranslation(environment,
675 translation,
676 value,
677 environment->HasTaggedValueAt(env_offset + i),
678 environment->HasUint32ValueAt(env_offset + i),
679 object_index_pointer,
680 dematerialized_index_pointer);
681 }
682 return;
683 }
684
685 if (op->IsStackSlot()) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100686 if (is_tagged) {
687 translation->StoreStackSlot(op->index());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000688 } else if (is_uint32) {
689 translation->StoreUint32StackSlot(op->index());
Ben Murdochb0fe1622011-05-05 13:52:32 +0100690 } else {
691 translation->StoreInt32StackSlot(op->index());
692 }
693 } else if (op->IsDoubleStackSlot()) {
694 translation->StoreDoubleStackSlot(op->index());
Ben Murdochb0fe1622011-05-05 13:52:32 +0100695 } else if (op->IsRegister()) {
696 Register reg = ToRegister(op);
697 if (is_tagged) {
698 translation->StoreRegister(reg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000699 } else if (is_uint32) {
700 translation->StoreUint32Register(reg);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100701 } else {
702 translation->StoreInt32Register(reg);
703 }
704 } else if (op->IsDoubleRegister()) {
705 XMMRegister reg = ToDoubleRegister(op);
706 translation->StoreDoubleRegister(reg);
707 } else if (op->IsConstantOperand()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000708 HConstant* constant = chunk()->LookupConstant(LConstantOperand::cast(op));
709 int src_index = DefineDeoptimizationLiteral(constant->handle(isolate()));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100710 translation->StoreLiteral(src_index);
711 } else {
712 UNREACHABLE();
713 }
714}
715
716
Ben Murdoch8b112d22011-06-08 16:22:53 +0100717void LCodeGen::CallCodeGeneric(Handle<Code> code,
718 RelocInfo::Mode mode,
719 LInstruction* instr,
Ben Murdoch8b112d22011-06-08 16:22:53 +0100720 SafepointMode safepoint_mode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000721 DCHECK(instr != NULL);
Steve Block1e0659c2011-05-24 12:43:12 +0100722 __ call(code, mode);
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000723 RecordSafepointWithLazyDeopt(instr, safepoint_mode);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100724
725 // Signal that we don't inline smi code before these stubs in the
726 // optimizing code generator.
Ben Murdoch257744e2011-11-30 15:57:28 +0000727 if (code->kind() == Code::BINARY_OP_IC ||
Ben Murdochb0fe1622011-05-05 13:52:32 +0100728 code->kind() == Code::COMPARE_IC) {
729 __ nop();
730 }
731}
732
733
Ben Murdoch8b112d22011-06-08 16:22:53 +0100734void LCodeGen::CallCode(Handle<Code> code,
735 RelocInfo::Mode mode,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000736 LInstruction* instr) {
737 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100738}
739
740
Steve Block44f0eee2011-05-26 01:26:41 +0100741void LCodeGen::CallRuntime(const Runtime::Function* fun,
Steve Block1e0659c2011-05-24 12:43:12 +0100742 int argc,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000743 LInstruction* instr,
744 SaveFPRegsMode save_doubles) {
745 DCHECK(instr != NULL);
746 DCHECK(instr->HasPointerMap());
Ben Murdochb0fe1622011-05-05 13:52:32 +0100747
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000748 __ CallRuntime(fun, argc, save_doubles);
Steve Block44f0eee2011-05-26 01:26:41 +0100749
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000750 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000751
752 DCHECK(info()->is_calling());
Ben Murdochb0fe1622011-05-05 13:52:32 +0100753}
754
755
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000756void LCodeGen::LoadContextFromDeferred(LOperand* context) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000757 if (context->IsRegister()) {
758 if (!ToRegister(context).is(esi)) {
759 __ mov(esi, ToRegister(context));
760 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100761 } else if (context->IsStackSlot()) {
Ben Murdoch85b71792012-04-11 18:30:58 +0100762 __ mov(esi, ToOperand(context));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100763 } else if (context->IsConstantOperand()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000764 HConstant* constant =
765 chunk_->LookupConstant(LConstantOperand::cast(context));
766 __ LoadObject(esi, Handle<Object>::cast(constant->handle(isolate())));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100767 } else {
768 UNREACHABLE();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000769 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000770}
771
772void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id,
773 int argc,
774 LInstruction* instr,
775 LOperand* context) {
776 LoadContextFromDeferred(context);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000777
Ben Murdoch8b112d22011-06-08 16:22:53 +0100778 __ CallRuntimeSaveDoubles(id);
779 RecordSafepointWithRegisters(
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000780 instr->pointer_map(), argc, Safepoint::kNoLazyDeopt);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000781
782 DCHECK(info()->is_calling());
Ben Murdoch8b112d22011-06-08 16:22:53 +0100783}
784
785
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000786void LCodeGen::RegisterEnvironmentForDeoptimization(
787 LEnvironment* environment, Safepoint::DeoptMode mode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000788 environment->set_has_been_used();
Ben Murdochb0fe1622011-05-05 13:52:32 +0100789 if (!environment->HasBeenRegistered()) {
790 // Physical stack frame layout:
791 // -x ............. -4 0 ..................................... y
792 // [incoming arguments] [spill slots] [pushed outgoing arguments]
793
794 // Layout of the environment:
795 // 0 ..................................................... size-1
796 // [parameters] [locals] [expression stack including arguments]
797
798 // Layout of the translation:
799 // 0 ........................................................ size - 1 + 4
800 // [expression stack including arguments] [locals] [4 words] [parameters]
801 // |>------------ translation_size ------------<|
802
803 int frame_count = 0;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100804 int jsframe_count = 0;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100805 for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
806 ++frame_count;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100807 if (e->frame_type() == JS_FUNCTION) {
808 ++jsframe_count;
809 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100810 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000811 Translation translation(&translations_, frame_count, jsframe_count, zone());
Ben Murdochb8e0da22011-05-16 14:20:40 +0100812 WriteTranslation(environment, &translation);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100813 int deoptimization_index = deoptimizations_.length();
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000814 int pc_offset = masm()->pc_offset();
815 environment->Register(deoptimization_index,
816 translation.index(),
817 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000818 deoptimizations_.Add(environment, zone());
Ben Murdochb0fe1622011-05-05 13:52:32 +0100819 }
820}
821
822
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000823void LCodeGen::DeoptimizeIf(Condition cc, LInstruction* instr,
824 const char* detail,
825 Deoptimizer::BailoutType bailout_type) {
826 LEnvironment* environment = instr->environment();
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000827 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000828 DCHECK(environment->HasBeenRegistered());
Ben Murdochb0fe1622011-05-05 13:52:32 +0100829 int id = environment->deoptimization_index();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000830 DCHECK(info()->IsOptimizing() || info()->IsStub());
831 Address entry =
832 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100833 if (entry == NULL) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000834 Abort(kBailoutWasNotPrepared);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100835 return;
836 }
837
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000838 if (DeoptEveryNTimes()) {
839 ExternalReference count = ExternalReference::stress_deopt_count(isolate());
Ben Murdochb0fe1622011-05-05 13:52:32 +0100840 Label no_deopt;
841 __ pushfd();
842 __ push(eax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000843 __ mov(eax, Operand::StaticVariable(count));
844 __ sub(eax, Immediate(1));
Ben Murdoch257744e2011-11-30 15:57:28 +0000845 __ j(not_zero, &no_deopt, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100846 if (FLAG_trap_on_deopt) __ int3();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000847 __ mov(eax, Immediate(FLAG_deopt_every_n_times));
848 __ mov(Operand::StaticVariable(count), eax);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100849 __ pop(eax);
850 __ popfd();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000851 DCHECK(frame_is_built_);
852 __ call(entry, RelocInfo::RUNTIME_ENTRY);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100853 __ bind(&no_deopt);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000854 __ mov(Operand::StaticVariable(count), eax);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100855 __ pop(eax);
856 __ popfd();
857 }
858
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000859 if (info()->ShouldTrapOnDeopt()) {
860 Label done;
861 if (cc != no_condition) __ j(NegateCondition(cc), &done, Label::kNear);
862 __ int3();
863 __ bind(&done);
864 }
865
866 Deoptimizer::Reason reason(instr->hydrogen_value()->position().raw(),
867 instr->Mnemonic(), detail);
868 DCHECK(info()->IsStub() || frame_is_built_);
869 if (cc == no_condition && frame_is_built_) {
870 DeoptComment(reason);
871 __ call(entry, RelocInfo::RUNTIME_ENTRY);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100872 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000873 Deoptimizer::JumpTableEntry table_entry(entry, reason, bailout_type,
874 !frame_is_built_);
875 // We often have several deopts to the same entry, reuse the last
876 // jump entry if this is the case.
877 if (jump_table_.is_empty() ||
878 !table_entry.IsEquivalentTo(jump_table_.last())) {
879 jump_table_.Add(table_entry, zone());
880 }
881 if (cc == no_condition) {
882 __ jmp(&jump_table_.last().label);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100883 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000884 __ j(cc, &jump_table_.last().label);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100885 }
886 }
887}
888
889
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000890void LCodeGen::DeoptimizeIf(Condition cc, LInstruction* instr,
891 const char* detail) {
892 Deoptimizer::BailoutType bailout_type = info()->IsStub()
893 ? Deoptimizer::LAZY
894 : Deoptimizer::EAGER;
895 DeoptimizeIf(cc, instr, detail, bailout_type);
896}
897
898
Ben Murdochb0fe1622011-05-05 13:52:32 +0100899void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
900 int length = deoptimizations_.length();
901 if (length == 0) return;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100902 Handle<DeoptimizationInputData> data =
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000903 DeoptimizationInputData::New(isolate(), length, TENURED);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100904
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000905 Handle<ByteArray> translations =
906 translations_.CreateByteArray(isolate()->factory());
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100907 data->SetTranslationByteArray(*translations);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100908 data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000909 data->SetOptimizationId(Smi::FromInt(info_->optimization_id()));
910 if (info_->IsOptimizing()) {
911 // Reference to shared function info does not change between phases.
912 AllowDeferredHandleDereference allow_handle_dereference;
913 data->SetSharedFunctionInfo(*info_->shared_info());
914 } else {
915 data->SetSharedFunctionInfo(Smi::FromInt(0));
916 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100917
918 Handle<FixedArray> literals =
Steve Block44f0eee2011-05-26 01:26:41 +0100919 factory()->NewFixedArray(deoptimization_literals_.length(), TENURED);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000920 { AllowDeferredHandleDereference copy_handles;
921 for (int i = 0; i < deoptimization_literals_.length(); i++) {
922 literals->set(i, *deoptimization_literals_[i]);
923 }
924 data->SetLiteralArray(*literals);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100925 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100926
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000927 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id().ToInt()));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100928 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
929
930 // Populate the deoptimization entries.
931 for (int i = 0; i < length; i++) {
932 LEnvironment* env = deoptimizations_[i];
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000933 data->SetAstId(i, env->ast_id());
Ben Murdochb0fe1622011-05-05 13:52:32 +0100934 data->SetTranslationIndex(i, Smi::FromInt(env->translation_index()));
935 data->SetArgumentsStackHeight(i,
936 Smi::FromInt(env->arguments_stack_height()));
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000937 data->SetPc(i, Smi::FromInt(env->pc_offset()));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100938 }
939 code->set_deoptimization_data(*data);
940}
941
942
943int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) {
944 int result = deoptimization_literals_.length();
945 for (int i = 0; i < deoptimization_literals_.length(); ++i) {
946 if (deoptimization_literals_[i].is_identical_to(literal)) return i;
947 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000948 deoptimization_literals_.Add(literal, zone());
Ben Murdochb0fe1622011-05-05 13:52:32 +0100949 return result;
950}
951
952
953void LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000954 DCHECK(deoptimization_literals_.length() == 0);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100955
956 const ZoneList<Handle<JSFunction> >* inlined_closures =
957 chunk()->inlined_closures();
958
959 for (int i = 0, length = inlined_closures->length();
960 i < length;
961 i++) {
962 DefineDeoptimizationLiteral(inlined_closures->at(i));
963 }
964
965 inlined_function_count_ = deoptimization_literals_.length();
966}
967
968
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000969void LCodeGen::RecordSafepointWithLazyDeopt(
970 LInstruction* instr, SafepointMode safepoint_mode) {
971 if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) {
972 RecordSafepoint(instr->pointer_map(), Safepoint::kLazyDeopt);
973 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000974 DCHECK(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000975 RecordSafepointWithRegisters(
976 instr->pointer_map(), 0, Safepoint::kLazyDeopt);
977 }
978}
979
980
Steve Block1e0659c2011-05-24 12:43:12 +0100981void LCodeGen::RecordSafepoint(
982 LPointerMap* pointers,
983 Safepoint::Kind kind,
984 int arguments,
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000985 Safepoint::DeoptMode deopt_mode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000986 DCHECK(kind == expected_safepoint_kind_);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100987 const ZoneList<LOperand*>* operands = pointers->GetNormalizedOperands();
Ben Murdoch2b4ba112012-01-20 14:57:15 +0000988 Safepoint safepoint =
989 safepoints_.DefineSafepoint(masm(), kind, arguments, deopt_mode);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100990 for (int i = 0; i < operands->length(); i++) {
991 LOperand* pointer = operands->at(i);
992 if (pointer->IsStackSlot()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000993 safepoint.DefinePointerSlot(pointer->index(), zone());
Steve Block1e0659c2011-05-24 12:43:12 +0100994 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000995 safepoint.DefinePointerRegister(ToRegister(pointer), zone());
Ben Murdochb0fe1622011-05-05 13:52:32 +0100996 }
997 }
998}
999
1000
Steve Block1e0659c2011-05-24 12:43:12 +01001001void LCodeGen::RecordSafepoint(LPointerMap* pointers,
Ben Murdoch2b4ba112012-01-20 14:57:15 +00001002 Safepoint::DeoptMode mode) {
1003 RecordSafepoint(pointers, Safepoint::kSimple, 0, mode);
Steve Block1e0659c2011-05-24 12:43:12 +01001004}
1005
1006
Ben Murdoch2b4ba112012-01-20 14:57:15 +00001007void LCodeGen::RecordSafepoint(Safepoint::DeoptMode mode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001008 LPointerMap empty_pointers(zone());
Ben Murdoch2b4ba112012-01-20 14:57:15 +00001009 RecordSafepoint(&empty_pointers, mode);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001010}
1011
1012
Ben Murdochb0fe1622011-05-05 13:52:32 +01001013void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers,
1014 int arguments,
Ben Murdoch2b4ba112012-01-20 14:57:15 +00001015 Safepoint::DeoptMode mode) {
1016 RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments, mode);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001017}
1018
1019
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001020void LCodeGen::RecordAndWritePosition(int position) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001021 if (position == RelocInfo::kNoPosition) return;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001022 masm()->positions_recorder()->RecordPosition(position);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001023 masm()->positions_recorder()->WriteRecordedPositions();
1024}
1025
1026
1027static const char* LabelType(LLabel* label) {
1028 if (label->is_loop_header()) return " (loop header)";
1029 if (label->is_osr_entry()) return " (OSR entry)";
1030 return "";
Ben Murdochb0fe1622011-05-05 13:52:32 +01001031}
1032
1033
1034void LCodeGen::DoLabel(LLabel* label) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001035 Comment(";;; <@%d,#%d> -------------------- B%d%s --------------------",
1036 current_instruction_,
1037 label->hydrogen_value()->id(),
1038 label->block_id(),
1039 LabelType(label));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001040 __ bind(label->label());
1041 current_block_ = label->block_id();
Ben Murdoch257744e2011-11-30 15:57:28 +00001042 DoGap(label);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001043}
1044
1045
1046void LCodeGen::DoParallelMove(LParallelMove* move) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01001047 resolver_.Resolve(move);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001048}
1049
1050
1051void LCodeGen::DoGap(LGap* gap) {
1052 for (int i = LGap::FIRST_INNER_POSITION;
1053 i <= LGap::LAST_INNER_POSITION;
1054 i++) {
1055 LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i);
1056 LParallelMove* move = gap->GetParallelMove(inner_pos);
1057 if (move != NULL) DoParallelMove(move);
1058 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001059}
1060
1061
Ben Murdoch257744e2011-11-30 15:57:28 +00001062void LCodeGen::DoInstructionGap(LInstructionGap* instr) {
1063 DoGap(instr);
1064}
1065
1066
Ben Murdochb0fe1622011-05-05 13:52:32 +01001067void LCodeGen::DoParameter(LParameter* instr) {
1068 // Nothing to do.
1069}
1070
1071
1072void LCodeGen::DoCallStub(LCallStub* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001073 DCHECK(ToRegister(instr->context()).is(esi));
1074 DCHECK(ToRegister(instr->result()).is(eax));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001075 switch (instr->hydrogen()->major_key()) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001076 case CodeStub::RegExpExec: {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001077 RegExpExecStub stub(isolate());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001078 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001079 break;
1080 }
1081 case CodeStub::SubString: {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001082 SubStringStub stub(isolate());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001083 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001084 break;
1085 }
1086 case CodeStub::StringCompare: {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001087 StringCompareStub stub(isolate());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001088 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001089 break;
1090 }
1091 default:
1092 UNREACHABLE();
1093 }
1094}
1095
1096
1097void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001098 GenerateOsrPrologue();
1099}
1100
1101
1102void LCodeGen::DoModByPowerOf2I(LModByPowerOf2I* instr) {
1103 Register dividend = ToRegister(instr->dividend());
1104 int32_t divisor = instr->divisor();
1105 DCHECK(dividend.is(ToRegister(instr->result())));
1106
1107 // Theoretically, a variation of the branch-free code for integer division by
1108 // a power of 2 (calculating the remainder via an additional multiplication
1109 // (which gets simplified to an 'and') and subtraction) should be faster, and
1110 // this is exactly what GCC and clang emit. Nevertheless, benchmarks seem to
1111 // indicate that positive dividends are heavily favored, so the branching
1112 // version performs better.
1113 HMod* hmod = instr->hydrogen();
1114 int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1);
1115 Label dividend_is_not_negative, done;
1116 if (hmod->CheckFlag(HValue::kLeftCanBeNegative)) {
1117 __ test(dividend, dividend);
1118 __ j(not_sign, &dividend_is_not_negative, Label::kNear);
1119 // Note that this is correct even for kMinInt operands.
1120 __ neg(dividend);
1121 __ and_(dividend, mask);
1122 __ neg(dividend);
1123 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) {
1124 DeoptimizeIf(zero, instr, "minus zero");
1125 }
1126 __ jmp(&done, Label::kNear);
1127 }
1128
1129 __ bind(&dividend_is_not_negative);
1130 __ and_(dividend, mask);
1131 __ bind(&done);
1132}
1133
1134
1135void LCodeGen::DoModByConstI(LModByConstI* instr) {
1136 Register dividend = ToRegister(instr->dividend());
1137 int32_t divisor = instr->divisor();
1138 DCHECK(ToRegister(instr->result()).is(eax));
1139
1140 if (divisor == 0) {
1141 DeoptimizeIf(no_condition, instr, "division by zero");
1142 return;
1143 }
1144
1145 __ TruncatingDiv(dividend, Abs(divisor));
1146 __ imul(edx, edx, Abs(divisor));
1147 __ mov(eax, dividend);
1148 __ sub(eax, edx);
1149
1150 // Check for negative zero.
1151 HMod* hmod = instr->hydrogen();
1152 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) {
1153 Label remainder_not_zero;
1154 __ j(not_zero, &remainder_not_zero, Label::kNear);
1155 __ cmp(dividend, Immediate(0));
1156 DeoptimizeIf(less, instr, "minus zero");
1157 __ bind(&remainder_not_zero);
1158 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001159}
1160
1161
1162void LCodeGen::DoModI(LModI* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001163 HMod* hmod = instr->hydrogen();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001164
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001165 Register left_reg = ToRegister(instr->left());
1166 DCHECK(left_reg.is(eax));
1167 Register right_reg = ToRegister(instr->right());
1168 DCHECK(!right_reg.is(eax));
1169 DCHECK(!right_reg.is(edx));
1170 Register result_reg = ToRegister(instr->result());
1171 DCHECK(result_reg.is(edx));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001172
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001173 Label done;
1174 // Check for x % 0, idiv would signal a divide error. We have to
1175 // deopt in this case because we can't return a NaN.
1176 if (hmod->CheckFlag(HValue::kCanBeDivByZero)) {
1177 __ test(right_reg, Operand(right_reg));
1178 DeoptimizeIf(zero, instr, "division by zero");
1179 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001180
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001181 // Check for kMinInt % -1, idiv would signal a divide error. We
1182 // have to deopt if we care about -0, because we can't return that.
1183 if (hmod->CheckFlag(HValue::kCanOverflow)) {
1184 Label no_overflow_possible;
1185 __ cmp(left_reg, kMinInt);
1186 __ j(not_equal, &no_overflow_possible, Label::kNear);
1187 __ cmp(right_reg, -1);
1188 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) {
1189 DeoptimizeIf(equal, instr, "minus zero");
Ben Murdoch7d3e7fc2011-07-12 16:37:06 +01001190 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001191 __ j(not_equal, &no_overflow_possible, Label::kNear);
1192 __ Move(result_reg, Immediate(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00001193 __ jmp(&done, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01001194 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001195 __ bind(&no_overflow_possible);
1196 }
Steve Block44f0eee2011-05-26 01:26:41 +01001197
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001198 // Sign extend dividend in eax into edx:eax.
1199 __ cdq();
Steve Block44f0eee2011-05-26 01:26:41 +01001200
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001201 // If we care about -0, test if the dividend is <0 and the result is 0.
1202 if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) {
1203 Label positive_left;
Ben Murdoch257744e2011-11-30 15:57:28 +00001204 __ test(left_reg, Operand(left_reg));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001205 __ j(not_sign, &positive_left, Label::kNear);
1206 __ idiv(right_reg);
1207 __ test(result_reg, Operand(result_reg));
1208 DeoptimizeIf(zero, instr, "minus zero");
Ben Murdoch257744e2011-11-30 15:57:28 +00001209 __ jmp(&done, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001210 __ bind(&positive_left);
1211 }
1212 __ idiv(right_reg);
1213 __ bind(&done);
1214}
Ben Murdoch257744e2011-11-30 15:57:28 +00001215
Ben Murdoch257744e2011-11-30 15:57:28 +00001216
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001217void LCodeGen::DoDivByPowerOf2I(LDivByPowerOf2I* instr) {
1218 Register dividend = ToRegister(instr->dividend());
1219 int32_t divisor = instr->divisor();
1220 Register result = ToRegister(instr->result());
1221 DCHECK(divisor == kMinInt || base::bits::IsPowerOfTwo32(Abs(divisor)));
1222 DCHECK(!result.is(dividend));
1223
1224 // Check for (0 / -x) that will produce negative zero.
1225 HDiv* hdiv = instr->hydrogen();
1226 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) {
1227 __ test(dividend, dividend);
1228 DeoptimizeIf(zero, instr, "minus zero");
1229 }
1230 // Check for (kMinInt / -1).
1231 if (hdiv->CheckFlag(HValue::kCanOverflow) && divisor == -1) {
1232 __ cmp(dividend, kMinInt);
1233 DeoptimizeIf(zero, instr, "overflow");
1234 }
1235 // Deoptimize if remainder will not be 0.
1236 if (!hdiv->CheckFlag(HInstruction::kAllUsesTruncatingToInt32) &&
1237 divisor != 1 && divisor != -1) {
1238 int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1);
1239 __ test(dividend, Immediate(mask));
1240 DeoptimizeIf(not_zero, instr, "lost precision");
1241 }
1242 __ Move(result, dividend);
1243 int32_t shift = WhichPowerOf2Abs(divisor);
1244 if (shift > 0) {
1245 // The arithmetic shift is always OK, the 'if' is an optimization only.
1246 if (shift > 1) __ sar(result, 31);
1247 __ shr(result, 32 - shift);
1248 __ add(result, dividend);
1249 __ sar(result, shift);
1250 }
1251 if (divisor < 0) __ neg(result);
1252}
1253
1254
1255void LCodeGen::DoDivByConstI(LDivByConstI* instr) {
1256 Register dividend = ToRegister(instr->dividend());
1257 int32_t divisor = instr->divisor();
1258 DCHECK(ToRegister(instr->result()).is(edx));
1259
1260 if (divisor == 0) {
1261 DeoptimizeIf(no_condition, instr, "division by zero");
1262 return;
1263 }
1264
1265 // Check for (0 / -x) that will produce negative zero.
1266 HDiv* hdiv = instr->hydrogen();
1267 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) {
1268 __ test(dividend, dividend);
1269 DeoptimizeIf(zero, instr, "minus zero");
1270 }
1271
1272 __ TruncatingDiv(dividend, Abs(divisor));
1273 if (divisor < 0) __ neg(edx);
1274
1275 if (!hdiv->CheckFlag(HInstruction::kAllUsesTruncatingToInt32)) {
1276 __ mov(eax, edx);
1277 __ imul(eax, eax, divisor);
1278 __ sub(eax, dividend);
1279 DeoptimizeIf(not_equal, instr, "lost precision");
Ben Murdochb0fe1622011-05-05 13:52:32 +01001280 }
1281}
1282
1283
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001284// TODO(svenpanne) Refactor this to avoid code duplication with DoFlooringDivI.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001285void LCodeGen::DoDivI(LDivI* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001286 HBinaryOperation* hdiv = instr->hydrogen();
1287 Register dividend = ToRegister(instr->dividend());
1288 Register divisor = ToRegister(instr->divisor());
1289 Register remainder = ToRegister(instr->temp());
1290 DCHECK(dividend.is(eax));
1291 DCHECK(remainder.is(edx));
1292 DCHECK(ToRegister(instr->result()).is(eax));
1293 DCHECK(!divisor.is(eax));
1294 DCHECK(!divisor.is(edx));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001295
1296 // Check for x / 0.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001297 if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) {
1298 __ test(divisor, divisor);
1299 DeoptimizeIf(zero, instr, "division by zero");
Ben Murdochb0fe1622011-05-05 13:52:32 +01001300 }
1301
1302 // Check for (0 / -x) that will produce negative zero.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001303 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) {
1304 Label dividend_not_zero;
1305 __ test(dividend, dividend);
1306 __ j(not_zero, &dividend_not_zero, Label::kNear);
1307 __ test(divisor, divisor);
1308 DeoptimizeIf(sign, instr, "minus zero");
1309 __ bind(&dividend_not_zero);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001310 }
1311
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001312 // Check for (kMinInt / -1).
1313 if (hdiv->CheckFlag(HValue::kCanOverflow)) {
1314 Label dividend_not_min_int;
1315 __ cmp(dividend, kMinInt);
1316 __ j(not_zero, &dividend_not_min_int, Label::kNear);
1317 __ cmp(divisor, -1);
1318 DeoptimizeIf(zero, instr, "overflow");
1319 __ bind(&dividend_not_min_int);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001320 }
1321
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001322 // Sign extend to edx (= remainder).
Ben Murdochb0fe1622011-05-05 13:52:32 +01001323 __ cdq();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001324 __ idiv(divisor);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001325
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001326 if (!hdiv->CheckFlag(HValue::kAllUsesTruncatingToInt32)) {
1327 // Deoptimize if remainder is not 0.
1328 __ test(remainder, remainder);
1329 DeoptimizeIf(not_zero, instr, "lost precision");
1330 }
1331}
1332
1333
1334void LCodeGen::DoFlooringDivByPowerOf2I(LFlooringDivByPowerOf2I* instr) {
1335 Register dividend = ToRegister(instr->dividend());
1336 int32_t divisor = instr->divisor();
1337 DCHECK(dividend.is(ToRegister(instr->result())));
1338
1339 // If the divisor is positive, things are easy: There can be no deopts and we
1340 // can simply do an arithmetic right shift.
1341 if (divisor == 1) return;
1342 int32_t shift = WhichPowerOf2Abs(divisor);
1343 if (divisor > 1) {
1344 __ sar(dividend, shift);
1345 return;
1346 }
1347
1348 // If the divisor is negative, we have to negate and handle edge cases.
1349 __ neg(dividend);
1350 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
1351 DeoptimizeIf(zero, instr, "minus zero");
1352 }
1353
1354 // Dividing by -1 is basically negation, unless we overflow.
1355 if (divisor == -1) {
1356 if (instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) {
1357 DeoptimizeIf(overflow, instr, "overflow");
1358 }
1359 return;
1360 }
1361
1362 // If the negation could not overflow, simply shifting is OK.
1363 if (!instr->hydrogen()->CheckFlag(HValue::kLeftCanBeMinInt)) {
1364 __ sar(dividend, shift);
1365 return;
1366 }
1367
1368 Label not_kmin_int, done;
1369 __ j(no_overflow, &not_kmin_int, Label::kNear);
1370 __ mov(dividend, Immediate(kMinInt / divisor));
1371 __ jmp(&done, Label::kNear);
1372 __ bind(&not_kmin_int);
1373 __ sar(dividend, shift);
1374 __ bind(&done);
1375}
1376
1377
1378void LCodeGen::DoFlooringDivByConstI(LFlooringDivByConstI* instr) {
1379 Register dividend = ToRegister(instr->dividend());
1380 int32_t divisor = instr->divisor();
1381 DCHECK(ToRegister(instr->result()).is(edx));
1382
1383 if (divisor == 0) {
1384 DeoptimizeIf(no_condition, instr, "division by zero");
1385 return;
1386 }
1387
1388 // Check for (0 / -x) that will produce negative zero.
1389 HMathFloorOfDiv* hdiv = instr->hydrogen();
1390 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) {
1391 __ test(dividend, dividend);
1392 DeoptimizeIf(zero, instr, "minus zero");
1393 }
1394
1395 // Easy case: We need no dynamic check for the dividend and the flooring
1396 // division is the same as the truncating division.
1397 if ((divisor > 0 && !hdiv->CheckFlag(HValue::kLeftCanBeNegative)) ||
1398 (divisor < 0 && !hdiv->CheckFlag(HValue::kLeftCanBePositive))) {
1399 __ TruncatingDiv(dividend, Abs(divisor));
1400 if (divisor < 0) __ neg(edx);
1401 return;
1402 }
1403
1404 // In the general case we may need to adjust before and after the truncating
1405 // division to get a flooring division.
1406 Register temp = ToRegister(instr->temp3());
1407 DCHECK(!temp.is(dividend) && !temp.is(eax) && !temp.is(edx));
1408 Label needs_adjustment, done;
1409 __ cmp(dividend, Immediate(0));
1410 __ j(divisor > 0 ? less : greater, &needs_adjustment, Label::kNear);
1411 __ TruncatingDiv(dividend, Abs(divisor));
1412 if (divisor < 0) __ neg(edx);
1413 __ jmp(&done, Label::kNear);
1414 __ bind(&needs_adjustment);
1415 __ lea(temp, Operand(dividend, divisor > 0 ? 1 : -1));
1416 __ TruncatingDiv(temp, Abs(divisor));
1417 if (divisor < 0) __ neg(edx);
1418 __ dec(edx);
1419 __ bind(&done);
1420}
1421
1422
1423// TODO(svenpanne) Refactor this to avoid code duplication with DoDivI.
1424void LCodeGen::DoFlooringDivI(LFlooringDivI* instr) {
1425 HBinaryOperation* hdiv = instr->hydrogen();
1426 Register dividend = ToRegister(instr->dividend());
1427 Register divisor = ToRegister(instr->divisor());
1428 Register remainder = ToRegister(instr->temp());
1429 Register result = ToRegister(instr->result());
1430 DCHECK(dividend.is(eax));
1431 DCHECK(remainder.is(edx));
1432 DCHECK(result.is(eax));
1433 DCHECK(!divisor.is(eax));
1434 DCHECK(!divisor.is(edx));
1435
1436 // Check for x / 0.
1437 if (hdiv->CheckFlag(HValue::kCanBeDivByZero)) {
1438 __ test(divisor, divisor);
1439 DeoptimizeIf(zero, instr, "division by zero");
1440 }
1441
1442 // Check for (0 / -x) that will produce negative zero.
1443 if (hdiv->CheckFlag(HValue::kBailoutOnMinusZero)) {
1444 Label dividend_not_zero;
1445 __ test(dividend, dividend);
1446 __ j(not_zero, &dividend_not_zero, Label::kNear);
1447 __ test(divisor, divisor);
1448 DeoptimizeIf(sign, instr, "minus zero");
1449 __ bind(&dividend_not_zero);
1450 }
1451
1452 // Check for (kMinInt / -1).
1453 if (hdiv->CheckFlag(HValue::kCanOverflow)) {
1454 Label dividend_not_min_int;
1455 __ cmp(dividend, kMinInt);
1456 __ j(not_zero, &dividend_not_min_int, Label::kNear);
1457 __ cmp(divisor, -1);
1458 DeoptimizeIf(zero, instr, "overflow");
1459 __ bind(&dividend_not_min_int);
1460 }
1461
1462 // Sign extend to edx (= remainder).
1463 __ cdq();
1464 __ idiv(divisor);
1465
1466 Label done;
1467 __ test(remainder, remainder);
1468 __ j(zero, &done, Label::kNear);
1469 __ xor_(remainder, divisor);
1470 __ sar(remainder, 31);
1471 __ add(result, remainder);
1472 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001473}
1474
1475
1476void LCodeGen::DoMulI(LMulI* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001477 Register left = ToRegister(instr->left());
1478 LOperand* right = instr->right();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001479
1480 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001481 __ mov(ToRegister(instr->temp()), left);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001482 }
1483
1484 if (right->IsConstantOperand()) {
Steve Block44f0eee2011-05-26 01:26:41 +01001485 // Try strength reductions on the multiplication.
1486 // All replacement instructions are at most as long as the imul
1487 // and have better latency.
1488 int constant = ToInteger32(LConstantOperand::cast(right));
1489 if (constant == -1) {
1490 __ neg(left);
1491 } else if (constant == 0) {
1492 __ xor_(left, Operand(left));
1493 } else if (constant == 2) {
1494 __ add(left, Operand(left));
1495 } else if (!instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1496 // If we know that the multiplication can't overflow, it's safe to
1497 // use instructions that don't set the overflow flag for the
1498 // multiplication.
1499 switch (constant) {
1500 case 1:
1501 // Do nothing.
1502 break;
1503 case 3:
1504 __ lea(left, Operand(left, left, times_2, 0));
1505 break;
1506 case 4:
1507 __ shl(left, 2);
1508 break;
1509 case 5:
1510 __ lea(left, Operand(left, left, times_4, 0));
1511 break;
1512 case 8:
1513 __ shl(left, 3);
1514 break;
1515 case 9:
1516 __ lea(left, Operand(left, left, times_8, 0));
1517 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001518 case 16:
1519 __ shl(left, 4);
1520 break;
Steve Block44f0eee2011-05-26 01:26:41 +01001521 default:
1522 __ imul(left, left, constant);
1523 break;
1524 }
1525 } else {
1526 __ imul(left, left, constant);
1527 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001528 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001529 if (instr->hydrogen()->representation().IsSmi()) {
1530 __ SmiUntag(left);
1531 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001532 __ imul(left, ToOperand(right));
1533 }
1534
1535 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001536 DeoptimizeIf(overflow, instr, "overflow");
Ben Murdochb0fe1622011-05-05 13:52:32 +01001537 }
1538
1539 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
1540 // Bail out if the result is supposed to be negative zero.
Ben Murdoch257744e2011-11-30 15:57:28 +00001541 Label done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001542 __ test(left, Operand(left));
Ben Murdoch257744e2011-11-30 15:57:28 +00001543 __ j(not_zero, &done, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001544 if (right->IsConstantOperand()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001545 if (ToInteger32(LConstantOperand::cast(right)) < 0) {
1546 DeoptimizeIf(no_condition, instr, "minus zero");
1547 } else if (ToInteger32(LConstantOperand::cast(right)) == 0) {
1548 __ cmp(ToRegister(instr->temp()), Immediate(0));
1549 DeoptimizeIf(less, instr, "minus zero");
Ben Murdochb0fe1622011-05-05 13:52:32 +01001550 }
1551 } else {
1552 // Test the non-zero operand for negative sign.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001553 __ or_(ToRegister(instr->temp()), ToOperand(right));
1554 DeoptimizeIf(sign, instr, "minus zero");
Ben Murdochb0fe1622011-05-05 13:52:32 +01001555 }
1556 __ bind(&done);
1557 }
1558}
1559
1560
1561void LCodeGen::DoBitI(LBitI* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001562 LOperand* left = instr->left();
1563 LOperand* right = instr->right();
1564 DCHECK(left->Equals(instr->result()));
1565 DCHECK(left->IsRegister());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001566
1567 if (right->IsConstantOperand()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001568 int32_t right_operand =
1569 ToRepresentation(LConstantOperand::cast(right),
1570 instr->hydrogen()->representation());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001571 switch (instr->op()) {
1572 case Token::BIT_AND:
1573 __ and_(ToRegister(left), right_operand);
1574 break;
1575 case Token::BIT_OR:
1576 __ or_(ToRegister(left), right_operand);
1577 break;
1578 case Token::BIT_XOR:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001579 if (right_operand == int32_t(~0)) {
1580 __ not_(ToRegister(left));
1581 } else {
1582 __ xor_(ToRegister(left), right_operand);
1583 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001584 break;
1585 default:
1586 UNREACHABLE();
1587 break;
1588 }
1589 } else {
1590 switch (instr->op()) {
1591 case Token::BIT_AND:
1592 __ and_(ToRegister(left), ToOperand(right));
1593 break;
1594 case Token::BIT_OR:
1595 __ or_(ToRegister(left), ToOperand(right));
1596 break;
1597 case Token::BIT_XOR:
1598 __ xor_(ToRegister(left), ToOperand(right));
1599 break;
1600 default:
1601 UNREACHABLE();
1602 break;
1603 }
1604 }
1605}
1606
1607
1608void LCodeGen::DoShiftI(LShiftI* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001609 LOperand* left = instr->left();
1610 LOperand* right = instr->right();
1611 DCHECK(left->Equals(instr->result()));
1612 DCHECK(left->IsRegister());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001613 if (right->IsRegister()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001614 DCHECK(ToRegister(right).is(ecx));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001615
1616 switch (instr->op()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001617 case Token::ROR:
1618 __ ror_cl(ToRegister(left));
1619 break;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001620 case Token::SAR:
1621 __ sar_cl(ToRegister(left));
1622 break;
1623 case Token::SHR:
1624 __ shr_cl(ToRegister(left));
1625 if (instr->can_deopt()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001626 __ test(ToRegister(left), ToRegister(left));
1627 DeoptimizeIf(sign, instr, "negative value");
Ben Murdochb0fe1622011-05-05 13:52:32 +01001628 }
1629 break;
1630 case Token::SHL:
1631 __ shl_cl(ToRegister(left));
1632 break;
1633 default:
1634 UNREACHABLE();
1635 break;
1636 }
1637 } else {
1638 int value = ToInteger32(LConstantOperand::cast(right));
1639 uint8_t shift_count = static_cast<uint8_t>(value & 0x1F);
1640 switch (instr->op()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001641 case Token::ROR:
1642 if (shift_count == 0 && instr->can_deopt()) {
1643 __ test(ToRegister(left), ToRegister(left));
1644 DeoptimizeIf(sign, instr, "negative value");
1645 } else {
1646 __ ror(ToRegister(left), shift_count);
1647 }
1648 break;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001649 case Token::SAR:
1650 if (shift_count != 0) {
1651 __ sar(ToRegister(left), shift_count);
1652 }
1653 break;
1654 case Token::SHR:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001655 if (shift_count != 0) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001656 __ shr(ToRegister(left), shift_count);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001657 } else if (instr->can_deopt()) {
1658 __ test(ToRegister(left), ToRegister(left));
1659 DeoptimizeIf(sign, instr, "negative value");
Ben Murdochb0fe1622011-05-05 13:52:32 +01001660 }
1661 break;
1662 case Token::SHL:
1663 if (shift_count != 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001664 if (instr->hydrogen_value()->representation().IsSmi() &&
1665 instr->can_deopt()) {
1666 if (shift_count != 1) {
1667 __ shl(ToRegister(left), shift_count - 1);
1668 }
1669 __ SmiTag(ToRegister(left));
1670 DeoptimizeIf(overflow, instr, "overflow");
1671 } else {
1672 __ shl(ToRegister(left), shift_count);
1673 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001674 }
1675 break;
1676 default:
1677 UNREACHABLE();
1678 break;
1679 }
1680 }
1681}
1682
1683
1684void LCodeGen::DoSubI(LSubI* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001685 LOperand* left = instr->left();
1686 LOperand* right = instr->right();
1687 DCHECK(left->Equals(instr->result()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001688
1689 if (right->IsConstantOperand()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001690 __ sub(ToOperand(left),
1691 ToImmediate(right, instr->hydrogen()->representation()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001692 } else {
1693 __ sub(ToRegister(left), ToOperand(right));
1694 }
1695 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001696 DeoptimizeIf(overflow, instr, "overflow");
Ben Murdochb0fe1622011-05-05 13:52:32 +01001697 }
1698}
1699
1700
1701void LCodeGen::DoConstantI(LConstantI* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001702 __ Move(ToRegister(instr->result()), Immediate(instr->value()));
1703}
1704
1705
1706void LCodeGen::DoConstantS(LConstantS* instr) {
1707 __ Move(ToRegister(instr->result()), Immediate(instr->value()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01001708}
1709
1710
1711void LCodeGen::DoConstantD(LConstantD* instr) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001712 double v = instr->value();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001713 uint64_t int_val = bit_cast<uint64_t, double>(v);
1714 int32_t lower = static_cast<int32_t>(int_val);
1715 int32_t upper = static_cast<int32_t>(int_val >> (kBitsPerInt));
1716 DCHECK(instr->result()->IsDoubleRegister());
1717
1718 XMMRegister res = ToDoubleRegister(instr->result());
1719 if (int_val == 0) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001720 __ xorps(res, res);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001721 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001722 Register temp = ToRegister(instr->temp());
Ben Murdoch8b112d22011-06-08 16:22:53 +01001723 if (CpuFeatures::IsSupported(SSE4_1)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001724 CpuFeatureScope scope2(masm(), SSE4_1);
Steve Block1e0659c2011-05-24 12:43:12 +01001725 if (lower != 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001726 __ Move(temp, Immediate(lower));
Steve Block1e0659c2011-05-24 12:43:12 +01001727 __ movd(res, Operand(temp));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001728 __ Move(temp, Immediate(upper));
Steve Block1e0659c2011-05-24 12:43:12 +01001729 __ pinsrd(res, Operand(temp), 1);
1730 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00001731 __ xorps(res, res);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001732 __ Move(temp, Immediate(upper));
Steve Block1e0659c2011-05-24 12:43:12 +01001733 __ pinsrd(res, Operand(temp), 1);
1734 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001735 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001736 __ Move(temp, Immediate(upper));
Steve Block1e0659c2011-05-24 12:43:12 +01001737 __ movd(res, Operand(temp));
1738 __ psllq(res, 32);
1739 if (lower != 0) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001740 XMMRegister xmm_scratch = double_scratch0();
1741 __ Move(temp, Immediate(lower));
1742 __ movd(xmm_scratch, Operand(temp));
1743 __ orps(res, xmm_scratch);
Steve Block1e0659c2011-05-24 12:43:12 +01001744 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001745 }
1746 }
1747}
1748
1749
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001750void LCodeGen::DoConstantE(LConstantE* instr) {
1751 __ lea(ToRegister(instr->result()), Operand::StaticVariable(instr->value()));
1752}
1753
1754
Ben Murdochb0fe1622011-05-05 13:52:32 +01001755void LCodeGen::DoConstantT(LConstantT* instr) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001756 Register reg = ToRegister(instr->result());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001757 Handle<Object> object = instr->value(isolate());
1758 AllowDeferredHandleDereference smi_check;
1759 __ LoadObject(reg, object);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001760}
1761
1762
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001763void LCodeGen::DoMapEnumLength(LMapEnumLength* instr) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01001764 Register result = ToRegister(instr->result());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001765 Register map = ToRegister(instr->value());
1766 __ EnumLength(result, map);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001767}
1768
1769
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001770void LCodeGen::DoDateField(LDateField* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001771 Register object = ToRegister(instr->date());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001772 Register result = ToRegister(instr->result());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001773 Register scratch = ToRegister(instr->temp());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001774 Smi* index = instr->index();
1775 Label runtime, done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001776 DCHECK(object.is(result));
1777 DCHECK(object.is(eax));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001778
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001779 __ test(object, Immediate(kSmiTagMask));
1780 DeoptimizeIf(zero, instr, "Smi");
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001781 __ CmpObjectType(object, JS_DATE_TYPE, scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001782 DeoptimizeIf(not_equal, instr, "not a date object");
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001783
1784 if (index->value() == 0) {
1785 __ mov(result, FieldOperand(object, JSDate::kValueOffset));
1786 } else {
1787 if (index->value() < JSDate::kFirstUncachedField) {
1788 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
1789 __ mov(scratch, Operand::StaticVariable(stamp));
1790 __ cmp(scratch, FieldOperand(object, JSDate::kCacheStampOffset));
1791 __ j(not_equal, &runtime, Label::kNear);
1792 __ mov(result, FieldOperand(object, JSDate::kValueOffset +
1793 kPointerSize * index->value()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001794 __ jmp(&done, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001795 }
1796 __ bind(&runtime);
1797 __ PrepareCallCFunction(2, scratch);
1798 __ mov(Operand(esp, 0), object);
1799 __ mov(Operand(esp, 1 * kPointerSize), Immediate(index));
1800 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
1801 __ bind(&done);
1802 }
1803}
1804
1805
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001806Operand LCodeGen::BuildSeqStringOperand(Register string,
1807 LOperand* index,
1808 String::Encoding encoding) {
1809 if (index->IsConstantOperand()) {
1810 int offset = ToRepresentation(LConstantOperand::cast(index),
1811 Representation::Integer32());
1812 if (encoding == String::TWO_BYTE_ENCODING) {
1813 offset *= kUC16Size;
1814 }
1815 STATIC_ASSERT(kCharSize == 1);
1816 return FieldOperand(string, SeqString::kHeaderSize + offset);
1817 }
1818 return FieldOperand(
1819 string, ToRegister(index),
1820 encoding == String::ONE_BYTE_ENCODING ? times_1 : times_2,
1821 SeqString::kHeaderSize);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001822}
1823
1824
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001825void LCodeGen::DoSeqStringGetChar(LSeqStringGetChar* instr) {
1826 String::Encoding encoding = instr->hydrogen()->encoding();
1827 Register result = ToRegister(instr->result());
1828 Register string = ToRegister(instr->string());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001829
1830 if (FLAG_debug_code) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001831 __ push(string);
1832 __ mov(string, FieldOperand(string, HeapObject::kMapOffset));
1833 __ movzx_b(string, FieldOperand(string, Map::kInstanceTypeOffset));
1834
1835 __ and_(string, Immediate(kStringRepresentationMask | kStringEncodingMask));
1836 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
1837 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
1838 __ cmp(string, Immediate(encoding == String::ONE_BYTE_ENCODING
1839 ? one_byte_seq_type : two_byte_seq_type));
1840 __ Check(equal, kUnexpectedStringType);
1841 __ pop(string);
1842 }
1843
1844 Operand operand = BuildSeqStringOperand(string, instr->index(), encoding);
1845 if (encoding == String::ONE_BYTE_ENCODING) {
1846 __ movzx_b(result, operand);
1847 } else {
1848 __ movzx_w(result, operand);
1849 }
1850}
1851
1852
1853void LCodeGen::DoSeqStringSetChar(LSeqStringSetChar* instr) {
1854 String::Encoding encoding = instr->hydrogen()->encoding();
1855 Register string = ToRegister(instr->string());
1856
1857 if (FLAG_debug_code) {
1858 Register value = ToRegister(instr->value());
1859 Register index = ToRegister(instr->index());
1860 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
1861 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
1862 int encoding_mask =
1863 instr->hydrogen()->encoding() == String::ONE_BYTE_ENCODING
1864 ? one_byte_seq_type : two_byte_seq_type;
1865 __ EmitSeqStringSetCharCheck(string, index, value, encoding_mask);
1866 }
1867
1868 Operand operand = BuildSeqStringOperand(string, instr->index(), encoding);
1869 if (instr->value()->IsConstantOperand()) {
1870 int value = ToRepresentation(LConstantOperand::cast(instr->value()),
1871 Representation::Integer32());
1872 DCHECK_LE(0, value);
1873 if (encoding == String::ONE_BYTE_ENCODING) {
1874 DCHECK_LE(value, String::kMaxOneByteCharCode);
1875 __ mov_b(operand, static_cast<int8_t>(value));
1876 } else {
1877 DCHECK_LE(value, String::kMaxUtf16CodeUnit);
1878 __ mov_w(operand, static_cast<int16_t>(value));
1879 }
1880 } else {
1881 Register value = ToRegister(instr->value());
1882 if (encoding == String::ONE_BYTE_ENCODING) {
1883 __ mov_b(operand, value);
1884 } else {
1885 __ mov_w(operand, value);
1886 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001887 }
1888}
1889
1890
1891void LCodeGen::DoAddI(LAddI* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001892 LOperand* left = instr->left();
1893 LOperand* right = instr->right();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001894
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001895 if (LAddI::UseLea(instr->hydrogen()) && !left->Equals(instr->result())) {
1896 if (right->IsConstantOperand()) {
1897 int32_t offset = ToRepresentation(LConstantOperand::cast(right),
1898 instr->hydrogen()->representation());
1899 __ lea(ToRegister(instr->result()), MemOperand(ToRegister(left), offset));
1900 } else {
1901 Operand address(ToRegister(left), ToRegister(right), times_1, 0);
1902 __ lea(ToRegister(instr->result()), address);
1903 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001904 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001905 if (right->IsConstantOperand()) {
1906 __ add(ToOperand(left),
1907 ToImmediate(right, instr->hydrogen()->representation()));
1908 } else {
1909 __ add(ToRegister(left), ToOperand(right));
1910 }
1911 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1912 DeoptimizeIf(overflow, instr, "overflow");
1913 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01001914 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001915}
Ben Murdochb0fe1622011-05-05 13:52:32 +01001916
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001917
1918void LCodeGen::DoMathMinMax(LMathMinMax* instr) {
1919 LOperand* left = instr->left();
1920 LOperand* right = instr->right();
1921 DCHECK(left->Equals(instr->result()));
1922 HMathMinMax::Operation operation = instr->hydrogen()->operation();
1923 if (instr->hydrogen()->representation().IsSmiOrInteger32()) {
1924 Label return_left;
1925 Condition condition = (operation == HMathMinMax::kMathMin)
1926 ? less_equal
1927 : greater_equal;
1928 if (right->IsConstantOperand()) {
1929 Operand left_op = ToOperand(left);
1930 Immediate immediate = ToImmediate(LConstantOperand::cast(instr->right()),
1931 instr->hydrogen()->representation());
1932 __ cmp(left_op, immediate);
1933 __ j(condition, &return_left, Label::kNear);
1934 __ mov(left_op, immediate);
1935 } else {
1936 Register left_reg = ToRegister(left);
1937 Operand right_op = ToOperand(right);
1938 __ cmp(left_reg, right_op);
1939 __ j(condition, &return_left, Label::kNear);
1940 __ mov(left_reg, right_op);
1941 }
1942 __ bind(&return_left);
1943 } else {
1944 DCHECK(instr->hydrogen()->representation().IsDouble());
1945 Label check_nan_left, check_zero, return_left, return_right;
1946 Condition condition = (operation == HMathMinMax::kMathMin) ? below : above;
1947 XMMRegister left_reg = ToDoubleRegister(left);
1948 XMMRegister right_reg = ToDoubleRegister(right);
1949 __ ucomisd(left_reg, right_reg);
1950 __ j(parity_even, &check_nan_left, Label::kNear); // At least one NaN.
1951 __ j(equal, &check_zero, Label::kNear); // left == right.
1952 __ j(condition, &return_left, Label::kNear);
1953 __ jmp(&return_right, Label::kNear);
1954
1955 __ bind(&check_zero);
1956 XMMRegister xmm_scratch = double_scratch0();
1957 __ xorps(xmm_scratch, xmm_scratch);
1958 __ ucomisd(left_reg, xmm_scratch);
1959 __ j(not_equal, &return_left, Label::kNear); // left == right != 0.
1960 // At this point, both left and right are either 0 or -0.
1961 if (operation == HMathMinMax::kMathMin) {
1962 __ orpd(left_reg, right_reg);
1963 } else {
1964 // Since we operate on +0 and/or -0, addsd and andsd have the same effect.
1965 __ addsd(left_reg, right_reg);
1966 }
1967 __ jmp(&return_left, Label::kNear);
1968
1969 __ bind(&check_nan_left);
1970 __ ucomisd(left_reg, left_reg); // NaN check.
1971 __ j(parity_even, &return_left, Label::kNear); // left == NaN.
1972 __ bind(&return_right);
1973 __ movaps(left_reg, right_reg);
1974
1975 __ bind(&return_left);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001976 }
1977}
1978
1979
1980void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001981 XMMRegister left = ToDoubleRegister(instr->left());
1982 XMMRegister right = ToDoubleRegister(instr->right());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001983 XMMRegister result = ToDoubleRegister(instr->result());
Ben Murdochb0fe1622011-05-05 13:52:32 +01001984 switch (instr->op()) {
1985 case Token::ADD:
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001986 __ addsd(left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001987 break;
1988 case Token::SUB:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001989 __ subsd(left, right);
1990 break;
Ben Murdochb0fe1622011-05-05 13:52:32 +01001991 case Token::MUL:
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001992 __ mulsd(left, right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001993 break;
1994 case Token::DIV:
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001995 __ divsd(left, right);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001996 // Don't delete this mov. It may improve performance on some CPUs,
1997 // when there is a mulsd depending on the result
1998 __ movaps(left, left);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001999 break;
2000 case Token::MOD: {
2001 // Pass two doubles as arguments on the stack.
2002 __ PrepareCallCFunction(4, eax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002003 __ movsd(Operand(esp, 0 * kDoubleSize), left);
2004 __ movsd(Operand(esp, 1 * kDoubleSize), right);
Steve Block44f0eee2011-05-26 01:26:41 +01002005 __ CallCFunction(
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002006 ExternalReference::mod_two_doubles_operation(isolate()),
Steve Block44f0eee2011-05-26 01:26:41 +01002007 4);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002008
2009 // Return value is in st(0) on ia32.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002010 // Store it into the result register.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002011 __ sub(Operand(esp), Immediate(kDoubleSize));
2012 __ fstp_d(Operand(esp, 0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002013 __ movsd(result, Operand(esp, 0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002014 __ add(Operand(esp), Immediate(kDoubleSize));
2015 break;
2016 }
2017 default:
2018 UNREACHABLE();
2019 break;
2020 }
2021}
2022
2023
2024void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002025 DCHECK(ToRegister(instr->context()).is(esi));
2026 DCHECK(ToRegister(instr->left()).is(edx));
2027 DCHECK(ToRegister(instr->right()).is(eax));
2028 DCHECK(ToRegister(instr->result()).is(eax));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002029
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002030 Handle<Code> code =
2031 CodeFactory::BinaryOpIC(isolate(), instr->op(), NO_OVERWRITE).code();
2032 CallCode(code, RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002033}
2034
2035
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002036template<class InstrType>
2037void LCodeGen::EmitBranch(InstrType instr, Condition cc) {
2038 int left_block = instr->TrueDestination(chunk_);
2039 int right_block = instr->FalseDestination(chunk_);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002040
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002041 int next_block = GetNextEmittedBlock();
Ben Murdochb0fe1622011-05-05 13:52:32 +01002042
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002043 if (right_block == left_block || cc == no_condition) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002044 EmitGoto(left_block);
2045 } else if (left_block == next_block) {
2046 __ j(NegateCondition(cc), chunk_->GetAssemblyLabel(right_block));
2047 } else if (right_block == next_block) {
2048 __ j(cc, chunk_->GetAssemblyLabel(left_block));
2049 } else {
2050 __ j(cc, chunk_->GetAssemblyLabel(left_block));
2051 __ jmp(chunk_->GetAssemblyLabel(right_block));
2052 }
2053}
2054
2055
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002056template<class InstrType>
2057void LCodeGen::EmitFalseBranch(InstrType instr, Condition cc) {
2058 int false_block = instr->FalseDestination(chunk_);
2059 if (cc == no_condition) {
2060 __ jmp(chunk_->GetAssemblyLabel(false_block));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002061 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002062 __ j(cc, chunk_->GetAssemblyLabel(false_block));
2063 }
2064}
2065
2066
2067void LCodeGen::DoBranch(LBranch* instr) {
2068 Representation r = instr->hydrogen()->value()->representation();
2069 if (r.IsSmiOrInteger32()) {
2070 Register reg = ToRegister(instr->value());
2071 __ test(reg, Operand(reg));
2072 EmitBranch(instr, not_zero);
2073 } else if (r.IsDouble()) {
2074 DCHECK(!info()->IsStub());
2075 XMMRegister reg = ToDoubleRegister(instr->value());
2076 XMMRegister xmm_scratch = double_scratch0();
2077 __ xorps(xmm_scratch, xmm_scratch);
2078 __ ucomisd(reg, xmm_scratch);
2079 EmitBranch(instr, not_equal);
2080 } else {
2081 DCHECK(r.IsTagged());
2082 Register reg = ToRegister(instr->value());
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002083 HType type = instr->hydrogen()->value()->type();
2084 if (type.IsBoolean()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002085 DCHECK(!info()->IsStub());
Steve Block44f0eee2011-05-26 01:26:41 +01002086 __ cmp(reg, factory()->true_value());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002087 EmitBranch(instr, equal);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002088 } else if (type.IsSmi()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002089 DCHECK(!info()->IsStub());
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002090 __ test(reg, Operand(reg));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002091 EmitBranch(instr, not_equal);
2092 } else if (type.IsJSArray()) {
2093 DCHECK(!info()->IsStub());
2094 EmitBranch(instr, no_condition);
2095 } else if (type.IsHeapNumber()) {
2096 DCHECK(!info()->IsStub());
2097 XMMRegister xmm_scratch = double_scratch0();
2098 __ xorps(xmm_scratch, xmm_scratch);
2099 __ ucomisd(xmm_scratch, FieldOperand(reg, HeapNumber::kValueOffset));
2100 EmitBranch(instr, not_equal);
2101 } else if (type.IsString()) {
2102 DCHECK(!info()->IsStub());
2103 __ cmp(FieldOperand(reg, String::kLengthOffset), Immediate(0));
2104 EmitBranch(instr, not_equal);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002105 } else {
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002106 ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002107 if (expected.IsEmpty()) expected = ToBooleanStub::Types::Generic();
Ben Murdochb0fe1622011-05-05 13:52:32 +01002108
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002109 if (expected.Contains(ToBooleanStub::UNDEFINED)) {
2110 // undefined -> false.
2111 __ cmp(reg, factory()->undefined_value());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002112 __ j(equal, instr->FalseLabel(chunk_));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002113 }
2114 if (expected.Contains(ToBooleanStub::BOOLEAN)) {
2115 // true -> true.
2116 __ cmp(reg, factory()->true_value());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002117 __ j(equal, instr->TrueLabel(chunk_));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002118 // false -> false.
2119 __ cmp(reg, factory()->false_value());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002120 __ j(equal, instr->FalseLabel(chunk_));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002121 }
2122 if (expected.Contains(ToBooleanStub::NULL_TYPE)) {
2123 // 'null' -> false.
2124 __ cmp(reg, factory()->null_value());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002125 __ j(equal, instr->FalseLabel(chunk_));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002126 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002127
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002128 if (expected.Contains(ToBooleanStub::SMI)) {
2129 // Smis: 0 -> false, all other -> true.
2130 __ test(reg, Operand(reg));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002131 __ j(equal, instr->FalseLabel(chunk_));
2132 __ JumpIfSmi(reg, instr->TrueLabel(chunk_));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002133 } else if (expected.NeedsMap()) {
2134 // If we need a map later and have a Smi -> deopt.
2135 __ test(reg, Immediate(kSmiTagMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002136 DeoptimizeIf(zero, instr, "Smi");
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002137 }
2138
2139 Register map = no_reg; // Keep the compiler happy.
2140 if (expected.NeedsMap()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002141 map = ToRegister(instr->temp());
2142 DCHECK(!map.is(reg));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002143 __ mov(map, FieldOperand(reg, HeapObject::kMapOffset));
2144
2145 if (expected.CanBeUndetectable()) {
2146 // Undetectable -> false.
2147 __ test_b(FieldOperand(map, Map::kBitFieldOffset),
2148 1 << Map::kIsUndetectable);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002149 __ j(not_zero, instr->FalseLabel(chunk_));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002150 }
2151 }
2152
2153 if (expected.Contains(ToBooleanStub::SPEC_OBJECT)) {
2154 // spec object -> true.
2155 __ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002156 __ j(above_equal, instr->TrueLabel(chunk_));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002157 }
2158
2159 if (expected.Contains(ToBooleanStub::STRING)) {
2160 // String value -> false iff empty.
2161 Label not_string;
2162 __ CmpInstanceType(map, FIRST_NONSTRING_TYPE);
2163 __ j(above_equal, &not_string, Label::kNear);
2164 __ cmp(FieldOperand(reg, String::kLengthOffset), Immediate(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002165 __ j(not_zero, instr->TrueLabel(chunk_));
2166 __ jmp(instr->FalseLabel(chunk_));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002167 __ bind(&not_string);
2168 }
2169
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002170 if (expected.Contains(ToBooleanStub::SYMBOL)) {
2171 // Symbol value -> true.
2172 __ CmpInstanceType(map, SYMBOL_TYPE);
2173 __ j(equal, instr->TrueLabel(chunk_));
2174 }
2175
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002176 if (expected.Contains(ToBooleanStub::HEAP_NUMBER)) {
2177 // heap number -> false iff +0, -0, or NaN.
2178 Label not_heap_number;
2179 __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
2180 factory()->heap_number_map());
2181 __ j(not_equal, &not_heap_number, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002182 XMMRegister xmm_scratch = double_scratch0();
2183 __ xorps(xmm_scratch, xmm_scratch);
2184 __ ucomisd(xmm_scratch, FieldOperand(reg, HeapNumber::kValueOffset));
2185 __ j(zero, instr->FalseLabel(chunk_));
2186 __ jmp(instr->TrueLabel(chunk_));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00002187 __ bind(&not_heap_number);
2188 }
2189
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002190 if (!expected.IsGeneric()) {
2191 // We've seen something for the first time -> deopt.
2192 // This can only happen if we are not generic already.
2193 DeoptimizeIf(no_condition, instr, "unexpected object");
2194 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002195 }
2196 }
2197}
2198
2199
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002200void LCodeGen::EmitGoto(int block) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002201 if (!IsNextEmittedBlock(block)) {
2202 __ jmp(chunk_->GetAssemblyLabel(LookupDestination(block)));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002203 }
2204}
2205
2206
Ben Murdochb0fe1622011-05-05 13:52:32 +01002207void LCodeGen::DoGoto(LGoto* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002208 EmitGoto(instr->block_id());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002209}
2210
2211
2212Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) {
2213 Condition cond = no_condition;
2214 switch (op) {
2215 case Token::EQ:
2216 case Token::EQ_STRICT:
2217 cond = equal;
2218 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002219 case Token::NE:
2220 case Token::NE_STRICT:
2221 cond = not_equal;
2222 break;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002223 case Token::LT:
2224 cond = is_unsigned ? below : less;
2225 break;
2226 case Token::GT:
2227 cond = is_unsigned ? above : greater;
2228 break;
2229 case Token::LTE:
2230 cond = is_unsigned ? below_equal : less_equal;
2231 break;
2232 case Token::GTE:
2233 cond = is_unsigned ? above_equal : greater_equal;
2234 break;
2235 case Token::IN:
2236 case Token::INSTANCEOF:
2237 default:
2238 UNREACHABLE();
2239 }
2240 return cond;
2241}
2242
2243
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002244void LCodeGen::DoCompareNumericAndBranch(LCompareNumericAndBranch* instr) {
2245 LOperand* left = instr->left();
2246 LOperand* right = instr->right();
2247 bool is_unsigned =
2248 instr->is_double() ||
2249 instr->hydrogen()->left()->CheckFlag(HInstruction::kUint32) ||
2250 instr->hydrogen()->right()->CheckFlag(HInstruction::kUint32);
2251 Condition cc = TokenToCondition(instr->op(), is_unsigned);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002252
2253 if (left->IsConstantOperand() && right->IsConstantOperand()) {
2254 // We can statically evaluate the comparison.
2255 double left_val = ToDouble(LConstantOperand::cast(left));
2256 double right_val = ToDouble(LConstantOperand::cast(right));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002257 int next_block = EvalComparison(instr->op(), left_val, right_val) ?
2258 instr->TrueDestination(chunk_) : instr->FalseDestination(chunk_);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002259 EmitGoto(next_block);
2260 } else {
2261 if (instr->is_double()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002262 __ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002263 // Don't base result on EFLAGS when a NaN is involved. Instead
2264 // jump to the false block.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002265 __ j(parity_even, instr->FalseLabel(chunk_));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002266 } else {
2267 if (right->IsConstantOperand()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002268 __ cmp(ToOperand(left),
2269 ToImmediate(right, instr->hydrogen()->representation()));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002270 } else if (left->IsConstantOperand()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002271 __ cmp(ToOperand(right),
2272 ToImmediate(left, instr->hydrogen()->representation()));
2273 // We commuted the operands, so commute the condition.
2274 cc = CommuteCondition(cc);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002275 } else {
2276 __ cmp(ToRegister(left), ToOperand(right));
2277 }
2278 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002279 EmitBranch(instr, cc);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002280 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002281}
2282
2283
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002284void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002285 Register left = ToRegister(instr->left());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002286
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002287 if (instr->right()->IsConstantOperand()) {
2288 Handle<Object> right = ToHandle(LConstantOperand::cast(instr->right()));
2289 __ CmpObject(left, right);
2290 } else {
2291 Operand right = ToOperand(instr->right());
2292 __ cmp(left, right);
2293 }
2294 EmitBranch(instr, equal);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002295}
2296
2297
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002298void LCodeGen::DoCmpHoleAndBranch(LCmpHoleAndBranch* instr) {
2299 if (instr->hydrogen()->representation().IsTagged()) {
2300 Register input_reg = ToRegister(instr->object());
2301 __ cmp(input_reg, factory()->the_hole_value());
2302 EmitBranch(instr, equal);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002303 return;
2304 }
2305
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002306 XMMRegister input_reg = ToDoubleRegister(instr->object());
2307 __ ucomisd(input_reg, input_reg);
2308 EmitFalseBranch(instr, parity_odd);
2309
2310 __ sub(esp, Immediate(kDoubleSize));
2311 __ movsd(MemOperand(esp, 0), input_reg);
2312
2313 __ add(esp, Immediate(kDoubleSize));
2314 int offset = sizeof(kHoleNanUpper32);
2315 __ cmp(MemOperand(esp, -offset), Immediate(kHoleNanUpper32));
2316 EmitBranch(instr, equal);
2317}
2318
2319
2320void LCodeGen::DoCompareMinusZeroAndBranch(LCompareMinusZeroAndBranch* instr) {
2321 Representation rep = instr->hydrogen()->value()->representation();
2322 DCHECK(!rep.IsInteger32());
2323 Register scratch = ToRegister(instr->temp());
2324
2325 if (rep.IsDouble()) {
2326 XMMRegister value = ToDoubleRegister(instr->value());
2327 XMMRegister xmm_scratch = double_scratch0();
2328 __ xorps(xmm_scratch, xmm_scratch);
2329 __ ucomisd(xmm_scratch, value);
2330 EmitFalseBranch(instr, not_equal);
2331 __ movmskpd(scratch, value);
2332 __ test(scratch, Immediate(1));
2333 EmitBranch(instr, not_zero);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002334 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002335 Register value = ToRegister(instr->value());
2336 Handle<Map> map = masm()->isolate()->factory()->heap_number_map();
2337 __ CheckMap(value, map, instr->FalseLabel(chunk()), DO_SMI_CHECK);
2338 __ cmp(FieldOperand(value, HeapNumber::kExponentOffset),
2339 Immediate(0x1));
2340 EmitFalseBranch(instr, no_overflow);
2341 __ cmp(FieldOperand(value, HeapNumber::kMantissaOffset),
2342 Immediate(0x00000000));
2343 EmitBranch(instr, equal);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002344 }
2345}
2346
2347
2348Condition LCodeGen::EmitIsObject(Register input,
2349 Register temp1,
Ben Murdochb0fe1622011-05-05 13:52:32 +01002350 Label* is_not_object,
2351 Label* is_object) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002352 __ JumpIfSmi(input, is_not_object);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002353
Steve Block44f0eee2011-05-26 01:26:41 +01002354 __ cmp(input, isolate()->factory()->null_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002355 __ j(equal, is_object);
2356
2357 __ mov(temp1, FieldOperand(input, HeapObject::kMapOffset));
2358 // Undetectable objects behave like undefined.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002359 __ test_b(FieldOperand(temp1, Map::kBitFieldOffset),
2360 1 << Map::kIsUndetectable);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002361 __ j(not_zero, is_not_object);
2362
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002363 __ movzx_b(temp1, FieldOperand(temp1, Map::kInstanceTypeOffset));
2364 __ cmp(temp1, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002365 __ j(below, is_not_object);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002366 __ cmp(temp1, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002367 return below_equal;
2368}
2369
2370
Ben Murdochb0fe1622011-05-05 13:52:32 +01002371void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002372 Register reg = ToRegister(instr->value());
2373 Register temp = ToRegister(instr->temp());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002374
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002375 Condition true_cond = EmitIsObject(
2376 reg, temp, instr->FalseLabel(chunk_), instr->TrueLabel(chunk_));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002377
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002378 EmitBranch(instr, true_cond);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002379}
2380
2381
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002382Condition LCodeGen::EmitIsString(Register input,
2383 Register temp1,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002384 Label* is_not_string,
2385 SmiCheck check_needed = INLINE_SMI_CHECK) {
2386 if (check_needed == INLINE_SMI_CHECK) {
2387 __ JumpIfSmi(input, is_not_string);
2388 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002389
2390 Condition cond = masm_->IsObjectStringType(input, temp1, temp1);
2391
2392 return cond;
2393}
2394
2395
2396void LCodeGen::DoIsStringAndBranch(LIsStringAndBranch* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002397 Register reg = ToRegister(instr->value());
2398 Register temp = ToRegister(instr->temp());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002399
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002400 SmiCheck check_needed =
2401 instr->hydrogen()->value()->type().IsHeapObject()
2402 ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002403
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002404 Condition true_cond = EmitIsString(
2405 reg, temp, instr->FalseLabel(chunk_), check_needed);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002406
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002407 EmitBranch(instr, true_cond);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002408}
2409
2410
Ben Murdochb0fe1622011-05-05 13:52:32 +01002411void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002412 Operand input = ToOperand(instr->value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002413
2414 __ test(input, Immediate(kSmiTagMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002415 EmitBranch(instr, zero);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002416}
2417
2418
Ben Murdoch257744e2011-11-30 15:57:28 +00002419void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002420 Register input = ToRegister(instr->value());
2421 Register temp = ToRegister(instr->temp());
Ben Murdoch257744e2011-11-30 15:57:28 +00002422
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002423 if (!instr->hydrogen()->value()->type().IsHeapObject()) {
2424 STATIC_ASSERT(kSmiTag == 0);
2425 __ JumpIfSmi(input, instr->FalseLabel(chunk_));
2426 }
Ben Murdoch257744e2011-11-30 15:57:28 +00002427 __ mov(temp, FieldOperand(input, HeapObject::kMapOffset));
2428 __ test_b(FieldOperand(temp, Map::kBitFieldOffset),
2429 1 << Map::kIsUndetectable);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002430 EmitBranch(instr, not_zero);
Ben Murdoch257744e2011-11-30 15:57:28 +00002431}
2432
2433
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002434static Condition ComputeCompareCondition(Token::Value op) {
2435 switch (op) {
2436 case Token::EQ_STRICT:
2437 case Token::EQ:
2438 return equal;
2439 case Token::LT:
2440 return less;
2441 case Token::GT:
2442 return greater;
2443 case Token::LTE:
2444 return less_equal;
2445 case Token::GTE:
2446 return greater_equal;
2447 default:
2448 UNREACHABLE();
2449 return no_condition;
2450 }
2451}
2452
2453
2454void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) {
2455 Token::Value op = instr->op();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002456
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002457 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002458 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2459
2460 Condition condition = ComputeCompareCondition(op);
2461 __ test(eax, Operand(eax));
2462
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002463 EmitBranch(instr, condition);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002464}
2465
2466
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002467static InstanceType TestType(HHasInstanceTypeAndBranch* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01002468 InstanceType from = instr->from();
2469 InstanceType to = instr->to();
Ben Murdochb0fe1622011-05-05 13:52:32 +01002470 if (from == FIRST_TYPE) return to;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002471 DCHECK(from == to || to == LAST_TYPE);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002472 return from;
2473}
2474
2475
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002476static Condition BranchCondition(HHasInstanceTypeAndBranch* instr) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01002477 InstanceType from = instr->from();
2478 InstanceType to = instr->to();
Ben Murdochb0fe1622011-05-05 13:52:32 +01002479 if (from == to) return equal;
2480 if (to == LAST_TYPE) return above_equal;
2481 if (from == FIRST_TYPE) return below_equal;
2482 UNREACHABLE();
2483 return equal;
2484}
2485
2486
Ben Murdochb0fe1622011-05-05 13:52:32 +01002487void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002488 Register input = ToRegister(instr->value());
2489 Register temp = ToRegister(instr->temp());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002490
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002491 if (!instr->hydrogen()->value()->type().IsHeapObject()) {
2492 __ JumpIfSmi(input, instr->FalseLabel(chunk_));
2493 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002494
Ben Murdochb8e0da22011-05-16 14:20:40 +01002495 __ CmpObjectType(input, TestType(instr->hydrogen()), temp);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002496 EmitBranch(instr, BranchCondition(instr->hydrogen()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002497}
2498
2499
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002500void LCodeGen::DoGetCachedArrayIndex(LGetCachedArrayIndex* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002501 Register input = ToRegister(instr->value());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002502 Register result = ToRegister(instr->result());
2503
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002504 __ AssertString(input);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002505
2506 __ mov(result, FieldOperand(input, String::kHashFieldOffset));
2507 __ IndexFromHash(result, result);
2508}
2509
2510
Ben Murdochb0fe1622011-05-05 13:52:32 +01002511void LCodeGen::DoHasCachedArrayIndexAndBranch(
2512 LHasCachedArrayIndexAndBranch* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002513 Register input = ToRegister(instr->value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002514
2515 __ test(FieldOperand(input, String::kHashFieldOffset),
2516 Immediate(String::kContainsCachedArrayIndexMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002517 EmitBranch(instr, equal);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002518}
2519
2520
2521// Branches to a label or falls through with the answer in the z flag. Trashes
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002522// the temp registers, but not the input.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002523void LCodeGen::EmitClassOfTest(Label* is_true,
2524 Label* is_false,
2525 Handle<String>class_name,
2526 Register input,
2527 Register temp,
2528 Register temp2) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002529 DCHECK(!input.is(temp));
2530 DCHECK(!input.is(temp2));
2531 DCHECK(!temp.is(temp2));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002532 __ JumpIfSmi(input, is_false);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002533
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002534 if (String::Equals(isolate()->factory()->Function_string(), class_name)) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002535 // Assuming the following assertions, we can use the same compares to test
2536 // for both being a function type and being in the object type range.
2537 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
2538 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
2539 FIRST_SPEC_OBJECT_TYPE + 1);
2540 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
2541 LAST_SPEC_OBJECT_TYPE - 1);
2542 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
2543 __ CmpObjectType(input, FIRST_SPEC_OBJECT_TYPE, temp);
2544 __ j(below, is_false);
2545 __ j(equal, is_true);
2546 __ CmpInstanceType(temp, LAST_SPEC_OBJECT_TYPE);
2547 __ j(equal, is_true);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002548 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002549 // Faster code path to avoid two compares: subtract lower bound from the
2550 // actual type and do a signed compare with the width of the type range.
2551 __ mov(temp, FieldOperand(input, HeapObject::kMapOffset));
2552 __ movzx_b(temp2, FieldOperand(temp, Map::kInstanceTypeOffset));
2553 __ sub(Operand(temp2), Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
2554 __ cmp(Operand(temp2), Immediate(LAST_NONCALLABLE_SPEC_OBJECT_TYPE -
2555 FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
2556 __ j(above, is_false);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002557 }
2558
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002559 // Now we are in the FIRST-LAST_NONCALLABLE_SPEC_OBJECT_TYPE range.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002560 // Check if the constructor in the map is a function.
2561 __ mov(temp, FieldOperand(temp, Map::kConstructorOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002562 // Objects with a non-function constructor have class 'Object'.
2563 __ CmpObjectType(temp, JS_FUNCTION_TYPE, temp2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002564 if (String::Equals(class_name, isolate()->factory()->Object_string())) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002565 __ j(not_equal, is_true);
2566 } else {
2567 __ j(not_equal, is_false);
2568 }
2569
2570 // temp now contains the constructor function. Grab the
2571 // instance class name from there.
2572 __ mov(temp, FieldOperand(temp, JSFunction::kSharedFunctionInfoOffset));
2573 __ mov(temp, FieldOperand(temp,
2574 SharedFunctionInfo::kInstanceClassNameOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002575 // The class name we are testing against is internalized since it's a literal.
2576 // The name in the constructor is internalized because of the way the context
2577 // is booted. This routine isn't expected to work for random API-created
Ben Murdochb0fe1622011-05-05 13:52:32 +01002578 // classes and it doesn't have to because you can't access it with natives
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002579 // syntax. Since both sides are internalized it is sufficient to use an
2580 // identity comparison.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002581 __ cmp(temp, class_name);
2582 // End with the answer in the z flag.
2583}
2584
2585
Ben Murdochb0fe1622011-05-05 13:52:32 +01002586void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002587 Register input = ToRegister(instr->value());
2588 Register temp = ToRegister(instr->temp());
2589 Register temp2 = ToRegister(instr->temp2());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002590
Ben Murdochb0fe1622011-05-05 13:52:32 +01002591 Handle<String> class_name = instr->hydrogen()->class_name();
2592
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002593 EmitClassOfTest(instr->TrueLabel(chunk_), instr->FalseLabel(chunk_),
2594 class_name, input, temp, temp2);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002595
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002596 EmitBranch(instr, equal);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002597}
2598
2599
2600void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002601 Register reg = ToRegister(instr->value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002602 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002603 EmitBranch(instr, equal);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002604}
2605
2606
2607void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002608 // Object and function are in fixed registers defined by the stub.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002609 DCHECK(ToRegister(instr->context()).is(esi));
2610 InstanceofStub stub(isolate(), InstanceofStub::kArgsInRegisters);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002611 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002612
Ben Murdoch257744e2011-11-30 15:57:28 +00002613 Label true_value, done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002614 __ test(eax, Operand(eax));
Ben Murdoch257744e2011-11-30 15:57:28 +00002615 __ j(zero, &true_value, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01002616 __ mov(ToRegister(instr->result()), factory()->false_value());
Ben Murdoch257744e2011-11-30 15:57:28 +00002617 __ jmp(&done, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002618 __ bind(&true_value);
Steve Block44f0eee2011-05-26 01:26:41 +01002619 __ mov(ToRegister(instr->result()), factory()->true_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002620 __ bind(&done);
2621}
2622
2623
Ben Murdoch086aeea2011-05-13 15:57:08 +01002624void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002625 class DeferredInstanceOfKnownGlobal FINAL : public LDeferredCode {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002626 public:
2627 DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
2628 LInstanceOfKnownGlobal* instr)
2629 : LDeferredCode(codegen), instr_(instr) { }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002630 void Generate() OVERRIDE {
Ben Murdoch2b4ba112012-01-20 14:57:15 +00002631 codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_);
Ben Murdoch086aeea2011-05-13 15:57:08 +01002632 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002633 LInstruction* instr() OVERRIDE { return instr_; }
Ben Murdoch086aeea2011-05-13 15:57:08 +01002634 Label* map_check() { return &map_check_; }
Ben Murdoch086aeea2011-05-13 15:57:08 +01002635 private:
2636 LInstanceOfKnownGlobal* instr_;
2637 Label map_check_;
2638 };
2639
2640 DeferredInstanceOfKnownGlobal* deferred;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002641 deferred = new(zone()) DeferredInstanceOfKnownGlobal(this, instr);
Ben Murdoch086aeea2011-05-13 15:57:08 +01002642
2643 Label done, false_result;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002644 Register object = ToRegister(instr->value());
2645 Register temp = ToRegister(instr->temp());
Ben Murdoch086aeea2011-05-13 15:57:08 +01002646
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002647 // A Smi is not an instance of anything.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002648 __ JumpIfSmi(object, &false_result, Label::kNear);
Ben Murdoch086aeea2011-05-13 15:57:08 +01002649
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002650 // This is the inlined call site instanceof cache. The two occurences of the
Ben Murdoch086aeea2011-05-13 15:57:08 +01002651 // hole value will be patched to the last map/result pair generated by the
2652 // instanceof stub.
Ben Murdoch257744e2011-11-30 15:57:28 +00002653 Label cache_miss;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002654 Register map = ToRegister(instr->temp());
Ben Murdoch086aeea2011-05-13 15:57:08 +01002655 __ mov(map, FieldOperand(object, HeapObject::kMapOffset));
2656 __ bind(deferred->map_check()); // Label for calculating code patching.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002657 Handle<Cell> cache_cell = factory()->NewCell(factory()->the_hole_value());
2658 __ cmp(map, Operand::ForCell(cache_cell)); // Patched to cached map.
Ben Murdoch257744e2011-11-30 15:57:28 +00002659 __ j(not_equal, &cache_miss, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01002660 __ mov(eax, factory()->the_hole_value()); // Patched to either true or false.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002661 __ jmp(&done, Label::kNear);
Ben Murdoch086aeea2011-05-13 15:57:08 +01002662
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002663 // The inlined call site cache did not match. Check for null and string
2664 // before calling the deferred code.
Ben Murdoch086aeea2011-05-13 15:57:08 +01002665 __ bind(&cache_miss);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002666 // Null is not an instance of anything.
Steve Block44f0eee2011-05-26 01:26:41 +01002667 __ cmp(object, factory()->null_value());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002668 __ j(equal, &false_result, Label::kNear);
Ben Murdoch086aeea2011-05-13 15:57:08 +01002669
2670 // String values are not instances of anything.
2671 Condition is_string = masm_->IsObjectStringType(object, temp, temp);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002672 __ j(is_string, &false_result, Label::kNear);
Ben Murdoch086aeea2011-05-13 15:57:08 +01002673
2674 // Go to the deferred code.
2675 __ jmp(deferred->entry());
2676
2677 __ bind(&false_result);
Steve Block44f0eee2011-05-26 01:26:41 +01002678 __ mov(ToRegister(instr->result()), factory()->false_value());
Ben Murdoch086aeea2011-05-13 15:57:08 +01002679
2680 // Here result has either true or false. Deferred code also produces true or
2681 // false object.
2682 __ bind(deferred->exit());
2683 __ bind(&done);
2684}
2685
2686
Ben Murdoch2b4ba112012-01-20 14:57:15 +00002687void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
2688 Label* map_check) {
Ben Murdoch8b112d22011-06-08 16:22:53 +01002689 PushSafepointRegistersScope scope(this);
Ben Murdoch086aeea2011-05-13 15:57:08 +01002690
2691 InstanceofStub::Flags flags = InstanceofStub::kNoFlags;
2692 flags = static_cast<InstanceofStub::Flags>(
2693 flags | InstanceofStub::kArgsInRegisters);
2694 flags = static_cast<InstanceofStub::Flags>(
2695 flags | InstanceofStub::kCallSiteInlineCheck);
2696 flags = static_cast<InstanceofStub::Flags>(
2697 flags | InstanceofStub::kReturnTrueFalseObject);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002698 InstanceofStub stub(isolate(), flags);
Ben Murdoch086aeea2011-05-13 15:57:08 +01002699
Ben Murdoch8b112d22011-06-08 16:22:53 +01002700 // Get the temp register reserved by the instruction. This needs to be a
2701 // register which is pushed last by PushSafepointRegisters as top of the
2702 // stack is used to pass the offset to the location of the map check to
2703 // the stub.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002704 Register temp = ToRegister(instr->temp());
2705 DCHECK(MacroAssembler::SafepointRegisterStackIndex(temp) == 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002706 __ LoadHeapObject(InstanceofStub::right(), instr->function());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002707 static const int kAdditionalDelta = 13;
Ben Murdoch086aeea2011-05-13 15:57:08 +01002708 int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta;
Ben Murdoch086aeea2011-05-13 15:57:08 +01002709 __ mov(temp, Immediate(delta));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002710 __ StoreToSafepointRegisterSlot(temp, temp);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002711 CallCodeGeneric(stub.GetCode(),
2712 RelocInfo::CODE_TARGET,
2713 instr,
Ben Murdoch8b112d22011-06-08 16:22:53 +01002714 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002715 // Get the deoptimization index of the LLazyBailout-environment that
2716 // corresponds to this instruction.
2717 LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment();
Ben Murdoch2b4ba112012-01-20 14:57:15 +00002718 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
2719
Ben Murdoch086aeea2011-05-13 15:57:08 +01002720 // Put the result value into the eax slot and restore all registers.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002721 __ StoreToSafepointRegisterSlot(eax, eax);
Ben Murdoch086aeea2011-05-13 15:57:08 +01002722}
2723
2724
Ben Murdochb0fe1622011-05-05 13:52:32 +01002725void LCodeGen::DoCmpT(LCmpT* instr) {
2726 Token::Value op = instr->op();
2727
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002728 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002729 CallCode(ic, RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002730
2731 Condition condition = ComputeCompareCondition(op);
Ben Murdoch257744e2011-11-30 15:57:28 +00002732 Label true_value, done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002733 __ test(eax, Operand(eax));
Ben Murdoch257744e2011-11-30 15:57:28 +00002734 __ j(condition, &true_value, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01002735 __ mov(ToRegister(instr->result()), factory()->false_value());
Ben Murdoch257744e2011-11-30 15:57:28 +00002736 __ jmp(&done, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002737 __ bind(&true_value);
Steve Block44f0eee2011-05-26 01:26:41 +01002738 __ mov(ToRegister(instr->result()), factory()->true_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01002739 __ bind(&done);
2740}
2741
2742
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002743void LCodeGen::EmitReturn(LReturn* instr, bool dynamic_frame_alignment) {
2744 int extra_value_count = dynamic_frame_alignment ? 2 : 1;
2745
2746 if (instr->has_constant_parameter_count()) {
2747 int parameter_count = ToInteger32(instr->constant_parameter_count());
2748 if (dynamic_frame_alignment && FLAG_debug_code) {
2749 __ cmp(Operand(esp,
2750 (parameter_count + extra_value_count) * kPointerSize),
2751 Immediate(kAlignmentZapValue));
2752 __ Assert(equal, kExpectedAlignmentMarker);
2753 }
2754 __ Ret((parameter_count + extra_value_count) * kPointerSize, ecx);
2755 } else {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002756 DCHECK(info()->IsStub()); // Functions would need to drop one more value.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002757 Register reg = ToRegister(instr->parameter_count());
2758 // The argument count parameter is a smi
2759 __ SmiUntag(reg);
2760 Register return_addr_reg = reg.is(ecx) ? ebx : ecx;
2761 if (dynamic_frame_alignment && FLAG_debug_code) {
2762 DCHECK(extra_value_count == 2);
2763 __ cmp(Operand(esp, reg, times_pointer_size,
2764 extra_value_count * kPointerSize),
2765 Immediate(kAlignmentZapValue));
2766 __ Assert(equal, kExpectedAlignmentMarker);
2767 }
2768
2769 // emit code to restore stack based on instr->parameter_count()
2770 __ pop(return_addr_reg); // save return address
2771 if (dynamic_frame_alignment) {
2772 __ inc(reg); // 1 more for alignment
2773 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002774
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002775 __ shl(reg, kPointerSizeLog2);
2776 __ add(esp, reg);
2777 __ jmp(return_addr_reg);
2778 }
2779}
2780
2781
Ben Murdochb0fe1622011-05-05 13:52:32 +01002782void LCodeGen::DoReturn(LReturn* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002783 if (FLAG_trace && info()->IsOptimizing()) {
Steve Block1e0659c2011-05-24 12:43:12 +01002784 // Preserve the return value on the stack and rely on the runtime call
2785 // to return the value in the same register. We're leaving the code
2786 // managed by the register allocator and tearing down the frame, it's
2787 // safe to write to the context register.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002788 __ push(eax);
Steve Block1e0659c2011-05-24 12:43:12 +01002789 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002790 __ CallRuntime(Runtime::kTraceExit, 1);
2791 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002792 if (info()->saves_caller_doubles()) RestoreCallerDoubles();
2793 if (dynamic_frame_alignment_) {
2794 // Fetch the state of the dynamic frame alignment.
2795 __ mov(edx, Operand(ebp,
2796 JavaScriptFrameConstants::kDynamicAlignmentStateOffset));
2797 }
2798 int no_frame_start = -1;
2799 if (NeedsEagerFrame()) {
2800 __ mov(esp, ebp);
2801 __ pop(ebp);
2802 no_frame_start = masm_->pc_offset();
2803 }
2804 if (dynamic_frame_alignment_) {
2805 Label no_padding;
2806 __ cmp(edx, Immediate(kNoAlignmentPadding));
2807 __ j(equal, &no_padding, Label::kNear);
2808
2809 EmitReturn(instr, true);
2810 __ bind(&no_padding);
2811 }
2812
2813 EmitReturn(instr, false);
2814 if (no_frame_start != -1) {
2815 info()->AddNoFrameRange(no_frame_start, masm_->pc_offset());
2816 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01002817}
2818
2819
Ben Murdoch8b112d22011-06-08 16:22:53 +01002820void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002821 Register result = ToRegister(instr->result());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002822 __ mov(result, Operand::ForCell(instr->hydrogen()->cell().handle()));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002823 if (instr->hydrogen()->RequiresHoleCheck()) {
Steve Block44f0eee2011-05-26 01:26:41 +01002824 __ cmp(result, factory()->the_hole_value());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002825 DeoptimizeIf(equal, instr, "hole");
Ben Murdochb0fe1622011-05-05 13:52:32 +01002826 }
2827}
2828
2829
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002830template <class T>
2831void LCodeGen::EmitVectorLoadICRegisters(T* instr) {
2832 DCHECK(FLAG_vector_ics);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002833 Register vector_register = ToRegister(instr->temp_vector());
2834 Register slot_register = VectorLoadICDescriptor::SlotRegister();
2835 DCHECK(vector_register.is(VectorLoadICDescriptor::VectorRegister()));
2836 DCHECK(slot_register.is(eax));
2837
2838 AllowDeferredHandleDereference vector_structure_check;
2839 Handle<TypeFeedbackVector> vector = instr->hydrogen()->feedback_vector();
2840 __ mov(vector_register, vector);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002841 // No need to allocate this register.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002842 FeedbackVectorICSlot slot = instr->hydrogen()->slot();
2843 int index = vector->GetIndex(slot);
2844 __ mov(slot_register, Immediate(Smi::FromInt(index)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002845}
Ben Murdoch8b112d22011-06-08 16:22:53 +01002846
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002847
2848void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
2849 DCHECK(ToRegister(instr->context()).is(esi));
2850 DCHECK(ToRegister(instr->global_object())
2851 .is(LoadDescriptor::ReceiverRegister()));
2852 DCHECK(ToRegister(instr->result()).is(eax));
2853
2854 __ mov(LoadDescriptor::NameRegister(), instr->name());
2855 if (FLAG_vector_ics) {
2856 EmitVectorLoadICRegisters<LLoadGlobalGeneric>(instr);
2857 }
2858 ContextualMode mode = instr->for_typeof() ? NOT_CONTEXTUAL : CONTEXTUAL;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002859 Handle<Code> ic = CodeFactory::LoadICInOptimizedCode(isolate(), mode).code();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002860 CallCode(ic, RelocInfo::CODE_TARGET, instr);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002861}
2862
2863
2864void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002865 Register value = ToRegister(instr->value());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002866 Handle<PropertyCell> cell_handle = instr->hydrogen()->cell().handle();
Steve Block1e0659c2011-05-24 12:43:12 +01002867
2868 // If the cell we are storing to contains the hole it could have
2869 // been deleted from the property dictionary. In that case, we need
2870 // to update the property details in the property dictionary to mark
2871 // it as no longer deleted. We deoptimize in that case.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002872 if (instr->hydrogen()->RequiresHoleCheck()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002873 __ cmp(Operand::ForCell(cell_handle), factory()->the_hole_value());
2874 DeoptimizeIf(equal, instr, "hole");
Steve Block1e0659c2011-05-24 12:43:12 +01002875 }
2876
2877 // Store the value.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002878 __ mov(Operand::ForCell(cell_handle), value);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002879 // Cells are always rescanned, so no write barrier here.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002880}
2881
2882
Ben Murdochb8e0da22011-05-16 14:20:40 +01002883void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01002884 Register context = ToRegister(instr->context());
Ben Murdochb8e0da22011-05-16 14:20:40 +01002885 Register result = ToRegister(instr->result());
Steve Block1e0659c2011-05-24 12:43:12 +01002886 __ mov(result, ContextOperand(context, instr->slot_index()));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002887
2888 if (instr->hydrogen()->RequiresHoleCheck()) {
2889 __ cmp(result, factory()->the_hole_value());
2890 if (instr->hydrogen()->DeoptimizesOnHole()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002891 DeoptimizeIf(equal, instr, "hole");
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002892 } else {
2893 Label is_not_hole;
2894 __ j(not_equal, &is_not_hole, Label::kNear);
2895 __ mov(result, factory()->undefined_value());
2896 __ bind(&is_not_hole);
2897 }
2898 }
Steve Block1e0659c2011-05-24 12:43:12 +01002899}
2900
2901
2902void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
2903 Register context = ToRegister(instr->context());
2904 Register value = ToRegister(instr->value());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002905
2906 Label skip_assignment;
2907
2908 Operand target = ContextOperand(context, instr->slot_index());
2909 if (instr->hydrogen()->RequiresHoleCheck()) {
2910 __ cmp(target, factory()->the_hole_value());
2911 if (instr->hydrogen()->DeoptimizesOnHole()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002912 DeoptimizeIf(equal, instr, "hole");
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002913 } else {
2914 __ j(not_equal, &skip_assignment, Label::kNear);
2915 }
2916 }
2917
2918 __ mov(target, value);
2919 if (instr->hydrogen()->NeedsWriteBarrier()) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002920 SmiCheck check_needed =
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002921 instr->hydrogen()->value()->type().IsHeapObject()
2922 ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
2923 Register temp = ToRegister(instr->temp());
Steve Block1e0659c2011-05-24 12:43:12 +01002924 int offset = Context::SlotOffset(instr->slot_index());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002925 __ RecordWriteContextSlot(context,
2926 offset,
2927 value,
2928 temp,
2929 kSaveFPRegs,
2930 EMIT_REMEMBERED_SET,
2931 check_needed);
Steve Block1e0659c2011-05-24 12:43:12 +01002932 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002933
2934 __ bind(&skip_assignment);
Ben Murdochb8e0da22011-05-16 14:20:40 +01002935}
2936
2937
Ben Murdochb0fe1622011-05-05 13:52:32 +01002938void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002939 HObjectAccess access = instr->hydrogen()->access();
2940 int offset = access.offset();
2941
2942 if (access.IsExternalMemory()) {
2943 Register result = ToRegister(instr->result());
2944 MemOperand operand = instr->object()->IsConstantOperand()
2945 ? MemOperand::StaticVariable(ToExternalReference(
2946 LConstantOperand::cast(instr->object())))
2947 : MemOperand(ToRegister(instr->object()), offset);
2948 __ Load(result, operand, access.representation());
2949 return;
2950 }
2951
Steve Block44f0eee2011-05-26 01:26:41 +01002952 Register object = ToRegister(instr->object());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002953 if (instr->hydrogen()->representation().IsDouble()) {
2954 XMMRegister result = ToDoubleRegister(instr->result());
2955 __ movsd(result, FieldOperand(object, offset));
2956 return;
2957 }
2958
Ben Murdochb0fe1622011-05-05 13:52:32 +01002959 Register result = ToRegister(instr->result());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002960 if (!access.IsInobject()) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002961 __ mov(result, FieldOperand(object, JSObject::kPropertiesOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002962 object = result;
Ben Murdochb0fe1622011-05-05 13:52:32 +01002963 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002964 __ Load(result, FieldOperand(object, offset), access.representation());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002965}
2966
2967
2968void LCodeGen::EmitPushTaggedOperand(LOperand* operand) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002969 DCHECK(!operand->IsDoubleRegister());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002970 if (operand->IsConstantOperand()) {
2971 Handle<Object> object = ToHandle(LConstantOperand::cast(operand));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002972 AllowDeferredHandleDereference smi_check;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01002973 if (object->IsSmi()) {
2974 __ Push(Handle<Smi>::cast(object));
2975 } else {
2976 __ PushHeapObject(Handle<HeapObject>::cast(object));
2977 }
2978 } else if (operand->IsRegister()) {
2979 __ push(ToRegister(operand));
2980 } else {
2981 __ push(ToOperand(operand));
Steve Block44f0eee2011-05-26 01:26:41 +01002982 }
2983}
2984
2985
Ben Murdochb0fe1622011-05-05 13:52:32 +01002986void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002987 DCHECK(ToRegister(instr->context()).is(esi));
2988 DCHECK(ToRegister(instr->object()).is(LoadDescriptor::ReceiverRegister()));
2989 DCHECK(ToRegister(instr->result()).is(eax));
Ben Murdochb0fe1622011-05-05 13:52:32 +01002990
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002991 __ mov(LoadDescriptor::NameRegister(), instr->name());
2992 if (FLAG_vector_ics) {
2993 EmitVectorLoadICRegisters<LLoadNamedGeneric>(instr);
2994 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002995 Handle<Code> ic =
2996 CodeFactory::LoadICInOptimizedCode(isolate(), NOT_CONTEXTUAL).code();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00002997 CallCode(ic, RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01002998}
2999
3000
Steve Block9fac8402011-05-12 15:51:54 +01003001void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
3002 Register function = ToRegister(instr->function());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003003 Register temp = ToRegister(instr->temp());
Steve Block9fac8402011-05-12 15:51:54 +01003004 Register result = ToRegister(instr->result());
3005
Steve Block9fac8402011-05-12 15:51:54 +01003006 // Get the prototype or initial map from the function.
3007 __ mov(result,
3008 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
3009
3010 // Check that the function has a prototype or an initial map.
Steve Block44f0eee2011-05-26 01:26:41 +01003011 __ cmp(Operand(result), Immediate(factory()->the_hole_value()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003012 DeoptimizeIf(equal, instr, "hole");
Steve Block9fac8402011-05-12 15:51:54 +01003013
3014 // If the function does not have an initial map, we're done.
Ben Murdoch257744e2011-11-30 15:57:28 +00003015 Label done;
Steve Block9fac8402011-05-12 15:51:54 +01003016 __ CmpObjectType(result, MAP_TYPE, temp);
Ben Murdoch257744e2011-11-30 15:57:28 +00003017 __ j(not_equal, &done, Label::kNear);
Steve Block9fac8402011-05-12 15:51:54 +01003018
3019 // Get the prototype from the initial map.
3020 __ mov(result, FieldOperand(result, Map::kPrototypeOffset));
Steve Block9fac8402011-05-12 15:51:54 +01003021
3022 // All done.
3023 __ bind(&done);
3024}
3025
3026
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003027void LCodeGen::DoLoadRoot(LLoadRoot* instr) {
Steve Block1e0659c2011-05-24 12:43:12 +01003028 Register result = ToRegister(instr->result());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003029 __ LoadRoot(result, instr->index());
Steve Block1e0659c2011-05-24 12:43:12 +01003030}
3031
3032
Ben Murdochb0fe1622011-05-05 13:52:32 +01003033void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
3034 Register arguments = ToRegister(instr->arguments());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003035 Register result = ToRegister(instr->result());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003036 if (instr->length()->IsConstantOperand() &&
3037 instr->index()->IsConstantOperand()) {
3038 int const_index = ToInteger32(LConstantOperand::cast(instr->index()));
3039 int const_length = ToInteger32(LConstantOperand::cast(instr->length()));
3040 int index = (const_length - const_index) + 1;
3041 __ mov(result, Operand(arguments, index * kPointerSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00003042 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003043 Register length = ToRegister(instr->length());
3044 Operand index = ToOperand(instr->index());
3045 // There are two words between the frame pointer and the last argument.
3046 // Subtracting from length accounts for one of them add one more.
3047 __ sub(length, index);
3048 __ mov(result, Operand(arguments, length, times_4, kPointerSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00003049 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003050}
3051
3052
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003053void LCodeGen::DoLoadKeyedExternalArray(LLoadKeyed* instr) {
Ben Murdoch589d6972011-11-30 16:04:58 +00003054 ElementsKind elements_kind = instr->elements_kind();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003055 LOperand* key = instr->key();
3056 if (!key->IsConstantOperand() &&
3057 ExternalArrayOpRequiresTemp(instr->hydrogen()->key()->representation(),
3058 elements_kind)) {
3059 __ SmiUntag(ToRegister(key));
3060 }
3061 Operand operand(BuildFastArrayOperand(
3062 instr->elements(),
3063 key,
3064 instr->hydrogen()->key()->representation(),
3065 elements_kind,
3066 instr->base_offset()));
3067 if (elements_kind == EXTERNAL_FLOAT32_ELEMENTS ||
3068 elements_kind == FLOAT32_ELEMENTS) {
Steve Block44f0eee2011-05-26 01:26:41 +01003069 XMMRegister result(ToDoubleRegister(instr->result()));
Ben Murdoch257744e2011-11-30 15:57:28 +00003070 __ movss(result, operand);
Steve Block44f0eee2011-05-26 01:26:41 +01003071 __ cvtss2sd(result, result);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003072 } else if (elements_kind == EXTERNAL_FLOAT64_ELEMENTS ||
3073 elements_kind == FLOAT64_ELEMENTS) {
3074 __ movsd(ToDoubleRegister(instr->result()), operand);
Steve Block44f0eee2011-05-26 01:26:41 +01003075 } else {
3076 Register result(ToRegister(instr->result()));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003077 switch (elements_kind) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003078 case EXTERNAL_INT8_ELEMENTS:
3079 case INT8_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00003080 __ movsx_b(result, operand);
Steve Block44f0eee2011-05-26 01:26:41 +01003081 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003082 case EXTERNAL_UINT8_CLAMPED_ELEMENTS:
3083 case EXTERNAL_UINT8_ELEMENTS:
3084 case UINT8_ELEMENTS:
3085 case UINT8_CLAMPED_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00003086 __ movzx_b(result, operand);
Steve Block44f0eee2011-05-26 01:26:41 +01003087 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003088 case EXTERNAL_INT16_ELEMENTS:
3089 case INT16_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00003090 __ movsx_w(result, operand);
Steve Block44f0eee2011-05-26 01:26:41 +01003091 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003092 case EXTERNAL_UINT16_ELEMENTS:
3093 case UINT16_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00003094 __ movzx_w(result, operand);
Steve Block44f0eee2011-05-26 01:26:41 +01003095 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003096 case EXTERNAL_INT32_ELEMENTS:
3097 case INT32_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00003098 __ mov(result, operand);
Steve Block44f0eee2011-05-26 01:26:41 +01003099 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003100 case EXTERNAL_UINT32_ELEMENTS:
3101 case UINT32_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00003102 __ mov(result, operand);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003103 if (!instr->hydrogen()->CheckFlag(HInstruction::kUint32)) {
3104 __ test(result, Operand(result));
3105 DeoptimizeIf(negative, instr, "negative value");
3106 }
Steve Block44f0eee2011-05-26 01:26:41 +01003107 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003108 case EXTERNAL_FLOAT32_ELEMENTS:
3109 case EXTERNAL_FLOAT64_ELEMENTS:
3110 case FLOAT32_ELEMENTS:
3111 case FLOAT64_ELEMENTS:
3112 case FAST_SMI_ELEMENTS:
Ben Murdoch589d6972011-11-30 16:04:58 +00003113 case FAST_ELEMENTS:
3114 case FAST_DOUBLE_ELEMENTS:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003115 case FAST_HOLEY_SMI_ELEMENTS:
3116 case FAST_HOLEY_ELEMENTS:
3117 case FAST_HOLEY_DOUBLE_ELEMENTS:
Ben Murdoch589d6972011-11-30 16:04:58 +00003118 case DICTIONARY_ELEMENTS:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003119 case SLOPPY_ARGUMENTS_ELEMENTS:
Steve Block44f0eee2011-05-26 01:26:41 +01003120 UNREACHABLE();
3121 break;
3122 }
3123 }
Steve Block1e0659c2011-05-24 12:43:12 +01003124}
3125
3126
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003127void LCodeGen::DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr) {
3128 if (instr->hydrogen()->RequiresHoleCheck()) {
3129 Operand hole_check_operand = BuildFastArrayOperand(
3130 instr->elements(), instr->key(),
3131 instr->hydrogen()->key()->representation(),
3132 FAST_DOUBLE_ELEMENTS,
3133 instr->base_offset() + sizeof(kHoleNanLower32));
3134 __ cmp(hole_check_operand, Immediate(kHoleNanUpper32));
3135 DeoptimizeIf(equal, instr, "hole");
3136 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003137
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003138 Operand double_load_operand = BuildFastArrayOperand(
3139 instr->elements(),
3140 instr->key(),
3141 instr->hydrogen()->key()->representation(),
3142 FAST_DOUBLE_ELEMENTS,
3143 instr->base_offset());
3144 XMMRegister result = ToDoubleRegister(instr->result());
3145 __ movsd(result, double_load_operand);
3146}
3147
3148
3149void LCodeGen::DoLoadKeyedFixedArray(LLoadKeyed* instr) {
3150 Register result = ToRegister(instr->result());
3151
3152 // Load the result.
3153 __ mov(result,
3154 BuildFastArrayOperand(instr->elements(), instr->key(),
3155 instr->hydrogen()->key()->representation(),
3156 FAST_ELEMENTS, instr->base_offset()));
3157
3158 // Check for the hole value.
3159 if (instr->hydrogen()->RequiresHoleCheck()) {
3160 if (IsFastSmiElementsKind(instr->hydrogen()->elements_kind())) {
3161 __ test(result, Immediate(kSmiTagMask));
3162 DeoptimizeIf(not_equal, instr, "not a Smi");
3163 } else {
3164 __ cmp(result, factory()->the_hole_value());
3165 DeoptimizeIf(equal, instr, "hole");
3166 }
3167 }
3168}
3169
3170
3171void LCodeGen::DoLoadKeyed(LLoadKeyed* instr) {
3172 if (instr->is_typed_elements()) {
3173 DoLoadKeyedExternalArray(instr);
3174 } else if (instr->hydrogen()->representation().IsDouble()) {
3175 DoLoadKeyedFixedDoubleArray(instr);
3176 } else {
3177 DoLoadKeyedFixedArray(instr);
3178 }
3179}
3180
3181
3182Operand LCodeGen::BuildFastArrayOperand(
3183 LOperand* elements_pointer,
3184 LOperand* key,
3185 Representation key_representation,
3186 ElementsKind elements_kind,
3187 uint32_t base_offset) {
3188 Register elements_pointer_reg = ToRegister(elements_pointer);
3189 int element_shift_size = ElementsKindToShiftSize(elements_kind);
3190 int shift_size = element_shift_size;
3191 if (key->IsConstantOperand()) {
3192 int constant_value = ToInteger32(LConstantOperand::cast(key));
3193 if (constant_value & 0xF0000000) {
3194 Abort(kArrayIndexConstantValueTooBig);
3195 }
3196 return Operand(elements_pointer_reg,
3197 ((constant_value) << shift_size)
3198 + base_offset);
3199 } else {
3200 // Take the tag bit into account while computing the shift size.
3201 if (key_representation.IsSmi() && (shift_size >= 1)) {
3202 shift_size -= kSmiTagSize;
3203 }
3204 ScaleFactor scale_factor = static_cast<ScaleFactor>(shift_size);
3205 return Operand(elements_pointer_reg,
3206 ToRegister(key),
3207 scale_factor,
3208 base_offset);
3209 }
3210}
3211
3212
3213void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
3214 DCHECK(ToRegister(instr->context()).is(esi));
3215 DCHECK(ToRegister(instr->object()).is(LoadDescriptor::ReceiverRegister()));
3216 DCHECK(ToRegister(instr->key()).is(LoadDescriptor::NameRegister()));
3217
3218 if (FLAG_vector_ics) {
3219 EmitVectorLoadICRegisters<LLoadKeyedGeneric>(instr);
3220 }
3221
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003222 Handle<Code> ic = CodeFactory::KeyedLoadICInOptimizedCode(isolate()).code();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003223 CallCode(ic, RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003224}
3225
3226
3227void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
3228 Register result = ToRegister(instr->result());
3229
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003230 if (instr->hydrogen()->from_inlined()) {
3231 __ lea(result, Operand(esp, -2 * kPointerSize));
3232 } else {
3233 // Check for arguments adapter frame.
3234 Label done, adapted;
3235 __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
3236 __ mov(result, Operand(result, StandardFrameConstants::kContextOffset));
3237 __ cmp(Operand(result),
3238 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3239 __ j(equal, &adapted, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003240
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003241 // No arguments adaptor frame.
3242 __ mov(result, Operand(ebp));
3243 __ jmp(&done, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003244
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003245 // Arguments adaptor frame present.
3246 __ bind(&adapted);
3247 __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003248
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003249 // Result is the frame pointer for the frame if not adapted and for the real
3250 // frame below the adaptor frame if adapted.
3251 __ bind(&done);
3252 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003253}
3254
3255
3256void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003257 Operand elem = ToOperand(instr->elements());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003258 Register result = ToRegister(instr->result());
3259
Ben Murdoch257744e2011-11-30 15:57:28 +00003260 Label done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003261
Ben Murdoch086aeea2011-05-13 15:57:08 +01003262 // If no arguments adaptor frame the number of arguments is fixed.
Ben Murdochb0fe1622011-05-05 13:52:32 +01003263 __ cmp(ebp, elem);
3264 __ mov(result, Immediate(scope()->num_parameters()));
Ben Murdoch257744e2011-11-30 15:57:28 +00003265 __ j(equal, &done, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003266
3267 // Arguments adaptor frame present. Get argument length from there.
3268 __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
3269 __ mov(result, Operand(result,
3270 ArgumentsAdaptorFrameConstants::kLengthOffset));
3271 __ SmiUntag(result);
3272
Ben Murdoch086aeea2011-05-13 15:57:08 +01003273 // Argument length is in result register.
Ben Murdochb0fe1622011-05-05 13:52:32 +01003274 __ bind(&done);
3275}
3276
3277
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003278void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01003279 Register receiver = ToRegister(instr->receiver());
Steve Block1e0659c2011-05-24 12:43:12 +01003280 Register function = ToRegister(instr->function());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003281
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003282 // If the receiver is null or undefined, we have to pass the global
3283 // object as a receiver to normal functions. Values have to be
3284 // passed unchanged to builtins and strict-mode functions.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003285 Label receiver_ok, global_object;
3286 Label::Distance dist = DeoptEveryNTimes() ? Label::kFar : Label::kNear;
3287 Register scratch = ToRegister(instr->temp());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003288
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003289 if (!instr->hydrogen()->known_function()) {
3290 // Do not transform the receiver to object for strict mode
3291 // functions.
3292 __ mov(scratch,
3293 FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
3294 __ test_b(FieldOperand(scratch, SharedFunctionInfo::kStrictModeByteOffset),
3295 1 << SharedFunctionInfo::kStrictModeBitWithinByte);
3296 __ j(not_equal, &receiver_ok, dist);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003297
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003298 // Do not transform the receiver to object for builtins.
3299 __ test_b(FieldOperand(scratch, SharedFunctionInfo::kNativeByteOffset),
3300 1 << SharedFunctionInfo::kNativeBitWithinByte);
3301 __ j(not_equal, &receiver_ok, dist);
3302 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003303
3304 // Normal function. Replace undefined or null with global receiver.
Steve Block44f0eee2011-05-26 01:26:41 +01003305 __ cmp(receiver, factory()->null_value());
Ben Murdoch257744e2011-11-30 15:57:28 +00003306 __ j(equal, &global_object, Label::kNear);
Steve Block44f0eee2011-05-26 01:26:41 +01003307 __ cmp(receiver, factory()->undefined_value());
Ben Murdoch257744e2011-11-30 15:57:28 +00003308 __ j(equal, &global_object, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01003309
3310 // The receiver should be a JS object.
3311 __ test(receiver, Immediate(kSmiTagMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003312 DeoptimizeIf(equal, instr, "Smi");
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003313 __ CmpObjectType(receiver, FIRST_SPEC_OBJECT_TYPE, scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003314 DeoptimizeIf(below, instr, "not a JavaScript object");
Steve Block1e0659c2011-05-24 12:43:12 +01003315
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003316 __ jmp(&receiver_ok, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01003317 __ bind(&global_object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003318 __ mov(receiver, FieldOperand(function, JSFunction::kContextOffset));
3319 const int global_offset = Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX);
3320 __ mov(receiver, Operand(receiver, global_offset));
3321 const int proxy_offset = GlobalObject::kGlobalProxyOffset;
3322 __ mov(receiver, FieldOperand(receiver, proxy_offset));
Steve Block1e0659c2011-05-24 12:43:12 +01003323 __ bind(&receiver_ok);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003324}
3325
3326
3327void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
3328 Register receiver = ToRegister(instr->receiver());
3329 Register function = ToRegister(instr->function());
3330 Register length = ToRegister(instr->length());
3331 Register elements = ToRegister(instr->elements());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003332 DCHECK(receiver.is(eax)); // Used for parameter count.
3333 DCHECK(function.is(edi)); // Required by InvokeFunction.
3334 DCHECK(ToRegister(instr->result()).is(eax));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003335
3336 // Copy the arguments to this function possibly from the
3337 // adaptor frame below it.
3338 const uint32_t kArgumentsLimit = 1 * KB;
3339 __ cmp(length, kArgumentsLimit);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003340 DeoptimizeIf(above, instr, "too many arguments");
Ben Murdochb0fe1622011-05-05 13:52:32 +01003341
3342 __ push(receiver);
3343 __ mov(receiver, length);
3344
3345 // Loop through the arguments pushing them onto the execution
3346 // stack.
Ben Murdoch257744e2011-11-30 15:57:28 +00003347 Label invoke, loop;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003348 // length is a small non-negative integer, due to the test above.
3349 __ test(length, Operand(length));
Ben Murdoch257744e2011-11-30 15:57:28 +00003350 __ j(zero, &invoke, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003351 __ bind(&loop);
3352 __ push(Operand(elements, length, times_pointer_size, 1 * kPointerSize));
3353 __ dec(length);
3354 __ j(not_zero, &loop);
3355
3356 // Invoke the function.
3357 __ bind(&invoke);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003358 DCHECK(instr->HasPointerMap());
Steve Block1e0659c2011-05-24 12:43:12 +01003359 LPointerMap* pointers = instr->pointer_map();
Ben Murdoch2b4ba112012-01-20 14:57:15 +00003360 SafepointGenerator safepoint_generator(
3361 this, pointers, Safepoint::kLazyDeopt);
Ben Murdoch257744e2011-11-30 15:57:28 +00003362 ParameterCount actual(eax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003363 __ InvokeFunction(function, actual, CALL_FUNCTION, safepoint_generator);
3364}
3365
3366
3367void LCodeGen::DoDebugBreak(LDebugBreak* instr) {
3368 __ int3();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003369}
3370
3371
3372void LCodeGen::DoPushArgument(LPushArgument* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003373 LOperand* argument = instr->value();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003374 EmitPushTaggedOperand(argument);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003375}
3376
3377
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003378void LCodeGen::DoDrop(LDrop* instr) {
3379 __ Drop(instr->count());
3380}
3381
3382
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003383void LCodeGen::DoThisFunction(LThisFunction* instr) {
3384 Register result = ToRegister(instr->result());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003385 __ mov(result, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003386}
3387
3388
Steve Block1e0659c2011-05-24 12:43:12 +01003389void LCodeGen::DoContext(LContext* instr) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01003390 Register result = ToRegister(instr->result());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003391 if (info()->IsOptimizing()) {
3392 __ mov(result, Operand(ebp, StandardFrameConstants::kContextOffset));
3393 } else {
3394 // If there is no frame, the context must be in esi.
3395 DCHECK(result.is(esi));
3396 }
Steve Block1e0659c2011-05-24 12:43:12 +01003397}
3398
3399
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003400void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003401 DCHECK(ToRegister(instr->context()).is(esi));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003402 __ push(esi); // The context is the first argument.
3403 __ push(Immediate(instr->hydrogen()->pairs()));
3404 __ push(Immediate(Smi::FromInt(instr->hydrogen()->flags())));
3405 CallRuntime(Runtime::kDeclareGlobals, 3, instr);
3406}
3407
3408
Ben Murdochb0fe1622011-05-05 13:52:32 +01003409void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003410 int formal_parameter_count,
Ben Murdochb0fe1622011-05-05 13:52:32 +01003411 int arity,
Ben Murdoch257744e2011-11-30 15:57:28 +00003412 LInstruction* instr,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003413 EDIState edi_state) {
3414 bool dont_adapt_arguments =
3415 formal_parameter_count == SharedFunctionInfo::kDontAdaptArgumentsSentinel;
3416 bool can_invoke_directly =
3417 dont_adapt_arguments || formal_parameter_count == arity;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003418
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003419 if (can_invoke_directly) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003420 if (edi_state == EDI_UNINITIALIZED) {
3421 __ LoadHeapObject(edi, function);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003422 }
3423
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003424 // Change context.
3425 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
3426
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003427 // Set eax to arguments count if adaption is not needed. Assumes that eax
3428 // is available to write to at this point.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003429 if (dont_adapt_arguments) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003430 __ mov(eax, arity);
3431 }
3432
3433 // Invoke function directly.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003434 if (function.is_identical_to(info()->closure())) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003435 __ CallSelf();
3436 } else {
3437 __ call(FieldOperand(edi, JSFunction::kCodeEntryOffset));
3438 }
3439 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT);
3440 } else {
3441 // We need to adapt arguments.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003442 LPointerMap* pointers = instr->pointer_map();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003443 SafepointGenerator generator(
3444 this, pointers, Safepoint::kLazyDeopt);
3445 ParameterCount count(arity);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003446 ParameterCount expected(formal_parameter_count);
3447 __ InvokeFunction(function, expected, count, CALL_FUNCTION, generator);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003448 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003449}
3450
3451
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003452void LCodeGen::DoTailCallThroughMegamorphicCache(
3453 LTailCallThroughMegamorphicCache* instr) {
3454 Register receiver = ToRegister(instr->receiver());
3455 Register name = ToRegister(instr->name());
3456 DCHECK(receiver.is(LoadDescriptor::ReceiverRegister()));
3457 DCHECK(name.is(LoadDescriptor::NameRegister()));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003458 Register slot = FLAG_vector_ics ? ToRegister(instr->slot()) : no_reg;
3459 Register vector = FLAG_vector_ics ? ToRegister(instr->vector()) : no_reg;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003460
3461 Register scratch = ebx;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003462 Register extra = edi;
3463 DCHECK(!extra.is(slot) && !extra.is(vector));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003464 DCHECK(!scratch.is(receiver) && !scratch.is(name));
3465 DCHECK(!extra.is(receiver) && !extra.is(name));
3466
3467 // Important for the tail-call.
3468 bool must_teardown_frame = NeedsEagerFrame();
3469
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003470 if (!instr->hydrogen()->is_just_miss()) {
3471 if (FLAG_vector_ics) {
3472 __ push(slot);
3473 __ push(vector);
3474 }
3475
3476 // The probe will tail call to a handler if found.
3477 // If --vector-ics is on, then it knows to pop the two args first.
3478 DCHECK(!instr->hydrogen()->is_keyed_load());
3479 isolate()->stub_cache()->GenerateProbe(
3480 masm(), Code::LOAD_IC, instr->hydrogen()->flags(), must_teardown_frame,
3481 receiver, name, scratch, extra);
3482
3483 if (FLAG_vector_ics) {
3484 __ pop(vector);
3485 __ pop(slot);
3486 }
3487 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003488
3489 // Tail call to miss if we ended up here.
3490 if (must_teardown_frame) __ leave();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003491 if (instr->hydrogen()->is_keyed_load()) {
3492 KeyedLoadIC::GenerateMiss(masm());
3493 } else {
3494 LoadIC::GenerateMiss(masm());
3495 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003496}
3497
3498
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003499void LCodeGen::DoCallWithDescriptor(LCallWithDescriptor* instr) {
3500 DCHECK(ToRegister(instr->result()).is(eax));
3501
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003502 if (instr->hydrogen()->IsTailCall()) {
3503 if (NeedsEagerFrame()) __ leave();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003504
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003505 if (instr->target()->IsConstantOperand()) {
3506 LConstantOperand* target = LConstantOperand::cast(instr->target());
3507 Handle<Code> code = Handle<Code>::cast(ToHandle(target));
3508 __ jmp(code, RelocInfo::CODE_TARGET);
3509 } else {
3510 DCHECK(instr->target()->IsRegister());
3511 Register target = ToRegister(instr->target());
3512 __ add(target, Immediate(Code::kHeaderSize - kHeapObjectTag));
3513 __ jmp(target);
3514 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003515 } else {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003516 LPointerMap* pointers = instr->pointer_map();
3517 SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt);
3518
3519 if (instr->target()->IsConstantOperand()) {
3520 LConstantOperand* target = LConstantOperand::cast(instr->target());
3521 Handle<Code> code = Handle<Code>::cast(ToHandle(target));
3522 generator.BeforeCall(__ CallSize(code, RelocInfo::CODE_TARGET));
3523 __ call(code, RelocInfo::CODE_TARGET);
3524 } else {
3525 DCHECK(instr->target()->IsRegister());
3526 Register target = ToRegister(instr->target());
3527 generator.BeforeCall(__ CallSize(Operand(target)));
3528 __ add(target, Immediate(Code::kHeaderSize - kHeapObjectTag));
3529 __ call(target);
3530 }
3531 generator.AfterCall();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003532 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003533}
3534
3535
3536void LCodeGen::DoCallJSFunction(LCallJSFunction* instr) {
3537 DCHECK(ToRegister(instr->function()).is(edi));
3538 DCHECK(ToRegister(instr->result()).is(eax));
3539
3540 if (instr->hydrogen()->pass_argument_count()) {
3541 __ mov(eax, instr->arity());
3542 }
3543
3544 // Change context.
3545 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
3546
3547 bool is_self_call = false;
3548 if (instr->hydrogen()->function()->IsConstant()) {
3549 HConstant* fun_const = HConstant::cast(instr->hydrogen()->function());
3550 Handle<JSFunction> jsfun =
3551 Handle<JSFunction>::cast(fun_const->handle(isolate()));
3552 is_self_call = jsfun.is_identical_to(info()->closure());
3553 }
3554
3555 if (is_self_call) {
3556 __ CallSelf();
3557 } else {
3558 __ call(FieldOperand(edi, JSFunction::kCodeEntryOffset));
3559 }
3560
3561 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT);
3562}
3563
3564
3565void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003566 Register input_reg = ToRegister(instr->value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003567 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01003568 factory()->heap_number_map());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003569 DeoptimizeIf(not_equal, instr, "not a heap number");
Ben Murdochb0fe1622011-05-05 13:52:32 +01003570
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003571 Label slow, allocated, done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003572 Register tmp = input_reg.is(eax) ? ecx : eax;
3573 Register tmp2 = tmp.is(ecx) ? edx : input_reg.is(ecx) ? edx : ecx;
3574
3575 // Preserve the value of all registers.
Ben Murdoch8b112d22011-06-08 16:22:53 +01003576 PushSafepointRegistersScope scope(this);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003577
Ben Murdochb0fe1622011-05-05 13:52:32 +01003578 __ mov(tmp, FieldOperand(input_reg, HeapNumber::kExponentOffset));
Steve Block1e0659c2011-05-24 12:43:12 +01003579 // Check the sign of the argument. If the argument is positive, just
3580 // return it. We do not need to patch the stack since |input| and
3581 // |result| are the same register and |input| will be restored
3582 // unchanged by popping safepoint registers.
Ben Murdochb0fe1622011-05-05 13:52:32 +01003583 __ test(tmp, Immediate(HeapNumber::kSignMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003584 __ j(zero, &done, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003585
Ben Murdochb0fe1622011-05-05 13:52:32 +01003586 __ AllocateHeapNumber(tmp, tmp2, no_reg, &slow);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003587 __ jmp(&allocated, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003588
3589 // Slow case: Call the runtime system to do the number allocation.
3590 __ bind(&slow);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003591 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0,
3592 instr, instr->context());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003593 // Set the pointer to the new heap number in tmp.
3594 if (!tmp.is(eax)) __ mov(tmp, eax);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003595 // Restore input_reg after call to runtime.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003596 __ LoadFromSafepointRegisterSlot(input_reg, input_reg);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003597
3598 __ bind(&allocated);
3599 __ mov(tmp2, FieldOperand(input_reg, HeapNumber::kExponentOffset));
3600 __ and_(tmp2, ~HeapNumber::kSignMask);
3601 __ mov(FieldOperand(tmp, HeapNumber::kExponentOffset), tmp2);
3602 __ mov(tmp2, FieldOperand(input_reg, HeapNumber::kMantissaOffset));
3603 __ mov(FieldOperand(tmp, HeapNumber::kMantissaOffset), tmp2);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003604 __ StoreToSafepointRegisterSlot(input_reg, tmp);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003605
Steve Block1e0659c2011-05-24 12:43:12 +01003606 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003607}
3608
3609
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003610void LCodeGen::EmitIntegerMathAbs(LMathAbs* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003611 Register input_reg = ToRegister(instr->value());
Steve Block1e0659c2011-05-24 12:43:12 +01003612 __ test(input_reg, Operand(input_reg));
3613 Label is_positive;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003614 __ j(not_sign, &is_positive, Label::kNear);
3615 __ neg(input_reg); // Sets flags.
3616 DeoptimizeIf(negative, instr, "overflow");
Steve Block1e0659c2011-05-24 12:43:12 +01003617 __ bind(&is_positive);
3618}
3619
3620
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003621void LCodeGen::DoMathAbs(LMathAbs* instr) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01003622 // Class for deferred case.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003623 class DeferredMathAbsTaggedHeapNumber FINAL : public LDeferredCode {
Ben Murdochb0fe1622011-05-05 13:52:32 +01003624 public:
3625 DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003626 LMathAbs* instr)
Ben Murdochb0fe1622011-05-05 13:52:32 +01003627 : LDeferredCode(codegen), instr_(instr) { }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003628 void Generate() OVERRIDE {
Ben Murdochb0fe1622011-05-05 13:52:32 +01003629 codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_);
3630 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04003631 LInstruction* instr() OVERRIDE { return instr_; }
3632
Ben Murdochb0fe1622011-05-05 13:52:32 +01003633 private:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003634 LMathAbs* instr_;
Ben Murdochb0fe1622011-05-05 13:52:32 +01003635 };
3636
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003637 DCHECK(instr->value()->Equals(instr->result()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003638 Representation r = instr->hydrogen()->value()->representation();
3639
3640 if (r.IsDouble()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003641 XMMRegister scratch = double_scratch0();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003642 XMMRegister input_reg = ToDoubleRegister(instr->value());
Ben Murdoch257744e2011-11-30 15:57:28 +00003643 __ xorps(scratch, scratch);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003644 __ subsd(scratch, input_reg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003645 __ andps(input_reg, scratch);
3646 } else if (r.IsSmiOrInteger32()) {
Steve Block1e0659c2011-05-24 12:43:12 +01003647 EmitIntegerMathAbs(instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003648 } else { // Tagged case.
3649 DeferredMathAbsTaggedHeapNumber* deferred =
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003650 new(zone()) DeferredMathAbsTaggedHeapNumber(this, instr);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003651 Register input_reg = ToRegister(instr->value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003652 // Smi check.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003653 __ JumpIfNotSmi(input_reg, deferred->entry());
Steve Block1e0659c2011-05-24 12:43:12 +01003654 EmitIntegerMathAbs(instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003655 __ bind(deferred->exit());
3656 }
3657}
3658
3659
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003660void LCodeGen::DoMathFloor(LMathFloor* instr) {
3661 XMMRegister xmm_scratch = double_scratch0();
Ben Murdochb0fe1622011-05-05 13:52:32 +01003662 Register output_reg = ToRegister(instr->result());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003663 XMMRegister input_reg = ToDoubleRegister(instr->value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003664
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003665 if (CpuFeatures::IsSupported(SSE4_1)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003666 CpuFeatureScope scope(masm(), SSE4_1);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003667 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3668 // Deoptimize on negative zero.
3669 Label non_zero;
3670 __ xorps(xmm_scratch, xmm_scratch); // Zero the register.
3671 __ ucomisd(input_reg, xmm_scratch);
3672 __ j(not_equal, &non_zero, Label::kNear);
3673 __ movmskpd(output_reg, input_reg);
3674 __ test(output_reg, Immediate(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003675 DeoptimizeIf(not_zero, instr, "minus zero");
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003676 __ bind(&non_zero);
3677 }
3678 __ roundsd(xmm_scratch, input_reg, Assembler::kRoundDown);
3679 __ cvttsd2si(output_reg, Operand(xmm_scratch));
3680 // Overflow is signalled with minint.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003681 __ cmp(output_reg, 0x1);
3682 DeoptimizeIf(overflow, instr, "overflow");
Ben Murdochb0fe1622011-05-05 13:52:32 +01003683 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003684 Label negative_sign, done;
3685 // Deoptimize on unordered.
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003686 __ xorps(xmm_scratch, xmm_scratch); // Zero the register.
3687 __ ucomisd(input_reg, xmm_scratch);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003688 DeoptimizeIf(parity_even, instr, "NaN");
3689 __ j(below, &negative_sign, Label::kNear);
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003690
3691 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3692 // Check for negative zero.
3693 Label positive_sign;
3694 __ j(above, &positive_sign, Label::kNear);
3695 __ movmskpd(output_reg, input_reg);
3696 __ test(output_reg, Immediate(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003697 DeoptimizeIf(not_zero, instr, "minus zero");
3698 __ Move(output_reg, Immediate(0));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003699 __ jmp(&done, Label::kNear);
3700 __ bind(&positive_sign);
3701 }
3702
3703 // Use truncating instruction (OK because input is positive).
3704 __ cvttsd2si(output_reg, Operand(input_reg));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003705 // Overflow is signalled with minint.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003706 __ cmp(output_reg, 0x1);
3707 DeoptimizeIf(overflow, instr, "overflow");
3708 __ jmp(&done, Label::kNear);
3709
3710 // Non-zero negative reaches here.
3711 __ bind(&negative_sign);
3712 // Truncate, then compare and compensate.
3713 __ cvttsd2si(output_reg, Operand(input_reg));
3714 __ Cvtsi2sd(xmm_scratch, output_reg);
3715 __ ucomisd(input_reg, xmm_scratch);
3716 __ j(equal, &done, Label::kNear);
3717 __ sub(output_reg, Immediate(1));
3718 DeoptimizeIf(overflow, instr, "overflow");
3719
Ben Murdoch69a99ed2011-11-30 16:03:39 +00003720 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003721 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003722}
3723
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003724
3725void LCodeGen::DoMathRound(LMathRound* instr) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01003726 Register output_reg = ToRegister(instr->result());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003727 XMMRegister input_reg = ToDoubleRegister(instr->value());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003728 XMMRegister xmm_scratch = double_scratch0();
3729 XMMRegister input_temp = ToDoubleRegister(instr->temp());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003730 ExternalReference one_half = ExternalReference::address_of_one_half();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003731 ExternalReference minus_one_half =
3732 ExternalReference::address_of_minus_one_half();
3733
3734 Label done, round_to_zero, below_one_half, do_not_compensate;
3735 Label::Distance dist = DeoptEveryNTimes() ? Label::kFar : Label::kNear;
3736
3737 __ movsd(xmm_scratch, Operand::StaticVariable(one_half));
Ben Murdoch257744e2011-11-30 15:57:28 +00003738 __ ucomisd(xmm_scratch, input_reg);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003739 __ j(above, &below_one_half, Label::kNear);
3740
3741 // CVTTSD2SI rounds towards zero, since 0.5 <= x, we use floor(0.5 + x).
Ben Murdoch692be652012-01-10 18:47:50 +00003742 __ addsd(xmm_scratch, input_reg);
Ben Murdoch692be652012-01-10 18:47:50 +00003743 __ cvttsd2si(output_reg, Operand(xmm_scratch));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003744 // Overflow is signalled with minint.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003745 __ cmp(output_reg, 0x1);
3746 DeoptimizeIf(overflow, instr, "overflow");
3747 __ jmp(&done, dist);
Ben Murdoch257744e2011-11-30 15:57:28 +00003748
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003749 __ bind(&below_one_half);
3750 __ movsd(xmm_scratch, Operand::StaticVariable(minus_one_half));
3751 __ ucomisd(xmm_scratch, input_reg);
3752 __ j(below_equal, &round_to_zero, Label::kNear);
Ben Murdoch257744e2011-11-30 15:57:28 +00003753
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003754 // CVTTSD2SI rounds towards zero, we use ceil(x - (-0.5)) and then
3755 // compare and compensate.
3756 __ movaps(input_temp, input_reg); // Do not alter input_reg.
3757 __ subsd(input_temp, xmm_scratch);
3758 __ cvttsd2si(output_reg, Operand(input_temp));
3759 // Catch minint due to overflow, and to prevent overflow when compensating.
3760 __ cmp(output_reg, 0x1);
3761 DeoptimizeIf(overflow, instr, "overflow");
3762
3763 __ Cvtsi2sd(xmm_scratch, output_reg);
3764 __ ucomisd(xmm_scratch, input_temp);
3765 __ j(equal, &done, dist);
3766 __ sub(output_reg, Immediate(1));
3767 // No overflow because we already ruled out minint.
3768 __ jmp(&done, dist);
3769
3770 __ bind(&round_to_zero);
Ben Murdoch257744e2011-11-30 15:57:28 +00003771 // We return 0 for the input range [+0, 0.5[, or [-0.5, 0.5[ if
3772 // we can ignore the difference between a result of -0 and +0.
3773 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3774 // If the sign is positive, we return +0.
3775 __ movmskpd(output_reg, input_reg);
3776 __ test(output_reg, Immediate(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003777 DeoptimizeIf(not_zero, instr, "minus zero");
Ben Murdoch257744e2011-11-30 15:57:28 +00003778 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003779 __ Move(output_reg, Immediate(0));
Ben Murdoch257744e2011-11-30 15:57:28 +00003780 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003781}
3782
3783
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003784void LCodeGen::DoMathFround(LMathFround* instr) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003785 XMMRegister input_reg = ToDoubleRegister(instr->value());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003786 XMMRegister output_reg = ToDoubleRegister(instr->result());
3787 __ cvtsd2ss(output_reg, input_reg);
3788 __ cvtss2sd(output_reg, output_reg);
3789}
3790
3791
3792void LCodeGen::DoMathSqrt(LMathSqrt* instr) {
3793 Operand input = ToOperand(instr->value());
3794 XMMRegister output = ToDoubleRegister(instr->result());
3795 __ sqrtsd(output, input);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003796}
3797
3798
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003799void LCodeGen::DoMathPowHalf(LMathPowHalf* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003800 XMMRegister xmm_scratch = double_scratch0();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003801 XMMRegister input_reg = ToDoubleRegister(instr->value());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003802 Register scratch = ToRegister(instr->temp());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003803 DCHECK(ToDoubleRegister(instr->result()).is(input_reg));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003804
3805 // Note that according to ECMA-262 15.8.2.13:
3806 // Math.pow(-Infinity, 0.5) == Infinity
3807 // Math.sqrt(-Infinity) == NaN
3808 Label done, sqrt;
3809 // Check base for -Infinity. According to IEEE-754, single-precision
3810 // -Infinity has the highest 9 bits set and the lowest 23 bits cleared.
3811 __ mov(scratch, 0xFF800000);
3812 __ movd(xmm_scratch, scratch);
3813 __ cvtss2sd(xmm_scratch, xmm_scratch);
3814 __ ucomisd(input_reg, xmm_scratch);
3815 // Comparing -Infinity with NaN results in "unordered", which sets the
3816 // zero flag as if both were equal. However, it also sets the carry flag.
3817 __ j(not_equal, &sqrt, Label::kNear);
3818 __ j(carry, &sqrt, Label::kNear);
3819 // If input is -Infinity, return Infinity.
3820 __ xorps(input_reg, input_reg);
3821 __ subsd(input_reg, xmm_scratch);
3822 __ jmp(&done, Label::kNear);
3823
3824 // Square root.
3825 __ bind(&sqrt);
Ben Murdoch257744e2011-11-30 15:57:28 +00003826 __ xorps(xmm_scratch, xmm_scratch);
Steve Block1e0659c2011-05-24 12:43:12 +01003827 __ addsd(input_reg, xmm_scratch); // Convert -0 to +0.
Ben Murdochb0fe1622011-05-05 13:52:32 +01003828 __ sqrtsd(input_reg, input_reg);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003829 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003830}
3831
3832
3833void LCodeGen::DoPower(LPower* instr) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01003834 Representation exponent_type = instr->hydrogen()->right()->representation();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003835 // Having marked this as a call, we can use any registers.
3836 // Just make sure that the input/output registers are the expected ones.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003837 Register tagged_exponent = MathPowTaggedDescriptor::exponent();
3838 DCHECK(!instr->right()->IsDoubleRegister() ||
3839 ToDoubleRegister(instr->right()).is(xmm1));
3840 DCHECK(!instr->right()->IsRegister() ||
3841 ToRegister(instr->right()).is(tagged_exponent));
3842 DCHECK(ToDoubleRegister(instr->left()).is(xmm2));
3843 DCHECK(ToDoubleRegister(instr->result()).is(xmm3));
Steve Block44f0eee2011-05-26 01:26:41 +01003844
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003845 if (exponent_type.IsSmi()) {
3846 MathPowStub stub(isolate(), MathPowStub::TAGGED);
3847 __ CallStub(&stub);
3848 } else if (exponent_type.IsTagged()) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003849 Label no_deopt;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003850 __ JumpIfSmi(tagged_exponent, &no_deopt);
3851 DCHECK(!ecx.is(tagged_exponent));
3852 __ CmpObjectType(tagged_exponent, HEAP_NUMBER_TYPE, ecx);
3853 DeoptimizeIf(not_equal, instr, "not a heap number");
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003854 __ bind(&no_deopt);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003855 MathPowStub stub(isolate(), MathPowStub::TAGGED);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003856 __ CallStub(&stub);
3857 } else if (exponent_type.IsInteger32()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003858 MathPowStub stub(isolate(), MathPowStub::INTEGER);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003859 __ CallStub(&stub);
3860 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003861 DCHECK(exponent_type.IsDouble());
3862 MathPowStub stub(isolate(), MathPowStub::DOUBLE);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003863 __ CallStub(&stub);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003864 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003865}
Ben Murdochb0fe1622011-05-05 13:52:32 +01003866
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003867
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003868void LCodeGen::DoMathLog(LMathLog* instr) {
3869 DCHECK(instr->value()->Equals(instr->result()));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003870 XMMRegister input_reg = ToDoubleRegister(instr->value());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003871 XMMRegister xmm_scratch = double_scratch0();
Ben Murdoch257744e2011-11-30 15:57:28 +00003872 Label positive, done, zero;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003873 __ xorps(xmm_scratch, xmm_scratch);
3874 __ ucomisd(input_reg, xmm_scratch);
Ben Murdoch257744e2011-11-30 15:57:28 +00003875 __ j(above, &positive, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003876 __ j(not_carry, &zero, Label::kNear);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003877 ExternalReference nan =
3878 ExternalReference::address_of_canonical_non_hole_nan();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003879 __ movsd(input_reg, Operand::StaticVariable(nan));
Ben Murdoch257744e2011-11-30 15:57:28 +00003880 __ jmp(&done, Label::kNear);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003881 __ bind(&zero);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003882 ExternalReference ninf =
3883 ExternalReference::address_of_negative_infinity();
3884 __ movsd(input_reg, Operand::StaticVariable(ninf));
Ben Murdoch257744e2011-11-30 15:57:28 +00003885 __ jmp(&done, Label::kNear);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003886 __ bind(&positive);
3887 __ fldln2();
3888 __ sub(Operand(esp), Immediate(kDoubleSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003889 __ movsd(Operand(esp, 0), input_reg);
Ben Murdoch8b112d22011-06-08 16:22:53 +01003890 __ fld_d(Operand(esp, 0));
3891 __ fyl2x();
3892 __ fstp_d(Operand(esp, 0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003893 __ movsd(input_reg, Operand(esp, 0));
Ben Murdoch8b112d22011-06-08 16:22:53 +01003894 __ add(Operand(esp), Immediate(kDoubleSize));
3895 __ bind(&done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003896}
3897
3898
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003899void LCodeGen::DoMathClz32(LMathClz32* instr) {
3900 Register input = ToRegister(instr->value());
3901 Register result = ToRegister(instr->result());
3902 Label not_zero_input;
3903 __ bsr(result, input);
3904
3905 __ j(not_zero, &not_zero_input);
3906 __ Move(result, Immediate(63)); // 63^31 == 32
3907
3908 __ bind(&not_zero_input);
3909 __ xor_(result, Immediate(31)); // for x in [0..31], 31^x == 31-x.
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003910}
3911
3912
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003913void LCodeGen::DoMathExp(LMathExp* instr) {
3914 XMMRegister input = ToDoubleRegister(instr->value());
3915 XMMRegister result = ToDoubleRegister(instr->result());
3916 XMMRegister temp0 = double_scratch0();
3917 Register temp1 = ToRegister(instr->temp1());
3918 Register temp2 = ToRegister(instr->temp2());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003919
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003920 MathExpGenerator::EmitMathExp(masm(), input, result, temp0, temp1, temp2);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003921}
3922
3923
Ben Murdoch257744e2011-11-30 15:57:28 +00003924void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003925 DCHECK(ToRegister(instr->context()).is(esi));
3926 DCHECK(ToRegister(instr->function()).is(edi));
3927 DCHECK(instr->HasPointerMap());
Ben Murdoch257744e2011-11-30 15:57:28 +00003928
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003929 Handle<JSFunction> known_function = instr->hydrogen()->known_function();
3930 if (known_function.is_null()) {
3931 LPointerMap* pointers = instr->pointer_map();
3932 SafepointGenerator generator(
3933 this, pointers, Safepoint::kLazyDeopt);
3934 ParameterCount count(instr->arity());
3935 __ InvokeFunction(edi, count, CALL_FUNCTION, generator);
3936 } else {
3937 CallKnownFunction(known_function,
3938 instr->hydrogen()->formal_parameter_count(),
3939 instr->arity(),
3940 instr,
3941 EDI_CONTAINS_TARGET);
3942 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003943}
3944
3945
3946void LCodeGen::DoCallFunction(LCallFunction* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003947 DCHECK(ToRegister(instr->context()).is(esi));
3948 DCHECK(ToRegister(instr->function()).is(edi));
3949 DCHECK(ToRegister(instr->result()).is(eax));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003950
3951 int arity = instr->arity();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003952 CallFunctionStub stub(isolate(), arity, instr->hydrogen()->function_flags());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00003953 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003954}
3955
3956
Ben Murdochb0fe1622011-05-05 13:52:32 +01003957void LCodeGen::DoCallNew(LCallNew* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003958 DCHECK(ToRegister(instr->context()).is(esi));
3959 DCHECK(ToRegister(instr->constructor()).is(edi));
3960 DCHECK(ToRegister(instr->result()).is(eax));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003961
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003962 // No cell in ebx for construct type feedback in optimized code
3963 __ mov(ebx, isolate()->factory()->undefined_value());
3964 CallConstructStub stub(isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
3965 __ Move(eax, Immediate(instr->arity()));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01003966 CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003967}
3968
3969
Ben Murdochb8a8cc12014-11-26 15:28:44 +00003970void LCodeGen::DoCallNewArray(LCallNewArray* instr) {
3971 DCHECK(ToRegister(instr->context()).is(esi));
3972 DCHECK(ToRegister(instr->constructor()).is(edi));
3973 DCHECK(ToRegister(instr->result()).is(eax));
3974
3975 __ Move(eax, Immediate(instr->arity()));
3976 __ mov(ebx, isolate()->factory()->undefined_value());
3977 ElementsKind kind = instr->hydrogen()->elements_kind();
3978 AllocationSiteOverrideMode override_mode =
3979 (AllocationSite::GetMode(kind) == TRACK_ALLOCATION_SITE)
3980 ? DISABLE_ALLOCATION_SITES
3981 : DONT_OVERRIDE;
3982
3983 if (instr->arity() == 0) {
3984 ArrayNoArgumentConstructorStub stub(isolate(), kind, override_mode);
3985 CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr);
3986 } else if (instr->arity() == 1) {
3987 Label done;
3988 if (IsFastPackedElementsKind(kind)) {
3989 Label packed_case;
3990 // We might need a change here
3991 // look at the first argument
3992 __ mov(ecx, Operand(esp, 0));
3993 __ test(ecx, ecx);
3994 __ j(zero, &packed_case, Label::kNear);
3995
3996 ElementsKind holey_kind = GetHoleyElementsKind(kind);
3997 ArraySingleArgumentConstructorStub stub(isolate(),
3998 holey_kind,
3999 override_mode);
4000 CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr);
4001 __ jmp(&done, Label::kNear);
4002 __ bind(&packed_case);
4003 }
4004
4005 ArraySingleArgumentConstructorStub stub(isolate(), kind, override_mode);
4006 CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr);
4007 __ bind(&done);
4008 } else {
4009 ArrayNArgumentsConstructorStub stub(isolate(), kind, override_mode);
4010 CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr);
4011 }
4012}
4013
4014
Ben Murdochb0fe1622011-05-05 13:52:32 +01004015void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004016 DCHECK(ToRegister(instr->context()).is(esi));
4017 CallRuntime(instr->function(), instr->arity(), instr, instr->save_doubles());
4018}
4019
4020
4021void LCodeGen::DoStoreCodeEntry(LStoreCodeEntry* instr) {
4022 Register function = ToRegister(instr->function());
4023 Register code_object = ToRegister(instr->code_object());
4024 __ lea(code_object, FieldOperand(code_object, Code::kHeaderSize));
4025 __ mov(FieldOperand(function, JSFunction::kCodeEntryOffset), code_object);
4026}
4027
4028
4029void LCodeGen::DoInnerAllocatedObject(LInnerAllocatedObject* instr) {
4030 Register result = ToRegister(instr->result());
4031 Register base = ToRegister(instr->base_object());
4032 if (instr->offset()->IsConstantOperand()) {
4033 LConstantOperand* offset = LConstantOperand::cast(instr->offset());
4034 __ lea(result, Operand(base, ToInteger32(offset)));
4035 } else {
4036 Register offset = ToRegister(instr->offset());
4037 __ lea(result, Operand(base, offset, times_1, 0));
4038 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004039}
4040
4041
4042void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004043 Representation representation = instr->hydrogen()->field_representation();
Ben Murdochb0fe1622011-05-05 13:52:32 +01004044
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004045 HObjectAccess access = instr->hydrogen()->access();
4046 int offset = access.offset();
4047
4048 if (access.IsExternalMemory()) {
4049 DCHECK(!instr->hydrogen()->NeedsWriteBarrier());
4050 MemOperand operand = instr->object()->IsConstantOperand()
4051 ? MemOperand::StaticVariable(
4052 ToExternalReference(LConstantOperand::cast(instr->object())))
4053 : MemOperand(ToRegister(instr->object()), offset);
4054 if (instr->value()->IsConstantOperand()) {
4055 LConstantOperand* operand_value = LConstantOperand::cast(instr->value());
4056 __ mov(operand, Immediate(ToInteger32(operand_value)));
4057 } else {
4058 Register value = ToRegister(instr->value());
4059 __ Store(value, operand, representation);
4060 }
4061 return;
4062 }
4063
4064 Register object = ToRegister(instr->object());
4065 __ AssertNotSmi(object);
4066
4067 DCHECK(!representation.IsSmi() ||
4068 !instr->value()->IsConstantOperand() ||
4069 IsSmi(LConstantOperand::cast(instr->value())));
4070 if (representation.IsDouble()) {
4071 DCHECK(access.IsInobject());
4072 DCHECK(!instr->hydrogen()->has_transition());
4073 DCHECK(!instr->hydrogen()->NeedsWriteBarrier());
4074 XMMRegister value = ToDoubleRegister(instr->value());
4075 __ movsd(FieldOperand(object, offset), value);
4076 return;
4077 }
4078
4079 if (instr->hydrogen()->has_transition()) {
4080 Handle<Map> transition = instr->hydrogen()->transition_map();
4081 AddDeprecationDependency(transition);
4082 __ mov(FieldOperand(object, HeapObject::kMapOffset), transition);
4083 if (instr->hydrogen()->NeedsWriteBarrierForMap()) {
4084 Register temp = ToRegister(instr->temp());
4085 Register temp_map = ToRegister(instr->temp_map());
4086 // Update the write barrier for the map field.
4087 __ RecordWriteForMap(object, transition, temp_map, temp, kSaveFPRegs);
4088 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004089 }
4090
4091 // Do the store.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004092 Register write_register = object;
4093 if (!access.IsInobject()) {
4094 write_register = ToRegister(instr->temp());
4095 __ mov(write_register, FieldOperand(object, JSObject::kPropertiesOffset));
4096 }
4097
4098 MemOperand operand = FieldOperand(write_register, offset);
4099 if (instr->value()->IsConstantOperand()) {
4100 LConstantOperand* operand_value = LConstantOperand::cast(instr->value());
4101 if (operand_value->IsRegister()) {
4102 Register value = ToRegister(operand_value);
4103 __ Store(value, operand, representation);
4104 } else if (representation.IsInteger32()) {
4105 Immediate immediate = ToImmediate(operand_value, representation);
4106 DCHECK(!instr->hydrogen()->NeedsWriteBarrier());
4107 __ mov(operand, immediate);
4108 } else {
4109 Handle<Object> handle_value = ToHandle(operand_value);
4110 DCHECK(!instr->hydrogen()->NeedsWriteBarrier());
4111 __ mov(operand, handle_value);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004112 }
4113 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004114 Register value = ToRegister(instr->value());
4115 __ Store(value, operand, representation);
4116 }
4117
4118 if (instr->hydrogen()->NeedsWriteBarrier()) {
4119 Register value = ToRegister(instr->value());
4120 Register temp = access.IsInobject() ? ToRegister(instr->temp()) : object;
4121 // Update the write barrier for the object for in-object properties.
4122 __ RecordWriteField(write_register,
4123 offset,
4124 value,
4125 temp,
4126 kSaveFPRegs,
4127 EMIT_REMEMBERED_SET,
4128 instr->hydrogen()->SmiCheckForWriteBarrier(),
4129 instr->hydrogen()->PointersToHereCheckForValue());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004130 }
4131}
4132
4133
4134void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004135 DCHECK(ToRegister(instr->context()).is(esi));
4136 DCHECK(ToRegister(instr->object()).is(StoreDescriptor::ReceiverRegister()));
4137 DCHECK(ToRegister(instr->value()).is(StoreDescriptor::ValueRegister()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004138
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004139 __ mov(StoreDescriptor::NameRegister(), instr->name());
4140 Handle<Code> ic = StoreIC::initialize_stub(isolate(), instr->strict_mode());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004141 CallCode(ic, RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004142}
4143
4144
4145void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004146 Condition cc = instr->hydrogen()->allow_equality() ? above : above_equal;
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004147 if (instr->index()->IsConstantOperand()) {
4148 __ cmp(ToOperand(instr->length()),
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004149 ToImmediate(LConstantOperand::cast(instr->index()),
4150 instr->hydrogen()->length()->representation()));
4151 cc = CommuteCondition(cc);
4152 } else if (instr->length()->IsConstantOperand()) {
4153 __ cmp(ToOperand(instr->index()),
4154 ToImmediate(LConstantOperand::cast(instr->length()),
4155 instr->hydrogen()->index()->representation()));
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004156 } else {
4157 __ cmp(ToRegister(instr->index()), ToOperand(instr->length()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004158 }
4159 if (FLAG_debug_code && instr->hydrogen()->skip_check()) {
4160 Label done;
4161 __ j(NegateCondition(cc), &done, Label::kNear);
4162 __ int3();
4163 __ bind(&done);
4164 } else {
4165 DeoptimizeIf(cc, instr, "out of bounds");
Ben Murdoch69a99ed2011-11-30 16:03:39 +00004166 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004167}
4168
4169
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004170void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) {
Ben Murdoch589d6972011-11-30 16:04:58 +00004171 ElementsKind elements_kind = instr->elements_kind();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004172 LOperand* key = instr->key();
4173 if (!key->IsConstantOperand() &&
4174 ExternalArrayOpRequiresTemp(instr->hydrogen()->key()->representation(),
4175 elements_kind)) {
4176 __ SmiUntag(ToRegister(key));
4177 }
4178 Operand operand(BuildFastArrayOperand(
4179 instr->elements(),
4180 key,
4181 instr->hydrogen()->key()->representation(),
4182 elements_kind,
4183 instr->base_offset()));
4184 if (elements_kind == EXTERNAL_FLOAT32_ELEMENTS ||
4185 elements_kind == FLOAT32_ELEMENTS) {
4186 XMMRegister xmm_scratch = double_scratch0();
4187 __ cvtsd2ss(xmm_scratch, ToDoubleRegister(instr->value()));
4188 __ movss(operand, xmm_scratch);
4189 } else if (elements_kind == EXTERNAL_FLOAT64_ELEMENTS ||
4190 elements_kind == FLOAT64_ELEMENTS) {
4191 __ movsd(operand, ToDoubleRegister(instr->value()));
Steve Block44f0eee2011-05-26 01:26:41 +01004192 } else {
4193 Register value = ToRegister(instr->value());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004194 switch (elements_kind) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004195 case EXTERNAL_UINT8_CLAMPED_ELEMENTS:
4196 case EXTERNAL_UINT8_ELEMENTS:
4197 case EXTERNAL_INT8_ELEMENTS:
4198 case UINT8_ELEMENTS:
4199 case INT8_ELEMENTS:
4200 case UINT8_CLAMPED_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00004201 __ mov_b(operand, value);
Steve Block44f0eee2011-05-26 01:26:41 +01004202 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004203 case EXTERNAL_INT16_ELEMENTS:
4204 case EXTERNAL_UINT16_ELEMENTS:
4205 case UINT16_ELEMENTS:
4206 case INT16_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00004207 __ mov_w(operand, value);
Steve Block44f0eee2011-05-26 01:26:41 +01004208 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004209 case EXTERNAL_INT32_ELEMENTS:
4210 case EXTERNAL_UINT32_ELEMENTS:
4211 case UINT32_ELEMENTS:
4212 case INT32_ELEMENTS:
Ben Murdoch257744e2011-11-30 15:57:28 +00004213 __ mov(operand, value);
Steve Block44f0eee2011-05-26 01:26:41 +01004214 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004215 case EXTERNAL_FLOAT32_ELEMENTS:
4216 case EXTERNAL_FLOAT64_ELEMENTS:
4217 case FLOAT32_ELEMENTS:
4218 case FLOAT64_ELEMENTS:
4219 case FAST_SMI_ELEMENTS:
Ben Murdoch589d6972011-11-30 16:04:58 +00004220 case FAST_ELEMENTS:
4221 case FAST_DOUBLE_ELEMENTS:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004222 case FAST_HOLEY_SMI_ELEMENTS:
4223 case FAST_HOLEY_ELEMENTS:
4224 case FAST_HOLEY_DOUBLE_ELEMENTS:
Ben Murdoch589d6972011-11-30 16:04:58 +00004225 case DICTIONARY_ELEMENTS:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004226 case SLOPPY_ARGUMENTS_ELEMENTS:
Steve Block44f0eee2011-05-26 01:26:41 +01004227 UNREACHABLE();
4228 break;
4229 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004230 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004231}
4232
4233
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004234void LCodeGen::DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr) {
4235 ExternalReference canonical_nan_reference =
4236 ExternalReference::address_of_canonical_non_hole_nan();
4237 Operand double_store_operand = BuildFastArrayOperand(
4238 instr->elements(),
4239 instr->key(),
4240 instr->hydrogen()->key()->representation(),
4241 FAST_DOUBLE_ELEMENTS,
4242 instr->base_offset());
4243
4244 XMMRegister value = ToDoubleRegister(instr->value());
4245
4246 if (instr->NeedsCanonicalization()) {
4247 Label have_value;
4248
4249 __ ucomisd(value, value);
4250 __ j(parity_odd, &have_value, Label::kNear); // NaN.
4251
4252 __ movsd(value, Operand::StaticVariable(canonical_nan_reference));
4253 __ bind(&have_value);
4254 }
4255
4256 __ movsd(double_store_operand, value);
4257}
4258
4259
4260void LCodeGen::DoStoreKeyedFixedArray(LStoreKeyed* instr) {
4261 Register elements = ToRegister(instr->elements());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004262 Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
4263
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004264 Operand operand = BuildFastArrayOperand(
4265 instr->elements(),
4266 instr->key(),
4267 instr->hydrogen()->key()->representation(),
4268 FAST_ELEMENTS,
4269 instr->base_offset());
4270 if (instr->value()->IsRegister()) {
4271 __ mov(operand, ToRegister(instr->value()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004272 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004273 LConstantOperand* operand_value = LConstantOperand::cast(instr->value());
4274 if (IsSmi(operand_value)) {
4275 Immediate immediate = ToImmediate(operand_value, Representation::Smi());
4276 __ mov(operand, immediate);
4277 } else {
4278 DCHECK(!IsInteger32(operand_value));
4279 Handle<Object> handle_value = ToHandle(operand_value);
4280 __ mov(operand, handle_value);
4281 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004282 }
4283
Ben Murdochb0fe1622011-05-05 13:52:32 +01004284 if (instr->hydrogen()->NeedsWriteBarrier()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004285 DCHECK(instr->value()->IsRegister());
4286 Register value = ToRegister(instr->value());
4287 DCHECK(!instr->key()->IsConstantOperand());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004288 SmiCheck check_needed =
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004289 instr->hydrogen()->value()->type().IsHeapObject()
4290 ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
Ben Murdochb0fe1622011-05-05 13:52:32 +01004291 // Compute address of modified element and store it into key register.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004292 __ lea(key, operand);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004293 __ RecordWrite(elements,
4294 key,
4295 value,
4296 kSaveFPRegs,
4297 EMIT_REMEMBERED_SET,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004298 check_needed,
4299 instr->hydrogen()->PointersToHereCheckForValue());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004300 }
4301}
4302
4303
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004304void LCodeGen::DoStoreKeyed(LStoreKeyed* instr) {
4305 // By cases...external, fast-double, fast
4306 if (instr->is_typed_elements()) {
4307 DoStoreKeyedExternalArray(instr);
4308 } else if (instr->hydrogen()->value()->representation().IsDouble()) {
4309 DoStoreKeyedFixedDoubleArray(instr);
4310 } else {
4311 DoStoreKeyedFixedArray(instr);
4312 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004313}
4314
4315
Ben Murdochb0fe1622011-05-05 13:52:32 +01004316void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004317 DCHECK(ToRegister(instr->context()).is(esi));
4318 DCHECK(ToRegister(instr->object()).is(StoreDescriptor::ReceiverRegister()));
4319 DCHECK(ToRegister(instr->key()).is(StoreDescriptor::NameRegister()));
4320 DCHECK(ToRegister(instr->value()).is(StoreDescriptor::ValueRegister()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004321
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004322 Handle<Code> ic =
4323 CodeFactory::KeyedStoreIC(isolate(), instr->strict_mode()).code();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004324 CallCode(ic, RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004325}
4326
4327
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004328void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) {
4329 Register object = ToRegister(instr->object());
4330 Register temp = ToRegister(instr->temp());
4331 Label no_memento_found;
4332 __ TestJSArrayForAllocationMemento(object, temp, &no_memento_found);
4333 DeoptimizeIf(equal, instr, "memento found");
4334 __ bind(&no_memento_found);
4335}
4336
4337
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004338void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
4339 Register object_reg = ToRegister(instr->object());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004340
4341 Handle<Map> from_map = instr->original_map();
4342 Handle<Map> to_map = instr->transitioned_map();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004343 ElementsKind from_kind = instr->from_kind();
4344 ElementsKind to_kind = instr->to_kind();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004345
4346 Label not_applicable;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004347 bool is_simple_map_transition =
4348 IsSimpleMapChangeTransition(from_kind, to_kind);
4349 Label::Distance branch_distance =
4350 is_simple_map_transition ? Label::kNear : Label::kFar;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004351 __ cmp(FieldOperand(object_reg, HeapObject::kMapOffset), from_map);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004352 __ j(not_equal, &not_applicable, branch_distance);
4353 if (is_simple_map_transition) {
4354 Register new_map_reg = ToRegister(instr->new_map_temp());
4355 __ mov(FieldOperand(object_reg, HeapObject::kMapOffset),
4356 Immediate(to_map));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004357 // Write barrier.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004358 DCHECK_NE(instr->temp(), NULL);
4359 __ RecordWriteForMap(object_reg, to_map, new_map_reg,
4360 ToRegister(instr->temp()),
4361 kDontSaveFPRegs);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004362 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004363 DCHECK(ToRegister(instr->context()).is(esi));
4364 DCHECK(object_reg.is(eax));
4365 PushSafepointRegistersScope scope(this);
4366 __ mov(ebx, to_map);
4367 bool is_js_array = from_map->instance_type() == JS_ARRAY_TYPE;
4368 TransitionElementsKindStub stub(isolate(), from_kind, to_kind, is_js_array);
4369 __ CallStub(&stub);
4370 RecordSafepointWithLazyDeopt(instr,
4371 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004372 }
4373 __ bind(&not_applicable);
4374}
4375
4376
Steve Block1e0659c2011-05-24 12:43:12 +01004377void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004378 class DeferredStringCharCodeAt FINAL : public LDeferredCode {
Steve Block1e0659c2011-05-24 12:43:12 +01004379 public:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004380 DeferredStringCharCodeAt(LCodeGen* codegen,
4381 LStringCharCodeAt* instr)
Steve Block1e0659c2011-05-24 12:43:12 +01004382 : LDeferredCode(codegen), instr_(instr) { }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004383 void Generate() OVERRIDE { codegen()->DoDeferredStringCharCodeAt(instr_); }
4384 LInstruction* instr() OVERRIDE { return instr_; }
4385
Steve Block1e0659c2011-05-24 12:43:12 +01004386 private:
4387 LStringCharCodeAt* instr_;
4388 };
4389
Steve Block1e0659c2011-05-24 12:43:12 +01004390 DeferredStringCharCodeAt* deferred =
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004391 new(zone()) DeferredStringCharCodeAt(this, instr);
Steve Block1e0659c2011-05-24 12:43:12 +01004392
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004393 StringCharLoadGenerator::Generate(masm(),
4394 factory(),
4395 ToRegister(instr->string()),
4396 ToRegister(instr->index()),
4397 ToRegister(instr->result()),
4398 deferred->entry());
Steve Block1e0659c2011-05-24 12:43:12 +01004399 __ bind(deferred->exit());
4400}
4401
4402
4403void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) {
4404 Register string = ToRegister(instr->string());
4405 Register result = ToRegister(instr->result());
4406
4407 // TODO(3095996): Get rid of this. For now, we need to make the
4408 // result register contain a valid pointer because it is already
4409 // contained in the register pointer map.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004410 __ Move(result, Immediate(0));
Steve Block1e0659c2011-05-24 12:43:12 +01004411
Ben Murdoch8b112d22011-06-08 16:22:53 +01004412 PushSafepointRegistersScope scope(this);
Steve Block1e0659c2011-05-24 12:43:12 +01004413 __ push(string);
4414 // Push the index as a smi. This is safe because of the checks in
4415 // DoStringCharCodeAt above.
4416 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
4417 if (instr->index()->IsConstantOperand()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004418 Immediate immediate = ToImmediate(LConstantOperand::cast(instr->index()),
4419 Representation::Smi());
4420 __ push(immediate);
Steve Block1e0659c2011-05-24 12:43:12 +01004421 } else {
4422 Register index = ToRegister(instr->index());
4423 __ SmiTag(index);
4424 __ push(index);
4425 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004426 CallRuntimeFromDeferred(Runtime::kStringCharCodeAtRT, 2,
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004427 instr, instr->context());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004428 __ AssertSmi(eax);
Steve Block1e0659c2011-05-24 12:43:12 +01004429 __ SmiUntag(eax);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004430 __ StoreToSafepointRegisterSlot(result, eax);
Steve Block1e0659c2011-05-24 12:43:12 +01004431}
4432
4433
Steve Block44f0eee2011-05-26 01:26:41 +01004434void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004435 class DeferredStringCharFromCode FINAL : public LDeferredCode {
Steve Block44f0eee2011-05-26 01:26:41 +01004436 public:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004437 DeferredStringCharFromCode(LCodeGen* codegen,
4438 LStringCharFromCode* instr)
Steve Block44f0eee2011-05-26 01:26:41 +01004439 : LDeferredCode(codegen), instr_(instr) { }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004440 void Generate() OVERRIDE {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004441 codegen()->DoDeferredStringCharFromCode(instr_);
4442 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004443 LInstruction* instr() OVERRIDE { return instr_; }
4444
Steve Block44f0eee2011-05-26 01:26:41 +01004445 private:
4446 LStringCharFromCode* instr_;
4447 };
4448
4449 DeferredStringCharFromCode* deferred =
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004450 new(zone()) DeferredStringCharFromCode(this, instr);
Steve Block44f0eee2011-05-26 01:26:41 +01004451
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004452 DCHECK(instr->hydrogen()->value()->representation().IsInteger32());
Steve Block44f0eee2011-05-26 01:26:41 +01004453 Register char_code = ToRegister(instr->char_code());
4454 Register result = ToRegister(instr->result());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004455 DCHECK(!char_code.is(result));
Steve Block44f0eee2011-05-26 01:26:41 +01004456
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004457 __ cmp(char_code, String::kMaxOneByteCharCode);
Steve Block44f0eee2011-05-26 01:26:41 +01004458 __ j(above, deferred->entry());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004459 __ Move(result, Immediate(factory()->single_character_string_cache()));
Steve Block44f0eee2011-05-26 01:26:41 +01004460 __ mov(result, FieldOperand(result,
4461 char_code, times_pointer_size,
4462 FixedArray::kHeaderSize));
4463 __ cmp(result, factory()->undefined_value());
4464 __ j(equal, deferred->entry());
4465 __ bind(deferred->exit());
4466}
4467
4468
4469void LCodeGen::DoDeferredStringCharFromCode(LStringCharFromCode* instr) {
4470 Register char_code = ToRegister(instr->char_code());
4471 Register result = ToRegister(instr->result());
4472
4473 // TODO(3095996): Get rid of this. For now, we need to make the
4474 // result register contain a valid pointer because it is already
4475 // contained in the register pointer map.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004476 __ Move(result, Immediate(0));
Steve Block44f0eee2011-05-26 01:26:41 +01004477
Ben Murdoch8b112d22011-06-08 16:22:53 +01004478 PushSafepointRegistersScope scope(this);
Steve Block44f0eee2011-05-26 01:26:41 +01004479 __ SmiTag(char_code);
4480 __ push(char_code);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004481 CallRuntimeFromDeferred(Runtime::kCharFromCode, 1, instr, instr->context());
Steve Block44f0eee2011-05-26 01:26:41 +01004482 __ StoreToSafepointRegisterSlot(result, eax);
Steve Block44f0eee2011-05-26 01:26:41 +01004483}
4484
4485
Ben Murdoch257744e2011-11-30 15:57:28 +00004486void LCodeGen::DoStringAdd(LStringAdd* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004487 DCHECK(ToRegister(instr->context()).is(esi));
4488 DCHECK(ToRegister(instr->left()).is(edx));
4489 DCHECK(ToRegister(instr->right()).is(eax));
4490 StringAddStub stub(isolate(),
4491 instr->hydrogen()->flags(),
4492 instr->hydrogen()->pretenure_flag());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004493 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdoch257744e2011-11-30 15:57:28 +00004494}
4495
4496
Ben Murdochb0fe1622011-05-05 13:52:32 +01004497void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004498 LOperand* input = instr->value();
Ben Murdochb0fe1622011-05-05 13:52:32 +01004499 LOperand* output = instr->result();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004500 DCHECK(input->IsRegister() || input->IsStackSlot());
4501 DCHECK(output->IsDoubleRegister());
4502 __ Cvtsi2sd(ToDoubleRegister(output), ToOperand(input));
4503}
4504
4505
4506void LCodeGen::DoUint32ToDouble(LUint32ToDouble* instr) {
4507 LOperand* input = instr->value();
4508 LOperand* output = instr->result();
4509 __ LoadUint32(ToDoubleRegister(output), ToRegister(input));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004510}
4511
4512
4513void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004514 class DeferredNumberTagI FINAL : public LDeferredCode {
Ben Murdochb0fe1622011-05-05 13:52:32 +01004515 public:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004516 DeferredNumberTagI(LCodeGen* codegen,
4517 LNumberTagI* instr)
Ben Murdochb0fe1622011-05-05 13:52:32 +01004518 : LDeferredCode(codegen), instr_(instr) { }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004519 void Generate() OVERRIDE {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004520 codegen()->DoDeferredNumberTagIU(
4521 instr_, instr_->value(), instr_->temp(), SIGNED_INT32);
4522 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004523 LInstruction* instr() OVERRIDE { return instr_; }
4524
Ben Murdochb0fe1622011-05-05 13:52:32 +01004525 private:
4526 LNumberTagI* instr_;
4527 };
4528
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004529 LOperand* input = instr->value();
4530 DCHECK(input->IsRegister() && input->Equals(instr->result()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004531 Register reg = ToRegister(input);
4532
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004533 DeferredNumberTagI* deferred =
4534 new(zone()) DeferredNumberTagI(this, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004535 __ SmiTag(reg);
4536 __ j(overflow, deferred->entry());
4537 __ bind(deferred->exit());
4538}
4539
4540
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004541void LCodeGen::DoNumberTagU(LNumberTagU* instr) {
4542 class DeferredNumberTagU FINAL : public LDeferredCode {
4543 public:
4544 DeferredNumberTagU(LCodeGen* codegen, LNumberTagU* instr)
4545 : LDeferredCode(codegen), instr_(instr) { }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004546 void Generate() OVERRIDE {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004547 codegen()->DoDeferredNumberTagIU(
4548 instr_, instr_->value(), instr_->temp(), UNSIGNED_INT32);
4549 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004550 LInstruction* instr() OVERRIDE { return instr_; }
4551
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004552 private:
4553 LNumberTagU* instr_;
4554 };
Ben Murdochb0fe1622011-05-05 13:52:32 +01004555
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004556 LOperand* input = instr->value();
4557 DCHECK(input->IsRegister() && input->Equals(instr->result()));
4558 Register reg = ToRegister(input);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004559
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004560 DeferredNumberTagU* deferred =
4561 new(zone()) DeferredNumberTagU(this, instr);
4562 __ cmp(reg, Immediate(Smi::kMaxValue));
4563 __ j(above, deferred->entry());
4564 __ SmiTag(reg);
4565 __ bind(deferred->exit());
4566}
4567
4568
4569void LCodeGen::DoDeferredNumberTagIU(LInstruction* instr,
4570 LOperand* value,
4571 LOperand* temp,
4572 IntegerSignedness signedness) {
4573 Label done, slow;
4574 Register reg = ToRegister(value);
4575 Register tmp = ToRegister(temp);
4576 XMMRegister xmm_scratch = double_scratch0();
4577
4578 if (signedness == SIGNED_INT32) {
4579 // There was overflow, so bits 30 and 31 of the original integer
4580 // disagree. Try to allocate a heap number in new space and store
4581 // the value in there. If that fails, call the runtime system.
4582 __ SmiUntag(reg);
4583 __ xor_(reg, 0x80000000);
4584 __ Cvtsi2sd(xmm_scratch, Operand(reg));
4585 } else {
4586 __ LoadUint32(xmm_scratch, reg);
4587 }
4588
Ben Murdochb0fe1622011-05-05 13:52:32 +01004589 if (FLAG_inline_new) {
4590 __ AllocateHeapNumber(reg, tmp, no_reg, &slow);
Ben Murdoch257744e2011-11-30 15:57:28 +00004591 __ jmp(&done, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004592 }
4593
4594 // Slow case: Call the runtime system to do the number allocation.
4595 __ bind(&slow);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004596 {
4597 // TODO(3095996): Put a valid pointer value in the stack slot where the
4598 // result register is stored, as this register is in the pointer map, but
4599 // contains an integer value.
4600 __ Move(reg, Immediate(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004601
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004602 // Preserve the value of all registers.
4603 PushSafepointRegistersScope scope(this);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004604
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004605 // NumberTagI and NumberTagD use the context from the frame, rather than
4606 // the environment's HContext or HInlinedContext value.
4607 // They only call Runtime::kAllocateHeapNumber.
4608 // The corresponding HChange instructions are added in a phase that does
4609 // not have easy access to the local context.
4610 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
4611 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
4612 RecordSafepointWithRegisters(
4613 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt);
4614 __ StoreToSafepointRegisterSlot(reg, eax);
4615 }
4616
4617 // Done. Put the value in xmm_scratch into the value of the allocated heap
Ben Murdochb0fe1622011-05-05 13:52:32 +01004618 // number.
4619 __ bind(&done);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004620 __ movsd(FieldOperand(reg, HeapNumber::kValueOffset), xmm_scratch);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004621}
4622
4623
4624void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004625 class DeferredNumberTagD FINAL : public LDeferredCode {
Ben Murdochb0fe1622011-05-05 13:52:32 +01004626 public:
4627 DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr)
4628 : LDeferredCode(codegen), instr_(instr) { }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004629 void Generate() OVERRIDE { codegen()->DoDeferredNumberTagD(instr_); }
4630 LInstruction* instr() OVERRIDE { return instr_; }
4631
Ben Murdochb0fe1622011-05-05 13:52:32 +01004632 private:
4633 LNumberTagD* instr_;
4634 };
4635
Ben Murdochb0fe1622011-05-05 13:52:32 +01004636 Register reg = ToRegister(instr->result());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004637
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004638 DeferredNumberTagD* deferred =
4639 new(zone()) DeferredNumberTagD(this, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004640 if (FLAG_inline_new) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004641 Register tmp = ToRegister(instr->temp());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004642 __ AllocateHeapNumber(reg, tmp, no_reg, deferred->entry());
4643 } else {
4644 __ jmp(deferred->entry());
4645 }
4646 __ bind(deferred->exit());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004647 XMMRegister input_reg = ToDoubleRegister(instr->value());
4648 __ movsd(FieldOperand(reg, HeapNumber::kValueOffset), input_reg);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004649}
4650
4651
4652void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
4653 // TODO(3095996): Get rid of this. For now, we need to make the
4654 // result register contain a valid pointer because it is already
4655 // contained in the register pointer map.
4656 Register reg = ToRegister(instr->result());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004657 __ Move(reg, Immediate(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004658
Ben Murdoch8b112d22011-06-08 16:22:53 +01004659 PushSafepointRegistersScope scope(this);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004660 // NumberTagI and NumberTagD use the context from the frame, rather than
4661 // the environment's HContext or HInlinedContext value.
4662 // They only call Runtime::kAllocateHeapNumber.
4663 // The corresponding HChange instructions are added in a phase that does
4664 // not have easy access to the local context.
4665 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
4666 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
Ben Murdoch2b4ba112012-01-20 14:57:15 +00004667 RecordSafepointWithRegisters(
4668 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004669 __ StoreToSafepointRegisterSlot(reg, eax);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004670}
4671
4672
4673void LCodeGen::DoSmiTag(LSmiTag* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004674 HChange* hchange = instr->hydrogen();
4675 Register input = ToRegister(instr->value());
4676 if (hchange->CheckFlag(HValue::kCanOverflow) &&
4677 hchange->value()->CheckFlag(HValue::kUint32)) {
4678 __ test(input, Immediate(0xc0000000));
4679 DeoptimizeIf(not_zero, instr, "overflow");
4680 }
4681 __ SmiTag(input);
4682 if (hchange->CheckFlag(HValue::kCanOverflow) &&
4683 !hchange->value()->CheckFlag(HValue::kUint32)) {
4684 DeoptimizeIf(overflow, instr, "overflow");
4685 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004686}
4687
4688
4689void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004690 LOperand* input = instr->value();
4691 Register result = ToRegister(input);
4692 DCHECK(input->IsRegister() && input->Equals(instr->result()));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004693 if (instr->needs_check()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004694 __ test(result, Immediate(kSmiTagMask));
4695 DeoptimizeIf(not_zero, instr, "not a Smi");
4696 } else {
4697 __ AssertSmi(result);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004698 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004699 __ SmiUntag(result);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004700}
4701
4702
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004703void LCodeGen::EmitNumberUntagD(LNumberUntagD* instr, Register input_reg,
4704 Register temp_reg, XMMRegister result_reg,
4705 NumberUntagDMode mode) {
4706 bool can_convert_undefined_to_nan =
4707 instr->hydrogen()->can_convert_undefined_to_nan();
4708 bool deoptimize_on_minus_zero = instr->hydrogen()->deoptimize_on_minus_zero();
Ben Murdochb0fe1622011-05-05 13:52:32 +01004709
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004710 Label convert, load_smi, done;
Ben Murdochb0fe1622011-05-05 13:52:32 +01004711
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004712 if (mode == NUMBER_CANDIDATE_IS_ANY_TAGGED) {
4713 // Smi check.
4714 __ JumpIfSmi(input_reg, &load_smi, Label::kNear);
Ben Murdoch257744e2011-11-30 15:57:28 +00004715
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004716 // Heap number map check.
4717 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
4718 factory()->heap_number_map());
4719 if (can_convert_undefined_to_nan) {
4720 __ j(not_equal, &convert, Label::kNear);
4721 } else {
4722 DeoptimizeIf(not_equal, instr, "not a heap number");
4723 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004724
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004725 // Heap number to XMM conversion.
4726 __ movsd(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset));
4727
4728 if (deoptimize_on_minus_zero) {
4729 XMMRegister xmm_scratch = double_scratch0();
4730 __ xorps(xmm_scratch, xmm_scratch);
4731 __ ucomisd(result_reg, xmm_scratch);
4732 __ j(not_zero, &done, Label::kNear);
4733 __ movmskpd(temp_reg, result_reg);
4734 __ test_b(temp_reg, 1);
4735 DeoptimizeIf(not_zero, instr, "minus zero");
4736 }
Ben Murdoch257744e2011-11-30 15:57:28 +00004737 __ jmp(&done, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004738
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004739 if (can_convert_undefined_to_nan) {
4740 __ bind(&convert);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004741
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004742 // Convert undefined (and hole) to NaN.
4743 __ cmp(input_reg, factory()->undefined_value());
4744 DeoptimizeIf(not_equal, instr, "not a heap number/undefined");
4745
4746 ExternalReference nan =
4747 ExternalReference::address_of_canonical_non_hole_nan();
4748 __ movsd(result_reg, Operand::StaticVariable(nan));
4749 __ jmp(&done, Label::kNear);
4750 }
4751 } else {
4752 DCHECK(mode == NUMBER_CANDIDATE_IS_SMI);
4753 }
4754
Ben Murdochb0fe1622011-05-05 13:52:32 +01004755 __ bind(&load_smi);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004756 // Smi to XMM conversion. Clobbering a temp is faster than re-tagging the
4757 // input register since we avoid dependencies.
4758 __ mov(temp_reg, input_reg);
4759 __ SmiUntag(temp_reg); // Untag smi before converting to float.
4760 __ Cvtsi2sd(result_reg, Operand(temp_reg));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004761 __ bind(&done);
4762}
4763
4764
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004765void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr, Label* done) {
4766 Register input_reg = ToRegister(instr->value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004767
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004768 // The input was optimistically untagged; revert it.
4769 STATIC_ASSERT(kSmiTagSize == 1);
4770 __ lea(input_reg, Operand(input_reg, times_2, kHeapObjectTag));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004771
4772 if (instr->truncating()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004773 Label no_heap_number, check_bools, check_false;
4774
4775 // Heap number map check.
4776 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
4777 factory()->heap_number_map());
4778 __ j(not_equal, &no_heap_number, Label::kNear);
4779 __ TruncateHeapNumberToI(input_reg, input_reg);
4780 __ jmp(done);
4781
4782 __ bind(&no_heap_number);
4783 // Check for Oddballs. Undefined/False is converted to zero and True to one
4784 // for truncating conversions.
Steve Block44f0eee2011-05-26 01:26:41 +01004785 __ cmp(input_reg, factory()->undefined_value());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004786 __ j(not_equal, &check_bools, Label::kNear);
4787 __ Move(input_reg, Immediate(0));
4788 __ jmp(done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004789
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004790 __ bind(&check_bools);
4791 __ cmp(input_reg, factory()->true_value());
4792 __ j(not_equal, &check_false, Label::kNear);
4793 __ Move(input_reg, Immediate(1));
4794 __ jmp(done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004795
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004796 __ bind(&check_false);
4797 __ cmp(input_reg, factory()->false_value());
4798 DeoptimizeIf(not_equal, instr, "not a heap number/undefined/true/false");
4799 __ Move(input_reg, Immediate(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004800 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004801 XMMRegister scratch = ToDoubleRegister(instr->temp());
4802 DCHECK(!scratch.is(xmm0));
4803 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
4804 isolate()->factory()->heap_number_map());
4805 DeoptimizeIf(not_equal, instr, "not a heap number");
4806 __ movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004807 __ cvttsd2si(input_reg, Operand(xmm0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004808 __ Cvtsi2sd(scratch, Operand(input_reg));
4809 __ ucomisd(xmm0, scratch);
4810 DeoptimizeIf(not_equal, instr, "lost precision");
4811 DeoptimizeIf(parity_even, instr, "NaN");
4812 if (instr->hydrogen()->GetMinusZeroMode() == FAIL_ON_MINUS_ZERO) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01004813 __ test(input_reg, Operand(input_reg));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004814 __ j(not_zero, done);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004815 __ movmskpd(input_reg, xmm0);
4816 __ and_(input_reg, 1);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004817 DeoptimizeIf(not_zero, instr, "minus zero");
Ben Murdochb0fe1622011-05-05 13:52:32 +01004818 }
4819 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004820}
4821
4822
4823void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004824 class DeferredTaggedToI FINAL : public LDeferredCode {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004825 public:
4826 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr)
4827 : LDeferredCode(codegen), instr_(instr) { }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04004828 void Generate() OVERRIDE { codegen()->DoDeferredTaggedToI(instr_, done()); }
4829 LInstruction* instr() OVERRIDE { return instr_; }
4830
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004831 private:
4832 LTaggedToI* instr_;
4833 };
4834
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004835 LOperand* input = instr->value();
4836 DCHECK(input->IsRegister());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004837 Register input_reg = ToRegister(input);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004838 DCHECK(input_reg.is(ToRegister(instr->result())));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004839
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004840 if (instr->hydrogen()->value()->representation().IsSmi()) {
4841 __ SmiUntag(input_reg);
4842 } else {
4843 DeferredTaggedToI* deferred =
4844 new(zone()) DeferredTaggedToI(this, instr);
4845 // Optimistically untag the input.
4846 // If the input is a HeapObject, SmiUntag will set the carry flag.
4847 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
4848 __ SmiUntag(input_reg);
4849 // Branch to deferred code if the input was tagged.
4850 // The deferred code will take care of restoring the tag.
4851 __ j(carry, deferred->entry());
4852 __ bind(deferred->exit());
4853 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004854}
4855
4856
4857void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004858 LOperand* input = instr->value();
4859 DCHECK(input->IsRegister());
4860 LOperand* temp = instr->temp();
4861 DCHECK(temp->IsRegister());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004862 LOperand* result = instr->result();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004863 DCHECK(result->IsDoubleRegister());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004864
4865 Register input_reg = ToRegister(input);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004866 Register temp_reg = ToRegister(temp);
4867
4868 HValue* value = instr->hydrogen()->value();
4869 NumberUntagDMode mode = value->representation().IsSmi()
4870 ? NUMBER_CANDIDATE_IS_SMI : NUMBER_CANDIDATE_IS_ANY_TAGGED;
4871
Ben Murdochb0fe1622011-05-05 13:52:32 +01004872 XMMRegister result_reg = ToDoubleRegister(result);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004873 EmitNumberUntagD(instr, input_reg, temp_reg, result_reg, mode);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004874}
4875
4876
4877void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004878 LOperand* input = instr->value();
4879 DCHECK(input->IsDoubleRegister());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004880 LOperand* result = instr->result();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004881 DCHECK(result->IsRegister());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004882 Register result_reg = ToRegister(result);
4883
4884 if (instr->truncating()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004885 XMMRegister input_reg = ToDoubleRegister(input);
4886 __ TruncateDoubleToI(result_reg, input_reg);
Ben Murdochb0fe1622011-05-05 13:52:32 +01004887 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004888 Label lost_precision, is_nan, minus_zero, done;
4889 XMMRegister input_reg = ToDoubleRegister(input);
4890 XMMRegister xmm_scratch = double_scratch0();
4891 Label::Distance dist = DeoptEveryNTimes() ? Label::kFar : Label::kNear;
4892 __ DoubleToI(result_reg, input_reg, xmm_scratch,
4893 instr->hydrogen()->GetMinusZeroMode(), &lost_precision,
4894 &is_nan, &minus_zero, dist);
4895 __ jmp(&done, dist);
4896 __ bind(&lost_precision);
4897 DeoptimizeIf(no_condition, instr, "lost precision");
4898 __ bind(&is_nan);
4899 DeoptimizeIf(no_condition, instr, "NaN");
4900 __ bind(&minus_zero);
4901 DeoptimizeIf(no_condition, instr, "minus zero");
Ben Murdochb0fe1622011-05-05 13:52:32 +01004902 __ bind(&done);
4903 }
4904}
4905
4906
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004907void LCodeGen::DoDoubleToSmi(LDoubleToSmi* instr) {
4908 LOperand* input = instr->value();
4909 DCHECK(input->IsDoubleRegister());
4910 LOperand* result = instr->result();
4911 DCHECK(result->IsRegister());
4912 Register result_reg = ToRegister(result);
4913
4914 Label lost_precision, is_nan, minus_zero, done;
4915 XMMRegister input_reg = ToDoubleRegister(input);
4916 XMMRegister xmm_scratch = double_scratch0();
4917 Label::Distance dist = DeoptEveryNTimes() ? Label::kFar : Label::kNear;
4918 __ DoubleToI(result_reg, input_reg, xmm_scratch,
4919 instr->hydrogen()->GetMinusZeroMode(), &lost_precision, &is_nan,
4920 &minus_zero, dist);
4921 __ jmp(&done, dist);
4922 __ bind(&lost_precision);
4923 DeoptimizeIf(no_condition, instr, "lost precision");
4924 __ bind(&is_nan);
4925 DeoptimizeIf(no_condition, instr, "NaN");
4926 __ bind(&minus_zero);
4927 DeoptimizeIf(no_condition, instr, "minus zero");
4928 __ bind(&done);
4929 __ SmiTag(result_reg);
4930 DeoptimizeIf(overflow, instr, "overflow");
4931}
4932
4933
Ben Murdochb0fe1622011-05-05 13:52:32 +01004934void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004935 LOperand* input = instr->value();
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00004936 __ test(ToOperand(input), Immediate(kSmiTagMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004937 DeoptimizeIf(not_zero, instr, "not a Smi");
Steve Block44f0eee2011-05-26 01:26:41 +01004938}
4939
4940
4941void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004942 if (!instr->hydrogen()->value()->type().IsHeapObject()) {
4943 LOperand* input = instr->value();
4944 __ test(ToOperand(input), Immediate(kSmiTagMask));
4945 DeoptimizeIf(zero, instr, "Smi");
4946 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01004947}
4948
4949
4950void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004951 Register input = ToRegister(instr->value());
4952 Register temp = ToRegister(instr->temp());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004953
Ben Murdochb0fe1622011-05-05 13:52:32 +01004954 __ mov(temp, FieldOperand(input, HeapObject::kMapOffset));
Ben Murdochb0fe1622011-05-05 13:52:32 +01004955
Ben Murdoch257744e2011-11-30 15:57:28 +00004956 if (instr->hydrogen()->is_interval_check()) {
4957 InstanceType first;
4958 InstanceType last;
4959 instr->hydrogen()->GetCheckInterval(&first, &last);
4960
Steve Block1e0659c2011-05-24 12:43:12 +01004961 __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset),
4962 static_cast<int8_t>(first));
Ben Murdoch257744e2011-11-30 15:57:28 +00004963
4964 // If there is only one type in the interval check for equality.
4965 if (first == last) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004966 DeoptimizeIf(not_equal, instr, "wrong instance type");
Ben Murdoch257744e2011-11-30 15:57:28 +00004967 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004968 DeoptimizeIf(below, instr, "wrong instance type");
Ben Murdoch257744e2011-11-30 15:57:28 +00004969 // Omit check for the last type.
4970 if (last != LAST_TYPE) {
4971 __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset),
4972 static_cast<int8_t>(last));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004973 DeoptimizeIf(above, instr, "wrong instance type");
Ben Murdoch257744e2011-11-30 15:57:28 +00004974 }
4975 }
4976 } else {
4977 uint8_t mask;
4978 uint8_t tag;
4979 instr->hydrogen()->GetCheckMaskAndTag(&mask, &tag);
4980
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004981 if (base::bits::IsPowerOfTwo32(mask)) {
4982 DCHECK(tag == 0 || base::bits::IsPowerOfTwo32(tag));
Ben Murdoch257744e2011-11-30 15:57:28 +00004983 __ test_b(FieldOperand(temp, Map::kInstanceTypeOffset), mask);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004984 DeoptimizeIf(tag == 0 ? not_zero : zero, instr, "wrong instance type");
Ben Murdoch257744e2011-11-30 15:57:28 +00004985 } else {
4986 __ movzx_b(temp, FieldOperand(temp, Map::kInstanceTypeOffset));
4987 __ and_(temp, mask);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004988 __ cmp(temp, tag);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004989 DeoptimizeIf(not_equal, instr, "wrong instance type");
Ben Murdochb0fe1622011-05-05 13:52:32 +01004990 }
4991 }
4992}
4993
4994
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004995void LCodeGen::DoCheckValue(LCheckValue* instr) {
4996 Handle<HeapObject> object = instr->hydrogen()->object().handle();
4997 if (instr->hydrogen()->object_in_new_space()) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01004998 Register reg = ToRegister(instr->value());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00004999 Handle<Cell> cell = isolate()->factory()->NewCell(object);
5000 __ cmp(reg, Operand::ForCell(cell));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005001 } else {
5002 Operand operand = ToOperand(instr->value());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005003 __ cmp(operand, object);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005004 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005005 DeoptimizeIf(not_equal, instr, "value mismatch");
Ben Murdochb0fe1622011-05-05 13:52:32 +01005006}
5007
5008
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005009void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) {
5010 {
5011 PushSafepointRegistersScope scope(this);
5012 __ push(object);
5013 __ xor_(esi, esi);
5014 __ CallRuntimeSaveDoubles(Runtime::kTryMigrateInstance);
5015 RecordSafepointWithRegisters(
5016 instr->pointer_map(), 1, Safepoint::kNoLazyDeopt);
5017
5018 __ test(eax, Immediate(kSmiTagMask));
5019 }
5020 DeoptimizeIf(zero, instr, "instance migration failed");
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005021}
5022
5023
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005024void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
5025 class DeferredCheckMaps FINAL : public LDeferredCode {
5026 public:
5027 DeferredCheckMaps(LCodeGen* codegen, LCheckMaps* instr, Register object)
5028 : LDeferredCode(codegen), instr_(instr), object_(object) {
5029 SetExit(check_maps());
5030 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04005031 void Generate() OVERRIDE {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005032 codegen()->DoDeferredInstanceMigration(instr_, object_);
5033 }
5034 Label* check_maps() { return &check_maps_; }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04005035 LInstruction* instr() OVERRIDE { return instr_; }
5036
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005037 private:
5038 LCheckMaps* instr_;
5039 Label check_maps_;
5040 Register object_;
5041 };
5042
5043 if (instr->hydrogen()->IsStabilityCheck()) {
5044 const UniqueSet<Map>* maps = instr->hydrogen()->maps();
5045 for (int i = 0; i < maps->size(); ++i) {
5046 AddStabilityDependency(maps->at(i).handle());
5047 }
5048 return;
5049 }
5050
5051 LOperand* input = instr->value();
5052 DCHECK(input->IsRegister());
Ben Murdochb0fe1622011-05-05 13:52:32 +01005053 Register reg = ToRegister(input);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005054
5055 DeferredCheckMaps* deferred = NULL;
5056 if (instr->hydrogen()->HasMigrationTarget()) {
5057 deferred = new(zone()) DeferredCheckMaps(this, instr, reg);
5058 __ bind(deferred->check_maps());
5059 }
5060
5061 const UniqueSet<Map>* maps = instr->hydrogen()->maps();
5062 Label success;
5063 for (int i = 0; i < maps->size() - 1; i++) {
5064 Handle<Map> map = maps->at(i).handle();
5065 __ CompareMap(reg, map);
5066 __ j(equal, &success, Label::kNear);
5067 }
5068
5069 Handle<Map> map = maps->at(maps->size() - 1).handle();
5070 __ CompareMap(reg, map);
5071 if (instr->hydrogen()->HasMigrationTarget()) {
5072 __ j(not_equal, deferred->entry());
5073 } else {
5074 DeoptimizeIf(not_equal, instr, "wrong map");
5075 }
5076
5077 __ bind(&success);
Ben Murdochb0fe1622011-05-05 13:52:32 +01005078}
5079
5080
Ben Murdoch257744e2011-11-30 15:57:28 +00005081void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) {
5082 XMMRegister value_reg = ToDoubleRegister(instr->unclamped());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005083 XMMRegister xmm_scratch = double_scratch0();
Ben Murdoch257744e2011-11-30 15:57:28 +00005084 Register result_reg = ToRegister(instr->result());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005085 __ ClampDoubleToUint8(value_reg, xmm_scratch, result_reg);
Ben Murdoch257744e2011-11-30 15:57:28 +00005086}
5087
5088
5089void LCodeGen::DoClampIToUint8(LClampIToUint8* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005090 DCHECK(instr->unclamped()->Equals(instr->result()));
Ben Murdoch257744e2011-11-30 15:57:28 +00005091 Register value_reg = ToRegister(instr->result());
5092 __ ClampUint8(value_reg);
5093}
5094
5095
5096void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005097 DCHECK(instr->unclamped()->Equals(instr->result()));
Ben Murdoch257744e2011-11-30 15:57:28 +00005098 Register input_reg = ToRegister(instr->unclamped());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005099 XMMRegister temp_xmm_reg = ToDoubleRegister(instr->temp_xmm());
5100 XMMRegister xmm_scratch = double_scratch0();
Ben Murdoch257744e2011-11-30 15:57:28 +00005101 Label is_smi, done, heap_number;
5102
5103 __ JumpIfSmi(input_reg, &is_smi);
5104
5105 // Check for heap number
5106 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
5107 factory()->heap_number_map());
5108 __ j(equal, &heap_number, Label::kNear);
5109
5110 // Check for undefined. Undefined is converted to zero for clamping
5111 // conversions.
5112 __ cmp(input_reg, factory()->undefined_value());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005113 DeoptimizeIf(not_equal, instr, "not a heap number/undefined");
Ben Murdoch257744e2011-11-30 15:57:28 +00005114 __ mov(input_reg, 0);
5115 __ jmp(&done, Label::kNear);
5116
5117 // Heap number
5118 __ bind(&heap_number);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005119 __ movsd(xmm_scratch, FieldOperand(input_reg, HeapNumber::kValueOffset));
5120 __ ClampDoubleToUint8(xmm_scratch, temp_xmm_reg, input_reg);
Ben Murdoch257744e2011-11-30 15:57:28 +00005121 __ jmp(&done, Label::kNear);
5122
5123 // smi
5124 __ bind(&is_smi);
5125 __ SmiUntag(input_reg);
5126 __ ClampUint8(input_reg);
Ben Murdoch257744e2011-11-30 15:57:28 +00005127 __ bind(&done);
5128}
5129
5130
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005131void LCodeGen::DoDoubleBits(LDoubleBits* instr) {
5132 XMMRegister value_reg = ToDoubleRegister(instr->value());
5133 Register result_reg = ToRegister(instr->result());
5134 if (instr->hydrogen()->bits() == HDoubleBits::HIGH) {
5135 if (CpuFeatures::IsSupported(SSE4_1)) {
5136 CpuFeatureScope scope2(masm(), SSE4_1);
5137 __ pextrd(result_reg, value_reg, 1);
5138 } else {
5139 XMMRegister xmm_scratch = double_scratch0();
5140 __ pshufd(xmm_scratch, value_reg, 1);
5141 __ movd(result_reg, xmm_scratch);
5142 }
5143 } else {
5144 __ movd(result_reg, value_reg);
Ben Murdochb0fe1622011-05-05 13:52:32 +01005145 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005146}
5147
5148
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005149void LCodeGen::DoConstructDouble(LConstructDouble* instr) {
5150 Register hi_reg = ToRegister(instr->hi());
5151 Register lo_reg = ToRegister(instr->lo());
5152 XMMRegister result_reg = ToDoubleRegister(instr->result());
5153
5154 if (CpuFeatures::IsSupported(SSE4_1)) {
5155 CpuFeatureScope scope2(masm(), SSE4_1);
5156 __ movd(result_reg, lo_reg);
5157 __ pinsrd(result_reg, hi_reg, 1);
5158 } else {
5159 XMMRegister xmm_scratch = double_scratch0();
5160 __ movd(result_reg, hi_reg);
5161 __ psllq(result_reg, 32);
5162 __ movd(xmm_scratch, lo_reg);
5163 __ orps(result_reg, xmm_scratch);
5164 }
5165}
5166
5167
5168void LCodeGen::DoAllocate(LAllocate* instr) {
5169 class DeferredAllocate FINAL : public LDeferredCode {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005170 public:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005171 DeferredAllocate(LCodeGen* codegen, LAllocate* instr)
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005172 : LDeferredCode(codegen), instr_(instr) { }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04005173 void Generate() OVERRIDE { codegen()->DoDeferredAllocate(instr_); }
5174 LInstruction* instr() OVERRIDE { return instr_; }
5175
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005176 private:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005177 LAllocate* instr_;
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005178 };
5179
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005180 DeferredAllocate* deferred = new(zone()) DeferredAllocate(this, instr);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005181
5182 Register result = ToRegister(instr->result());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005183 Register temp = ToRegister(instr->temp());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005184
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005185 // Allocate memory for the object.
5186 AllocationFlags flags = TAG_OBJECT;
5187 if (instr->hydrogen()->MustAllocateDoubleAligned()) {
5188 flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
5189 }
5190 if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
5191 DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
5192 DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
5193 flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE);
5194 } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
5195 DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
5196 flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005197 }
5198
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005199 if (instr->size()->IsConstantOperand()) {
5200 int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
5201 if (size <= Page::kMaxRegularHeapObjectSize) {
5202 __ Allocate(size, result, temp, no_reg, deferred->entry(), flags);
5203 } else {
5204 __ jmp(deferred->entry());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005205 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005206 } else {
5207 Register size = ToRegister(instr->size());
5208 __ Allocate(size, result, temp, no_reg, deferred->entry(), flags);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005209 }
5210
5211 __ bind(deferred->exit());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005212
5213 if (instr->hydrogen()->MustPrefillWithFiller()) {
5214 if (instr->size()->IsConstantOperand()) {
5215 int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
5216 __ mov(temp, (size / kPointerSize) - 1);
5217 } else {
5218 temp = ToRegister(instr->size());
5219 __ shr(temp, kPointerSizeLog2);
5220 __ dec(temp);
5221 }
5222 Label loop;
5223 __ bind(&loop);
5224 __ mov(FieldOperand(result, temp, times_pointer_size, 0),
5225 isolate()->factory()->one_pointer_filler_map());
5226 __ dec(temp);
5227 __ j(not_zero, &loop);
5228 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005229}
5230
5231
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005232void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005233 Register result = ToRegister(instr->result());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005234
5235 // TODO(3095996): Get rid of this. For now, we need to make the
5236 // result register contain a valid pointer because it is already
5237 // contained in the register pointer map.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005238 __ Move(result, Immediate(Smi::FromInt(0)));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005239
5240 PushSafepointRegistersScope scope(this);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005241 if (instr->size()->IsRegister()) {
5242 Register size = ToRegister(instr->size());
5243 DCHECK(!size.is(result));
5244 __ SmiTag(ToRegister(instr->size()));
5245 __ push(size);
5246 } else {
5247 int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
5248 if (size >= 0 && size <= Smi::kMaxValue) {
5249 __ push(Immediate(Smi::FromInt(size)));
5250 } else {
5251 // We should never get here at runtime => abort
5252 __ int3();
5253 return;
5254 }
5255 }
5256
5257 int flags = AllocateDoubleAlignFlag::encode(
5258 instr->hydrogen()->MustAllocateDoubleAligned());
5259 if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
5260 DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
5261 DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
5262 flags = AllocateTargetSpace::update(flags, OLD_POINTER_SPACE);
5263 } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
5264 DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
5265 flags = AllocateTargetSpace::update(flags, OLD_DATA_SPACE);
5266 } else {
5267 flags = AllocateTargetSpace::update(flags, NEW_SPACE);
5268 }
5269 __ push(Immediate(Smi::FromInt(flags)));
5270
5271 CallRuntimeFromDeferred(
5272 Runtime::kAllocateInTargetSpace, 2, instr, instr->context());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005273 __ StoreToSafepointRegisterSlot(result, eax);
Ben Murdoch5d4cdbf2012-04-11 10:23:59 +01005274}
5275
5276
Steve Block44f0eee2011-05-26 01:26:41 +01005277void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005278 DCHECK(ToRegister(instr->value()).is(eax));
Steve Block44f0eee2011-05-26 01:26:41 +01005279 __ push(eax);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005280 CallRuntime(Runtime::kToFastProperties, 1, instr);
Steve Block44f0eee2011-05-26 01:26:41 +01005281}
5282
5283
Ben Murdochb0fe1622011-05-05 13:52:32 +01005284void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005285 DCHECK(ToRegister(instr->context()).is(esi));
Ben Murdoch257744e2011-11-30 15:57:28 +00005286 Label materialized;
Ben Murdochb0fe1622011-05-05 13:52:32 +01005287 // Registers will be used as follows:
Ben Murdochb0fe1622011-05-05 13:52:32 +01005288 // ecx = literals array.
5289 // ebx = regexp literal.
5290 // eax = regexp literal clone.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005291 // esi = context.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005292 int literal_offset =
5293 FixedArray::OffsetOfElementAt(instr->hydrogen()->literal_index());
5294 __ LoadHeapObject(ecx, instr->hydrogen()->literals());
Ben Murdochb0fe1622011-05-05 13:52:32 +01005295 __ mov(ebx, FieldOperand(ecx, literal_offset));
Steve Block44f0eee2011-05-26 01:26:41 +01005296 __ cmp(ebx, factory()->undefined_value());
Ben Murdoch257744e2011-11-30 15:57:28 +00005297 __ j(not_equal, &materialized, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01005298
5299 // Create regexp literal using runtime function
5300 // Result will be in eax.
5301 __ push(ecx);
5302 __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
5303 __ push(Immediate(instr->hydrogen()->pattern()));
5304 __ push(Immediate(instr->hydrogen()->flags()));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005305 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01005306 __ mov(ebx, eax);
5307
5308 __ bind(&materialized);
5309 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
5310 Label allocated, runtime_allocate;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005311 __ Allocate(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
5312 __ jmp(&allocated, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +01005313
5314 __ bind(&runtime_allocate);
5315 __ push(ebx);
5316 __ push(Immediate(Smi::FromInt(size)));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005317 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01005318 __ pop(ebx);
5319
5320 __ bind(&allocated);
5321 // Copy the content into the newly allocated memory.
5322 // (Unroll copy loop once for better throughput).
5323 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
5324 __ mov(edx, FieldOperand(ebx, i));
5325 __ mov(ecx, FieldOperand(ebx, i + kPointerSize));
5326 __ mov(FieldOperand(eax, i), edx);
5327 __ mov(FieldOperand(eax, i + kPointerSize), ecx);
5328 }
5329 if ((size % (2 * kPointerSize)) != 0) {
5330 __ mov(edx, FieldOperand(ebx, size - kPointerSize));
5331 __ mov(FieldOperand(eax, size - kPointerSize), edx);
5332 }
5333}
5334
5335
5336void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005337 DCHECK(ToRegister(instr->context()).is(esi));
Ben Murdochb0fe1622011-05-05 13:52:32 +01005338 // Use the fast case closure allocation code that allocates in new
5339 // space for nested functions that don't need literals cloning.
Steve Block1e0659c2011-05-24 12:43:12 +01005340 bool pretenure = instr->hydrogen()->pretenure();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005341 if (!pretenure && instr->hydrogen()->has_no_literals()) {
5342 FastNewClosureStub stub(isolate(), instr->hydrogen()->strict_mode(),
5343 instr->hydrogen()->kind());
5344 __ mov(ebx, Immediate(instr->hydrogen()->shared_info()));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005345 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01005346 } else {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005347 __ push(esi);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005348 __ push(Immediate(instr->hydrogen()->shared_info()));
5349 __ push(Immediate(pretenure ? factory()->true_value()
5350 : factory()->false_value()));
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005351 CallRuntime(Runtime::kNewClosure, 3, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01005352 }
5353}
5354
5355
5356void LCodeGen::DoTypeof(LTypeof* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005357 DCHECK(ToRegister(instr->context()).is(esi));
5358 LOperand* input = instr->value();
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005359 EmitPushTaggedOperand(input);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005360 CallRuntime(Runtime::kTypeof, 1, instr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01005361}
5362
5363
5364void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005365 Register input = ToRegister(instr->value());
5366 Condition final_branch_condition = EmitTypeofIs(instr, input);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005367 if (final_branch_condition != no_condition) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005368 EmitBranch(instr, final_branch_condition);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005369 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01005370}
5371
5372
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005373Condition LCodeGen::EmitTypeofIs(LTypeofIsAndBranch* instr, Register input) {
5374 Label* true_label = instr->TrueLabel(chunk_);
5375 Label* false_label = instr->FalseLabel(chunk_);
5376 Handle<String> type_name = instr->type_literal();
5377 int left_block = instr->TrueDestination(chunk_);
5378 int right_block = instr->FalseDestination(chunk_);
5379 int next_block = GetNextEmittedBlock();
5380
5381 Label::Distance true_distance = left_block == next_block ? Label::kNear
5382 : Label::kFar;
5383 Label::Distance false_distance = right_block == next_block ? Label::kNear
5384 : Label::kFar;
Ben Murdochb0fe1622011-05-05 13:52:32 +01005385 Condition final_branch_condition = no_condition;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005386 if (String::Equals(type_name, factory()->number_string())) {
5387 __ JumpIfSmi(input, true_label, true_distance);
Ben Murdochb0fe1622011-05-05 13:52:32 +01005388 __ cmp(FieldOperand(input, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01005389 factory()->heap_number_map());
Ben Murdochb0fe1622011-05-05 13:52:32 +01005390 final_branch_condition = equal;
5391
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005392 } else if (String::Equals(type_name, factory()->string_string())) {
5393 __ JumpIfSmi(input, false_label, false_distance);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01005394 __ CmpObjectType(input, FIRST_NONSTRING_TYPE, input);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005395 __ j(above_equal, false_label, false_distance);
Ben Murdochb0fe1622011-05-05 13:52:32 +01005396 __ test_b(FieldOperand(input, Map::kBitFieldOffset),
5397 1 << Map::kIsUndetectable);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01005398 final_branch_condition = zero;
Ben Murdochb0fe1622011-05-05 13:52:32 +01005399
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005400 } else if (String::Equals(type_name, factory()->symbol_string())) {
5401 __ JumpIfSmi(input, false_label, false_distance);
5402 __ CmpObjectType(input, SYMBOL_TYPE, input);
5403 final_branch_condition = equal;
5404
5405 } else if (String::Equals(type_name, factory()->boolean_string())) {
Steve Block44f0eee2011-05-26 01:26:41 +01005406 __ cmp(input, factory()->true_value());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005407 __ j(equal, true_label, true_distance);
Steve Block44f0eee2011-05-26 01:26:41 +01005408 __ cmp(input, factory()->false_value());
Ben Murdochb0fe1622011-05-05 13:52:32 +01005409 final_branch_condition = equal;
5410
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005411 } else if (String::Equals(type_name, factory()->undefined_string())) {
Steve Block44f0eee2011-05-26 01:26:41 +01005412 __ cmp(input, factory()->undefined_value());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005413 __ j(equal, true_label, true_distance);
5414 __ JumpIfSmi(input, false_label, false_distance);
Ben Murdochb0fe1622011-05-05 13:52:32 +01005415 // Check for undetectable objects => true.
5416 __ mov(input, FieldOperand(input, HeapObject::kMapOffset));
5417 __ test_b(FieldOperand(input, Map::kBitFieldOffset),
5418 1 << Map::kIsUndetectable);
5419 final_branch_condition = not_zero;
5420
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005421 } else if (String::Equals(type_name, factory()->function_string())) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005422 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005423 __ JumpIfSmi(input, false_label, false_distance);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005424 __ CmpObjectType(input, JS_FUNCTION_TYPE, input);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005425 __ j(equal, true_label, true_distance);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005426 __ CmpInstanceType(input, JS_FUNCTION_PROXY_TYPE);
5427 final_branch_condition = equal;
Ben Murdochb0fe1622011-05-05 13:52:32 +01005428
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005429 } else if (String::Equals(type_name, factory()->object_string())) {
5430 __ JumpIfSmi(input, false_label, false_distance);
5431 __ cmp(input, factory()->null_value());
5432 __ j(equal, true_label, true_distance);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005433 __ CmpObjectType(input, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, input);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005434 __ j(below, false_label, false_distance);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005435 __ CmpInstanceType(input, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005436 __ j(above, false_label, false_distance);
Ben Murdochb0fe1622011-05-05 13:52:32 +01005437 // Check for undetectable objects => false.
5438 __ test_b(FieldOperand(input, Map::kBitFieldOffset),
5439 1 << Map::kIsUndetectable);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01005440 final_branch_condition = zero;
Ben Murdochb0fe1622011-05-05 13:52:32 +01005441
5442 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005443 __ jmp(false_label, false_distance);
Ben Murdochb0fe1622011-05-05 13:52:32 +01005444 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01005445 return final_branch_condition;
5446}
5447
5448
Steve Block1e0659c2011-05-24 12:43:12 +01005449void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005450 Register temp = ToRegister(instr->temp());
Steve Block1e0659c2011-05-24 12:43:12 +01005451
5452 EmitIsConstructCall(temp);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005453 EmitBranch(instr, equal);
Steve Block1e0659c2011-05-24 12:43:12 +01005454}
5455
5456
5457void LCodeGen::EmitIsConstructCall(Register temp) {
5458 // Get the frame pointer for the calling frame.
5459 __ mov(temp, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
5460
5461 // Skip the arguments adaptor frame if it exists.
Ben Murdoch257744e2011-11-30 15:57:28 +00005462 Label check_frame_marker;
Steve Block1e0659c2011-05-24 12:43:12 +01005463 __ cmp(Operand(temp, StandardFrameConstants::kContextOffset),
5464 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
Ben Murdoch257744e2011-11-30 15:57:28 +00005465 __ j(not_equal, &check_frame_marker, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01005466 __ mov(temp, Operand(temp, StandardFrameConstants::kCallerFPOffset));
5467
5468 // Check the marker in the calling frame.
5469 __ bind(&check_frame_marker);
5470 __ cmp(Operand(temp, StandardFrameConstants::kMarkerOffset),
5471 Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
5472}
5473
5474
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005475void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) {
5476 if (!info()->IsStub()) {
5477 // Ensure that we have enough space after the previous lazy-bailout
5478 // instruction for patching the code here.
5479 int current_pc = masm()->pc_offset();
5480 if (current_pc < last_lazy_deopt_pc_ + space_needed) {
5481 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
5482 __ Nop(padding_size);
5483 }
Ben Murdoch2b4ba112012-01-20 14:57:15 +00005484 }
5485 last_lazy_deopt_pc_ = masm()->pc_offset();
5486}
5487
5488
Ben Murdochb0fe1622011-05-05 13:52:32 +01005489void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005490 last_lazy_deopt_pc_ = masm()->pc_offset();
5491 DCHECK(instr->HasEnvironment());
Ben Murdoch2b4ba112012-01-20 14:57:15 +00005492 LEnvironment* env = instr->environment();
5493 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
5494 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
Ben Murdochb0fe1622011-05-05 13:52:32 +01005495}
5496
5497
5498void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005499 Deoptimizer::BailoutType type = instr->hydrogen()->type();
5500 // TODO(danno): Stubs expect all deopts to be lazy for historical reasons (the
5501 // needed return address), even though the implementation of LAZY and EAGER is
5502 // now identical. When LAZY is eventually completely folded into EAGER, remove
5503 // the special case below.
5504 if (info()->IsStub() && type == Deoptimizer::EAGER) {
5505 type = Deoptimizer::LAZY;
5506 }
5507 DeoptimizeIf(no_condition, instr, instr->hydrogen()->reason(), type);
Ben Murdochb0fe1622011-05-05 13:52:32 +01005508}
5509
5510
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005511void LCodeGen::DoDummy(LDummy* instr) {
5512 // Nothing to see here, move on!
5513}
5514
5515
5516void LCodeGen::DoDummyUse(LDummyUse* instr) {
5517 // Nothing to see here, move on!
Ben Murdochb0fe1622011-05-05 13:52:32 +01005518}
5519
5520
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005521void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) {
Ben Murdoch2b4ba112012-01-20 14:57:15 +00005522 PushSafepointRegistersScope scope(this);
5523 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
5524 __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
5525 RecordSafepointWithLazyDeopt(
5526 instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005527 DCHECK(instr->HasEnvironment());
Ben Murdoch2b4ba112012-01-20 14:57:15 +00005528 LEnvironment* env = instr->environment();
5529 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005530}
5531
5532
5533void LCodeGen::DoStackCheck(LStackCheck* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005534 class DeferredStackCheck FINAL : public LDeferredCode {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005535 public:
5536 DeferredStackCheck(LCodeGen* codegen, LStackCheck* instr)
5537 : LDeferredCode(codegen), instr_(instr) { }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04005538 void Generate() OVERRIDE { codegen()->DoDeferredStackCheck(instr_); }
5539 LInstruction* instr() OVERRIDE { return instr_; }
5540
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005541 private:
5542 LStackCheck* instr_;
5543 };
5544
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005545 DCHECK(instr->HasEnvironment());
Ben Murdoch2b4ba112012-01-20 14:57:15 +00005546 LEnvironment* env = instr->environment();
5547 // There is no LLazyBailout instruction for stack-checks. We have to
5548 // prepare for lazy deoptimization explicitly here.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005549 if (instr->hydrogen()->is_function_entry()) {
5550 // Perform stack overflow check.
5551 Label done;
5552 ExternalReference stack_limit =
5553 ExternalReference::address_of_stack_limit(isolate());
5554 __ cmp(esp, Operand::StaticVariable(stack_limit));
5555 __ j(above_equal, &done, Label::kNear);
5556
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005557 DCHECK(instr->context()->IsRegister());
5558 DCHECK(ToRegister(instr->context()).is(esi));
5559 CallCode(isolate()->builtins()->StackCheck(),
5560 RelocInfo::CODE_TARGET,
5561 instr);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005562 __ bind(&done);
5563 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005564 DCHECK(instr->hydrogen()->is_backwards_branch());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005565 // Perform stack overflow check if this goto needs it before jumping.
5566 DeferredStackCheck* deferred_stack_check =
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005567 new(zone()) DeferredStackCheck(this, instr);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005568 ExternalReference stack_limit =
5569 ExternalReference::address_of_stack_limit(isolate());
5570 __ cmp(esp, Operand::StaticVariable(stack_limit));
5571 __ j(below, deferred_stack_check->entry());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005572 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005573 __ bind(instr->done_label());
5574 deferred_stack_check->SetExit(instr->done_label());
Ben Murdoch2b4ba112012-01-20 14:57:15 +00005575 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
5576 // Don't record a deoptimization index for the safepoint here.
5577 // This will be done explicitly when emitting call and the safepoint in
5578 // the deferred code.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00005579 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01005580}
5581
5582
5583void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
5584 // This is a pseudo-instruction that ensures that the environment here is
5585 // properly registered for deoptimization and records the assembler's PC
5586 // offset.
5587 LEnvironment* environment = instr->environment();
Ben Murdochb0fe1622011-05-05 13:52:32 +01005588
5589 // If the environment were already registered, we would have no way of
5590 // backpatching it with the spill slot operands.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005591 DCHECK(!environment->HasBeenRegistered());
Ben Murdoch2b4ba112012-01-20 14:57:15 +00005592 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
Ben Murdochb0fe1622011-05-05 13:52:32 +01005593
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005594 GenerateOsrPrologue();
Ben Murdoch257744e2011-11-30 15:57:28 +00005595}
5596
5597
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005598void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005599 DCHECK(ToRegister(instr->context()).is(esi));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005600 __ cmp(eax, isolate()->factory()->undefined_value());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005601 DeoptimizeIf(equal, instr, "undefined");
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005602
5603 __ cmp(eax, isolate()->factory()->null_value());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005604 DeoptimizeIf(equal, instr, "null");
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005605
5606 __ test(eax, Immediate(kSmiTagMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005607 DeoptimizeIf(zero, instr, "Smi");
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005608
5609 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
5610 __ CmpObjectType(eax, LAST_JS_PROXY_TYPE, ecx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005611 DeoptimizeIf(below_equal, instr, "wrong instance type");
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005612
5613 Label use_cache, call_runtime;
5614 __ CheckEnumCache(&call_runtime);
5615
5616 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
5617 __ jmp(&use_cache, Label::kNear);
5618
5619 // Get the set of properties to enumerate.
5620 __ bind(&call_runtime);
5621 __ push(eax);
5622 CallRuntime(Runtime::kGetPropertyNamesFast, 1, instr);
5623
5624 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
5625 isolate()->factory()->meta_map());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005626 DeoptimizeIf(not_equal, instr, "wrong map");
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005627 __ bind(&use_cache);
5628}
5629
5630
5631void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
5632 Register map = ToRegister(instr->map());
5633 Register result = ToRegister(instr->result());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005634 Label load_cache, done;
5635 __ EnumLength(result, map);
5636 __ cmp(result, Immediate(Smi::FromInt(0)));
5637 __ j(not_equal, &load_cache, Label::kNear);
5638 __ mov(result, isolate()->factory()->empty_fixed_array());
5639 __ jmp(&done, Label::kNear);
5640
5641 __ bind(&load_cache);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005642 __ LoadInstanceDescriptors(map, result);
5643 __ mov(result,
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005644 FieldOperand(result, DescriptorArray::kEnumCacheOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005645 __ mov(result,
5646 FieldOperand(result, FixedArray::SizeFor(instr->idx())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005647 __ bind(&done);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005648 __ test(result, result);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005649 DeoptimizeIf(equal, instr, "no cache");
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005650}
5651
5652
5653void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) {
5654 Register object = ToRegister(instr->value());
5655 __ cmp(ToRegister(instr->map()),
5656 FieldOperand(object, HeapObject::kMapOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005657 DeoptimizeIf(not_equal, instr, "wrong map");
5658}
5659
5660
5661void LCodeGen::DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
5662 Register object,
5663 Register index) {
5664 PushSafepointRegistersScope scope(this);
5665 __ push(object);
5666 __ push(index);
5667 __ xor_(esi, esi);
5668 __ CallRuntimeSaveDoubles(Runtime::kLoadMutableDouble);
5669 RecordSafepointWithRegisters(
5670 instr->pointer_map(), 2, Safepoint::kNoLazyDeopt);
5671 __ StoreToSafepointRegisterSlot(object, eax);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005672}
5673
5674
5675void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005676 class DeferredLoadMutableDouble FINAL : public LDeferredCode {
5677 public:
5678 DeferredLoadMutableDouble(LCodeGen* codegen,
5679 LLoadFieldByIndex* instr,
5680 Register object,
5681 Register index)
5682 : LDeferredCode(codegen),
5683 instr_(instr),
5684 object_(object),
5685 index_(index) {
5686 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04005687 void Generate() OVERRIDE {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005688 codegen()->DoDeferredLoadMutableDouble(instr_, object_, index_);
5689 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04005690 LInstruction* instr() OVERRIDE { return instr_; }
5691
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005692 private:
5693 LLoadFieldByIndex* instr_;
5694 Register object_;
5695 Register index_;
5696 };
5697
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005698 Register object = ToRegister(instr->object());
5699 Register index = ToRegister(instr->index());
5700
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005701 DeferredLoadMutableDouble* deferred;
5702 deferred = new(zone()) DeferredLoadMutableDouble(
5703 this, instr, object, index);
5704
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005705 Label out_of_object, done;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005706 __ test(index, Immediate(Smi::FromInt(1)));
5707 __ j(not_zero, deferred->entry());
5708
5709 __ sar(index, 1);
5710
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005711 __ cmp(index, Immediate(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005712 __ j(less, &out_of_object, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005713 __ mov(object, FieldOperand(object,
5714 index,
5715 times_half_pointer_size,
5716 JSObject::kHeaderSize));
5717 __ jmp(&done, Label::kNear);
5718
5719 __ bind(&out_of_object);
5720 __ mov(object, FieldOperand(object, JSObject::kPropertiesOffset));
5721 __ neg(index);
5722 // Index is now equal to out of object property index plus 1.
5723 __ mov(object, FieldOperand(object,
5724 index,
5725 times_half_pointer_size,
5726 FixedArray::kHeaderSize - kPointerSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005727 __ bind(deferred->exit());
Ben Murdoch3ef787d2012-04-12 10:51:47 +01005728 __ bind(&done);
5729}
5730
5731
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005732void LCodeGen::DoStoreFrameContext(LStoreFrameContext* instr) {
5733 Register context = ToRegister(instr->context());
5734 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), context);
5735}
5736
5737
5738void LCodeGen::DoAllocateBlockContext(LAllocateBlockContext* instr) {
5739 Handle<ScopeInfo> scope_info = instr->scope_info();
5740 __ Push(scope_info);
5741 __ push(ToRegister(instr->function()));
5742 CallRuntime(Runtime::kPushBlockContext, 2, instr);
5743 RecordSafepoint(Safepoint::kNoLazyDeopt);
5744}
5745
5746
Ben Murdochb0fe1622011-05-05 13:52:32 +01005747#undef __
5748
5749} } // namespace v8::internal
Ben Murdochb8e0da22011-05-16 14:20:40 +01005750
5751#endif // V8_TARGET_ARCH_IA32