blob: 57baf77ca2a6909b1a9290200c439ded752e48fb [file] [log] [blame]
Ben Murdoch086aeea2011-05-13 15:57:08 +01001// Copyright 2011 the V8 project authors. All rights reserved.
Steve Block3ce2e202009-11-05 08:53:23 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_X64)
31
Kristian Monsen80d68ea2010-09-08 11:05:35 +010032#include "code-stubs.h"
Ben Murdoch8b112d22011-06-08 16:22:53 +010033#include "codegen.h"
Steve Blockd0582a62009-12-15 09:54:21 +000034#include "compiler.h"
Steve Block3ce2e202009-11-05 08:53:23 +000035#include "debug.h"
Leon Clarked91b9f72010-01-27 17:25:45 +000036#include "full-codegen.h"
Steve Block3ce2e202009-11-05 08:53:23 +000037#include "parser.h"
Steve Block6ded16b2010-05-10 14:33:55 +010038#include "scopes.h"
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -080039#include "stub-cache.h"
Steve Block3ce2e202009-11-05 08:53:23 +000040
41namespace v8 {
42namespace internal {
43
44#define __ ACCESS_MASM(masm_)
45
Steve Block1e0659c2011-05-24 12:43:12 +010046
Ben Murdoch257744e2011-11-30 15:57:28 +000047static unsigned GetPropertyId(Property* property) {
48 if (property->is_synthetic()) return AstNode::kNoNumber;
49 return property->id();
50}
51
52
Steve Block1e0659c2011-05-24 12:43:12 +010053class JumpPatchSite BASE_EMBEDDED {
54 public:
55 explicit JumpPatchSite(MacroAssembler* masm)
56 : masm_(masm) {
57#ifdef DEBUG
58 info_emitted_ = false;
59#endif
60 }
61
62 ~JumpPatchSite() {
63 ASSERT(patch_site_.is_bound() == info_emitted_);
64 }
65
Ben Murdoch257744e2011-11-30 15:57:28 +000066 void EmitJumpIfNotSmi(Register reg,
67 Label* target,
68 Label::Distance near_jump = Label::kFar) {
Steve Block1e0659c2011-05-24 12:43:12 +010069 __ testb(reg, Immediate(kSmiTagMask));
Ben Murdoch257744e2011-11-30 15:57:28 +000070 EmitJump(not_carry, target, near_jump); // Always taken before patched.
Steve Block1e0659c2011-05-24 12:43:12 +010071 }
72
Ben Murdoch257744e2011-11-30 15:57:28 +000073 void EmitJumpIfSmi(Register reg,
74 Label* target,
75 Label::Distance near_jump = Label::kFar) {
Steve Block1e0659c2011-05-24 12:43:12 +010076 __ testb(reg, Immediate(kSmiTagMask));
Ben Murdoch257744e2011-11-30 15:57:28 +000077 EmitJump(carry, target, near_jump); // Never taken before patched.
Steve Block1e0659c2011-05-24 12:43:12 +010078 }
79
80 void EmitPatchInfo() {
81 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
82 ASSERT(is_int8(delta_to_patch_site));
83 __ testl(rax, Immediate(delta_to_patch_site));
84#ifdef DEBUG
85 info_emitted_ = true;
86#endif
87 }
88
89 bool is_bound() const { return patch_site_.is_bound(); }
90
91 private:
92 // jc will be patched with jz, jnc will become jnz.
Ben Murdoch257744e2011-11-30 15:57:28 +000093 void EmitJump(Condition cc, Label* target, Label::Distance near_jump) {
Steve Block1e0659c2011-05-24 12:43:12 +010094 ASSERT(!patch_site_.is_bound() && !info_emitted_);
95 ASSERT(cc == carry || cc == not_carry);
96 __ bind(&patch_site_);
Ben Murdoch257744e2011-11-30 15:57:28 +000097 __ j(cc, target, near_jump);
Steve Block1e0659c2011-05-24 12:43:12 +010098 }
99
100 MacroAssembler* masm_;
101 Label patch_site_;
102#ifdef DEBUG
103 bool info_emitted_;
104#endif
105};
106
107
Steve Block3ce2e202009-11-05 08:53:23 +0000108// Generate code for a JS function. On entry to the function the receiver
109// and arguments have been pushed on the stack left to right, with the
110// return address on top of them. The actual argument count matches the
111// formal parameter count expected by the function.
112//
113// The live registers are:
114// o rdi: the JS function object being called (ie, ourselves)
115// o rsi: our context
116// o rbp: our caller's frame pointer
117// o rsp: stack pointer (pointing to return address)
118//
119// The function builds a JS frame. Please see JavaScriptFrameConstants in
120// frames-x64.h for its layout.
Iain Merrick75681382010-08-19 15:07:18 +0100121void FullCodeGenerator::Generate(CompilationInfo* info) {
Andrei Popescu31002712010-02-23 13:46:05 +0000122 ASSERT(info_ == NULL);
123 info_ = info;
124 SetFunctionPosition(function());
Steve Block6ded16b2010-05-10 14:33:55 +0100125 Comment cmnt(masm_, "[ function compiled by full code generator");
Steve Block3ce2e202009-11-05 08:53:23 +0000126
Ben Murdochf87a2032010-10-22 12:50:53 +0100127#ifdef DEBUG
128 if (strlen(FLAG_stop_at) > 0 &&
129 info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
130 __ int3();
131 }
132#endif
Ben Murdoch257744e2011-11-30 15:57:28 +0000133
134 // Strict mode functions need to replace the receiver with undefined
135 // when called as functions (without an explicit receiver
136 // object). rcx is zero for method calls and non-zero for function
137 // calls.
138 if (info->is_strict_mode()) {
139 Label ok;
140 __ testq(rcx, rcx);
141 __ j(zero, &ok, Label::kNear);
142 // +1 for return address.
143 int receiver_offset = (scope()->num_parameters() + 1) * kPointerSize;
144 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
145 __ movq(Operand(rsp, receiver_offset), kScratchRegister);
146 __ bind(&ok);
147 }
148
Iain Merrick75681382010-08-19 15:07:18 +0100149 __ push(rbp); // Caller's frame pointer.
150 __ movq(rbp, rsp);
151 __ push(rsi); // Callee's context.
152 __ push(rdi); // Callee's JS Function.
Steve Block3ce2e202009-11-05 08:53:23 +0000153
Iain Merrick75681382010-08-19 15:07:18 +0100154 { Comment cmnt(masm_, "[ Allocate locals");
155 int locals_count = scope()->num_stack_slots();
156 if (locals_count == 1) {
157 __ PushRoot(Heap::kUndefinedValueRootIndex);
158 } else if (locals_count > 1) {
159 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
160 for (int i = 0; i < locals_count; i++) {
161 __ push(rdx);
Steve Blockd0582a62009-12-15 09:54:21 +0000162 }
Steve Block3ce2e202009-11-05 08:53:23 +0000163 }
Iain Merrick75681382010-08-19 15:07:18 +0100164 }
Steve Block3ce2e202009-11-05 08:53:23 +0000165
Iain Merrick75681382010-08-19 15:07:18 +0100166 bool function_in_register = true;
Steve Blockd0582a62009-12-15 09:54:21 +0000167
Iain Merrick75681382010-08-19 15:07:18 +0100168 // Possibly allocate a local context.
169 int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
170 if (heap_slots > 0) {
171 Comment cmnt(masm_, "[ Allocate local context");
172 // Argument to NewContext is the function, which is still in rdi.
173 __ push(rdi);
174 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
175 FastNewContextStub stub(heap_slots);
Leon Clarke4515c472010-02-03 11:58:03 +0000176 __ CallStub(&stub);
Iain Merrick75681382010-08-19 15:07:18 +0100177 } else {
178 __ CallRuntime(Runtime::kNewContext, 1);
Leon Clarke4515c472010-02-03 11:58:03 +0000179 }
Iain Merrick75681382010-08-19 15:07:18 +0100180 function_in_register = false;
181 // Context is returned in both rax and rsi. It replaces the context
182 // passed to us. It's saved in the stack and kept live in rsi.
183 __ movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
184
185 // Copy any necessary parameters into the context.
186 int num_parameters = scope()->num_parameters();
187 for (int i = 0; i < num_parameters; i++) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100188 Slot* slot = scope()->parameter(i)->AsSlot();
Iain Merrick75681382010-08-19 15:07:18 +0100189 if (slot != NULL && slot->type() == Slot::CONTEXT) {
190 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
191 (num_parameters - 1 - i) * kPointerSize;
192 // Load parameter from stack.
193 __ movq(rax, Operand(rbp, parameter_offset));
194 // Store it in the context.
195 int context_offset = Context::SlotOffset(slot->index());
196 __ movq(Operand(rsi, context_offset), rax);
197 // Update the write barrier. This clobbers all involved
198 // registers, so we have use a third register to avoid
199 // clobbering rsi.
200 __ movq(rcx, rsi);
201 __ RecordWrite(rcx, context_offset, rax, rbx);
202 }
203 }
204 }
205
206 // Possibly allocate an arguments object.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100207 Variable* arguments = scope()->arguments();
Iain Merrick75681382010-08-19 15:07:18 +0100208 if (arguments != NULL) {
209 // Arguments object must be allocated after the context object, in
210 // case the "arguments" or ".arguments" variables are in the context.
211 Comment cmnt(masm_, "[ Allocate arguments object");
212 if (function_in_register) {
213 __ push(rdi);
214 } else {
215 __ push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
216 }
217 // The receiver is just before the parameters on the caller's stack.
218 int offset = scope()->num_parameters() * kPointerSize;
219 __ lea(rdx,
220 Operand(rbp, StandardFrameConstants::kCallerSPOffset + offset));
221 __ push(rdx);
222 __ Push(Smi::FromInt(scope()->num_parameters()));
223 // Arguments to ArgumentsAccessStub:
224 // function, receiver address, parameter count.
225 // The stub will rewrite receiver and parameter count if the previous
226 // stack frame was an arguments adapter frame.
Steve Block44f0eee2011-05-26 01:26:41 +0100227 ArgumentsAccessStub stub(
228 is_strict_mode() ? ArgumentsAccessStub::NEW_STRICT
229 : ArgumentsAccessStub::NEW_NON_STRICT);
Iain Merrick75681382010-08-19 15:07:18 +0100230 __ CallStub(&stub);
Steve Block44f0eee2011-05-26 01:26:41 +0100231
232 Variable* arguments_shadow = scope()->arguments_shadow();
233 if (arguments_shadow != NULL) {
234 // Store new arguments object in both "arguments" and ".arguments" slots.
235 __ movq(rcx, rax);
236 Move(arguments_shadow->AsSlot(), rcx, rbx, rdx);
237 }
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100238 Move(arguments->AsSlot(), rax, rbx, rdx);
Steve Blockd0582a62009-12-15 09:54:21 +0000239 }
240
Ben Murdochb0fe1622011-05-05 13:52:32 +0100241 if (FLAG_trace) {
242 __ CallRuntime(Runtime::kTraceEnter, 0);
243 }
244
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100245 // Visit the declarations and body unless there is an illegal
246 // redeclaration.
247 if (scope()->HasIllegalRedeclaration()) {
248 Comment cmnt(masm_, "[ Declarations");
249 scope()->VisitIllegalRedeclaration(this);
250 } else {
251 { Comment cmnt(masm_, "[ Declarations");
252 // For named function expressions, declare the function name as a
253 // constant.
254 if (scope()->is_function_scope() && scope()->function() != NULL) {
255 EmitDeclaration(scope()->function(), Variable::CONST, NULL);
256 }
257 VisitDeclarations(scope()->declarations());
258 }
259
260 { Comment cmnt(masm_, "[ Stack check");
Ben Murdoch8b112d22011-06-08 16:22:53 +0100261 PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS);
Ben Murdoch257744e2011-11-30 15:57:28 +0000262 Label ok;
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100263 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +0000264 __ j(above_equal, &ok, Label::kNear);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100265 StackCheckStub stub;
266 __ CallStub(&stub);
267 __ bind(&ok);
268 }
269
270 { Comment cmnt(masm_, "[ Body");
271 ASSERT(loop_depth() == 0);
272 VisitStatements(function()->body());
273 ASSERT(loop_depth() == 0);
274 }
Steve Block3ce2e202009-11-05 08:53:23 +0000275 }
276
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100277 // Always emit a 'return undefined' in case control fell off the end of
278 // the body.
Steve Block3ce2e202009-11-05 08:53:23 +0000279 { Comment cmnt(masm_, "[ return <undefined>;");
Steve Block3ce2e202009-11-05 08:53:23 +0000280 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100281 EmitReturnSequence();
Steve Blockd0582a62009-12-15 09:54:21 +0000282 }
283}
284
285
Ben Murdochdb5a90a2011-01-06 18:27:03 +0000286void FullCodeGenerator::ClearAccumulator() {
Steve Block9fac8402011-05-12 15:51:54 +0100287 __ Set(rax, 0);
Ben Murdochdb5a90a2011-01-06 18:27:03 +0000288}
289
290
Ben Murdochb0fe1622011-05-05 13:52:32 +0100291void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) {
292 Comment cmnt(masm_, "[ Stack check");
Ben Murdoch257744e2011-11-30 15:57:28 +0000293 Label ok;
Ben Murdochb0fe1622011-05-05 13:52:32 +0100294 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +0000295 __ j(above_equal, &ok, Label::kNear);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100296 StackCheckStub stub;
297 __ CallStub(&stub);
Ben Murdoch086aeea2011-05-13 15:57:08 +0100298 // Record a mapping of this PC offset to the OSR id. This is used to find
299 // the AST id from the unoptimized code in order to use it as a key into
300 // the deoptimization input data found in the optimized code.
301 RecordStackCheck(stmt->OsrEntryId());
302
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100303 // Loop stack checks can be patched to perform on-stack replacement. In
304 // order to decide whether or not to perform OSR we embed the loop depth
305 // in a test instruction after the call so we can extract it from the OSR
306 // builtin.
307 ASSERT(loop_depth() > 0);
308 __ testl(rax, Immediate(Min(loop_depth(), Code::kMaxLoopNestingMarker)));
309
Ben Murdochb0fe1622011-05-05 13:52:32 +0100310 __ bind(&ok);
311 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
Ben Murdoch086aeea2011-05-13 15:57:08 +0100312 // Record a mapping of the OSR id to this PC. This is used if the OSR
313 // entry becomes the target of a bailout. We don't expect it to be, but
314 // we want it to work if it is.
Ben Murdochb0fe1622011-05-05 13:52:32 +0100315 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100316}
317
318
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100319void FullCodeGenerator::EmitReturnSequence() {
Steve Blockd0582a62009-12-15 09:54:21 +0000320 Comment cmnt(masm_, "[ Return sequence");
321 if (return_label_.is_bound()) {
322 __ jmp(&return_label_);
323 } else {
324 __ bind(&return_label_);
Steve Block3ce2e202009-11-05 08:53:23 +0000325 if (FLAG_trace) {
326 __ push(rax);
327 __ CallRuntime(Runtime::kTraceExit, 1);
328 }
Steve Blockd0582a62009-12-15 09:54:21 +0000329#ifdef DEBUG
330 // Add a label for checking the size of the code used for returning.
331 Label check_exit_codesize;
332 masm_->bind(&check_exit_codesize);
333#endif
Ben Murdochbb769b22010-08-11 14:56:33 +0100334 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
Steve Block3ce2e202009-11-05 08:53:23 +0000335 __ RecordJSReturn();
Steve Block3ce2e202009-11-05 08:53:23 +0000336 // Do not use the leave instruction here because it is too short to
337 // patch with the code required by the debugger.
338 __ movq(rsp, rbp);
339 __ pop(rbp);
Steve Block1e0659c2011-05-24 12:43:12 +0100340
341 int arguments_bytes = (scope()->num_parameters() + 1) * kPointerSize;
342 __ Ret(arguments_bytes, rcx);
343
Steve Block3ce2e202009-11-05 08:53:23 +0000344#ifdef ENABLE_DEBUGGER_SUPPORT
345 // Add padding that will be overwritten by a debugger breakpoint. We
Steve Block1e0659c2011-05-24 12:43:12 +0100346 // have just generated at least 7 bytes: "movq rsp, rbp; pop rbp; ret k"
Steve Block3ce2e202009-11-05 08:53:23 +0000347 // (3 + 1 + 3).
Steve Blockd0582a62009-12-15 09:54:21 +0000348 const int kPadding = Assembler::kJSReturnSequenceLength - 7;
Steve Block3ce2e202009-11-05 08:53:23 +0000349 for (int i = 0; i < kPadding; ++i) {
350 masm_->int3();
351 }
Steve Block1e0659c2011-05-24 12:43:12 +0100352 // Check that the size of the code used for returning is large enough
353 // for the debugger's requirements.
354 ASSERT(Assembler::kJSReturnSequenceLength <=
355 masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
Steve Block3ce2e202009-11-05 08:53:23 +0000356#endif
357 }
358}
359
360
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100361void FullCodeGenerator::EffectContext::Plug(Slot* slot) const {
362}
Leon Clarkee46be812010-01-19 14:06:41 +0000363
Leon Clarkee46be812010-01-19 14:06:41 +0000364
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100365void FullCodeGenerator::AccumulatorValueContext::Plug(Slot* slot) const {
366 MemOperand slot_operand = codegen()->EmitSlotSearch(slot, result_register());
367 __ movq(result_register(), slot_operand);
368}
Leon Clarkee46be812010-01-19 14:06:41 +0000369
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100370
371void FullCodeGenerator::StackValueContext::Plug(Slot* slot) const {
372 MemOperand slot_operand = codegen()->EmitSlotSearch(slot, result_register());
373 __ push(slot_operand);
374}
375
376
377void FullCodeGenerator::TestContext::Plug(Slot* slot) const {
378 codegen()->Move(result_register(), slot);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100379 codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100380 codegen()->DoTest(true_label_, false_label_, fall_through_);
381}
382
383
384void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
385}
386
387
388void FullCodeGenerator::AccumulatorValueContext::Plug(
389 Heap::RootListIndex index) const {
390 __ LoadRoot(result_register(), index);
391}
392
393
394void FullCodeGenerator::StackValueContext::Plug(
395 Heap::RootListIndex index) const {
396 __ PushRoot(index);
397}
398
399
400void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100401 codegen()->PrepareForBailoutBeforeSplit(TOS_REG,
402 true,
403 true_label_,
404 false_label_);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100405 if (index == Heap::kUndefinedValueRootIndex ||
406 index == Heap::kNullValueRootIndex ||
407 index == Heap::kFalseValueRootIndex) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100408 if (false_label_ != fall_through_) __ jmp(false_label_);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100409 } else if (index == Heap::kTrueValueRootIndex) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100410 if (true_label_ != fall_through_) __ jmp(true_label_);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100411 } else {
412 __ LoadRoot(result_register(), index);
413 codegen()->DoTest(true_label_, false_label_, fall_through_);
Steve Blockd0582a62009-12-15 09:54:21 +0000414 }
415}
416
417
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100418void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
419}
420
421
422void FullCodeGenerator::AccumulatorValueContext::Plug(
423 Handle<Object> lit) const {
424 __ Move(result_register(), lit);
425}
426
427
428void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
429 __ Push(lit);
430}
431
432
433void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100434 codegen()->PrepareForBailoutBeforeSplit(TOS_REG,
435 true,
436 true_label_,
437 false_label_);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100438 ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals.
439 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100440 if (false_label_ != fall_through_) __ jmp(false_label_);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100441 } else if (lit->IsTrue() || lit->IsJSObject()) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100442 if (true_label_ != fall_through_) __ jmp(true_label_);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100443 } else if (lit->IsString()) {
444 if (String::cast(*lit)->length() == 0) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100445 if (false_label_ != fall_through_) __ jmp(false_label_);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100446 } else {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100447 if (true_label_ != fall_through_) __ jmp(true_label_);
Steve Blockd0582a62009-12-15 09:54:21 +0000448 }
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100449 } else if (lit->IsSmi()) {
450 if (Smi::cast(*lit)->value() == 0) {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100451 if (false_label_ != fall_through_) __ jmp(false_label_);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100452 } else {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100453 if (true_label_ != fall_through_) __ jmp(true_label_);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100454 }
455 } else {
456 // For simplicity we always test the accumulator register.
457 __ Move(result_register(), lit);
458 codegen()->DoTest(true_label_, false_label_, fall_through_);
Steve Blockd0582a62009-12-15 09:54:21 +0000459 }
460}
461
462
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100463void FullCodeGenerator::EffectContext::DropAndPlug(int count,
464 Register reg) const {
Leon Clarkee46be812010-01-19 14:06:41 +0000465 ASSERT(count > 0);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100466 __ Drop(count);
Leon Clarkef7060e22010-06-03 12:02:55 +0100467}
468
469
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100470void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
471 int count,
472 Register reg) const {
473 ASSERT(count > 0);
474 __ Drop(count);
475 __ Move(result_register(), reg);
Leon Clarkee46be812010-01-19 14:06:41 +0000476}
477
478
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100479void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
480 Register reg) const {
481 ASSERT(count > 0);
482 if (count > 1) __ Drop(count - 1);
483 __ movq(Operand(rsp, 0), reg);
484}
485
486
487void FullCodeGenerator::TestContext::DropAndPlug(int count,
488 Register reg) const {
489 ASSERT(count > 0);
490 // For simplicity we always test the accumulator register.
491 __ Drop(count);
492 __ Move(result_register(), reg);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100493 codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100494 codegen()->DoTest(true_label_, false_label_, fall_through_);
495}
496
497
498void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
499 Label* materialize_false) const {
Ben Murdochb0fe1622011-05-05 13:52:32 +0100500 ASSERT(materialize_true == materialize_false);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100501 __ bind(materialize_true);
502}
503
504
505void FullCodeGenerator::AccumulatorValueContext::Plug(
506 Label* materialize_true,
507 Label* materialize_false) const {
Ben Murdoch257744e2011-11-30 15:57:28 +0000508 Label done;
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100509 __ bind(materialize_true);
Steve Block44f0eee2011-05-26 01:26:41 +0100510 __ Move(result_register(), isolate()->factory()->true_value());
Ben Murdoch257744e2011-11-30 15:57:28 +0000511 __ jmp(&done, Label::kNear);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100512 __ bind(materialize_false);
Steve Block44f0eee2011-05-26 01:26:41 +0100513 __ Move(result_register(), isolate()->factory()->false_value());
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100514 __ bind(&done);
515}
516
517
518void FullCodeGenerator::StackValueContext::Plug(
519 Label* materialize_true,
520 Label* materialize_false) const {
Ben Murdoch257744e2011-11-30 15:57:28 +0000521 Label done;
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100522 __ bind(materialize_true);
Steve Block44f0eee2011-05-26 01:26:41 +0100523 __ Push(isolate()->factory()->true_value());
Ben Murdoch257744e2011-11-30 15:57:28 +0000524 __ jmp(&done, Label::kNear);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100525 __ bind(materialize_false);
Steve Block44f0eee2011-05-26 01:26:41 +0100526 __ Push(isolate()->factory()->false_value());
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100527 __ bind(&done);
528}
529
530
531void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
532 Label* materialize_false) const {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100533 ASSERT(materialize_true == true_label_);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100534 ASSERT(materialize_false == false_label_);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100535}
536
537
538void FullCodeGenerator::EffectContext::Plug(bool flag) const {
539}
540
541
542void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
543 Heap::RootListIndex value_root_index =
544 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
545 __ LoadRoot(result_register(), value_root_index);
546}
547
548
549void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
550 Heap::RootListIndex value_root_index =
551 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
552 __ PushRoot(value_root_index);
553}
554
555
556void FullCodeGenerator::TestContext::Plug(bool flag) const {
Ben Murdoch086aeea2011-05-13 15:57:08 +0100557 codegen()->PrepareForBailoutBeforeSplit(TOS_REG,
558 true,
559 true_label_,
560 false_label_);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100561 if (flag) {
562 if (true_label_ != fall_through_) __ jmp(true_label_);
563 } else {
564 if (false_label_ != fall_through_) __ jmp(false_label_);
Leon Clarkef7060e22010-06-03 12:02:55 +0100565 }
566}
567
568
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100569void FullCodeGenerator::DoTest(Label* if_true,
570 Label* if_false,
571 Label* fall_through) {
Leon Clarkee46be812010-01-19 14:06:41 +0000572 ToBooleanStub stub;
573 __ push(result_register());
574 __ CallStub(&stub);
575 __ testq(rax, rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100576 // The stub returns nonzero for true.
577 Split(not_zero, if_true, if_false, fall_through);
578}
Leon Clarkee46be812010-01-19 14:06:41 +0000579
Leon Clarkee46be812010-01-19 14:06:41 +0000580
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100581void FullCodeGenerator::Split(Condition cc,
582 Label* if_true,
583 Label* if_false,
584 Label* fall_through) {
585 if (if_false == fall_through) {
586 __ j(cc, if_true);
587 } else if (if_true == fall_through) {
588 __ j(NegateCondition(cc), if_false);
589 } else {
590 __ j(cc, if_true);
591 __ jmp(if_false);
Leon Clarkee46be812010-01-19 14:06:41 +0000592 }
593}
594
595
Leon Clarked91b9f72010-01-27 17:25:45 +0000596MemOperand FullCodeGenerator::EmitSlotSearch(Slot* slot, Register scratch) {
Leon Clarkee46be812010-01-19 14:06:41 +0000597 switch (slot->type()) {
598 case Slot::PARAMETER:
599 case Slot::LOCAL:
600 return Operand(rbp, SlotOffset(slot));
601 case Slot::CONTEXT: {
602 int context_chain_length =
Andrei Popescu31002712010-02-23 13:46:05 +0000603 scope()->ContextChainLength(slot->var()->scope());
Leon Clarkee46be812010-01-19 14:06:41 +0000604 __ LoadContext(scratch, context_chain_length);
Steve Block59151502010-09-22 15:07:15 +0100605 return ContextOperand(scratch, slot->index());
Leon Clarkee46be812010-01-19 14:06:41 +0000606 }
607 case Slot::LOOKUP:
608 UNREACHABLE();
609 }
610 UNREACHABLE();
611 return Operand(rax, 0);
612}
613
614
Leon Clarked91b9f72010-01-27 17:25:45 +0000615void FullCodeGenerator::Move(Register destination, Slot* source) {
Leon Clarkee46be812010-01-19 14:06:41 +0000616 MemOperand location = EmitSlotSearch(source, destination);
617 __ movq(destination, location);
618}
619
620
Leon Clarked91b9f72010-01-27 17:25:45 +0000621void FullCodeGenerator::Move(Slot* dst,
Steve Blockd0582a62009-12-15 09:54:21 +0000622 Register src,
623 Register scratch1,
624 Register scratch2) {
Leon Clarkee46be812010-01-19 14:06:41 +0000625 ASSERT(dst->type() != Slot::LOOKUP); // Not yet implemented.
626 ASSERT(!scratch1.is(src) && !scratch2.is(src));
627 MemOperand location = EmitSlotSearch(dst, scratch1);
628 __ movq(location, src);
629 // Emit the write barrier code if the location is in the heap.
630 if (dst->type() == Slot::CONTEXT) {
631 int offset = FixedArray::kHeaderSize + dst->index() * kPointerSize;
632 __ RecordWrite(scratch1, offset, src, scratch2);
Steve Blockd0582a62009-12-15 09:54:21 +0000633 }
634}
635
636
Ben Murdochb0fe1622011-05-05 13:52:32 +0100637void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state,
638 bool should_normalize,
639 Label* if_true,
640 Label* if_false) {
Ben Murdoch086aeea2011-05-13 15:57:08 +0100641 // Only prepare for bailouts before splits if we're in a test
642 // context. Otherwise, we let the Visit function deal with the
643 // preparation to avoid preparing with the same AST id twice.
644 if (!context()->IsTest() || !info_->IsOptimizable()) return;
645
Ben Murdoch257744e2011-11-30 15:57:28 +0000646 Label skip;
647 if (should_normalize) __ jmp(&skip, Label::kNear);
Ben Murdoch086aeea2011-05-13 15:57:08 +0100648
649 ForwardBailoutStack* current = forward_bailout_stack_;
650 while (current != NULL) {
651 PrepareForBailout(current->expr(), state);
652 current = current->parent();
653 }
654
655 if (should_normalize) {
656 __ CompareRoot(rax, Heap::kTrueValueRootIndex);
657 Split(equal, if_true, if_false, NULL);
658 __ bind(&skip);
659 }
Ben Murdochb0fe1622011-05-05 13:52:32 +0100660}
661
662
Leon Clarkef7060e22010-06-03 12:02:55 +0100663void FullCodeGenerator::EmitDeclaration(Variable* variable,
664 Variable::Mode mode,
665 FunctionLiteral* function) {
Steve Blockd0582a62009-12-15 09:54:21 +0000666 Comment cmnt(masm_, "[ Declaration");
Leon Clarkef7060e22010-06-03 12:02:55 +0100667 ASSERT(variable != NULL); // Must have been resolved.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100668 Slot* slot = variable->AsSlot();
Leon Clarkef7060e22010-06-03 12:02:55 +0100669 Property* prop = variable->AsProperty();
Steve Blockd0582a62009-12-15 09:54:21 +0000670
671 if (slot != NULL) {
672 switch (slot->type()) {
Leon Clarkee46be812010-01-19 14:06:41 +0000673 case Slot::PARAMETER:
Steve Blockd0582a62009-12-15 09:54:21 +0000674 case Slot::LOCAL:
Leon Clarkef7060e22010-06-03 12:02:55 +0100675 if (mode == Variable::CONST) {
Steve Blockd0582a62009-12-15 09:54:21 +0000676 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
Leon Clarkee46be812010-01-19 14:06:41 +0000677 __ movq(Operand(rbp, SlotOffset(slot)), kScratchRegister);
Leon Clarkef7060e22010-06-03 12:02:55 +0100678 } else if (function != NULL) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100679 VisitForAccumulatorValue(function);
Leon Clarkee46be812010-01-19 14:06:41 +0000680 __ movq(Operand(rbp, SlotOffset(slot)), result_register());
Steve Blockd0582a62009-12-15 09:54:21 +0000681 }
682 break;
683
684 case Slot::CONTEXT:
Leon Clarkee46be812010-01-19 14:06:41 +0000685 // We bypass the general EmitSlotSearch because we know more about
686 // this specific context.
687
Steve Blockd0582a62009-12-15 09:54:21 +0000688 // The variable in the decl always resides in the current context.
Leon Clarkef7060e22010-06-03 12:02:55 +0100689 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
Steve Blockd0582a62009-12-15 09:54:21 +0000690 if (FLAG_debug_code) {
691 // Check if we have the correct context pointer.
Steve Block59151502010-09-22 15:07:15 +0100692 __ movq(rbx, ContextOperand(rsi, Context::FCONTEXT_INDEX));
Steve Blockd0582a62009-12-15 09:54:21 +0000693 __ cmpq(rbx, rsi);
694 __ Check(equal, "Unexpected declaration in current context.");
695 }
Leon Clarkef7060e22010-06-03 12:02:55 +0100696 if (mode == Variable::CONST) {
Steve Blockd0582a62009-12-15 09:54:21 +0000697 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
Steve Block59151502010-09-22 15:07:15 +0100698 __ movq(ContextOperand(rsi, slot->index()), kScratchRegister);
Steve Blockd0582a62009-12-15 09:54:21 +0000699 // No write barrier since the hole value is in old space.
Leon Clarkef7060e22010-06-03 12:02:55 +0100700 } else if (function != NULL) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100701 VisitForAccumulatorValue(function);
Steve Block59151502010-09-22 15:07:15 +0100702 __ movq(ContextOperand(rsi, slot->index()), result_register());
Steve Blockd0582a62009-12-15 09:54:21 +0000703 int offset = Context::SlotOffset(slot->index());
Leon Clarke4515c472010-02-03 11:58:03 +0000704 __ movq(rbx, rsi);
705 __ RecordWrite(rbx, offset, result_register(), rcx);
Steve Blockd0582a62009-12-15 09:54:21 +0000706 }
707 break;
708
709 case Slot::LOOKUP: {
710 __ push(rsi);
Leon Clarkef7060e22010-06-03 12:02:55 +0100711 __ Push(variable->name());
Steve Blockd0582a62009-12-15 09:54:21 +0000712 // Declaration nodes are always introduced in one of two modes.
Leon Clarkef7060e22010-06-03 12:02:55 +0100713 ASSERT(mode == Variable::VAR || mode == Variable::CONST);
714 PropertyAttributes attr = (mode == Variable::VAR) ? NONE : READ_ONLY;
Steve Blockd0582a62009-12-15 09:54:21 +0000715 __ Push(Smi::FromInt(attr));
716 // Push initial value, if any.
717 // Note: For variables we must not push an initial value (such as
718 // 'undefined') because we may have a (legal) redeclaration and we
719 // must not destroy the current value.
Leon Clarkef7060e22010-06-03 12:02:55 +0100720 if (mode == Variable::CONST) {
Steve Blockd0582a62009-12-15 09:54:21 +0000721 __ PushRoot(Heap::kTheHoleValueRootIndex);
Leon Clarkef7060e22010-06-03 12:02:55 +0100722 } else if (function != NULL) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100723 VisitForStackValue(function);
Steve Blockd0582a62009-12-15 09:54:21 +0000724 } else {
725 __ Push(Smi::FromInt(0)); // no initial value!
726 }
727 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
728 break;
729 }
730 }
731
732 } else if (prop != NULL) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000733 // A const declaration aliasing a parameter is an illegal redeclaration.
734 ASSERT(mode != Variable::CONST);
735 if (function != NULL) {
736 // We are declaring a function that rewrites to a property.
737 // Use (keyed) IC to set the initial value. We cannot visit the
738 // rewrite because it's shared and we risk recording duplicate AST
739 // IDs for bailouts from optimized code.
Steve Block1e0659c2011-05-24 12:43:12 +0100740 ASSERT(prop->obj()->AsVariableProxy() != NULL);
741 { AccumulatorValueContext for_object(this);
742 EmitVariableLoad(prop->obj()->AsVariableProxy()->var());
Steve Blockd0582a62009-12-15 09:54:21 +0000743 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000744 __ push(rax);
745 VisitForAccumulatorValue(function);
746 __ pop(rdx);
Steve Block1e0659c2011-05-24 12:43:12 +0100747 ASSERT(prop->key()->AsLiteral() != NULL &&
748 prop->key()->AsLiteral()->handle()->IsSmi());
749 __ Move(rcx, prop->key()->AsLiteral()->handle());
Steve Blockd0582a62009-12-15 09:54:21 +0000750
Steve Block44f0eee2011-05-26 01:26:41 +0100751 Handle<Code> ic = is_strict_mode()
752 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
753 : isolate()->builtins()->KeyedStoreIC_Initialize();
Ben Murdoch257744e2011-11-30 15:57:28 +0000754 EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
Steve Blockd0582a62009-12-15 09:54:21 +0000755 }
756 }
757}
758
759
Leon Clarkef7060e22010-06-03 12:02:55 +0100760void FullCodeGenerator::VisitDeclaration(Declaration* decl) {
761 EmitDeclaration(decl->proxy()->var(), decl->mode(), decl->fun());
762}
763
764
Leon Clarked91b9f72010-01-27 17:25:45 +0000765void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
Steve Block3ce2e202009-11-05 08:53:23 +0000766 // Call the runtime to declare the globals.
767 __ push(rsi); // The context is the first argument.
768 __ Push(pairs);
Andrei Popescu31002712010-02-23 13:46:05 +0000769 __ Push(Smi::FromInt(is_eval() ? 1 : 0));
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100770 __ Push(Smi::FromInt(strict_mode_flag()));
771 __ CallRuntime(Runtime::kDeclareGlobals, 4);
Steve Block3ce2e202009-11-05 08:53:23 +0000772 // Return value is ignored.
773}
774
775
Leon Clarkef7060e22010-06-03 12:02:55 +0100776void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
777 Comment cmnt(masm_, "[ SwitchStatement");
778 Breakable nested_statement(this, stmt);
779 SetStatementPosition(stmt);
Ben Murdoch086aeea2011-05-13 15:57:08 +0100780
Leon Clarkef7060e22010-06-03 12:02:55 +0100781 // Keep the switch value on the stack until a case matches.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100782 VisitForStackValue(stmt->tag());
Ben Murdoch086aeea2011-05-13 15:57:08 +0100783 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
Steve Block3ce2e202009-11-05 08:53:23 +0000784
Leon Clarkef7060e22010-06-03 12:02:55 +0100785 ZoneList<CaseClause*>* clauses = stmt->cases();
786 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
Steve Block3ce2e202009-11-05 08:53:23 +0000787
Leon Clarkef7060e22010-06-03 12:02:55 +0100788 Label next_test; // Recycled for each test.
789 // Compile all the tests with branches to their bodies.
790 for (int i = 0; i < clauses->length(); i++) {
791 CaseClause* clause = clauses->at(i);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100792 clause->body_target()->Unuse();
Steve Block1e0659c2011-05-24 12:43:12 +0100793
Leon Clarkef7060e22010-06-03 12:02:55 +0100794 // The default is not a test, but remember it as final fall through.
795 if (clause->is_default()) {
796 default_clause = clause;
797 continue;
798 }
799
800 Comment cmnt(masm_, "[ Case comparison");
801 __ bind(&next_test);
802 next_test.Unuse();
803
804 // Compile the label expression.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100805 VisitForAccumulatorValue(clause->label());
Leon Clarkef7060e22010-06-03 12:02:55 +0100806
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100807 // Perform the comparison as if via '==='.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100808 __ movq(rdx, Operand(rsp, 0)); // Switch value.
809 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
Steve Block1e0659c2011-05-24 12:43:12 +0100810 JumpPatchSite patch_site(masm_);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100811 if (inline_smi_code) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000812 Label slow_case;
Steve Block1e0659c2011-05-24 12:43:12 +0100813 __ movq(rcx, rdx);
814 __ or_(rcx, rax);
Ben Murdoch257744e2011-11-30 15:57:28 +0000815 patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +0100816
817 __ cmpq(rdx, rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100818 __ j(not_equal, &next_test);
819 __ Drop(1); // Switch value is no longer needed.
Ben Murdoch8b112d22011-06-08 16:22:53 +0100820 __ jmp(clause->body_target());
Kristian Monsen80d68ea2010-09-08 11:05:35 +0100821 __ bind(&slow_case);
822 }
Leon Clarkef7060e22010-06-03 12:02:55 +0100823
Steve Block1e0659c2011-05-24 12:43:12 +0100824 // Record position before stub call for type feedback.
825 SetSourcePosition(clause->position());
826 Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
Ben Murdoch257744e2011-11-30 15:57:28 +0000827 EmitCallIC(ic, &patch_site, clause->CompareId());
Steve Block1e0659c2011-05-24 12:43:12 +0100828
Leon Clarkef7060e22010-06-03 12:02:55 +0100829 __ testq(rax, rax);
830 __ j(not_equal, &next_test);
831 __ Drop(1); // Switch value is no longer needed.
Ben Murdoch8b112d22011-06-08 16:22:53 +0100832 __ jmp(clause->body_target());
Leon Clarkef7060e22010-06-03 12:02:55 +0100833 }
834
835 // Discard the test value and jump to the default if present, otherwise to
836 // the end of the statement.
837 __ bind(&next_test);
838 __ Drop(1); // Switch value is no longer needed.
839 if (default_clause == NULL) {
840 __ jmp(nested_statement.break_target());
841 } else {
Ben Murdoch8b112d22011-06-08 16:22:53 +0100842 __ jmp(default_clause->body_target());
Leon Clarkef7060e22010-06-03 12:02:55 +0100843 }
844
845 // Compile all the case bodies.
846 for (int i = 0; i < clauses->length(); i++) {
847 Comment cmnt(masm_, "[ Case body");
848 CaseClause* clause = clauses->at(i);
Ben Murdoch8b112d22011-06-08 16:22:53 +0100849 __ bind(clause->body_target());
Steve Block44f0eee2011-05-26 01:26:41 +0100850 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
Leon Clarkef7060e22010-06-03 12:02:55 +0100851 VisitStatements(clause->statements());
852 }
853
854 __ bind(nested_statement.break_target());
Ben Murdoch086aeea2011-05-13 15:57:08 +0100855 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
Leon Clarkef7060e22010-06-03 12:02:55 +0100856}
857
858
859void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
860 Comment cmnt(masm_, "[ ForInStatement");
861 SetStatementPosition(stmt);
862
863 Label loop, exit;
864 ForIn loop_statement(this, stmt);
865 increment_loop_depth();
866
867 // Get the object to enumerate over. Both SpiderMonkey and JSC
868 // ignore null and undefined in contrast to the specification; see
869 // ECMA-262 section 12.6.4.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100870 VisitForAccumulatorValue(stmt->enumerable());
Leon Clarkef7060e22010-06-03 12:02:55 +0100871 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
872 __ j(equal, &exit);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100873 Register null_value = rdi;
874 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
875 __ cmpq(rax, null_value);
Leon Clarkef7060e22010-06-03 12:02:55 +0100876 __ j(equal, &exit);
877
878 // Convert the object to a JS object.
879 Label convert, done_convert;
880 __ JumpIfSmi(rax, &convert);
881 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx);
882 __ j(above_equal, &done_convert);
883 __ bind(&convert);
884 __ push(rax);
885 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
886 __ bind(&done_convert);
887 __ push(rax);
888
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100889 // Check cache validity in generated code. This is a fast case for
890 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
891 // guarantee cache validity, call the runtime system to check cache
892 // validity or get the property names in a fixed array.
893 Label next, call_runtime;
894 Register empty_fixed_array_value = r8;
895 __ LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
896 Register empty_descriptor_array_value = r9;
897 __ LoadRoot(empty_descriptor_array_value,
898 Heap::kEmptyDescriptorArrayRootIndex);
899 __ movq(rcx, rax);
900 __ bind(&next);
901
902 // Check that there are no elements. Register rcx contains the
903 // current JS object we've reached through the prototype chain.
904 __ cmpq(empty_fixed_array_value,
905 FieldOperand(rcx, JSObject::kElementsOffset));
906 __ j(not_equal, &call_runtime);
907
908 // Check that instance descriptors are not empty so that we can
909 // check for an enum cache. Leave the map in rbx for the subsequent
910 // prototype load.
911 __ movq(rbx, FieldOperand(rcx, HeapObject::kMapOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +0000912 __ movq(rdx, FieldOperand(rbx, Map::kInstanceDescriptorsOrBitField3Offset));
913 __ JumpIfSmi(rdx, &call_runtime);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100914
915 // Check that there is an enum cache in the non-empty instance
916 // descriptors (rdx). This is the case if the next enumeration
917 // index field does not contain a smi.
918 __ movq(rdx, FieldOperand(rdx, DescriptorArray::kEnumerationIndexOffset));
919 __ JumpIfSmi(rdx, &call_runtime);
920
921 // For all objects but the receiver, check that the cache is empty.
Ben Murdoch257744e2011-11-30 15:57:28 +0000922 Label check_prototype;
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100923 __ cmpq(rcx, rax);
Ben Murdoch257744e2011-11-30 15:57:28 +0000924 __ j(equal, &check_prototype, Label::kNear);
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100925 __ movq(rdx, FieldOperand(rdx, DescriptorArray::kEnumCacheBridgeCacheOffset));
926 __ cmpq(rdx, empty_fixed_array_value);
927 __ j(not_equal, &call_runtime);
928
929 // Load the prototype from the map and loop if non-null.
930 __ bind(&check_prototype);
931 __ movq(rcx, FieldOperand(rbx, Map::kPrototypeOffset));
932 __ cmpq(rcx, null_value);
933 __ j(not_equal, &next);
934
935 // The enum cache is valid. Load the map of the object being
936 // iterated over and use the cache for the iteration.
Ben Murdoch257744e2011-11-30 15:57:28 +0000937 Label use_cache;
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100938 __ movq(rax, FieldOperand(rax, HeapObject::kMapOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +0000939 __ jmp(&use_cache, Label::kNear);
Leon Clarkef7060e22010-06-03 12:02:55 +0100940
941 // Get the set of properties to enumerate.
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100942 __ bind(&call_runtime);
Leon Clarkef7060e22010-06-03 12:02:55 +0100943 __ push(rax); // Duplicate the enumerable object on the stack.
944 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
945
946 // If we got a map from the runtime call, we can do a fast
947 // modification check. Otherwise, we got a fixed array, and we have
948 // to do a slow check.
Ben Murdoch257744e2011-11-30 15:57:28 +0000949 Label fixed_array;
Leon Clarkef7060e22010-06-03 12:02:55 +0100950 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
951 Heap::kMetaMapRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +0000952 __ j(not_equal, &fixed_array, Label::kNear);
Leon Clarkef7060e22010-06-03 12:02:55 +0100953
954 // We got a map in register rax. Get the enumeration cache from it.
Ben Murdoche0cee9b2011-05-25 10:26:03 +0100955 __ bind(&use_cache);
Ben Murdoch257744e2011-11-30 15:57:28 +0000956 __ LoadInstanceDescriptors(rax, rcx);
Leon Clarkef7060e22010-06-03 12:02:55 +0100957 __ movq(rcx, FieldOperand(rcx, DescriptorArray::kEnumerationIndexOffset));
958 __ movq(rdx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset));
959
960 // Setup the four remaining stack slots.
961 __ push(rax); // Map.
962 __ push(rdx); // Enumeration cache.
963 __ movq(rax, FieldOperand(rdx, FixedArray::kLengthOffset));
Leon Clarkef7060e22010-06-03 12:02:55 +0100964 __ push(rax); // Enumeration cache length (as smi).
965 __ Push(Smi::FromInt(0)); // Initial index.
966 __ jmp(&loop);
967
968 // We got a fixed array in register rax. Iterate through that.
969 __ bind(&fixed_array);
970 __ Push(Smi::FromInt(0)); // Map (0) - force slow check.
971 __ push(rax);
972 __ movq(rax, FieldOperand(rax, FixedArray::kLengthOffset));
Leon Clarkef7060e22010-06-03 12:02:55 +0100973 __ push(rax); // Fixed array length (as smi).
974 __ Push(Smi::FromInt(0)); // Initial index.
975
976 // Generate code for doing the condition check.
977 __ bind(&loop);
978 __ movq(rax, Operand(rsp, 0 * kPointerSize)); // Get the current index.
979 __ cmpq(rax, Operand(rsp, 1 * kPointerSize)); // Compare to the array length.
980 __ j(above_equal, loop_statement.break_target());
981
982 // Get the current entry of the array into register rbx.
983 __ movq(rbx, Operand(rsp, 2 * kPointerSize));
Ben Murdochf87a2032010-10-22 12:50:53 +0100984 SmiIndex index = masm()->SmiToIndex(rax, rax, kPointerSizeLog2);
Leon Clarkef7060e22010-06-03 12:02:55 +0100985 __ movq(rbx, FieldOperand(rbx,
986 index.reg,
987 index.scale,
988 FixedArray::kHeaderSize));
989
990 // Get the expected map from the stack or a zero map in the
991 // permanent slow case into register rdx.
992 __ movq(rdx, Operand(rsp, 3 * kPointerSize));
993
994 // Check if the expected map still matches that of the enumerable.
995 // If not, we have to filter the key.
Ben Murdoch257744e2011-11-30 15:57:28 +0000996 Label update_each;
Leon Clarkef7060e22010-06-03 12:02:55 +0100997 __ movq(rcx, Operand(rsp, 4 * kPointerSize));
998 __ cmpq(rdx, FieldOperand(rcx, HeapObject::kMapOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +0000999 __ j(equal, &update_each, Label::kNear);
Leon Clarkef7060e22010-06-03 12:02:55 +01001000
1001 // Convert the entry to a string or null if it isn't a property
1002 // anymore. If the property has been removed while iterating, we
1003 // just skip it.
1004 __ push(rcx); // Enumerable.
1005 __ push(rbx); // Current entry.
1006 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
Steve Block44f0eee2011-05-26 01:26:41 +01001007 __ Cmp(rax, Smi::FromInt(0));
Leon Clarkef7060e22010-06-03 12:02:55 +01001008 __ j(equal, loop_statement.continue_target());
1009 __ movq(rbx, rax);
1010
1011 // Update the 'each' property or variable from the possibly filtered
1012 // entry in register rbx.
1013 __ bind(&update_each);
1014 __ movq(result_register(), rbx);
1015 // Perform the assignment as if via '='.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001016 { EffectContext context(this);
1017 EmitAssignment(stmt->each(), stmt->AssignmentId());
1018 }
Leon Clarkef7060e22010-06-03 12:02:55 +01001019
1020 // Generate code for the body of the loop.
Leon Clarkef7060e22010-06-03 12:02:55 +01001021 Visit(stmt->body());
1022
Leon Clarkef7060e22010-06-03 12:02:55 +01001023 // Generate code for going to the next element by incrementing the
1024 // index (smi) stored on top of the stack.
1025 __ bind(loop_statement.continue_target());
1026 __ SmiAddConstant(Operand(rsp, 0 * kPointerSize), Smi::FromInt(1));
Leon Clarkef7060e22010-06-03 12:02:55 +01001027
Ben Murdochb0fe1622011-05-05 13:52:32 +01001028 EmitStackCheck(stmt);
1029 __ jmp(&loop);
Leon Clarkef7060e22010-06-03 12:02:55 +01001030
1031 // Remove the pointers stored on the stack.
1032 __ bind(loop_statement.break_target());
1033 __ addq(rsp, Immediate(5 * kPointerSize));
1034
1035 // Exit and decrement the loop depth.
1036 __ bind(&exit);
1037 decrement_loop_depth();
1038}
1039
1040
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001041void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1042 bool pretenure) {
Leon Clarkef7060e22010-06-03 12:02:55 +01001043 // Use the fast case closure allocation code that allocates in new
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001044 // space for nested functions that don't need literals cloning. If
1045 // we're running with the --always-opt or the --prepare-always-opt
1046 // flag, we need to use the runtime function so that the new function
1047 // we are creating here gets a chance to have its code optimized and
1048 // doesn't just get a copy of the existing unoptimized code.
1049 if (!FLAG_always_opt &&
1050 !FLAG_prepare_always_opt &&
Steve Block44f0eee2011-05-26 01:26:41 +01001051 !pretenure &&
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001052 scope()->is_function_scope() &&
Steve Block44f0eee2011-05-26 01:26:41 +01001053 info->num_literals() == 0) {
1054 FastNewClosureStub stub(info->strict_mode() ? kStrictMode : kNonStrictMode);
Leon Clarkef7060e22010-06-03 12:02:55 +01001055 __ Push(info);
1056 __ CallStub(&stub);
1057 } else {
1058 __ push(rsi);
1059 __ Push(info);
Steve Block44f0eee2011-05-26 01:26:41 +01001060 __ Push(pretenure
1061 ? isolate()->factory()->true_value()
1062 : isolate()->factory()->false_value());
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001063 __ CallRuntime(Runtime::kNewClosure, 3);
Leon Clarkef7060e22010-06-03 12:02:55 +01001064 }
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001065 context()->Plug(rax);
Steve Block3ce2e202009-11-05 08:53:23 +00001066}
1067
1068
Leon Clarked91b9f72010-01-27 17:25:45 +00001069void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
Steve Block3ce2e202009-11-05 08:53:23 +00001070 Comment cmnt(masm_, "[ VariableProxy");
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001071 EmitVariableLoad(expr->var());
Leon Clarkee46be812010-01-19 14:06:41 +00001072}
1073
1074
Steve Block59151502010-09-22 15:07:15 +01001075void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions(
1076 Slot* slot,
1077 TypeofState typeof_state,
1078 Label* slow) {
1079 Register context = rsi;
1080 Register temp = rdx;
1081
1082 Scope* s = scope();
1083 while (s != NULL) {
1084 if (s->num_heap_slots() > 0) {
1085 if (s->calls_eval()) {
1086 // Check that extension is NULL.
1087 __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX),
1088 Immediate(0));
1089 __ j(not_equal, slow);
1090 }
1091 // Load next context in chain.
1092 __ movq(temp, ContextOperand(context, Context::CLOSURE_INDEX));
1093 __ movq(temp, FieldOperand(temp, JSFunction::kContextOffset));
1094 // Walk the rest of the chain without clobbering rsi.
1095 context = temp;
1096 }
1097 // If no outer scope calls eval, we do not need to check more
1098 // context extensions. If we have reached an eval scope, we check
1099 // all extensions from this point.
1100 if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break;
1101 s = s->outer_scope();
1102 }
1103
1104 if (s != NULL && s->is_eval_scope()) {
1105 // Loop up the context chain. There is no frame effect so it is
1106 // safe to use raw labels here.
Ben Murdoch257744e2011-11-30 15:57:28 +00001107 Label next, fast;
Steve Block59151502010-09-22 15:07:15 +01001108 if (!context.is(temp)) {
1109 __ movq(temp, context);
1110 }
1111 // Load map for comparison into register, outside loop.
1112 __ LoadRoot(kScratchRegister, Heap::kGlobalContextMapRootIndex);
1113 __ bind(&next);
1114 // Terminate at global context.
1115 __ cmpq(kScratchRegister, FieldOperand(temp, HeapObject::kMapOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00001116 __ j(equal, &fast, Label::kNear);
Steve Block59151502010-09-22 15:07:15 +01001117 // Check that extension is NULL.
1118 __ cmpq(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
1119 __ j(not_equal, slow);
1120 // Load next context in chain.
1121 __ movq(temp, ContextOperand(temp, Context::CLOSURE_INDEX));
1122 __ movq(temp, FieldOperand(temp, JSFunction::kContextOffset));
1123 __ jmp(&next);
1124 __ bind(&fast);
1125 }
1126
1127 // All extension objects were empty and it is safe to use a global
1128 // load IC call.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001129 __ movq(rax, GlobalObjectOperand());
Steve Block59151502010-09-22 15:07:15 +01001130 __ Move(rcx, slot->var()->name());
Steve Block44f0eee2011-05-26 01:26:41 +01001131 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
Steve Block59151502010-09-22 15:07:15 +01001132 RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
1133 ? RelocInfo::CODE_TARGET
1134 : RelocInfo::CODE_TARGET_CONTEXT;
Ben Murdoch257744e2011-11-30 15:57:28 +00001135 EmitCallIC(ic, mode, AstNode::kNoNumber);
Steve Block59151502010-09-22 15:07:15 +01001136}
1137
1138
1139MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(
1140 Slot* slot,
1141 Label* slow) {
1142 ASSERT(slot->type() == Slot::CONTEXT);
1143 Register context = rsi;
1144 Register temp = rbx;
1145
1146 for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) {
1147 if (s->num_heap_slots() > 0) {
1148 if (s->calls_eval()) {
1149 // Check that extension is NULL.
1150 __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX),
1151 Immediate(0));
1152 __ j(not_equal, slow);
1153 }
1154 __ movq(temp, ContextOperand(context, Context::CLOSURE_INDEX));
1155 __ movq(temp, FieldOperand(temp, JSFunction::kContextOffset));
1156 // Walk the rest of the chain without clobbering rsi.
1157 context = temp;
1158 }
1159 }
1160 // Check that last extension is NULL.
1161 __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
1162 __ j(not_equal, slow);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001163
1164 // This function is used only for loads, not stores, so it's safe to
1165 // return an rsi-based operand (the write barrier cannot be allowed to
1166 // destroy the rsi register).
1167 return ContextOperand(context, slot->index());
Steve Block59151502010-09-22 15:07:15 +01001168}
1169
1170
1171void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase(
1172 Slot* slot,
1173 TypeofState typeof_state,
1174 Label* slow,
1175 Label* done) {
1176 // Generate fast-case code for variables that might be shadowed by
1177 // eval-introduced variables. Eval is used a lot without
1178 // introducing variables. In those cases, we do not want to
1179 // perform a runtime call for all variables in the scope
1180 // containing the eval.
1181 if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) {
1182 EmitLoadGlobalSlotCheckExtensions(slot, typeof_state, slow);
1183 __ jmp(done);
1184 } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001185 Slot* potential_slot = slot->var()->local_if_not_shadowed()->AsSlot();
Steve Block59151502010-09-22 15:07:15 +01001186 Expression* rewrite = slot->var()->local_if_not_shadowed()->rewrite();
1187 if (potential_slot != NULL) {
1188 // Generate fast case for locals that rewrite to slots.
1189 __ movq(rax,
1190 ContextSlotOperandCheckExtensions(potential_slot, slow));
1191 if (potential_slot->var()->mode() == Variable::CONST) {
1192 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
1193 __ j(not_equal, done);
1194 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1195 }
1196 __ jmp(done);
1197 } else if (rewrite != NULL) {
1198 // Generate fast case for calls of an argument function.
1199 Property* property = rewrite->AsProperty();
1200 if (property != NULL) {
1201 VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
1202 Literal* key_literal = property->key()->AsLiteral();
1203 if (obj_proxy != NULL &&
1204 key_literal != NULL &&
1205 obj_proxy->IsArguments() &&
1206 key_literal->handle()->IsSmi()) {
1207 // Load arguments object if there are no eval-introduced
1208 // variables. Then load the argument from the arguments
1209 // object using keyed load.
1210 __ movq(rdx,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001211 ContextSlotOperandCheckExtensions(obj_proxy->var()->AsSlot(),
Steve Block59151502010-09-22 15:07:15 +01001212 slow));
1213 __ Move(rax, key_literal->handle());
Steve Block44f0eee2011-05-26 01:26:41 +01001214 Handle<Code> ic =
1215 isolate()->builtins()->KeyedLoadIC_Initialize();
Ben Murdoch257744e2011-11-30 15:57:28 +00001216 EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(property));
Steve Block59151502010-09-22 15:07:15 +01001217 __ jmp(done);
1218 }
1219 }
1220 }
1221 }
1222}
1223
1224
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001225void FullCodeGenerator::EmitVariableLoad(Variable* var) {
Leon Clarked91b9f72010-01-27 17:25:45 +00001226 // Four cases: non-this global variables, lookup slots, all other
1227 // types of slots, and parameters that rewrite to explicit property
1228 // accesses on the arguments object.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001229 Slot* slot = var->AsSlot();
Leon Clarked91b9f72010-01-27 17:25:45 +00001230 Property* property = var->AsProperty();
1231
1232 if (var->is_global() && !var->is_this()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001233 Comment cmnt(masm_, "Global variable");
1234 // Use inline caching. Variable name is passed in rcx and the global
1235 // object on the stack.
Leon Clarkee46be812010-01-19 14:06:41 +00001236 __ Move(rcx, var->name());
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001237 __ movq(rax, GlobalObjectOperand());
Steve Block44f0eee2011-05-26 01:26:41 +01001238 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
Ben Murdoch257744e2011-11-30 15:57:28 +00001239 EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT, AstNode::kNoNumber);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001240 context()->Plug(rax);
Leon Clarkeeab96aa2010-01-27 16:31:12 +00001241
Leon Clarked91b9f72010-01-27 17:25:45 +00001242 } else if (slot != NULL && slot->type() == Slot::LOOKUP) {
Steve Block59151502010-09-22 15:07:15 +01001243 Label done, slow;
1244
1245 // Generate code for loading from variables potentially shadowed
1246 // by eval-introduced variables.
1247 EmitDynamicLoadFromSlotFastCase(slot, NOT_INSIDE_TYPEOF, &slow, &done);
1248
1249 __ bind(&slow);
Leon Clarked91b9f72010-01-27 17:25:45 +00001250 Comment cmnt(masm_, "Lookup slot");
1251 __ push(rsi); // Context.
1252 __ Push(var->name());
1253 __ CallRuntime(Runtime::kLoadContextSlot, 2);
Steve Block59151502010-09-22 15:07:15 +01001254 __ bind(&done);
1255
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001256 context()->Plug(rax);
Leon Clarked91b9f72010-01-27 17:25:45 +00001257
1258 } else if (slot != NULL) {
1259 Comment cmnt(masm_, (slot->type() == Slot::CONTEXT)
1260 ? "Context slot"
1261 : "Stack slot");
Leon Clarkef7060e22010-06-03 12:02:55 +01001262 if (var->mode() == Variable::CONST) {
1263 // Constants may be the hole value if they have not been initialized.
1264 // Unhole them.
Ben Murdoch257744e2011-11-30 15:57:28 +00001265 Label done;
Leon Clarkef7060e22010-06-03 12:02:55 +01001266 MemOperand slot_operand = EmitSlotSearch(slot, rax);
1267 __ movq(rax, slot_operand);
1268 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
Ben Murdoch257744e2011-11-30 15:57:28 +00001269 __ j(not_equal, &done, Label::kNear);
Leon Clarkef7060e22010-06-03 12:02:55 +01001270 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1271 __ bind(&done);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001272 context()->Plug(rax);
Leon Clarkef7060e22010-06-03 12:02:55 +01001273 } else {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001274 context()->Plug(slot);
Leon Clarkef7060e22010-06-03 12:02:55 +01001275 }
Leon Clarked91b9f72010-01-27 17:25:45 +00001276
1277 } else {
1278 Comment cmnt(masm_, "Rewritten parameter");
1279 ASSERT_NOT_NULL(property);
1280 // Rewritten parameter accesses are of the form "slot[literal]".
Steve Blockd0582a62009-12-15 09:54:21 +00001281
Leon Clarkee46be812010-01-19 14:06:41 +00001282 // Assert that the object is in a slot.
1283 Variable* object_var = property->obj()->AsVariableProxy()->AsVariable();
1284 ASSERT_NOT_NULL(object_var);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001285 Slot* object_slot = object_var->AsSlot();
Steve Blockd0582a62009-12-15 09:54:21 +00001286 ASSERT_NOT_NULL(object_slot);
1287
1288 // Load the object.
Leon Clarkee46be812010-01-19 14:06:41 +00001289 MemOperand object_loc = EmitSlotSearch(object_slot, rax);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001290 __ movq(rdx, object_loc);
Steve Blockd0582a62009-12-15 09:54:21 +00001291
Leon Clarkee46be812010-01-19 14:06:41 +00001292 // Assert that the key is a smi.
Steve Blockd0582a62009-12-15 09:54:21 +00001293 Literal* key_literal = property->key()->AsLiteral();
1294 ASSERT_NOT_NULL(key_literal);
1295 ASSERT(key_literal->handle()->IsSmi());
1296
1297 // Load the key.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001298 __ Move(rax, key_literal->handle());
Steve Blockd0582a62009-12-15 09:54:21 +00001299
Leon Clarkee46be812010-01-19 14:06:41 +00001300 // Do a keyed property load.
Steve Block44f0eee2011-05-26 01:26:41 +01001301 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
Ben Murdoch257744e2011-11-30 15:57:28 +00001302 EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(property));
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001303 context()->Plug(rax);
Steve Block3ce2e202009-11-05 08:53:23 +00001304 }
1305}
1306
1307
Leon Clarked91b9f72010-01-27 17:25:45 +00001308void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
Steve Blockd0582a62009-12-15 09:54:21 +00001309 Comment cmnt(masm_, "[ RegExpLiteral");
Ben Murdochbb769b22010-08-11 14:56:33 +01001310 Label materialized;
Steve Block3ce2e202009-11-05 08:53:23 +00001311 // Registers will be used as follows:
1312 // rdi = JS function.
Ben Murdochbb769b22010-08-11 14:56:33 +01001313 // rcx = literals array.
1314 // rbx = regexp literal.
1315 // rax = regexp literal clone.
Steve Block3ce2e202009-11-05 08:53:23 +00001316 __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
Ben Murdochbb769b22010-08-11 14:56:33 +01001317 __ movq(rcx, FieldOperand(rdi, JSFunction::kLiteralsOffset));
Steve Block3ce2e202009-11-05 08:53:23 +00001318 int literal_offset =
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001319 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
Ben Murdochbb769b22010-08-11 14:56:33 +01001320 __ movq(rbx, FieldOperand(rcx, literal_offset));
1321 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
1322 __ j(not_equal, &materialized);
1323
Steve Block3ce2e202009-11-05 08:53:23 +00001324 // Create regexp literal using runtime function
1325 // Result will be in rax.
Ben Murdochbb769b22010-08-11 14:56:33 +01001326 __ push(rcx);
Steve Block3ce2e202009-11-05 08:53:23 +00001327 __ Push(Smi::FromInt(expr->literal_index()));
1328 __ Push(expr->pattern());
1329 __ Push(expr->flags());
1330 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
Ben Murdochbb769b22010-08-11 14:56:33 +01001331 __ movq(rbx, rax);
1332
1333 __ bind(&materialized);
1334 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1335 Label allocated, runtime_allocate;
1336 __ AllocateInNewSpace(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT);
1337 __ jmp(&allocated);
1338
1339 __ bind(&runtime_allocate);
1340 __ push(rbx);
1341 __ Push(Smi::FromInt(size));
1342 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1343 __ pop(rbx);
1344
1345 __ bind(&allocated);
1346 // Copy the content into the newly allocated memory.
1347 // (Unroll copy loop once for better throughput).
1348 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
1349 __ movq(rdx, FieldOperand(rbx, i));
1350 __ movq(rcx, FieldOperand(rbx, i + kPointerSize));
1351 __ movq(FieldOperand(rax, i), rdx);
1352 __ movq(FieldOperand(rax, i + kPointerSize), rcx);
1353 }
1354 if ((size % (2 * kPointerSize)) != 0) {
1355 __ movq(rdx, FieldOperand(rbx, size - kPointerSize));
1356 __ movq(FieldOperand(rax, size - kPointerSize), rdx);
1357 }
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001358 context()->Plug(rax);
Steve Blockd0582a62009-12-15 09:54:21 +00001359}
1360
1361
Leon Clarked91b9f72010-01-27 17:25:45 +00001362void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
Steve Blockd0582a62009-12-15 09:54:21 +00001363 Comment cmnt(masm_, "[ ObjectLiteral");
Steve Blockd0582a62009-12-15 09:54:21 +00001364 __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
Leon Clarkee46be812010-01-19 14:06:41 +00001365 __ push(FieldOperand(rdi, JSFunction::kLiteralsOffset));
Steve Blockd0582a62009-12-15 09:54:21 +00001366 __ Push(Smi::FromInt(expr->literal_index()));
Steve Blockd0582a62009-12-15 09:54:21 +00001367 __ Push(expr->constant_properties());
Steve Block44f0eee2011-05-26 01:26:41 +01001368 int flags = expr->fast_elements()
1369 ? ObjectLiteral::kFastElements
1370 : ObjectLiteral::kNoFlags;
1371 flags |= expr->has_function()
1372 ? ObjectLiteral::kHasFunction
1373 : ObjectLiteral::kNoFlags;
1374 __ Push(Smi::FromInt(flags));
Leon Clarkee46be812010-01-19 14:06:41 +00001375 if (expr->depth() > 1) {
Steve Block6ded16b2010-05-10 14:33:55 +01001376 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
Steve Block3ce2e202009-11-05 08:53:23 +00001377 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01001378 __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
Steve Blockd0582a62009-12-15 09:54:21 +00001379 }
1380
Leon Clarkee46be812010-01-19 14:06:41 +00001381 // If result_saved is true the result is on top of the stack. If
1382 // result_saved is false the result is in rax.
Steve Blockd0582a62009-12-15 09:54:21 +00001383 bool result_saved = false;
1384
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08001385 // Mark all computed expressions that are bound to a key that
1386 // is shadowed by a later occurrence of the same key. For the
1387 // marked expressions, no store code is emitted.
1388 expr->CalculateEmitStore();
1389
Steve Blockd0582a62009-12-15 09:54:21 +00001390 for (int i = 0; i < expr->properties()->length(); i++) {
1391 ObjectLiteral::Property* property = expr->properties()->at(i);
1392 if (property->IsCompileTimeValue()) continue;
1393
1394 Literal* key = property->key();
1395 Expression* value = property->value();
1396 if (!result_saved) {
1397 __ push(rax); // Save result on the stack
1398 result_saved = true;
1399 }
1400 switch (property->kind()) {
Leon Clarkee46be812010-01-19 14:06:41 +00001401 case ObjectLiteral::Property::CONSTANT:
1402 UNREACHABLE();
1403 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
Steve Blockd0582a62009-12-15 09:54:21 +00001404 ASSERT(!CompileTimeValue::IsCompileTimeValue(value));
Leon Clarkee46be812010-01-19 14:06:41 +00001405 // Fall through.
Steve Blockd0582a62009-12-15 09:54:21 +00001406 case ObjectLiteral::Property::COMPUTED:
1407 if (key->handle()->IsSymbol()) {
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08001408 if (property->emit_store()) {
Steve Block053d10c2011-06-13 19:13:29 +01001409 VisitForAccumulatorValue(value);
1410 __ Move(rcx, key->handle());
1411 __ movq(rdx, Operand(rsp, 0));
1412 Handle<Code> ic = is_strict_mode()
1413 ? isolate()->builtins()->StoreIC_Initialize_Strict()
1414 : isolate()->builtins()->StoreIC_Initialize();
Ben Murdoch257744e2011-11-30 15:57:28 +00001415 EmitCallIC(ic, RelocInfo::CODE_TARGET, key->id());
Ben Murdoch086aeea2011-05-13 15:57:08 +01001416 PrepareForBailoutForId(key->id(), NO_REGISTERS);
Steve Block053d10c2011-06-13 19:13:29 +01001417 } else {
1418 VisitForEffect(value);
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08001419 }
Steve Blockd0582a62009-12-15 09:54:21 +00001420 break;
1421 }
Leon Clarkee46be812010-01-19 14:06:41 +00001422 // Fall through.
Steve Blockd0582a62009-12-15 09:54:21 +00001423 case ObjectLiteral::Property::PROTOTYPE:
Leon Clarkee46be812010-01-19 14:06:41 +00001424 __ push(Operand(rsp, 0)); // Duplicate receiver.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001425 VisitForStackValue(key);
1426 VisitForStackValue(value);
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08001427 if (property->emit_store()) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001428 __ Push(Smi::FromInt(NONE)); // PropertyAttributes
1429 __ CallRuntime(Runtime::kSetProperty, 4);
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08001430 } else {
1431 __ Drop(3);
1432 }
Steve Blockd0582a62009-12-15 09:54:21 +00001433 break;
Leon Clarkee46be812010-01-19 14:06:41 +00001434 case ObjectLiteral::Property::SETTER:
Steve Blockd0582a62009-12-15 09:54:21 +00001435 case ObjectLiteral::Property::GETTER:
Leon Clarkee46be812010-01-19 14:06:41 +00001436 __ push(Operand(rsp, 0)); // Duplicate receiver.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001437 VisitForStackValue(key);
Steve Blockd0582a62009-12-15 09:54:21 +00001438 __ Push(property->kind() == ObjectLiteral::Property::SETTER ?
1439 Smi::FromInt(1) :
1440 Smi::FromInt(0));
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001441 VisitForStackValue(value);
Steve Blockd0582a62009-12-15 09:54:21 +00001442 __ CallRuntime(Runtime::kDefineAccessor, 4);
Steve Blockd0582a62009-12-15 09:54:21 +00001443 break;
Steve Blockd0582a62009-12-15 09:54:21 +00001444 }
1445 }
Leon Clarkee46be812010-01-19 14:06:41 +00001446
Steve Block44f0eee2011-05-26 01:26:41 +01001447 if (expr->has_function()) {
1448 ASSERT(result_saved);
1449 __ push(Operand(rsp, 0));
1450 __ CallRuntime(Runtime::kToFastProperties, 1);
1451 }
1452
Leon Clarkee46be812010-01-19 14:06:41 +00001453 if (result_saved) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001454 context()->PlugTOS();
Leon Clarkee46be812010-01-19 14:06:41 +00001455 } else {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001456 context()->Plug(rax);
Steve Block3ce2e202009-11-05 08:53:23 +00001457 }
1458}
1459
1460
Leon Clarked91b9f72010-01-27 17:25:45 +00001461void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
Steve Block3ce2e202009-11-05 08:53:23 +00001462 Comment cmnt(masm_, "[ ArrayLiteral");
Leon Clarkef7060e22010-06-03 12:02:55 +01001463
1464 ZoneList<Expression*>* subexprs = expr->values();
1465 int length = subexprs->length();
1466
Steve Block3ce2e202009-11-05 08:53:23 +00001467 __ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
Leon Clarkee46be812010-01-19 14:06:41 +00001468 __ push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
Steve Block3ce2e202009-11-05 08:53:23 +00001469 __ Push(Smi::FromInt(expr->literal_index()));
Leon Clarkee46be812010-01-19 14:06:41 +00001470 __ Push(expr->constant_elements());
Steve Block44f0eee2011-05-26 01:26:41 +01001471 if (expr->constant_elements()->map() ==
1472 isolate()->heap()->fixed_cow_array_map()) {
Iain Merrick75681382010-08-19 15:07:18 +01001473 FastCloneShallowArrayStub stub(
1474 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
1475 __ CallStub(&stub);
Steve Block44f0eee2011-05-26 01:26:41 +01001476 __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
Iain Merrick75681382010-08-19 15:07:18 +01001477 } else if (expr->depth() > 1) {
Leon Clarkee46be812010-01-19 14:06:41 +00001478 __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
Iain Merrick75681382010-08-19 15:07:18 +01001479 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
Leon Clarkee46be812010-01-19 14:06:41 +00001480 __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
Leon Clarkef7060e22010-06-03 12:02:55 +01001481 } else {
Iain Merrick75681382010-08-19 15:07:18 +01001482 FastCloneShallowArrayStub stub(
1483 FastCloneShallowArrayStub::CLONE_ELEMENTS, length);
Leon Clarkef7060e22010-06-03 12:02:55 +01001484 __ CallStub(&stub);
Steve Block3ce2e202009-11-05 08:53:23 +00001485 }
1486
1487 bool result_saved = false; // Is the result saved to the stack?
1488
1489 // Emit code to evaluate all the non-constant subexpressions and to store
1490 // them into the newly cloned array.
Leon Clarkef7060e22010-06-03 12:02:55 +01001491 for (int i = 0; i < length; i++) {
Steve Block3ce2e202009-11-05 08:53:23 +00001492 Expression* subexpr = subexprs->at(i);
1493 // If the subexpression is a literal or a simple materialized literal it
1494 // is already set in the cloned array.
1495 if (subexpr->AsLiteral() != NULL ||
1496 CompileTimeValue::IsCompileTimeValue(subexpr)) {
1497 continue;
1498 }
1499
1500 if (!result_saved) {
1501 __ push(rax);
1502 result_saved = true;
1503 }
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001504 VisitForAccumulatorValue(subexpr);
Steve Block3ce2e202009-11-05 08:53:23 +00001505
1506 // Store the subexpression value in the array's elements.
Steve Block3ce2e202009-11-05 08:53:23 +00001507 __ movq(rbx, Operand(rsp, 0)); // Copy of array literal.
1508 __ movq(rbx, FieldOperand(rbx, JSObject::kElementsOffset));
1509 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
Leon Clarkee46be812010-01-19 14:06:41 +00001510 __ movq(FieldOperand(rbx, offset), result_register());
Steve Block3ce2e202009-11-05 08:53:23 +00001511
1512 // Update the write barrier for the array store.
Leon Clarkee46be812010-01-19 14:06:41 +00001513 __ RecordWrite(rbx, offset, result_register(), rcx);
Ben Murdoch086aeea2011-05-13 15:57:08 +01001514
1515 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
Steve Block3ce2e202009-11-05 08:53:23 +00001516 }
1517
Leon Clarkee46be812010-01-19 14:06:41 +00001518 if (result_saved) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001519 context()->PlugTOS();
Leon Clarkee46be812010-01-19 14:06:41 +00001520 } else {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001521 context()->Plug(rax);
Steve Block3ce2e202009-11-05 08:53:23 +00001522 }
1523}
1524
1525
Andrei Popescu402d9372010-02-26 13:31:12 +00001526void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1527 Comment cmnt(masm_, "[ Assignment");
Leon Clarkef7060e22010-06-03 12:02:55 +01001528 // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
1529 // on the left-hand side.
1530 if (!expr->target()->IsValidLeftHandSide()) {
1531 VisitForEffect(expr->target());
1532 return;
1533 }
1534
Andrei Popescu402d9372010-02-26 13:31:12 +00001535 // Left-hand side can only be a property, a global or a (parameter or local)
1536 // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
1537 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1538 LhsKind assign_type = VARIABLE;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001539 Property* property = expr->target()->AsProperty();
1540 if (property != NULL) {
1541 assign_type = (property->key()->IsPropertyName())
1542 ? NAMED_PROPERTY
1543 : KEYED_PROPERTY;
Andrei Popescu402d9372010-02-26 13:31:12 +00001544 }
1545
1546 // Evaluate LHS expression.
1547 switch (assign_type) {
1548 case VARIABLE:
1549 // Nothing to do here.
1550 break;
1551 case NAMED_PROPERTY:
1552 if (expr->is_compound()) {
1553 // We need the receiver both on the stack and in the accumulator.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001554 VisitForAccumulatorValue(property->obj());
Andrei Popescu402d9372010-02-26 13:31:12 +00001555 __ push(result_register());
1556 } else {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001557 VisitForStackValue(property->obj());
Andrei Popescu402d9372010-02-26 13:31:12 +00001558 }
1559 break;
Ben Murdoch086aeea2011-05-13 15:57:08 +01001560 case KEYED_PROPERTY: {
Leon Clarkef7060e22010-06-03 12:02:55 +01001561 if (expr->is_compound()) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01001562 if (property->is_arguments_access()) {
1563 VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
1564 MemOperand slot_operand =
1565 EmitSlotSearch(obj_proxy->var()->AsSlot(), rcx);
1566 __ push(slot_operand);
1567 __ Move(rax, property->key()->AsLiteral()->handle());
1568 } else {
1569 VisitForStackValue(property->obj());
1570 VisitForAccumulatorValue(property->key());
1571 }
Leon Clarkef7060e22010-06-03 12:02:55 +01001572 __ movq(rdx, Operand(rsp, 0));
1573 __ push(rax);
1574 } else {
Ben Murdoch086aeea2011-05-13 15:57:08 +01001575 if (property->is_arguments_access()) {
1576 VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
1577 MemOperand slot_operand =
1578 EmitSlotSearch(obj_proxy->var()->AsSlot(), rcx);
1579 __ push(slot_operand);
1580 __ Push(property->key()->AsLiteral()->handle());
1581 } else {
1582 VisitForStackValue(property->obj());
1583 VisitForStackValue(property->key());
1584 }
Leon Clarkef7060e22010-06-03 12:02:55 +01001585 }
Andrei Popescu402d9372010-02-26 13:31:12 +00001586 break;
Ben Murdoch086aeea2011-05-13 15:57:08 +01001587 }
Andrei Popescu402d9372010-02-26 13:31:12 +00001588 }
1589
Ben Murdoch8b112d22011-06-08 16:22:53 +01001590 // For compound assignments we need another deoptimization point after the
1591 // variable/property load.
Andrei Popescu402d9372010-02-26 13:31:12 +00001592 if (expr->is_compound()) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001593 { AccumulatorValueContext context(this);
1594 switch (assign_type) {
1595 case VARIABLE:
1596 EmitVariableLoad(expr->target()->AsVariableProxy()->var());
Ben Murdoch8b112d22011-06-08 16:22:53 +01001597 PrepareForBailout(expr->target(), TOS_REG);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001598 break;
1599 case NAMED_PROPERTY:
1600 EmitNamedPropertyLoad(property);
Ben Murdoch8b112d22011-06-08 16:22:53 +01001601 PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001602 break;
1603 case KEYED_PROPERTY:
1604 EmitKeyedPropertyLoad(property);
Ben Murdoch8b112d22011-06-08 16:22:53 +01001605 PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001606 break;
1607 }
Andrei Popescu402d9372010-02-26 13:31:12 +00001608 }
Andrei Popescu402d9372010-02-26 13:31:12 +00001609
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001610 Token::Value op = expr->binary_op();
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001611 __ push(rax); // Left operand goes on the stack.
1612 VisitForAccumulatorValue(expr->value());
Andrei Popescu402d9372010-02-26 13:31:12 +00001613
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001614 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1615 ? OVERWRITE_RIGHT
1616 : NO_OVERWRITE;
1617 SetSourcePosition(expr->position() + 1);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001618 AccumulatorValueContext context(this);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001619 if (ShouldInlineSmiCase(op)) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001620 EmitInlineSmiBinaryOp(expr->binary_operation(),
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001621 op,
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001622 mode,
1623 expr->target(),
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001624 expr->value());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001625 } else {
Ben Murdoch257744e2011-11-30 15:57:28 +00001626 EmitBinaryOp(expr->binary_operation(), op, mode);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001627 }
Ben Murdoch086aeea2011-05-13 15:57:08 +01001628 // Deoptimization point in case the binary operation may have side effects.
1629 PrepareForBailout(expr->binary_operation(), TOS_REG);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001630 } else {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001631 VisitForAccumulatorValue(expr->value());
Andrei Popescu402d9372010-02-26 13:31:12 +00001632 }
1633
1634 // Record source position before possible IC call.
1635 SetSourcePosition(expr->position());
1636
1637 // Store the value.
1638 switch (assign_type) {
1639 case VARIABLE:
1640 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001641 expr->op());
Ben Murdoch086aeea2011-05-13 15:57:08 +01001642 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001643 context()->Plug(rax);
Andrei Popescu402d9372010-02-26 13:31:12 +00001644 break;
1645 case NAMED_PROPERTY:
1646 EmitNamedPropertyAssignment(expr);
1647 break;
1648 case KEYED_PROPERTY:
1649 EmitKeyedPropertyAssignment(expr);
1650 break;
1651 }
1652}
1653
1654
Leon Clarked91b9f72010-01-27 17:25:45 +00001655void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
Leon Clarkee46be812010-01-19 14:06:41 +00001656 SetSourcePosition(prop->position());
1657 Literal* key = prop->key()->AsLiteral();
1658 __ Move(rcx, key->handle());
Steve Block44f0eee2011-05-26 01:26:41 +01001659 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
Ben Murdoch257744e2011-11-30 15:57:28 +00001660 EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
Leon Clarkee46be812010-01-19 14:06:41 +00001661}
1662
1663
Leon Clarked91b9f72010-01-27 17:25:45 +00001664void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
Leon Clarkee46be812010-01-19 14:06:41 +00001665 SetSourcePosition(prop->position());
Steve Block44f0eee2011-05-26 01:26:41 +01001666 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
Ben Murdoch257744e2011-11-30 15:57:28 +00001667 EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
Leon Clarkee46be812010-01-19 14:06:41 +00001668}
1669
1670
Ben Murdoch257744e2011-11-30 15:57:28 +00001671void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001672 Token::Value op,
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001673 OverwriteMode mode,
1674 Expression* left,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001675 Expression* right) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001676 // Do combined smi check of the operands. Left operand is on the
1677 // stack (popped into rdx). Right operand is in rax but moved into
1678 // rcx to make the shifts easier.
Ben Murdoch257744e2011-11-30 15:57:28 +00001679 Label done, stub_call, smi_case;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001680 __ pop(rdx);
1681 __ movq(rcx, rax);
Steve Block1e0659c2011-05-24 12:43:12 +01001682 __ or_(rax, rdx);
1683 JumpPatchSite patch_site(masm_);
Ben Murdoch257744e2011-11-30 15:57:28 +00001684 patch_site.EmitJumpIfSmi(rax, &smi_case, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001685
1686 __ bind(&stub_call);
Steve Block1e0659c2011-05-24 12:43:12 +01001687 __ movq(rax, rcx);
Ben Murdoch257744e2011-11-30 15:57:28 +00001688 BinaryOpStub stub(op, mode);
1689 EmitCallIC(stub.GetCode(), &patch_site, expr->id());
1690 __ jmp(&done, Label::kNear);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001691
1692 __ bind(&smi_case);
1693 switch (op) {
1694 case Token::SAR:
1695 __ SmiShiftArithmeticRight(rax, rdx, rcx);
1696 break;
1697 case Token::SHL:
1698 __ SmiShiftLeft(rax, rdx, rcx);
1699 break;
1700 case Token::SHR:
1701 __ SmiShiftLogicalRight(rax, rdx, rcx, &stub_call);
1702 break;
1703 case Token::ADD:
1704 __ SmiAdd(rax, rdx, rcx, &stub_call);
1705 break;
1706 case Token::SUB:
1707 __ SmiSub(rax, rdx, rcx, &stub_call);
1708 break;
1709 case Token::MUL:
1710 __ SmiMul(rax, rdx, rcx, &stub_call);
1711 break;
1712 case Token::BIT_OR:
1713 __ SmiOr(rax, rdx, rcx);
1714 break;
1715 case Token::BIT_AND:
1716 __ SmiAnd(rax, rdx, rcx);
1717 break;
1718 case Token::BIT_XOR:
1719 __ SmiXor(rax, rdx, rcx);
1720 break;
1721 default:
1722 UNREACHABLE();
1723 break;
1724 }
1725
1726 __ bind(&done);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001727 context()->Plug(rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001728}
1729
1730
Ben Murdoch257744e2011-11-30 15:57:28 +00001731void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
1732 Token::Value op,
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001733 OverwriteMode mode) {
Steve Block1e0659c2011-05-24 12:43:12 +01001734 __ pop(rdx);
Ben Murdoch257744e2011-11-30 15:57:28 +00001735 BinaryOpStub stub(op, mode);
1736 // NULL signals no inlined smi code.
1737 EmitCallIC(stub.GetCode(), NULL, expr->id());
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001738 context()->Plug(rax);
Leon Clarkee46be812010-01-19 14:06:41 +00001739}
1740
1741
Ben Murdoch086aeea2011-05-13 15:57:08 +01001742void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
Leon Clarkef7060e22010-06-03 12:02:55 +01001743 // Invalid left-hand sides are rewritten to have a 'throw
1744 // ReferenceError' on the left-hand side.
1745 if (!expr->IsValidLeftHandSide()) {
1746 VisitForEffect(expr);
1747 return;
1748 }
1749
1750 // Left-hand side can only be a property, a global or a (parameter or local)
1751 // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
1752 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1753 LhsKind assign_type = VARIABLE;
1754 Property* prop = expr->AsProperty();
1755 if (prop != NULL) {
1756 assign_type = (prop->key()->IsPropertyName())
1757 ? NAMED_PROPERTY
1758 : KEYED_PROPERTY;
1759 }
1760
1761 switch (assign_type) {
1762 case VARIABLE: {
1763 Variable* var = expr->AsVariableProxy()->var();
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001764 EffectContext context(this);
1765 EmitVariableAssignment(var, Token::ASSIGN);
Leon Clarkef7060e22010-06-03 12:02:55 +01001766 break;
1767 }
1768 case NAMED_PROPERTY: {
1769 __ push(rax); // Preserve value.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001770 VisitForAccumulatorValue(prop->obj());
Leon Clarkef7060e22010-06-03 12:02:55 +01001771 __ movq(rdx, rax);
1772 __ pop(rax); // Restore value.
1773 __ Move(rcx, prop->key()->AsLiteral()->handle());
Steve Block44f0eee2011-05-26 01:26:41 +01001774 Handle<Code> ic = is_strict_mode()
1775 ? isolate()->builtins()->StoreIC_Initialize_Strict()
1776 : isolate()->builtins()->StoreIC_Initialize();
Ben Murdoch257744e2011-11-30 15:57:28 +00001777 EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
Leon Clarkef7060e22010-06-03 12:02:55 +01001778 break;
1779 }
1780 case KEYED_PROPERTY: {
1781 __ push(rax); // Preserve value.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001782 if (prop->is_synthetic()) {
1783 ASSERT(prop->obj()->AsVariableProxy() != NULL);
1784 ASSERT(prop->key()->AsLiteral() != NULL);
1785 { AccumulatorValueContext for_object(this);
1786 EmitVariableLoad(prop->obj()->AsVariableProxy()->var());
1787 }
1788 __ movq(rdx, rax);
1789 __ Move(rcx, prop->key()->AsLiteral()->handle());
1790 } else {
1791 VisitForStackValue(prop->obj());
1792 VisitForAccumulatorValue(prop->key());
1793 __ movq(rcx, rax);
1794 __ pop(rdx);
1795 }
1796 __ pop(rax); // Restore value.
Steve Block44f0eee2011-05-26 01:26:41 +01001797 Handle<Code> ic = is_strict_mode()
1798 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
1799 : isolate()->builtins()->KeyedStoreIC_Initialize();
Ben Murdoch257744e2011-11-30 15:57:28 +00001800 EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
Leon Clarkef7060e22010-06-03 12:02:55 +01001801 break;
1802 }
1803 }
Ben Murdoch086aeea2011-05-13 15:57:08 +01001804 PrepareForBailoutForId(bailout_ast_id, TOS_REG);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001805 context()->Plug(rax);
Leon Clarkef7060e22010-06-03 12:02:55 +01001806}
1807
1808
Leon Clarked91b9f72010-01-27 17:25:45 +00001809void FullCodeGenerator::EmitVariableAssignment(Variable* var,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001810 Token::Value op) {
Leon Clarkef7060e22010-06-03 12:02:55 +01001811 // Left-hand sides that rewrite to explicit property accesses do not reach
1812 // here.
Steve Block3ce2e202009-11-05 08:53:23 +00001813 ASSERT(var != NULL);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001814 ASSERT(var->is_global() || var->AsSlot() != NULL);
Leon Clarkef7060e22010-06-03 12:02:55 +01001815
Steve Block3ce2e202009-11-05 08:53:23 +00001816 if (var->is_global()) {
Leon Clarked91b9f72010-01-27 17:25:45 +00001817 ASSERT(!var->is_this());
Steve Blockd0582a62009-12-15 09:54:21 +00001818 // Assignment to a global variable. Use inline caching for the
1819 // assignment. Right-hand-side value is passed in rax, variable name in
Leon Clarkef7060e22010-06-03 12:02:55 +01001820 // rcx, and the global object on the stack.
Steve Block3ce2e202009-11-05 08:53:23 +00001821 __ Move(rcx, var->name());
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001822 __ movq(rdx, GlobalObjectOperand());
Steve Block44f0eee2011-05-26 01:26:41 +01001823 Handle<Code> ic = is_strict_mode()
1824 ? isolate()->builtins()->StoreIC_Initialize_Strict()
1825 : isolate()->builtins()->StoreIC_Initialize();
Ben Murdoch257744e2011-11-30 15:57:28 +00001826 EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT, AstNode::kNoNumber);
Steve Blockd0582a62009-12-15 09:54:21 +00001827
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001828 } else if (op == Token::INIT_CONST) {
1829 // Like var declarations, const declarations are hoisted to function
1830 // scope. However, unlike var initializers, const initializers are able
1831 // to drill a hole to that function context, even from inside a 'with'
1832 // context. We thus bypass the normal static scope lookup.
1833 Slot* slot = var->AsSlot();
1834 Label skip;
1835 switch (slot->type()) {
1836 case Slot::PARAMETER:
1837 // No const parameters.
1838 UNREACHABLE();
1839 break;
1840 case Slot::LOCAL:
1841 __ movq(rdx, Operand(rbp, SlotOffset(slot)));
1842 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
1843 __ j(not_equal, &skip);
1844 __ movq(Operand(rbp, SlotOffset(slot)), rax);
1845 break;
1846 case Slot::CONTEXT: {
1847 __ movq(rcx, ContextOperand(rsi, Context::FCONTEXT_INDEX));
1848 __ movq(rdx, ContextOperand(rcx, slot->index()));
1849 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
1850 __ j(not_equal, &skip);
1851 __ movq(ContextOperand(rcx, slot->index()), rax);
1852 int offset = Context::SlotOffset(slot->index());
1853 __ movq(rdx, rax); // Preserve the stored value in eax.
1854 __ RecordWrite(rcx, offset, rdx, rbx);
1855 break;
1856 }
1857 case Slot::LOOKUP:
1858 __ push(rax);
1859 __ push(rsi);
1860 __ Push(var->name());
1861 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
1862 break;
1863 }
1864 __ bind(&skip);
1865
1866 } else if (var->mode() != Variable::CONST) {
1867 // Perform the assignment for non-const variables. Const assignments
1868 // are simply skipped.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001869 Slot* slot = var->AsSlot();
Steve Blockd0582a62009-12-15 09:54:21 +00001870 switch (slot->type()) {
Leon Clarkee46be812010-01-19 14:06:41 +00001871 case Slot::PARAMETER:
Leon Clarkef7060e22010-06-03 12:02:55 +01001872 case Slot::LOCAL:
Leon Clarkef7060e22010-06-03 12:02:55 +01001873 // Perform the assignment.
1874 __ movq(Operand(rbp, SlotOffset(slot)), rax);
Steve Blockd0582a62009-12-15 09:54:21 +00001875 break;
Steve Blockd0582a62009-12-15 09:54:21 +00001876
1877 case Slot::CONTEXT: {
Leon Clarkee46be812010-01-19 14:06:41 +00001878 MemOperand target = EmitSlotSearch(slot, rcx);
Leon Clarkef7060e22010-06-03 12:02:55 +01001879 // Perform the assignment and issue the write barrier.
1880 __ movq(target, rax);
1881 // The value of the assignment is in rax. RecordWrite clobbers its
1882 // register arguments.
1883 __ movq(rdx, rax);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001884 int offset = Context::SlotOffset(slot->index());
Leon Clarkee46be812010-01-19 14:06:41 +00001885 __ RecordWrite(rcx, offset, rdx, rbx);
Steve Blockd0582a62009-12-15 09:54:21 +00001886 break;
Steve Block3ce2e202009-11-05 08:53:23 +00001887 }
Steve Blockd0582a62009-12-15 09:54:21 +00001888
1889 case Slot::LOOKUP:
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001890 // Call the runtime for the assignment.
Leon Clarkef7060e22010-06-03 12:02:55 +01001891 __ push(rax); // Value.
1892 __ push(rsi); // Context.
1893 __ Push(var->name());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001894 __ Push(Smi::FromInt(strict_mode_flag()));
1895 __ CallRuntime(Runtime::kStoreContextSlot, 4);
Steve Blockd0582a62009-12-15 09:54:21 +00001896 break;
Steve Block3ce2e202009-11-05 08:53:23 +00001897 }
1898 }
1899}
1900
1901
Leon Clarked91b9f72010-01-27 17:25:45 +00001902void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
Steve Blockd0582a62009-12-15 09:54:21 +00001903 // Assignment to a property, using a named store IC.
1904 Property* prop = expr->target()->AsProperty();
1905 ASSERT(prop != NULL);
1906 ASSERT(prop->key()->AsLiteral() != NULL);
Steve Block3ce2e202009-11-05 08:53:23 +00001907
Steve Blockd0582a62009-12-15 09:54:21 +00001908 // If the assignment starts a block of assignments to the same object,
1909 // change to slow case to avoid the quadratic behavior of repeatedly
1910 // adding fast properties.
1911 if (expr->starts_initialization_block()) {
Leon Clarkee46be812010-01-19 14:06:41 +00001912 __ push(result_register());
1913 __ push(Operand(rsp, kPointerSize)); // Receiver is now under value.
Steve Blockd0582a62009-12-15 09:54:21 +00001914 __ CallRuntime(Runtime::kToSlowProperties, 1);
Leon Clarkee46be812010-01-19 14:06:41 +00001915 __ pop(result_register());
Steve Blockd0582a62009-12-15 09:54:21 +00001916 }
1917
Leon Clarkee46be812010-01-19 14:06:41 +00001918 // Record source code position before IC call.
1919 SetSourcePosition(expr->position());
Steve Blockd0582a62009-12-15 09:54:21 +00001920 __ Move(rcx, prop->key()->AsLiteral()->handle());
Leon Clarke4515c472010-02-03 11:58:03 +00001921 if (expr->ends_initialization_block()) {
1922 __ movq(rdx, Operand(rsp, 0));
1923 } else {
1924 __ pop(rdx);
1925 }
Steve Block44f0eee2011-05-26 01:26:41 +01001926 Handle<Code> ic = is_strict_mode()
1927 ? isolate()->builtins()->StoreIC_Initialize_Strict()
1928 : isolate()->builtins()->StoreIC_Initialize();
Ben Murdoch257744e2011-11-30 15:57:28 +00001929 EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
Steve Blockd0582a62009-12-15 09:54:21 +00001930
1931 // If the assignment ends an initialization block, revert to fast case.
1932 if (expr->ends_initialization_block()) {
1933 __ push(rax); // Result of assignment, saved even if not needed.
1934 __ push(Operand(rsp, kPointerSize)); // Receiver is under value.
1935 __ CallRuntime(Runtime::kToFastProperties, 1);
1936 __ pop(rax);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001937 __ Drop(1);
Steve Blockd0582a62009-12-15 09:54:21 +00001938 }
Ben Murdoch086aeea2011-05-13 15:57:08 +01001939 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001940 context()->Plug(rax);
Steve Blockd0582a62009-12-15 09:54:21 +00001941}
1942
1943
Leon Clarked91b9f72010-01-27 17:25:45 +00001944void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
Steve Blockd0582a62009-12-15 09:54:21 +00001945 // Assignment to a property, using a keyed store IC.
1946
1947 // If the assignment starts a block of assignments to the same object,
1948 // change to slow case to avoid the quadratic behavior of repeatedly
1949 // adding fast properties.
1950 if (expr->starts_initialization_block()) {
Leon Clarkee46be812010-01-19 14:06:41 +00001951 __ push(result_register());
1952 // Receiver is now under the key and value.
Steve Blockd0582a62009-12-15 09:54:21 +00001953 __ push(Operand(rsp, 2 * kPointerSize));
1954 __ CallRuntime(Runtime::kToSlowProperties, 1);
Leon Clarkee46be812010-01-19 14:06:41 +00001955 __ pop(result_register());
Steve Blockd0582a62009-12-15 09:54:21 +00001956 }
1957
Leon Clarkef7060e22010-06-03 12:02:55 +01001958 __ pop(rcx);
1959 if (expr->ends_initialization_block()) {
1960 __ movq(rdx, Operand(rsp, 0)); // Leave receiver on the stack for later.
1961 } else {
1962 __ pop(rdx);
1963 }
Leon Clarkee46be812010-01-19 14:06:41 +00001964 // Record source code position before IC call.
1965 SetSourcePosition(expr->position());
Steve Block44f0eee2011-05-26 01:26:41 +01001966 Handle<Code> ic = is_strict_mode()
1967 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
1968 : isolate()->builtins()->KeyedStoreIC_Initialize();
Ben Murdoch257744e2011-11-30 15:57:28 +00001969 EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
Steve Blockd0582a62009-12-15 09:54:21 +00001970
1971 // If the assignment ends an initialization block, revert to fast case.
1972 if (expr->ends_initialization_block()) {
Leon Clarkef7060e22010-06-03 12:02:55 +01001973 __ pop(rdx);
Steve Blockd0582a62009-12-15 09:54:21 +00001974 __ push(rax); // Result of assignment, saved even if not needed.
Leon Clarkef7060e22010-06-03 12:02:55 +01001975 __ push(rdx);
Steve Blockd0582a62009-12-15 09:54:21 +00001976 __ CallRuntime(Runtime::kToFastProperties, 1);
1977 __ pop(rax);
1978 }
1979
Ben Murdoch086aeea2011-05-13 15:57:08 +01001980 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001981 context()->Plug(rax);
Steve Blockd0582a62009-12-15 09:54:21 +00001982}
1983
1984
Leon Clarked91b9f72010-01-27 17:25:45 +00001985void FullCodeGenerator::VisitProperty(Property* expr) {
Steve Blockd0582a62009-12-15 09:54:21 +00001986 Comment cmnt(masm_, "[ Property");
1987 Expression* key = expr->key();
Steve Blockd0582a62009-12-15 09:54:21 +00001988
Leon Clarkee46be812010-01-19 14:06:41 +00001989 if (key->IsPropertyName()) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001990 VisitForAccumulatorValue(expr->obj());
Leon Clarkee46be812010-01-19 14:06:41 +00001991 EmitNamedPropertyLoad(expr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001992 context()->Plug(rax);
Steve Blockd0582a62009-12-15 09:54:21 +00001993 } else {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001994 VisitForStackValue(expr->obj());
1995 VisitForAccumulatorValue(expr->key());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001996 __ pop(rdx);
Leon Clarkee46be812010-01-19 14:06:41 +00001997 EmitKeyedPropertyLoad(expr);
Ben Murdochb0fe1622011-05-05 13:52:32 +01001998 context()->Plug(rax);
Steve Blockd0582a62009-12-15 09:54:21 +00001999 }
Steve Blockd0582a62009-12-15 09:54:21 +00002000}
2001
2002
Leon Clarked91b9f72010-01-27 17:25:45 +00002003void FullCodeGenerator::EmitCallWithIC(Call* expr,
Andrei Popescu402d9372010-02-26 13:31:12 +00002004 Handle<Object> name,
Leon Clarkee46be812010-01-19 14:06:41 +00002005 RelocInfo::Mode mode) {
Steve Blockd0582a62009-12-15 09:54:21 +00002006 // Code common for calls using the IC.
2007 ZoneList<Expression*>* args = expr->arguments();
Steve Block3ce2e202009-11-05 08:53:23 +00002008 int arg_count = args->length();
Ben Murdochb0fe1622011-05-05 13:52:32 +01002009 { PreservePositionScope scope(masm()->positions_recorder());
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08002010 for (int i = 0; i < arg_count; i++) {
2011 VisitForStackValue(args->at(i));
2012 }
2013 __ Move(rcx, name);
Steve Block3ce2e202009-11-05 08:53:23 +00002014 }
Steve Blockd0582a62009-12-15 09:54:21 +00002015 // Record source position for debugger.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002016 SetSourcePosition(expr->position());
Steve Block3ce2e202009-11-05 08:53:23 +00002017 // Call the IC initialization code.
Leon Clarkee46be812010-01-19 14:06:41 +00002018 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
Steve Block44f0eee2011-05-26 01:26:41 +01002019 Handle<Code> ic =
Ben Murdoch257744e2011-11-30 15:57:28 +00002020 ISOLATE->stub_cache()->ComputeCallInitialize(arg_count, in_loop, mode);
2021 EmitCallIC(ic, mode, expr->id());
Ben Murdoch086aeea2011-05-13 15:57:08 +01002022 RecordJSReturnSite(expr);
Steve Block3ce2e202009-11-05 08:53:23 +00002023 // Restore context register.
2024 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002025 context()->Plug(rax);
Steve Blockd0582a62009-12-15 09:54:21 +00002026}
2027
2028
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01002029void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
Ben Murdoch257744e2011-11-30 15:57:28 +00002030 Expression* key) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002031 // Load the key.
2032 VisitForAccumulatorValue(key);
2033
2034 // Swap the name of the function and the receiver on the stack to follow
2035 // the calling convention for call ICs.
2036 __ pop(rcx);
2037 __ push(rax);
2038 __ push(rcx);
2039
2040 // Load the arguments.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01002041 ZoneList<Expression*>* args = expr->arguments();
2042 int arg_count = args->length();
Ben Murdochb0fe1622011-05-05 13:52:32 +01002043 { PreservePositionScope scope(masm()->positions_recorder());
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08002044 for (int i = 0; i < arg_count; i++) {
2045 VisitForStackValue(args->at(i));
2046 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01002047 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01002048 // Record source position for debugger.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002049 SetSourcePosition(expr->position());
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01002050 // Call the IC initialization code.
2051 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
Steve Block44f0eee2011-05-26 01:26:41 +01002052 Handle<Code> ic =
2053 ISOLATE->stub_cache()->ComputeKeyedCallInitialize(arg_count, in_loop);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002054 __ movq(rcx, Operand(rsp, (arg_count + 1) * kPointerSize)); // Key.
Ben Murdoch257744e2011-11-30 15:57:28 +00002055 EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
Ben Murdoch086aeea2011-05-13 15:57:08 +01002056 RecordJSReturnSite(expr);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01002057 // Restore context register.
2058 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002059 context()->DropAndPlug(1, rax); // Drop the key still on the stack.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01002060}
2061
2062
Ben Murdoch257744e2011-11-30 15:57:28 +00002063void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) {
Steve Blockd0582a62009-12-15 09:54:21 +00002064 // Code common for calls using the call stub.
2065 ZoneList<Expression*>* args = expr->arguments();
2066 int arg_count = args->length();
Ben Murdochb0fe1622011-05-05 13:52:32 +01002067 { PreservePositionScope scope(masm()->positions_recorder());
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08002068 for (int i = 0; i < arg_count; i++) {
2069 VisitForStackValue(args->at(i));
2070 }
Steve Block3ce2e202009-11-05 08:53:23 +00002071 }
Steve Blockd0582a62009-12-15 09:54:21 +00002072 // Record source position for debugger.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002073 SetSourcePosition(expr->position());
Leon Clarkef7060e22010-06-03 12:02:55 +01002074 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
Ben Murdoch257744e2011-11-30 15:57:28 +00002075 CallFunctionStub stub(arg_count, in_loop, flags);
Steve Blockd0582a62009-12-15 09:54:21 +00002076 __ CallStub(&stub);
Ben Murdoch086aeea2011-05-13 15:57:08 +01002077 RecordJSReturnSite(expr);
Steve Blockd0582a62009-12-15 09:54:21 +00002078 // Restore context register.
2079 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2080 // Discard the function left on TOS.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002081 context()->DropAndPlug(1, rax);
Steve Blockd0582a62009-12-15 09:54:21 +00002082}
2083
2084
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002085void FullCodeGenerator::EmitResolvePossiblyDirectEval(ResolveEvalFlag flag,
2086 int arg_count) {
2087 // Push copy of the first argument or undefined if it doesn't exist.
2088 if (arg_count > 0) {
2089 __ push(Operand(rsp, arg_count * kPointerSize));
2090 } else {
2091 __ PushRoot(Heap::kUndefinedValueRootIndex);
2092 }
2093
2094 // Push the receiver of the enclosing function and do runtime call.
2095 __ push(Operand(rbp, (2 + scope()->num_parameters()) * kPointerSize));
2096
2097 // Push the strict mode flag.
2098 __ Push(Smi::FromInt(strict_mode_flag()));
2099
2100 __ CallRuntime(flag == SKIP_CONTEXT_LOOKUP
2101 ? Runtime::kResolvePossiblyDirectEvalNoLookup
2102 : Runtime::kResolvePossiblyDirectEval, 4);
2103}
2104
2105
Leon Clarked91b9f72010-01-27 17:25:45 +00002106void FullCodeGenerator::VisitCall(Call* expr) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01002107#ifdef DEBUG
2108 // We want to verify that RecordJSReturnSite gets called on all paths
2109 // through this function. Avoid early returns.
2110 expr->return_is_recorded_ = false;
2111#endif
2112
Steve Blockd0582a62009-12-15 09:54:21 +00002113 Comment cmnt(masm_, "[ Call");
2114 Expression* fun = expr->expression();
2115 Variable* var = fun->AsVariableProxy()->AsVariable();
2116
2117 if (var != NULL && var->is_possibly_eval()) {
Leon Clarkef7060e22010-06-03 12:02:55 +01002118 // In a call to eval, we first call %ResolvePossiblyDirectEval to
2119 // resolve the function we need to call and the receiver of the
Ben Murdoch086aeea2011-05-13 15:57:08 +01002120 // call. Then we call the resolved function using the given
Leon Clarkef7060e22010-06-03 12:02:55 +01002121 // arguments.
Leon Clarkef7060e22010-06-03 12:02:55 +01002122 ZoneList<Expression*>* args = expr->arguments();
2123 int arg_count = args->length();
Ben Murdochb0fe1622011-05-05 13:52:32 +01002124 { PreservePositionScope pos_scope(masm()->positions_recorder());
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08002125 VisitForStackValue(fun);
2126 __ PushRoot(Heap::kUndefinedValueRootIndex); // Reserved receiver slot.
Leon Clarkef7060e22010-06-03 12:02:55 +01002127
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08002128 // Push the arguments.
2129 for (int i = 0; i < arg_count; i++) {
2130 VisitForStackValue(args->at(i));
2131 }
Leon Clarkef7060e22010-06-03 12:02:55 +01002132
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002133 // If we know that eval can only be shadowed by eval-introduced
2134 // variables we attempt to load the global eval function directly
2135 // in generated code. If we succeed, there is no need to perform a
2136 // context lookup in the runtime system.
2137 Label done;
2138 if (var->AsSlot() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) {
2139 Label slow;
2140 EmitLoadGlobalSlotCheckExtensions(var->AsSlot(),
2141 NOT_INSIDE_TYPEOF,
2142 &slow);
2143 // Push the function and resolve eval.
2144 __ push(rax);
2145 EmitResolvePossiblyDirectEval(SKIP_CONTEXT_LOOKUP, arg_count);
2146 __ jmp(&done);
2147 __ bind(&slow);
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08002148 }
2149
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002150 // Push copy of the function (found below the arguments) and
2151 // resolve eval.
2152 __ push(Operand(rsp, (arg_count + 1) * kPointerSize));
2153 EmitResolvePossiblyDirectEval(PERFORM_CONTEXT_LOOKUP, arg_count);
2154 if (done.is_linked()) {
2155 __ bind(&done);
2156 }
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08002157
2158 // The runtime call returns a pair of values in rax (function) and
2159 // rdx (receiver). Touch up the stack with the right values.
2160 __ movq(Operand(rsp, (arg_count + 0) * kPointerSize), rdx);
2161 __ movq(Operand(rsp, (arg_count + 1) * kPointerSize), rax);
Leon Clarkef7060e22010-06-03 12:02:55 +01002162 }
Leon Clarkef7060e22010-06-03 12:02:55 +01002163 // Record source position for debugger.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002164 SetSourcePosition(expr->position());
Leon Clarkef7060e22010-06-03 12:02:55 +01002165 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
Ben Murdoch257744e2011-11-30 15:57:28 +00002166 CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_IMPLICIT);
Leon Clarkef7060e22010-06-03 12:02:55 +01002167 __ CallStub(&stub);
Ben Murdoch086aeea2011-05-13 15:57:08 +01002168 RecordJSReturnSite(expr);
Leon Clarkef7060e22010-06-03 12:02:55 +01002169 // Restore context register.
2170 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002171 context()->DropAndPlug(1, rax);
Steve Blockd0582a62009-12-15 09:54:21 +00002172 } else if (var != NULL && !var->is_this() && var->is_global()) {
2173 // Call to a global variable.
Steve Blockd0582a62009-12-15 09:54:21 +00002174 // Push global object as receiver for the call IC lookup.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002175 __ push(GlobalObjectOperand());
Leon Clarkee46be812010-01-19 14:06:41 +00002176 EmitCallWithIC(expr, var->name(), RelocInfo::CODE_TARGET_CONTEXT);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002177 } else if (var != NULL && var->AsSlot() != NULL &&
2178 var->AsSlot()->type() == Slot::LOOKUP) {
Steve Block59151502010-09-22 15:07:15 +01002179 // Call to a lookup slot (dynamically introduced variable).
2180 Label slow, done;
2181
Ben Murdochb0fe1622011-05-05 13:52:32 +01002182 { PreservePositionScope scope(masm()->positions_recorder());
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08002183 // Generate code for loading from variables potentially shadowed
2184 // by eval-introduced variables.
2185 EmitDynamicLoadFromSlotFastCase(var->AsSlot(),
2186 NOT_INSIDE_TYPEOF,
2187 &slow,
2188 &done);
Steve Block59151502010-09-22 15:07:15 +01002189
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08002190 __ bind(&slow);
Ben Murdoch086aeea2011-05-13 15:57:08 +01002191 }
2192 // Call the runtime to find the function to call (returned in rax)
2193 // and the object holding it (returned in rdx).
2194 __ push(context_register());
2195 __ Push(var->name());
2196 __ CallRuntime(Runtime::kLoadContextSlot, 2);
2197 __ push(rax); // Function.
2198 __ push(rdx); // Receiver.
Steve Block59151502010-09-22 15:07:15 +01002199
Ben Murdoch086aeea2011-05-13 15:57:08 +01002200 // If fast case code has been generated, emit code to push the
2201 // function and receiver and have the slow path jump around this
2202 // code.
2203 if (done.is_linked()) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002204 Label call;
2205 __ jmp(&call, Label::kNear);
Ben Murdoch086aeea2011-05-13 15:57:08 +01002206 __ bind(&done);
2207 // Push function.
2208 __ push(rax);
2209 // Push global receiver.
Ben Murdoch257744e2011-11-30 15:57:28 +00002210 __ movq(rbx, GlobalObjectOperand());
2211 __ push(FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
2212 __ bind(&call);
Steve Block59151502010-09-22 15:07:15 +01002213 }
2214
Ben Murdoch257744e2011-11-30 15:57:28 +00002215 // The receiver is either the global receiver or an object found
2216 // by LoadContextSlot. That object could be the hole if the
2217 // receiver is implicitly the global object.
2218 EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT);
Steve Blockd0582a62009-12-15 09:54:21 +00002219 } else if (fun->AsProperty() != NULL) {
2220 // Call to an object property.
2221 Property* prop = fun->AsProperty();
2222 Literal* key = prop->key()->AsLiteral();
2223 if (key != NULL && key->handle()->IsSymbol()) {
2224 // Call to a named property, use call IC.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002225 { PreservePositionScope scope(masm()->positions_recorder());
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08002226 VisitForStackValue(prop->obj());
2227 }
Leon Clarkee46be812010-01-19 14:06:41 +00002228 EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET);
Steve Blockd0582a62009-12-15 09:54:21 +00002229 } else {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01002230 // Call to a keyed property.
2231 // For a synthetic property use keyed load IC followed by function call,
Ben Murdoch086aeea2011-05-13 15:57:08 +01002232 // for a regular property use keyed EmitCallIC.
Steve Blockd0582a62009-12-15 09:54:21 +00002233 if (prop->is_synthetic()) {
Steve Block1e0659c2011-05-24 12:43:12 +01002234 // Do not visit the object and key subexpressions (they are shared
2235 // by all occurrences of the same rewritten parameter).
2236 ASSERT(prop->obj()->AsVariableProxy() != NULL);
2237 ASSERT(prop->obj()->AsVariableProxy()->var()->AsSlot() != NULL);
2238 Slot* slot = prop->obj()->AsVariableProxy()->var()->AsSlot();
2239 MemOperand operand = EmitSlotSearch(slot, rdx);
2240 __ movq(rdx, operand);
2241
2242 ASSERT(prop->key()->AsLiteral() != NULL);
2243 ASSERT(prop->key()->AsLiteral()->handle()->IsSmi());
2244 __ Move(rax, prop->key()->AsLiteral()->handle());
2245
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01002246 // Record source code position for IC call.
Ben Murdochb0fe1622011-05-05 13:52:32 +01002247 SetSourcePosition(prop->position());
Ben Murdoch086aeea2011-05-13 15:57:08 +01002248
Steve Block44f0eee2011-05-26 01:26:41 +01002249 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
Ben Murdoch257744e2011-11-30 15:57:28 +00002250 EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01002251 // Push result (function).
2252 __ push(rax);
Ben Murdoch086aeea2011-05-13 15:57:08 +01002253 // Push Global receiver.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002254 __ movq(rcx, GlobalObjectOperand());
Leon Clarkee46be812010-01-19 14:06:41 +00002255 __ push(FieldOperand(rcx, GlobalObject::kGlobalReceiverOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00002256 EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
Steve Blockd0582a62009-12-15 09:54:21 +00002257 } else {
Steve Block1e0659c2011-05-24 12:43:12 +01002258 { PreservePositionScope scope(masm()->positions_recorder());
2259 VisitForStackValue(prop->obj());
2260 }
Ben Murdoch257744e2011-11-30 15:57:28 +00002261 EmitKeyedCallWithIC(expr, prop->key());
Steve Blockd0582a62009-12-15 09:54:21 +00002262 }
Steve Blockd0582a62009-12-15 09:54:21 +00002263 }
2264 } else {
Ben Murdochb0fe1622011-05-05 13:52:32 +01002265 { PreservePositionScope scope(masm()->positions_recorder());
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08002266 VisitForStackValue(fun);
2267 }
Steve Blockd0582a62009-12-15 09:54:21 +00002268 // Load global receiver object.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08002269 __ movq(rbx, GlobalObjectOperand());
Steve Blockd0582a62009-12-15 09:54:21 +00002270 __ push(FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
2271 // Emit function call.
Ben Murdoch257744e2011-11-30 15:57:28 +00002272 EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
Steve Blockd0582a62009-12-15 09:54:21 +00002273 }
Ben Murdoch086aeea2011-05-13 15:57:08 +01002274
2275#ifdef DEBUG
2276 // RecordJSReturnSite should have been called.
2277 ASSERT(expr->return_is_recorded_);
2278#endif
Steve Blockd0582a62009-12-15 09:54:21 +00002279}
2280
2281
Leon Clarked91b9f72010-01-27 17:25:45 +00002282void FullCodeGenerator::VisitCallNew(CallNew* expr) {
Steve Blockd0582a62009-12-15 09:54:21 +00002283 Comment cmnt(masm_, "[ CallNew");
2284 // According to ECMA-262, section 11.2.2, page 44, the function
2285 // expression in new calls must be evaluated before the
2286 // arguments.
Steve Blockd0582a62009-12-15 09:54:21 +00002287
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002288 // Push constructor on the stack. If it's not a function it's used as
2289 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2290 // ignored.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002291 VisitForStackValue(expr->expression());
Steve Blockd0582a62009-12-15 09:54:21 +00002292
2293 // Push the arguments ("left-to-right") on the stack.
2294 ZoneList<Expression*>* args = expr->arguments();
2295 int arg_count = args->length();
2296 for (int i = 0; i < arg_count; i++) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002297 VisitForStackValue(args->at(i));
Steve Blockd0582a62009-12-15 09:54:21 +00002298 }
2299
2300 // Call the construct call builtin that handles allocation and
2301 // constructor invocation.
2302 SetSourcePosition(expr->position());
2303
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002304 // Load function and argument count into rdi and rax.
Steve Blockd0582a62009-12-15 09:54:21 +00002305 __ Set(rax, arg_count);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002306 __ movq(rdi, Operand(rsp, arg_count * kPointerSize));
Steve Blockd0582a62009-12-15 09:54:21 +00002307
Steve Block44f0eee2011-05-26 01:26:41 +01002308 Handle<Code> construct_builtin =
2309 isolate()->builtins()->JSConstructCall();
Steve Blockd0582a62009-12-15 09:54:21 +00002310 __ Call(construct_builtin, RelocInfo::CONSTRUCT_CALL);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002311 context()->Plug(rax);
Steve Block3ce2e202009-11-05 08:53:23 +00002312}
2313
2314
Leon Clarkef7060e22010-06-03 12:02:55 +01002315void FullCodeGenerator::EmitIsSmi(ZoneList<Expression*>* args) {
2316 ASSERT(args->length() == 1);
2317
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002318 VisitForAccumulatorValue(args->at(0));
Leon Clarkef7060e22010-06-03 12:02:55 +01002319
2320 Label materialize_true, materialize_false;
2321 Label* if_true = NULL;
2322 Label* if_false = NULL;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002323 Label* fall_through = NULL;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002324 context()->PrepareTest(&materialize_true, &materialize_false,
2325 &if_true, &if_false, &fall_through);
Leon Clarkef7060e22010-06-03 12:02:55 +01002326
Ben Murdoch086aeea2011-05-13 15:57:08 +01002327 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Leon Clarkef7060e22010-06-03 12:02:55 +01002328 __ JumpIfSmi(rax, if_true);
2329 __ jmp(if_false);
2330
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002331 context()->Plug(if_true, if_false);
Leon Clarkef7060e22010-06-03 12:02:55 +01002332}
2333
2334
2335void FullCodeGenerator::EmitIsNonNegativeSmi(ZoneList<Expression*>* args) {
2336 ASSERT(args->length() == 1);
2337
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002338 VisitForAccumulatorValue(args->at(0));
Leon Clarkef7060e22010-06-03 12:02:55 +01002339
2340 Label materialize_true, materialize_false;
2341 Label* if_true = NULL;
2342 Label* if_false = NULL;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002343 Label* fall_through = NULL;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002344 context()->PrepareTest(&materialize_true, &materialize_false,
2345 &if_true, &if_false, &fall_through);
Leon Clarkef7060e22010-06-03 12:02:55 +01002346
Ben Murdoch086aeea2011-05-13 15:57:08 +01002347 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Ben Murdochf87a2032010-10-22 12:50:53 +01002348 Condition non_negative_smi = masm()->CheckNonNegativeSmi(rax);
2349 Split(non_negative_smi, if_true, if_false, fall_through);
Leon Clarkef7060e22010-06-03 12:02:55 +01002350
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002351 context()->Plug(if_true, if_false);
Leon Clarkef7060e22010-06-03 12:02:55 +01002352}
2353
2354
2355void FullCodeGenerator::EmitIsObject(ZoneList<Expression*>* args) {
2356 ASSERT(args->length() == 1);
2357
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002358 VisitForAccumulatorValue(args->at(0));
Leon Clarkef7060e22010-06-03 12:02:55 +01002359
2360 Label materialize_true, materialize_false;
2361 Label* if_true = NULL;
2362 Label* if_false = NULL;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002363 Label* fall_through = NULL;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002364 context()->PrepareTest(&materialize_true, &materialize_false,
2365 &if_true, &if_false, &fall_through);
Leon Clarkef7060e22010-06-03 12:02:55 +01002366
2367 __ JumpIfSmi(rax, if_false);
2368 __ CompareRoot(rax, Heap::kNullValueRootIndex);
2369 __ j(equal, if_true);
2370 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
2371 // Undetectable objects behave like undefined when tested with typeof.
2372 __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
2373 Immediate(1 << Map::kIsUndetectable));
2374 __ j(not_zero, if_false);
2375 __ movzxbq(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
2376 __ cmpq(rbx, Immediate(FIRST_JS_OBJECT_TYPE));
2377 __ j(below, if_false);
2378 __ cmpq(rbx, Immediate(LAST_JS_OBJECT_TYPE));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002379 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002380 Split(below_equal, if_true, if_false, fall_through);
Leon Clarkef7060e22010-06-03 12:02:55 +01002381
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002382 context()->Plug(if_true, if_false);
Leon Clarkef7060e22010-06-03 12:02:55 +01002383}
2384
2385
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002386void FullCodeGenerator::EmitIsSpecObject(ZoneList<Expression*>* args) {
2387 ASSERT(args->length() == 1);
2388
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002389 VisitForAccumulatorValue(args->at(0));
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002390
2391 Label materialize_true, materialize_false;
2392 Label* if_true = NULL;
2393 Label* if_false = NULL;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002394 Label* fall_through = NULL;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002395 context()->PrepareTest(&materialize_true, &materialize_false,
2396 &if_true, &if_false, &fall_through);
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002397
2398 __ JumpIfSmi(rax, if_false);
2399 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rbx);
Ben Murdoch086aeea2011-05-13 15:57:08 +01002400 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002401 Split(above_equal, if_true, if_false, fall_through);
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002402
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002403 context()->Plug(if_true, if_false);
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002404}
2405
2406
Leon Clarkef7060e22010-06-03 12:02:55 +01002407void FullCodeGenerator::EmitIsUndetectableObject(ZoneList<Expression*>* args) {
2408 ASSERT(args->length() == 1);
2409
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002410 VisitForAccumulatorValue(args->at(0));
Leon Clarkef7060e22010-06-03 12:02:55 +01002411
2412 Label materialize_true, materialize_false;
2413 Label* if_true = NULL;
2414 Label* if_false = NULL;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002415 Label* fall_through = NULL;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002416 context()->PrepareTest(&materialize_true, &materialize_false,
2417 &if_true, &if_false, &fall_through);
Leon Clarkef7060e22010-06-03 12:02:55 +01002418
2419 __ JumpIfSmi(rax, if_false);
2420 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
2421 __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
2422 Immediate(1 << Map::kIsUndetectable));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002423 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002424 Split(not_zero, if_true, if_false, fall_through);
Leon Clarkef7060e22010-06-03 12:02:55 +01002425
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002426 context()->Plug(if_true, if_false);
Leon Clarkef7060e22010-06-03 12:02:55 +01002427}
2428
2429
Iain Merrick75681382010-08-19 15:07:18 +01002430void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2431 ZoneList<Expression*>* args) {
2432 ASSERT(args->length() == 1);
2433
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002434 VisitForAccumulatorValue(args->at(0));
Iain Merrick75681382010-08-19 15:07:18 +01002435
2436 Label materialize_true, materialize_false;
2437 Label* if_true = NULL;
2438 Label* if_false = NULL;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002439 Label* fall_through = NULL;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002440 context()->PrepareTest(&materialize_true, &materialize_false,
2441 &if_true, &if_false, &fall_through);
Iain Merrick75681382010-08-19 15:07:18 +01002442
Ben Murdoch8b112d22011-06-08 16:22:53 +01002443 if (FLAG_debug_code) __ AbortIfSmi(rax);
2444
2445 // Check whether this map has already been checked to be safe for default
2446 // valueOf.
2447 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
2448 __ testb(FieldOperand(rbx, Map::kBitField2Offset),
2449 Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
2450 __ j(not_zero, if_true);
2451
2452 // Check for fast case object. Generate false result for slow case object.
2453 __ movq(rcx, FieldOperand(rax, JSObject::kPropertiesOffset));
2454 __ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
2455 __ CompareRoot(rcx, Heap::kHashTableMapRootIndex);
2456 __ j(equal, if_false);
2457
2458 // Look for valueOf symbol in the descriptor array, and indicate false if
2459 // found. The type is not checked, so if it is a transition it is a false
2460 // negative.
Ben Murdoch257744e2011-11-30 15:57:28 +00002461 __ LoadInstanceDescriptors(rbx, rbx);
Ben Murdoch8b112d22011-06-08 16:22:53 +01002462 __ movq(rcx, FieldOperand(rbx, FixedArray::kLengthOffset));
2463 // rbx: descriptor array
2464 // rcx: length of descriptor array
2465 // Calculate the end of the descriptor array.
2466 SmiIndex index = masm_->SmiToIndex(rdx, rcx, kPointerSizeLog2);
2467 __ lea(rcx,
2468 Operand(
2469 rbx, index.reg, index.scale, FixedArray::kHeaderSize));
2470 // Calculate location of the first key name.
2471 __ addq(rbx,
2472 Immediate(FixedArray::kHeaderSize +
2473 DescriptorArray::kFirstIndex * kPointerSize));
2474 // Loop through all the keys in the descriptor array. If one of these is the
2475 // symbol valueOf the result is false.
2476 Label entry, loop;
2477 __ jmp(&entry);
2478 __ bind(&loop);
2479 __ movq(rdx, FieldOperand(rbx, 0));
2480 __ Cmp(rdx, FACTORY->value_of_symbol());
2481 __ j(equal, if_false);
2482 __ addq(rbx, Immediate(kPointerSize));
2483 __ bind(&entry);
2484 __ cmpq(rbx, rcx);
2485 __ j(not_equal, &loop);
2486
2487 // Reload map as register rbx was used as temporary above.
2488 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
2489
2490 // If a valueOf property is not found on the object check that it's
2491 // prototype is the un-modified String prototype. If not result is false.
2492 __ movq(rcx, FieldOperand(rbx, Map::kPrototypeOffset));
2493 __ testq(rcx, Immediate(kSmiTagMask));
2494 __ j(zero, if_false);
2495 __ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
2496 __ movq(rdx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
2497 __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalContextOffset));
2498 __ cmpq(rcx,
2499 ContextOperand(rdx, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
2500 __ j(not_equal, if_false);
2501 // Set the bit in the map to indicate that it has been checked safe for
2502 // default valueOf and set true result.
2503 __ or_(FieldOperand(rbx, Map::kBitField2Offset),
2504 Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
2505 __ jmp(if_true);
2506
Ben Murdoch086aeea2011-05-13 15:57:08 +01002507 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002508 context()->Plug(if_true, if_false);
Iain Merrick75681382010-08-19 15:07:18 +01002509}
2510
2511
Leon Clarkef7060e22010-06-03 12:02:55 +01002512void FullCodeGenerator::EmitIsFunction(ZoneList<Expression*>* args) {
2513 ASSERT(args->length() == 1);
2514
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002515 VisitForAccumulatorValue(args->at(0));
Leon Clarkef7060e22010-06-03 12:02:55 +01002516
2517 Label materialize_true, materialize_false;
2518 Label* if_true = NULL;
2519 Label* if_false = NULL;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002520 Label* fall_through = NULL;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002521 context()->PrepareTest(&materialize_true, &materialize_false,
2522 &if_true, &if_false, &fall_through);
Leon Clarkef7060e22010-06-03 12:02:55 +01002523
2524 __ JumpIfSmi(rax, if_false);
2525 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
Ben Murdoch086aeea2011-05-13 15:57:08 +01002526 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002527 Split(equal, if_true, if_false, fall_through);
Leon Clarkef7060e22010-06-03 12:02:55 +01002528
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002529 context()->Plug(if_true, if_false);
Leon Clarkef7060e22010-06-03 12:02:55 +01002530}
2531
2532
2533void FullCodeGenerator::EmitIsArray(ZoneList<Expression*>* args) {
2534 ASSERT(args->length() == 1);
2535
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002536 VisitForAccumulatorValue(args->at(0));
Leon Clarkef7060e22010-06-03 12:02:55 +01002537
2538 Label materialize_true, materialize_false;
2539 Label* if_true = NULL;
2540 Label* if_false = NULL;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002541 Label* fall_through = NULL;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002542 context()->PrepareTest(&materialize_true, &materialize_false,
2543 &if_true, &if_false, &fall_through);
Leon Clarkef7060e22010-06-03 12:02:55 +01002544
2545 __ JumpIfSmi(rax, if_false);
2546 __ CmpObjectType(rax, JS_ARRAY_TYPE, rbx);
Ben Murdoch086aeea2011-05-13 15:57:08 +01002547 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002548 Split(equal, if_true, if_false, fall_through);
Leon Clarkef7060e22010-06-03 12:02:55 +01002549
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002550 context()->Plug(if_true, if_false);
Leon Clarkef7060e22010-06-03 12:02:55 +01002551}
2552
2553
2554void FullCodeGenerator::EmitIsRegExp(ZoneList<Expression*>* args) {
2555 ASSERT(args->length() == 1);
2556
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002557 VisitForAccumulatorValue(args->at(0));
Leon Clarkef7060e22010-06-03 12:02:55 +01002558
2559 Label materialize_true, materialize_false;
2560 Label* if_true = NULL;
2561 Label* if_false = NULL;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002562 Label* fall_through = NULL;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002563 context()->PrepareTest(&materialize_true, &materialize_false,
2564 &if_true, &if_false, &fall_through);
Leon Clarkef7060e22010-06-03 12:02:55 +01002565
2566 __ JumpIfSmi(rax, if_false);
2567 __ CmpObjectType(rax, JS_REGEXP_TYPE, rbx);
Ben Murdoch086aeea2011-05-13 15:57:08 +01002568 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002569 Split(equal, if_true, if_false, fall_through);
Leon Clarkef7060e22010-06-03 12:02:55 +01002570
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002571 context()->Plug(if_true, if_false);
Leon Clarkef7060e22010-06-03 12:02:55 +01002572}
2573
2574
2575
2576void FullCodeGenerator::EmitIsConstructCall(ZoneList<Expression*>* args) {
2577 ASSERT(args->length() == 0);
2578
2579 Label materialize_true, materialize_false;
2580 Label* if_true = NULL;
2581 Label* if_false = NULL;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002582 Label* fall_through = NULL;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002583 context()->PrepareTest(&materialize_true, &materialize_false,
2584 &if_true, &if_false, &fall_through);
Leon Clarkef7060e22010-06-03 12:02:55 +01002585
2586 // Get the frame pointer for the calling frame.
2587 __ movq(rax, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2588
2589 // Skip the arguments adaptor frame if it exists.
2590 Label check_frame_marker;
Steve Block44f0eee2011-05-26 01:26:41 +01002591 __ Cmp(Operand(rax, StandardFrameConstants::kContextOffset),
2592 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
Leon Clarkef7060e22010-06-03 12:02:55 +01002593 __ j(not_equal, &check_frame_marker);
2594 __ movq(rax, Operand(rax, StandardFrameConstants::kCallerFPOffset));
2595
2596 // Check the marker in the calling frame.
2597 __ bind(&check_frame_marker);
Steve Block44f0eee2011-05-26 01:26:41 +01002598 __ Cmp(Operand(rax, StandardFrameConstants::kMarkerOffset),
2599 Smi::FromInt(StackFrame::CONSTRUCT));
Ben Murdoch086aeea2011-05-13 15:57:08 +01002600 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002601 Split(equal, if_true, if_false, fall_through);
Leon Clarkef7060e22010-06-03 12:02:55 +01002602
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002603 context()->Plug(if_true, if_false);
Leon Clarkef7060e22010-06-03 12:02:55 +01002604}
2605
2606
2607void FullCodeGenerator::EmitObjectEquals(ZoneList<Expression*>* args) {
2608 ASSERT(args->length() == 2);
2609
2610 // Load the two objects into registers and perform the comparison.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002611 VisitForStackValue(args->at(0));
2612 VisitForAccumulatorValue(args->at(1));
Leon Clarkef7060e22010-06-03 12:02:55 +01002613
2614 Label materialize_true, materialize_false;
2615 Label* if_true = NULL;
2616 Label* if_false = NULL;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002617 Label* fall_through = NULL;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002618 context()->PrepareTest(&materialize_true, &materialize_false,
2619 &if_true, &if_false, &fall_through);
Leon Clarkef7060e22010-06-03 12:02:55 +01002620
2621 __ pop(rbx);
2622 __ cmpq(rax, rbx);
Ben Murdoch086aeea2011-05-13 15:57:08 +01002623 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002624 Split(equal, if_true, if_false, fall_through);
Leon Clarkef7060e22010-06-03 12:02:55 +01002625
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002626 context()->Plug(if_true, if_false);
Leon Clarkef7060e22010-06-03 12:02:55 +01002627}
2628
2629
2630void FullCodeGenerator::EmitArguments(ZoneList<Expression*>* args) {
2631 ASSERT(args->length() == 1);
2632
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002633 // ArgumentsAccessStub expects the key in rdx and the formal
2634 // parameter count in rax.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002635 VisitForAccumulatorValue(args->at(0));
Leon Clarkef7060e22010-06-03 12:02:55 +01002636 __ movq(rdx, rax);
2637 __ Move(rax, Smi::FromInt(scope()->num_parameters()));
2638 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
2639 __ CallStub(&stub);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002640 context()->Plug(rax);
Leon Clarkef7060e22010-06-03 12:02:55 +01002641}
2642
2643
2644void FullCodeGenerator::EmitArgumentsLength(ZoneList<Expression*>* args) {
2645 ASSERT(args->length() == 0);
2646
Ben Murdoch257744e2011-11-30 15:57:28 +00002647 Label exit;
Leon Clarkef7060e22010-06-03 12:02:55 +01002648 // Get the number of formal parameters.
2649 __ Move(rax, Smi::FromInt(scope()->num_parameters()));
2650
2651 // Check if the calling frame is an arguments adaptor frame.
2652 __ movq(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01002653 __ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset),
2654 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
Ben Murdoch257744e2011-11-30 15:57:28 +00002655 __ j(not_equal, &exit, Label::kNear);
Leon Clarkef7060e22010-06-03 12:02:55 +01002656
2657 // Arguments adaptor case: Read the arguments length from the
2658 // adaptor frame.
2659 __ movq(rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2660
2661 __ bind(&exit);
2662 if (FLAG_debug_code) __ AbortIfNotSmi(rax);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002663 context()->Plug(rax);
Leon Clarkef7060e22010-06-03 12:02:55 +01002664}
2665
2666
2667void FullCodeGenerator::EmitClassOf(ZoneList<Expression*>* args) {
2668 ASSERT(args->length() == 1);
2669 Label done, null, function, non_function_constructor;
2670
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002671 VisitForAccumulatorValue(args->at(0));
Leon Clarkef7060e22010-06-03 12:02:55 +01002672
2673 // If the object is a smi, we return null.
2674 __ JumpIfSmi(rax, &null);
2675
2676 // Check that the object is a JS object but take special care of JS
2677 // functions to make sure they have 'Function' as their class.
2678 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rax); // Map is now in rax.
2679 __ j(below, &null);
2680
2681 // As long as JS_FUNCTION_TYPE is the last instance type and it is
2682 // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
2683 // LAST_JS_OBJECT_TYPE.
2684 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
2685 ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
2686 __ CmpInstanceType(rax, JS_FUNCTION_TYPE);
2687 __ j(equal, &function);
2688
2689 // Check if the constructor in the map is a function.
2690 __ movq(rax, FieldOperand(rax, Map::kConstructorOffset));
2691 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
2692 __ j(not_equal, &non_function_constructor);
2693
2694 // rax now contains the constructor function. Grab the
2695 // instance class name from there.
2696 __ movq(rax, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset));
2697 __ movq(rax, FieldOperand(rax, SharedFunctionInfo::kInstanceClassNameOffset));
2698 __ jmp(&done);
2699
2700 // Functions have class 'Function'.
2701 __ bind(&function);
Steve Block44f0eee2011-05-26 01:26:41 +01002702 __ Move(rax, isolate()->factory()->function_class_symbol());
Leon Clarkef7060e22010-06-03 12:02:55 +01002703 __ jmp(&done);
2704
2705 // Objects with a non-function constructor have class 'Object'.
2706 __ bind(&non_function_constructor);
Steve Block44f0eee2011-05-26 01:26:41 +01002707 __ Move(rax, isolate()->factory()->Object_symbol());
Leon Clarkef7060e22010-06-03 12:02:55 +01002708 __ jmp(&done);
2709
2710 // Non-JS objects have class null.
2711 __ bind(&null);
2712 __ LoadRoot(rax, Heap::kNullValueRootIndex);
2713
2714 // All done.
2715 __ bind(&done);
2716
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002717 context()->Plug(rax);
Leon Clarkef7060e22010-06-03 12:02:55 +01002718}
2719
2720
2721void FullCodeGenerator::EmitLog(ZoneList<Expression*>* args) {
2722 // Conditionally generate a log call.
2723 // Args:
2724 // 0 (literal string): The type of logging (corresponds to the flags).
2725 // This is used to determine whether or not to generate the log call.
2726 // 1 (string): Format string. Access the string at argument index 2
2727 // with '%2s' (see Logger::LogRuntime for all the formats).
2728 // 2 (array): Arguments to the format string.
2729 ASSERT_EQ(args->length(), 3);
2730#ifdef ENABLE_LOGGING_AND_PROFILING
2731 if (CodeGenerator::ShouldGenerateLog(args->at(0))) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002732 VisitForStackValue(args->at(1));
2733 VisitForStackValue(args->at(2));
Leon Clarkef7060e22010-06-03 12:02:55 +01002734 __ CallRuntime(Runtime::kLog, 2);
2735 }
2736#endif
2737 // Finally, we're expected to leave a value on the top of the stack.
2738 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002739 context()->Plug(rax);
Leon Clarkef7060e22010-06-03 12:02:55 +01002740}
2741
2742
2743void FullCodeGenerator::EmitRandomHeapNumber(ZoneList<Expression*>* args) {
2744 ASSERT(args->length() == 0);
2745
2746 Label slow_allocate_heapnumber;
2747 Label heapnumber_allocated;
2748
2749 __ AllocateHeapNumber(rbx, rcx, &slow_allocate_heapnumber);
2750 __ jmp(&heapnumber_allocated);
2751
2752 __ bind(&slow_allocate_heapnumber);
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002753 // Allocate a heap number.
2754 __ CallRuntime(Runtime::kNumberAlloc, 0);
Leon Clarkef7060e22010-06-03 12:02:55 +01002755 __ movq(rbx, rax);
2756
2757 __ bind(&heapnumber_allocated);
2758
2759 // Return a random uint32 number in rax.
2760 // The fresh HeapNumber is in rbx, which is callee-save on both x64 ABIs.
Ben Murdoch8b112d22011-06-08 16:22:53 +01002761 __ PrepareCallCFunction(1);
2762#ifdef _WIN64
2763 __ LoadAddress(rcx, ExternalReference::isolate_address());
2764#else
2765 __ LoadAddress(rdi, ExternalReference::isolate_address());
2766#endif
2767 __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
Leon Clarkef7060e22010-06-03 12:02:55 +01002768
2769 // Convert 32 random bits in rax to 0.(32 random bits) in a double
2770 // by computing:
2771 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
2772 __ movl(rcx, Immediate(0x49800000)); // 1.0 x 2^20 as single.
2773 __ movd(xmm1, rcx);
2774 __ movd(xmm0, rax);
2775 __ cvtss2sd(xmm1, xmm1);
Ben Murdoch257744e2011-11-30 15:57:28 +00002776 __ xorps(xmm0, xmm1);
Leon Clarkef7060e22010-06-03 12:02:55 +01002777 __ subsd(xmm0, xmm1);
2778 __ movsd(FieldOperand(rbx, HeapNumber::kValueOffset), xmm0);
2779
2780 __ movq(rax, rbx);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002781 context()->Plug(rax);
Leon Clarkef7060e22010-06-03 12:02:55 +01002782}
2783
2784
2785void FullCodeGenerator::EmitSubString(ZoneList<Expression*>* args) {
2786 // Load the arguments on the stack and call the stub.
2787 SubStringStub stub;
2788 ASSERT(args->length() == 3);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002789 VisitForStackValue(args->at(0));
2790 VisitForStackValue(args->at(1));
2791 VisitForStackValue(args->at(2));
Leon Clarkef7060e22010-06-03 12:02:55 +01002792 __ CallStub(&stub);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002793 context()->Plug(rax);
Leon Clarkef7060e22010-06-03 12:02:55 +01002794}
2795
2796
2797void FullCodeGenerator::EmitRegExpExec(ZoneList<Expression*>* args) {
2798 // Load the arguments on the stack and call the stub.
2799 RegExpExecStub stub;
2800 ASSERT(args->length() == 4);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002801 VisitForStackValue(args->at(0));
2802 VisitForStackValue(args->at(1));
2803 VisitForStackValue(args->at(2));
2804 VisitForStackValue(args->at(3));
Leon Clarkef7060e22010-06-03 12:02:55 +01002805 __ CallStub(&stub);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002806 context()->Plug(rax);
Leon Clarkef7060e22010-06-03 12:02:55 +01002807}
2808
2809
2810void FullCodeGenerator::EmitValueOf(ZoneList<Expression*>* args) {
2811 ASSERT(args->length() == 1);
2812
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002813 VisitForAccumulatorValue(args->at(0)); // Load the object.
Leon Clarkef7060e22010-06-03 12:02:55 +01002814
2815 Label done;
2816 // If the object is a smi return the object.
2817 __ JumpIfSmi(rax, &done);
2818 // If the object is not a value type, return the object.
2819 __ CmpObjectType(rax, JS_VALUE_TYPE, rbx);
2820 __ j(not_equal, &done);
2821 __ movq(rax, FieldOperand(rax, JSValue::kValueOffset));
2822
2823 __ bind(&done);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002824 context()->Plug(rax);
Leon Clarkef7060e22010-06-03 12:02:55 +01002825}
2826
2827
2828void FullCodeGenerator::EmitMathPow(ZoneList<Expression*>* args) {
2829 // Load the arguments on the stack and call the runtime function.
2830 ASSERT(args->length() == 2);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002831 VisitForStackValue(args->at(0));
2832 VisitForStackValue(args->at(1));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01002833 MathPowStub stub;
2834 __ CallStub(&stub);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002835 context()->Plug(rax);
Leon Clarkef7060e22010-06-03 12:02:55 +01002836}
2837
2838
2839void FullCodeGenerator::EmitSetValueOf(ZoneList<Expression*>* args) {
2840 ASSERT(args->length() == 2);
2841
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002842 VisitForStackValue(args->at(0)); // Load the object.
2843 VisitForAccumulatorValue(args->at(1)); // Load the value.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01002844 __ pop(rbx); // rax = value. rbx = object.
Leon Clarkef7060e22010-06-03 12:02:55 +01002845
2846 Label done;
2847 // If the object is a smi, return the value.
2848 __ JumpIfSmi(rbx, &done);
2849
2850 // If the object is not a value type, return the value.
2851 __ CmpObjectType(rbx, JS_VALUE_TYPE, rcx);
2852 __ j(not_equal, &done);
2853
2854 // Store the value.
2855 __ movq(FieldOperand(rbx, JSValue::kValueOffset), rax);
2856 // Update the write barrier. Save the value as it will be
2857 // overwritten by the write barrier code and is needed afterward.
2858 __ movq(rdx, rax);
2859 __ RecordWrite(rbx, JSValue::kValueOffset, rdx, rcx);
2860
2861 __ bind(&done);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002862 context()->Plug(rax);
Leon Clarkef7060e22010-06-03 12:02:55 +01002863}
2864
2865
2866void FullCodeGenerator::EmitNumberToString(ZoneList<Expression*>* args) {
2867 ASSERT_EQ(args->length(), 1);
2868
2869 // Load the argument on the stack and call the stub.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002870 VisitForStackValue(args->at(0));
Leon Clarkef7060e22010-06-03 12:02:55 +01002871
2872 NumberToStringStub stub;
2873 __ CallStub(&stub);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002874 context()->Plug(rax);
Leon Clarkef7060e22010-06-03 12:02:55 +01002875}
2876
2877
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002878void FullCodeGenerator::EmitStringCharFromCode(ZoneList<Expression*>* args) {
Leon Clarkef7060e22010-06-03 12:02:55 +01002879 ASSERT(args->length() == 1);
2880
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002881 VisitForAccumulatorValue(args->at(0));
Leon Clarkef7060e22010-06-03 12:02:55 +01002882
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002883 Label done;
2884 StringCharFromCodeGenerator generator(rax, rbx);
2885 generator.GenerateFast(masm_);
Leon Clarkef7060e22010-06-03 12:02:55 +01002886 __ jmp(&done);
2887
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002888 NopRuntimeCallHelper call_helper;
2889 generator.GenerateSlow(masm_, call_helper);
Leon Clarkef7060e22010-06-03 12:02:55 +01002890
2891 __ bind(&done);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002892 context()->Plug(rbx);
Leon Clarkef7060e22010-06-03 12:02:55 +01002893}
2894
2895
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002896void FullCodeGenerator::EmitStringCharCodeAt(ZoneList<Expression*>* args) {
2897 ASSERT(args->length() == 2);
2898
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002899 VisitForStackValue(args->at(0));
2900 VisitForAccumulatorValue(args->at(1));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002901
2902 Register object = rbx;
2903 Register index = rax;
2904 Register scratch = rcx;
2905 Register result = rdx;
2906
2907 __ pop(object);
2908
2909 Label need_conversion;
2910 Label index_out_of_range;
2911 Label done;
2912 StringCharCodeAtGenerator generator(object,
2913 index,
2914 scratch,
2915 result,
2916 &need_conversion,
2917 &need_conversion,
2918 &index_out_of_range,
2919 STRING_INDEX_IS_NUMBER);
2920 generator.GenerateFast(masm_);
2921 __ jmp(&done);
2922
2923 __ bind(&index_out_of_range);
2924 // When the index is out of range, the spec requires us to return
2925 // NaN.
2926 __ LoadRoot(result, Heap::kNanValueRootIndex);
2927 __ jmp(&done);
2928
2929 __ bind(&need_conversion);
Leon Clarkef7060e22010-06-03 12:02:55 +01002930 // Move the undefined value into the result register, which will
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002931 // trigger conversion.
2932 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
2933 __ jmp(&done);
2934
2935 NopRuntimeCallHelper call_helper;
2936 generator.GenerateSlow(masm_, call_helper);
2937
2938 __ bind(&done);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002939 context()->Plug(result);
Leon Clarkef7060e22010-06-03 12:02:55 +01002940}
2941
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002942
2943void FullCodeGenerator::EmitStringCharAt(ZoneList<Expression*>* args) {
2944 ASSERT(args->length() == 2);
2945
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002946 VisitForStackValue(args->at(0));
2947 VisitForAccumulatorValue(args->at(1));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002948
2949 Register object = rbx;
2950 Register index = rax;
2951 Register scratch1 = rcx;
2952 Register scratch2 = rdx;
2953 Register result = rax;
2954
2955 __ pop(object);
2956
2957 Label need_conversion;
2958 Label index_out_of_range;
2959 Label done;
2960 StringCharAtGenerator generator(object,
2961 index,
2962 scratch1,
2963 scratch2,
2964 result,
2965 &need_conversion,
2966 &need_conversion,
2967 &index_out_of_range,
2968 STRING_INDEX_IS_NUMBER);
2969 generator.GenerateFast(masm_);
2970 __ jmp(&done);
2971
2972 __ bind(&index_out_of_range);
2973 // When the index is out of range, the spec requires us to return
2974 // the empty string.
2975 __ LoadRoot(result, Heap::kEmptyStringRootIndex);
2976 __ jmp(&done);
2977
2978 __ bind(&need_conversion);
2979 // Move smi zero into the result register, which will trigger
2980 // conversion.
2981 __ Move(result, Smi::FromInt(0));
2982 __ jmp(&done);
2983
2984 NopRuntimeCallHelper call_helper;
2985 generator.GenerateSlow(masm_, call_helper);
2986
2987 __ bind(&done);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002988 context()->Plug(result);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002989}
2990
2991
Leon Clarkef7060e22010-06-03 12:02:55 +01002992void FullCodeGenerator::EmitStringAdd(ZoneList<Expression*>* args) {
2993 ASSERT_EQ(2, args->length());
2994
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002995 VisitForStackValue(args->at(0));
2996 VisitForStackValue(args->at(1));
Leon Clarkef7060e22010-06-03 12:02:55 +01002997
2998 StringAddStub stub(NO_STRING_ADD_FLAGS);
2999 __ CallStub(&stub);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003000 context()->Plug(rax);
Leon Clarkef7060e22010-06-03 12:02:55 +01003001}
3002
3003
3004void FullCodeGenerator::EmitStringCompare(ZoneList<Expression*>* args) {
3005 ASSERT_EQ(2, args->length());
3006
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003007 VisitForStackValue(args->at(0));
3008 VisitForStackValue(args->at(1));
Leon Clarkef7060e22010-06-03 12:02:55 +01003009
3010 StringCompareStub stub;
3011 __ CallStub(&stub);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003012 context()->Plug(rax);
Leon Clarkef7060e22010-06-03 12:02:55 +01003013}
3014
3015
3016void FullCodeGenerator::EmitMathSin(ZoneList<Expression*>* args) {
3017 // Load the argument on the stack and call the stub.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003018 TranscendentalCacheStub stub(TranscendentalCache::SIN,
3019 TranscendentalCacheStub::TAGGED);
Leon Clarkef7060e22010-06-03 12:02:55 +01003020 ASSERT(args->length() == 1);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003021 VisitForStackValue(args->at(0));
Leon Clarkef7060e22010-06-03 12:02:55 +01003022 __ CallStub(&stub);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003023 context()->Plug(rax);
Leon Clarkef7060e22010-06-03 12:02:55 +01003024}
3025
3026
3027void FullCodeGenerator::EmitMathCos(ZoneList<Expression*>* args) {
3028 // Load the argument on the stack and call the stub.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003029 TranscendentalCacheStub stub(TranscendentalCache::COS,
3030 TranscendentalCacheStub::TAGGED);
Leon Clarkef7060e22010-06-03 12:02:55 +01003031 ASSERT(args->length() == 1);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003032 VisitForStackValue(args->at(0));
Leon Clarkef7060e22010-06-03 12:02:55 +01003033 __ CallStub(&stub);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003034 context()->Plug(rax);
Leon Clarkef7060e22010-06-03 12:02:55 +01003035}
3036
3037
Ben Murdochb0fe1622011-05-05 13:52:32 +01003038void FullCodeGenerator::EmitMathLog(ZoneList<Expression*>* args) {
3039 // Load the argument on the stack and call the stub.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003040 TranscendentalCacheStub stub(TranscendentalCache::LOG,
3041 TranscendentalCacheStub::TAGGED);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003042 ASSERT(args->length() == 1);
3043 VisitForStackValue(args->at(0));
3044 __ CallStub(&stub);
3045 context()->Plug(rax);
3046}
3047
3048
Leon Clarkef7060e22010-06-03 12:02:55 +01003049void FullCodeGenerator::EmitMathSqrt(ZoneList<Expression*>* args) {
3050 // Load the argument on the stack and call the runtime function.
3051 ASSERT(args->length() == 1);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003052 VisitForStackValue(args->at(0));
Leon Clarkef7060e22010-06-03 12:02:55 +01003053 __ CallRuntime(Runtime::kMath_sqrt, 1);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003054 context()->Plug(rax);
Leon Clarkef7060e22010-06-03 12:02:55 +01003055}
3056
3057
3058void FullCodeGenerator::EmitCallFunction(ZoneList<Expression*>* args) {
3059 ASSERT(args->length() >= 2);
3060
Ben Murdoch257744e2011-11-30 15:57:28 +00003061 int arg_count = args->length() - 2; // 2 ~ receiver and function.
3062 for (int i = 0; i < arg_count + 1; i++) {
3063 VisitForStackValue(args->at(i));
Leon Clarkef7060e22010-06-03 12:02:55 +01003064 }
Ben Murdoch257744e2011-11-30 15:57:28 +00003065 VisitForAccumulatorValue(args->last()); // Function.
Leon Clarkef7060e22010-06-03 12:02:55 +01003066
Ben Murdoch257744e2011-11-30 15:57:28 +00003067 // InvokeFunction requires the function in rdi. Move it in there.
3068 __ movq(rdi, result_register());
Leon Clarkef7060e22010-06-03 12:02:55 +01003069 ParameterCount count(arg_count);
Ben Murdoch257744e2011-11-30 15:57:28 +00003070 __ InvokeFunction(rdi, count, CALL_FUNCTION,
3071 NullCallWrapper(), CALL_AS_METHOD);
Leon Clarkef7060e22010-06-03 12:02:55 +01003072 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003073 context()->Plug(rax);
Leon Clarkef7060e22010-06-03 12:02:55 +01003074}
3075
3076
3077void FullCodeGenerator::EmitRegExpConstructResult(ZoneList<Expression*>* args) {
Ben Murdochb0fe1622011-05-05 13:52:32 +01003078 RegExpConstructResultStub stub;
Leon Clarkef7060e22010-06-03 12:02:55 +01003079 ASSERT(args->length() == 3);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003080 VisitForStackValue(args->at(0));
3081 VisitForStackValue(args->at(1));
3082 VisitForStackValue(args->at(2));
Ben Murdochb0fe1622011-05-05 13:52:32 +01003083 __ CallStub(&stub);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003084 context()->Plug(rax);
Leon Clarkef7060e22010-06-03 12:02:55 +01003085}
3086
3087
3088void FullCodeGenerator::EmitSwapElements(ZoneList<Expression*>* args) {
3089 ASSERT(args->length() == 3);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003090 VisitForStackValue(args->at(0));
3091 VisitForStackValue(args->at(1));
3092 VisitForStackValue(args->at(2));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003093 Label done;
3094 Label slow_case;
3095 Register object = rax;
3096 Register index_1 = rbx;
3097 Register index_2 = rcx;
3098 Register elements = rdi;
3099 Register temp = rdx;
3100 __ movq(object, Operand(rsp, 2 * kPointerSize));
3101 // Fetch the map and check if array is in fast case.
3102 // Check that object doesn't require security checks and
3103 // has no indexed interceptor.
Steve Block44f0eee2011-05-26 01:26:41 +01003104 __ CmpObjectType(object, JS_ARRAY_TYPE, temp);
3105 __ j(not_equal, &slow_case);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003106 __ testb(FieldOperand(temp, Map::kBitFieldOffset),
3107 Immediate(KeyedLoadIC::kSlowCaseBitFieldMask));
3108 __ j(not_zero, &slow_case);
3109
3110 // Check the object's elements are in fast case and writable.
3111 __ movq(elements, FieldOperand(object, JSObject::kElementsOffset));
3112 __ CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
3113 Heap::kFixedArrayMapRootIndex);
3114 __ j(not_equal, &slow_case);
3115
3116 // Check that both indices are smis.
3117 __ movq(index_1, Operand(rsp, 1 * kPointerSize));
3118 __ movq(index_2, Operand(rsp, 0 * kPointerSize));
3119 __ JumpIfNotBothSmi(index_1, index_2, &slow_case);
3120
3121 // Check that both indices are valid.
3122 // The JSArray length field is a smi since the array is in fast case mode.
3123 __ movq(temp, FieldOperand(object, JSArray::kLengthOffset));
3124 __ SmiCompare(temp, index_1);
3125 __ j(below_equal, &slow_case);
3126 __ SmiCompare(temp, index_2);
3127 __ j(below_equal, &slow_case);
3128
3129 __ SmiToInteger32(index_1, index_1);
3130 __ SmiToInteger32(index_2, index_2);
3131 // Bring addresses into index1 and index2.
3132 __ lea(index_1, FieldOperand(elements, index_1, times_pointer_size,
3133 FixedArray::kHeaderSize));
3134 __ lea(index_2, FieldOperand(elements, index_2, times_pointer_size,
3135 FixedArray::kHeaderSize));
3136
3137 // Swap elements. Use object and temp as scratch registers.
3138 __ movq(object, Operand(index_1, 0));
3139 __ movq(temp, Operand(index_2, 0));
3140 __ movq(Operand(index_2, 0), object);
3141 __ movq(Operand(index_1, 0), temp);
3142
3143 Label new_space;
3144 __ InNewSpace(elements, temp, equal, &new_space);
3145
3146 __ movq(object, elements);
3147 __ RecordWriteHelper(object, index_1, temp);
3148 __ RecordWriteHelper(elements, index_2, temp);
3149
3150 __ bind(&new_space);
3151 // We are done. Drop elements from the stack, and return undefined.
3152 __ addq(rsp, Immediate(3 * kPointerSize));
3153 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
3154 __ jmp(&done);
3155
3156 __ bind(&slow_case);
Leon Clarkef7060e22010-06-03 12:02:55 +01003157 __ CallRuntime(Runtime::kSwapElements, 3);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003158
3159 __ bind(&done);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003160 context()->Plug(rax);
Leon Clarkef7060e22010-06-03 12:02:55 +01003161}
3162
3163
3164void FullCodeGenerator::EmitGetFromCache(ZoneList<Expression*>* args) {
3165 ASSERT_EQ(2, args->length());
3166
3167 ASSERT_NE(NULL, args->at(0)->AsLiteral());
3168 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
3169
3170 Handle<FixedArray> jsfunction_result_caches(
Steve Block44f0eee2011-05-26 01:26:41 +01003171 isolate()->global_context()->jsfunction_result_caches());
Leon Clarkef7060e22010-06-03 12:02:55 +01003172 if (jsfunction_result_caches->length() <= cache_id) {
3173 __ Abort("Attempt to use undefined cache.");
3174 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003175 context()->Plug(rax);
Leon Clarkef7060e22010-06-03 12:02:55 +01003176 return;
3177 }
3178
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003179 VisitForAccumulatorValue(args->at(1));
Leon Clarkef7060e22010-06-03 12:02:55 +01003180
3181 Register key = rax;
3182 Register cache = rbx;
3183 Register tmp = rcx;
Steve Block59151502010-09-22 15:07:15 +01003184 __ movq(cache, ContextOperand(rsi, Context::GLOBAL_INDEX));
Leon Clarkef7060e22010-06-03 12:02:55 +01003185 __ movq(cache,
3186 FieldOperand(cache, GlobalObject::kGlobalContextOffset));
3187 __ movq(cache,
Steve Block59151502010-09-22 15:07:15 +01003188 ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
Leon Clarkef7060e22010-06-03 12:02:55 +01003189 __ movq(cache,
3190 FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3191
Ben Murdoch257744e2011-11-30 15:57:28 +00003192 Label done, not_found;
Leon Clarkef7060e22010-06-03 12:02:55 +01003193 // tmp now holds finger offset as a smi.
3194 ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3195 __ movq(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset));
3196 SmiIndex index =
3197 __ SmiToIndex(kScratchRegister, tmp, kPointerSizeLog2);
3198 __ cmpq(key, FieldOperand(cache,
3199 index.reg,
3200 index.scale,
3201 FixedArray::kHeaderSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00003202 __ j(not_equal, &not_found, Label::kNear);
Leon Clarkef7060e22010-06-03 12:02:55 +01003203 __ movq(rax, FieldOperand(cache,
3204 index.reg,
3205 index.scale,
3206 FixedArray::kHeaderSize + kPointerSize));
Ben Murdoch257744e2011-11-30 15:57:28 +00003207 __ jmp(&done, Label::kNear);
Leon Clarkef7060e22010-06-03 12:02:55 +01003208
3209 __ bind(&not_found);
3210 // Call runtime to perform the lookup.
3211 __ push(cache);
3212 __ push(key);
3213 __ CallRuntime(Runtime::kGetFromCache, 2);
3214
3215 __ bind(&done);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003216 context()->Plug(rax);
Leon Clarkef7060e22010-06-03 12:02:55 +01003217}
3218
3219
Ben Murdochbb769b22010-08-11 14:56:33 +01003220void FullCodeGenerator::EmitIsRegExpEquivalent(ZoneList<Expression*>* args) {
3221 ASSERT_EQ(2, args->length());
3222
3223 Register right = rax;
3224 Register left = rbx;
3225 Register tmp = rcx;
3226
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003227 VisitForStackValue(args->at(0));
3228 VisitForAccumulatorValue(args->at(1));
Ben Murdochbb769b22010-08-11 14:56:33 +01003229 __ pop(left);
3230
Ben Murdoch257744e2011-11-30 15:57:28 +00003231 Label done, fail, ok;
Ben Murdochbb769b22010-08-11 14:56:33 +01003232 __ cmpq(left, right);
Ben Murdoch257744e2011-11-30 15:57:28 +00003233 __ j(equal, &ok, Label::kNear);
Ben Murdochbb769b22010-08-11 14:56:33 +01003234 // Fail if either is a non-HeapObject.
3235 Condition either_smi = masm()->CheckEitherSmi(left, right, tmp);
Ben Murdoch257744e2011-11-30 15:57:28 +00003236 __ j(either_smi, &fail, Label::kNear);
3237 __ j(zero, &fail, Label::kNear);
Ben Murdochbb769b22010-08-11 14:56:33 +01003238 __ movq(tmp, FieldOperand(left, HeapObject::kMapOffset));
3239 __ cmpb(FieldOperand(tmp, Map::kInstanceTypeOffset),
3240 Immediate(JS_REGEXP_TYPE));
Ben Murdoch257744e2011-11-30 15:57:28 +00003241 __ j(not_equal, &fail, Label::kNear);
Ben Murdochbb769b22010-08-11 14:56:33 +01003242 __ cmpq(tmp, FieldOperand(right, HeapObject::kMapOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00003243 __ j(not_equal, &fail, Label::kNear);
Ben Murdochbb769b22010-08-11 14:56:33 +01003244 __ movq(tmp, FieldOperand(left, JSRegExp::kDataOffset));
3245 __ cmpq(tmp, FieldOperand(right, JSRegExp::kDataOffset));
Ben Murdoch257744e2011-11-30 15:57:28 +00003246 __ j(equal, &ok, Label::kNear);
Ben Murdochbb769b22010-08-11 14:56:33 +01003247 __ bind(&fail);
Steve Block44f0eee2011-05-26 01:26:41 +01003248 __ Move(rax, isolate()->factory()->false_value());
Ben Murdoch257744e2011-11-30 15:57:28 +00003249 __ jmp(&done, Label::kNear);
Ben Murdochbb769b22010-08-11 14:56:33 +01003250 __ bind(&ok);
Steve Block44f0eee2011-05-26 01:26:41 +01003251 __ Move(rax, isolate()->factory()->true_value());
Ben Murdochbb769b22010-08-11 14:56:33 +01003252 __ bind(&done);
3253
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003254 context()->Plug(rax);
Ben Murdochbb769b22010-08-11 14:56:33 +01003255}
3256
3257
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003258void FullCodeGenerator::EmitHasCachedArrayIndex(ZoneList<Expression*>* args) {
3259 ASSERT(args->length() == 1);
3260
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003261 VisitForAccumulatorValue(args->at(0));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003262
3263 Label materialize_true, materialize_false;
3264 Label* if_true = NULL;
3265 Label* if_false = NULL;
3266 Label* fall_through = NULL;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003267 context()->PrepareTest(&materialize_true, &materialize_false,
3268 &if_true, &if_false, &fall_through);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003269
3270 __ testl(FieldOperand(rax, String::kHashFieldOffset),
3271 Immediate(String::kContainsCachedArrayIndexMask));
Ben Murdoch086aeea2011-05-13 15:57:08 +01003272 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003273 __ j(zero, if_true);
3274 __ jmp(if_false);
3275
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003276 context()->Plug(if_true, if_false);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003277}
3278
3279
3280void FullCodeGenerator::EmitGetCachedArrayIndex(ZoneList<Expression*>* args) {
3281 ASSERT(args->length() == 1);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003282 VisitForAccumulatorValue(args->at(0));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003283
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003284 if (FLAG_debug_code) {
3285 __ AbortIfNotString(rax);
3286 }
3287
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003288 __ movl(rax, FieldOperand(rax, String::kHashFieldOffset));
3289 ASSERT(String::kHashShift >= kSmiTagSize);
3290 __ IndexFromHash(rax, rax);
3291
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003292 context()->Plug(rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003293}
3294
3295
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003296void FullCodeGenerator::EmitFastAsciiArrayJoin(ZoneList<Expression*>* args) {
Steve Block44f0eee2011-05-26 01:26:41 +01003297 Label bailout, return_result, done, one_char_separator, long_separator,
3298 non_trivial_array, not_size_one_array, loop,
3299 loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
3300 ASSERT(args->length() == 2);
3301 // We will leave the separator on the stack until the end of the function.
3302 VisitForStackValue(args->at(1));
3303 // Load this to rax (= array)
3304 VisitForAccumulatorValue(args->at(0));
3305 // All aliases of the same register have disjoint lifetimes.
3306 Register array = rax;
3307 Register elements = no_reg; // Will be rax.
3308
3309 Register index = rdx;
3310
3311 Register string_length = rcx;
3312
3313 Register string = rsi;
3314
3315 Register scratch = rbx;
3316
3317 Register array_length = rdi;
3318 Register result_pos = no_reg; // Will be rdi.
3319
3320 Operand separator_operand = Operand(rsp, 2 * kPointerSize);
3321 Operand result_operand = Operand(rsp, 1 * kPointerSize);
3322 Operand array_length_operand = Operand(rsp, 0 * kPointerSize);
3323 // Separator operand is already pushed. Make room for the two
3324 // other stack fields, and clear the direction flag in anticipation
3325 // of calling CopyBytes.
3326 __ subq(rsp, Immediate(2 * kPointerSize));
3327 __ cld();
3328 // Check that the array is a JSArray
3329 __ JumpIfSmi(array, &bailout);
3330 __ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
3331 __ j(not_equal, &bailout);
3332
3333 // Check that the array has fast elements.
3334 __ testb(FieldOperand(scratch, Map::kBitField2Offset),
3335 Immediate(1 << Map::kHasFastElements));
3336 __ j(zero, &bailout);
3337
3338 // Array has fast elements, so its length must be a smi.
3339 // If the array has length zero, return the empty string.
3340 __ movq(array_length, FieldOperand(array, JSArray::kLengthOffset));
3341 __ SmiCompare(array_length, Smi::FromInt(0));
3342 __ j(not_zero, &non_trivial_array);
3343 __ LoadRoot(rax, Heap::kEmptyStringRootIndex);
3344 __ jmp(&return_result);
3345
3346 // Save the array length on the stack.
3347 __ bind(&non_trivial_array);
3348 __ SmiToInteger32(array_length, array_length);
3349 __ movl(array_length_operand, array_length);
3350
3351 // Save the FixedArray containing array's elements.
3352 // End of array's live range.
3353 elements = array;
3354 __ movq(elements, FieldOperand(array, JSArray::kElementsOffset));
3355 array = no_reg;
3356
3357
3358 // Check that all array elements are sequential ASCII strings, and
3359 // accumulate the sum of their lengths, as a smi-encoded value.
3360 __ Set(index, 0);
3361 __ Set(string_length, 0);
3362 // Loop condition: while (index < array_length).
3363 // Live loop registers: index(int32), array_length(int32), string(String*),
3364 // scratch, string_length(int32), elements(FixedArray*).
3365 if (FLAG_debug_code) {
3366 __ cmpq(index, array_length);
3367 __ Assert(below, "No empty arrays here in EmitFastAsciiArrayJoin");
3368 }
3369 __ bind(&loop);
3370 __ movq(string, FieldOperand(elements,
3371 index,
3372 times_pointer_size,
3373 FixedArray::kHeaderSize));
3374 __ JumpIfSmi(string, &bailout);
3375 __ movq(scratch, FieldOperand(string, HeapObject::kMapOffset));
3376 __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3377 __ andb(scratch, Immediate(
3378 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
3379 __ cmpb(scratch, Immediate(kStringTag | kAsciiStringTag | kSeqStringTag));
3380 __ j(not_equal, &bailout);
3381 __ AddSmiField(string_length,
3382 FieldOperand(string, SeqAsciiString::kLengthOffset));
3383 __ j(overflow, &bailout);
3384 __ incl(index);
3385 __ cmpl(index, array_length);
3386 __ j(less, &loop);
3387
3388 // Live registers:
3389 // string_length: Sum of string lengths.
3390 // elements: FixedArray of strings.
3391 // index: Array length.
3392 // array_length: Array length.
3393
3394 // If array_length is 1, return elements[0], a string.
3395 __ cmpl(array_length, Immediate(1));
3396 __ j(not_equal, &not_size_one_array);
3397 __ movq(rax, FieldOperand(elements, FixedArray::kHeaderSize));
3398 __ jmp(&return_result);
3399
3400 __ bind(&not_size_one_array);
3401
3402 // End of array_length live range.
3403 result_pos = array_length;
3404 array_length = no_reg;
3405
3406 // Live registers:
3407 // string_length: Sum of string lengths.
3408 // elements: FixedArray of strings.
3409 // index: Array length.
3410
3411 // Check that the separator is a sequential ASCII string.
3412 __ movq(string, separator_operand);
3413 __ JumpIfSmi(string, &bailout);
3414 __ movq(scratch, FieldOperand(string, HeapObject::kMapOffset));
3415 __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3416 __ andb(scratch, Immediate(
3417 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
3418 __ cmpb(scratch, Immediate(kStringTag | kAsciiStringTag | kSeqStringTag));
3419 __ j(not_equal, &bailout);
3420
3421 // Live registers:
3422 // string_length: Sum of string lengths.
3423 // elements: FixedArray of strings.
3424 // index: Array length.
3425 // string: Separator string.
3426
3427 // Add (separator length times (array_length - 1)) to string_length.
3428 __ SmiToInteger32(scratch,
3429 FieldOperand(string, SeqAsciiString::kLengthOffset));
3430 __ decl(index);
3431 __ imull(scratch, index);
3432 __ j(overflow, &bailout);
3433 __ addl(string_length, scratch);
3434 __ j(overflow, &bailout);
3435
3436 // Live registers and stack values:
3437 // string_length: Total length of result string.
3438 // elements: FixedArray of strings.
3439 __ AllocateAsciiString(result_pos, string_length, scratch,
3440 index, string, &bailout);
3441 __ movq(result_operand, result_pos);
3442 __ lea(result_pos, FieldOperand(result_pos, SeqAsciiString::kHeaderSize));
3443
3444 __ movq(string, separator_operand);
3445 __ SmiCompare(FieldOperand(string, SeqAsciiString::kLengthOffset),
3446 Smi::FromInt(1));
3447 __ j(equal, &one_char_separator);
3448 __ j(greater, &long_separator);
3449
3450
3451 // Empty separator case:
3452 __ Set(index, 0);
3453 __ movl(scratch, array_length_operand);
3454 __ jmp(&loop_1_condition);
3455 // Loop condition: while (index < array_length).
3456 __ bind(&loop_1);
3457 // Each iteration of the loop concatenates one string to the result.
3458 // Live values in registers:
3459 // index: which element of the elements array we are adding to the result.
3460 // result_pos: the position to which we are currently copying characters.
3461 // elements: the FixedArray of strings we are joining.
3462 // scratch: array length.
3463
3464 // Get string = array[index].
3465 __ movq(string, FieldOperand(elements, index,
3466 times_pointer_size,
3467 FixedArray::kHeaderSize));
3468 __ SmiToInteger32(string_length,
3469 FieldOperand(string, String::kLengthOffset));
3470 __ lea(string,
3471 FieldOperand(string, SeqAsciiString::kHeaderSize));
3472 __ CopyBytes(result_pos, string, string_length);
3473 __ incl(index);
3474 __ bind(&loop_1_condition);
3475 __ cmpl(index, scratch);
3476 __ j(less, &loop_1); // Loop while (index < array_length).
3477 __ jmp(&done);
3478
3479 // Generic bailout code used from several places.
3480 __ bind(&bailout);
3481 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
3482 __ jmp(&return_result);
3483
3484
3485 // One-character separator case
3486 __ bind(&one_char_separator);
3487 // Get the separator ascii character value.
3488 // Register "string" holds the separator.
3489 __ movzxbl(scratch, FieldOperand(string, SeqAsciiString::kHeaderSize));
3490 __ Set(index, 0);
3491 // Jump into the loop after the code that copies the separator, so the first
3492 // element is not preceded by a separator
3493 __ jmp(&loop_2_entry);
3494 // Loop condition: while (index < length).
3495 __ bind(&loop_2);
3496 // Each iteration of the loop concatenates one string to the result.
3497 // Live values in registers:
3498 // elements: The FixedArray of strings we are joining.
3499 // index: which element of the elements array we are adding to the result.
3500 // result_pos: the position to which we are currently copying characters.
3501 // scratch: Separator character.
3502
3503 // Copy the separator character to the result.
3504 __ movb(Operand(result_pos, 0), scratch);
3505 __ incq(result_pos);
3506
3507 __ bind(&loop_2_entry);
3508 // Get string = array[index].
3509 __ movq(string, FieldOperand(elements, index,
3510 times_pointer_size,
3511 FixedArray::kHeaderSize));
3512 __ SmiToInteger32(string_length,
3513 FieldOperand(string, String::kLengthOffset));
3514 __ lea(string,
3515 FieldOperand(string, SeqAsciiString::kHeaderSize));
3516 __ CopyBytes(result_pos, string, string_length);
3517 __ incl(index);
3518 __ cmpl(index, array_length_operand);
3519 __ j(less, &loop_2); // End while (index < length).
3520 __ jmp(&done);
3521
3522
3523 // Long separator case (separator is more than one character).
3524 __ bind(&long_separator);
3525
3526 // Make elements point to end of elements array, and index
3527 // count from -array_length to zero, so we don't need to maintain
3528 // a loop limit.
3529 __ movl(index, array_length_operand);
3530 __ lea(elements, FieldOperand(elements, index, times_pointer_size,
3531 FixedArray::kHeaderSize));
3532 __ neg(index);
3533
3534 // Replace separator string with pointer to its first character, and
3535 // make scratch be its length.
3536 __ movq(string, separator_operand);
3537 __ SmiToInteger32(scratch,
3538 FieldOperand(string, String::kLengthOffset));
3539 __ lea(string,
3540 FieldOperand(string, SeqAsciiString::kHeaderSize));
3541 __ movq(separator_operand, string);
3542
3543 // Jump into the loop after the code that copies the separator, so the first
3544 // element is not preceded by a separator
3545 __ jmp(&loop_3_entry);
3546 // Loop condition: while (index < length).
3547 __ bind(&loop_3);
3548 // Each iteration of the loop concatenates one string to the result.
3549 // Live values in registers:
3550 // index: which element of the elements array we are adding to the result.
3551 // result_pos: the position to which we are currently copying characters.
3552 // scratch: Separator length.
3553 // separator_operand (rsp[0x10]): Address of first char of separator.
3554
3555 // Copy the separator to the result.
3556 __ movq(string, separator_operand);
3557 __ movl(string_length, scratch);
3558 __ CopyBytes(result_pos, string, string_length, 2);
3559
3560 __ bind(&loop_3_entry);
3561 // Get string = array[index].
3562 __ movq(string, Operand(elements, index, times_pointer_size, 0));
3563 __ SmiToInteger32(string_length,
3564 FieldOperand(string, String::kLengthOffset));
3565 __ lea(string,
3566 FieldOperand(string, SeqAsciiString::kHeaderSize));
3567 __ CopyBytes(result_pos, string, string_length);
3568 __ incq(index);
3569 __ j(not_equal, &loop_3); // Loop while (index < 0).
3570
3571 __ bind(&done);
3572 __ movq(rax, result_operand);
3573
3574 __ bind(&return_result);
3575 // Drop temp values from the stack, and restore context register.
3576 __ addq(rsp, Immediate(3 * kPointerSize));
3577 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
3578 context()->Plug(rax);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003579}
3580
3581
Leon Clarked91b9f72010-01-27 17:25:45 +00003582void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
Leon Clarkef7060e22010-06-03 12:02:55 +01003583 Handle<String> name = expr->name();
3584 if (name->length() > 0 && name->Get(0) == '_') {
3585 Comment cmnt(masm_, "[ InlineRuntimeCall");
3586 EmitInlineRuntimeCall(expr);
3587 return;
3588 }
3589
Steve Block3ce2e202009-11-05 08:53:23 +00003590 Comment cmnt(masm_, "[ CallRuntime");
3591 ZoneList<Expression*>* args = expr->arguments();
Steve Block3ce2e202009-11-05 08:53:23 +00003592
Steve Blockd0582a62009-12-15 09:54:21 +00003593 if (expr->is_jsruntime()) {
3594 // Prepare for calling JS runtime function.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003595 __ movq(rax, GlobalObjectOperand());
Steve Blockd0582a62009-12-15 09:54:21 +00003596 __ push(FieldOperand(rax, GlobalObject::kBuiltinsOffset));
3597 }
Steve Block3ce2e202009-11-05 08:53:23 +00003598
3599 // Push the arguments ("left-to-right").
3600 int arg_count = args->length();
3601 for (int i = 0; i < arg_count; i++) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003602 VisitForStackValue(args->at(i));
Steve Block3ce2e202009-11-05 08:53:23 +00003603 }
3604
Steve Blockd0582a62009-12-15 09:54:21 +00003605 if (expr->is_jsruntime()) {
Andrei Popescu402d9372010-02-26 13:31:12 +00003606 // Call the JS runtime function using a call IC.
3607 __ Move(rcx, expr->name());
3608 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
Ben Murdoch257744e2011-11-30 15:57:28 +00003609 RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
Steve Block44f0eee2011-05-26 01:26:41 +01003610 Handle<Code> ic =
Ben Murdoch257744e2011-11-30 15:57:28 +00003611 ISOLATE->stub_cache()->ComputeCallInitialize(arg_count, in_loop, mode);
3612 EmitCallIC(ic, mode, expr->id());
Steve Blockd0582a62009-12-15 09:54:21 +00003613 // Restore context register.
3614 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
Steve Block3ce2e202009-11-05 08:53:23 +00003615 } else {
Steve Blockd0582a62009-12-15 09:54:21 +00003616 __ CallRuntime(expr->function(), arg_count);
Steve Blockd0582a62009-12-15 09:54:21 +00003617 }
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003618 context()->Plug(rax);
Steve Blockd0582a62009-12-15 09:54:21 +00003619}
3620
3621
Leon Clarked91b9f72010-01-27 17:25:45 +00003622void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
Steve Blockd0582a62009-12-15 09:54:21 +00003623 switch (expr->op()) {
Leon Clarkef7060e22010-06-03 12:02:55 +01003624 case Token::DELETE: {
3625 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3626 Property* prop = expr->expression()->AsProperty();
3627 Variable* var = expr->expression()->AsVariableProxy()->AsVariable();
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003628
3629 if (prop != NULL) {
Steve Block1e0659c2011-05-24 12:43:12 +01003630 if (prop->is_synthetic()) {
3631 // Result of deleting parameters is false, even when they rewrite
3632 // to accesses on the arguments object.
3633 context()->Plug(false);
3634 } else {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003635 VisitForStackValue(prop->obj());
3636 VisitForStackValue(prop->key());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003637 __ Push(Smi::FromInt(strict_mode_flag()));
Steve Block1e0659c2011-05-24 12:43:12 +01003638 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3639 context()->Plug(rax);
Leon Clarkef7060e22010-06-03 12:02:55 +01003640 }
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003641 } else if (var != NULL) {
3642 // Delete of an unqualified identifier is disallowed in strict mode
3643 // but "delete this" is.
3644 ASSERT(strict_mode_flag() == kNonStrictMode || var->is_this());
3645 if (var->is_global()) {
3646 __ push(GlobalObjectOperand());
3647 __ Push(var->name());
3648 __ Push(Smi::FromInt(kNonStrictMode));
3649 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3650 context()->Plug(rax);
3651 } else if (var->AsSlot() != NULL &&
3652 var->AsSlot()->type() != Slot::LOOKUP) {
3653 // Result of deleting non-global, non-dynamic variables is false.
3654 // The subexpression does not have side effects.
3655 context()->Plug(false);
3656 } else {
3657 // Non-global variable. Call the runtime to try to delete from the
3658 // context where the variable was introduced.
3659 __ push(context_register());
3660 __ Push(var->name());
3661 __ CallRuntime(Runtime::kDeleteContextSlot, 2);
3662 context()->Plug(rax);
3663 }
Steve Block1e0659c2011-05-24 12:43:12 +01003664 } else {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003665 // Result of deleting non-property, non-variable reference is true.
3666 // The subexpression may have side effects.
3667 VisitForEffect(expr->expression());
3668 context()->Plug(true);
Leon Clarkef7060e22010-06-03 12:02:55 +01003669 }
3670 break;
3671 }
3672
Steve Blockd0582a62009-12-15 09:54:21 +00003673 case Token::VOID: {
3674 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
Leon Clarkee46be812010-01-19 14:06:41 +00003675 VisitForEffect(expr->expression());
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003676 context()->Plug(Heap::kUndefinedValueRootIndex);
Steve Blockd0582a62009-12-15 09:54:21 +00003677 break;
3678 }
3679
3680 case Token::NOT: {
3681 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003682 if (context()->IsEffect()) {
3683 // Unary NOT has no side effects so it's only necessary to visit the
3684 // subexpression. Match the optimizing compiler by not branching.
3685 VisitForEffect(expr->expression());
3686 } else {
3687 Label materialize_true, materialize_false;
3688 Label* if_true = NULL;
3689 Label* if_false = NULL;
3690 Label* fall_through = NULL;
3691 // Notice that the labels are swapped.
3692 context()->PrepareTest(&materialize_true, &materialize_false,
3693 &if_false, &if_true, &fall_through);
3694 if (context()->IsTest()) ForwardBailoutToChild(expr);
3695 VisitForControl(expr->expression(), if_true, if_false, fall_through);
3696 context()->Plug(if_false, if_true); // Labels swapped.
3697 }
Steve Blockd0582a62009-12-15 09:54:21 +00003698 break;
3699 }
3700
3701 case Token::TYPEOF: {
3702 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003703 { StackValueContext context(this);
3704 VisitForTypeofValue(expr->expression());
3705 }
Steve Blockd0582a62009-12-15 09:54:21 +00003706 __ CallRuntime(Runtime::kTypeof, 1);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003707 context()->Plug(rax);
Steve Blockd0582a62009-12-15 09:54:21 +00003708 break;
3709 }
3710
Leon Clarked91b9f72010-01-27 17:25:45 +00003711 case Token::ADD: {
3712 Comment cmt(masm_, "[ UnaryOperation (ADD)");
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003713 VisitForAccumulatorValue(expr->expression());
Ben Murdochb0fe1622011-05-05 13:52:32 +01003714 Label no_conversion;
Leon Clarke4515c472010-02-03 11:58:03 +00003715 Condition is_smi = masm_->CheckSmi(result_register());
Leon Clarked91b9f72010-01-27 17:25:45 +00003716 __ j(is_smi, &no_conversion);
Steve Block1e0659c2011-05-24 12:43:12 +01003717 ToNumberStub convert_stub;
3718 __ CallStub(&convert_stub);
Leon Clarked91b9f72010-01-27 17:25:45 +00003719 __ bind(&no_conversion);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003720 context()->Plug(result_register());
Leon Clarked91b9f72010-01-27 17:25:45 +00003721 break;
3722 }
3723
Ben Murdoch257744e2011-11-30 15:57:28 +00003724 case Token::SUB:
3725 EmitUnaryOperation(expr, "[ UnaryOperation (SUB)");
Leon Clarke4515c472010-02-03 11:58:03 +00003726 break;
Leon Clarke4515c472010-02-03 11:58:03 +00003727
Ben Murdoch257744e2011-11-30 15:57:28 +00003728 case Token::BIT_NOT:
3729 EmitUnaryOperation(expr, "[ UnaryOperation (BIT_NOT)");
Leon Clarke4515c472010-02-03 11:58:03 +00003730 break;
Leon Clarke4515c472010-02-03 11:58:03 +00003731
Steve Blockd0582a62009-12-15 09:54:21 +00003732 default:
3733 UNREACHABLE();
Steve Block3ce2e202009-11-05 08:53:23 +00003734 }
3735}
3736
3737
Ben Murdoch257744e2011-11-30 15:57:28 +00003738void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
3739 const char* comment) {
3740 // TODO(svenpanne): Allowing format strings in Comment would be nice here...
3741 Comment cmt(masm_, comment);
3742 bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
3743 UnaryOverwriteMode overwrite =
3744 can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
3745 UnaryOpStub stub(expr->op(), overwrite);
3746 // UnaryOpStub expects the argument to be in the
3747 // accumulator register rax.
3748 VisitForAccumulatorValue(expr->expression());
3749 SetSourcePosition(expr->position());
3750 EmitCallIC(stub.GetCode(), NULL, expr->id());
3751 context()->Plug(rax);
3752}
3753
3754
Leon Clarked91b9f72010-01-27 17:25:45 +00003755void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
Leon Clarkee46be812010-01-19 14:06:41 +00003756 Comment cmnt(masm_, "[ CountOperation");
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003757 SetSourcePosition(expr->position());
Leon Clarkee46be812010-01-19 14:06:41 +00003758
Leon Clarkef7060e22010-06-03 12:02:55 +01003759 // Invalid left-hand-sides are rewritten to have a 'throw
3760 // ReferenceError' as the left-hand side.
3761 if (!expr->expression()->IsValidLeftHandSide()) {
3762 VisitForEffect(expr->expression());
3763 return;
3764 }
3765
Leon Clarkee46be812010-01-19 14:06:41 +00003766 // Expression can only be a property, a global or a (parameter or local)
3767 // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
3768 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
3769 LhsKind assign_type = VARIABLE;
3770 Property* prop = expr->expression()->AsProperty();
3771 // In case of a property we use the uninitialized expression context
3772 // of the key to detect a named property.
3773 if (prop != NULL) {
3774 assign_type =
3775 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
3776 }
3777
3778 // Evaluate expression and get value.
3779 if (assign_type == VARIABLE) {
3780 ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003781 AccumulatorValueContext context(this);
3782 EmitVariableLoad(expr->expression()->AsVariableProxy()->var());
Leon Clarkef7060e22010-06-03 12:02:55 +01003783 } else {
Leon Clarkee46be812010-01-19 14:06:41 +00003784 // Reserve space for result of postfix operation.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003785 if (expr->is_postfix() && !context()->IsEffect()) {
Leon Clarkee46be812010-01-19 14:06:41 +00003786 __ Push(Smi::FromInt(0));
3787 }
Leon Clarkee46be812010-01-19 14:06:41 +00003788 if (assign_type == NAMED_PROPERTY) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003789 VisitForAccumulatorValue(prop->obj());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003790 __ push(rax); // Copy of receiver, needed for later store.
Leon Clarkee46be812010-01-19 14:06:41 +00003791 EmitNamedPropertyLoad(prop);
3792 } else {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003793 if (prop->is_arguments_access()) {
3794 VariableProxy* obj_proxy = prop->obj()->AsVariableProxy();
3795 MemOperand slot_operand =
3796 EmitSlotSearch(obj_proxy->var()->AsSlot(), rcx);
3797 __ push(slot_operand);
3798 __ Move(rax, prop->key()->AsLiteral()->handle());
3799 } else {
3800 VisitForStackValue(prop->obj());
3801 VisitForAccumulatorValue(prop->key());
3802 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003803 __ movq(rdx, Operand(rsp, 0)); // Leave receiver on stack
3804 __ push(rax); // Copy of key, needed for later store.
Leon Clarkee46be812010-01-19 14:06:41 +00003805 EmitKeyedPropertyLoad(prop);
3806 }
Leon Clarkee46be812010-01-19 14:06:41 +00003807 }
3808
Ben Murdoch086aeea2011-05-13 15:57:08 +01003809 // We need a second deoptimization point after loading the value
3810 // in case evaluating the property load my have a side effect.
Ben Murdoch8b112d22011-06-08 16:22:53 +01003811 if (assign_type == VARIABLE) {
3812 PrepareForBailout(expr->expression(), TOS_REG);
3813 } else {
3814 PrepareForBailoutForId(expr->CountId(), TOS_REG);
3815 }
Ben Murdoch086aeea2011-05-13 15:57:08 +01003816
Leon Clarked91b9f72010-01-27 17:25:45 +00003817 // Call ToNumber only if operand is not a smi.
Ben Murdoch257744e2011-11-30 15:57:28 +00003818 Label no_conversion;
Leon Clarked91b9f72010-01-27 17:25:45 +00003819 Condition is_smi;
3820 is_smi = masm_->CheckSmi(rax);
Ben Murdoch257744e2011-11-30 15:57:28 +00003821 __ j(is_smi, &no_conversion, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01003822 ToNumberStub convert_stub;
3823 __ CallStub(&convert_stub);
Leon Clarked91b9f72010-01-27 17:25:45 +00003824 __ bind(&no_conversion);
Leon Clarkee46be812010-01-19 14:06:41 +00003825
3826 // Save result for postfix expressions.
3827 if (expr->is_postfix()) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003828 if (!context()->IsEffect()) {
3829 // Save the result on the stack. If we have a named or keyed property
3830 // we store the result under the receiver that is currently on top
3831 // of the stack.
3832 switch (assign_type) {
3833 case VARIABLE:
3834 __ push(rax);
3835 break;
3836 case NAMED_PROPERTY:
3837 __ movq(Operand(rsp, kPointerSize), rax);
3838 break;
3839 case KEYED_PROPERTY:
3840 __ movq(Operand(rsp, 2 * kPointerSize), rax);
3841 break;
3842 }
Leon Clarkee46be812010-01-19 14:06:41 +00003843 }
3844 }
3845
Leon Clarked91b9f72010-01-27 17:25:45 +00003846 // Inline smi case if we are in a loop.
Ben Murdoch257744e2011-11-30 15:57:28 +00003847 Label done, stub_call;
Steve Block1e0659c2011-05-24 12:43:12 +01003848 JumpPatchSite patch_site(masm_);
3849
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003850 if (ShouldInlineSmiCase(expr->op())) {
Leon Clarked91b9f72010-01-27 17:25:45 +00003851 if (expr->op() == Token::INC) {
3852 __ SmiAddConstant(rax, rax, Smi::FromInt(1));
3853 } else {
3854 __ SmiSubConstant(rax, rax, Smi::FromInt(1));
3855 }
Ben Murdoch257744e2011-11-30 15:57:28 +00003856 __ j(overflow, &stub_call, Label::kNear);
Leon Clarked91b9f72010-01-27 17:25:45 +00003857 // We could eliminate this smi check if we split the code at
3858 // the first smi check before calling ToNumber.
Ben Murdoch257744e2011-11-30 15:57:28 +00003859 patch_site.EmitJumpIfSmi(rax, &done, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01003860
Leon Clarked91b9f72010-01-27 17:25:45 +00003861 __ bind(&stub_call);
3862 // Call stub. Undo operation first.
3863 if (expr->op() == Token::INC) {
3864 __ SmiSubConstant(rax, rax, Smi::FromInt(1));
3865 } else {
3866 __ SmiAddConstant(rax, rax, Smi::FromInt(1));
3867 }
3868 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01003869
3870 // Record position before stub call.
3871 SetSourcePosition(expr->position());
3872
Leon Clarkee46be812010-01-19 14:06:41 +00003873 // Call stub for +1/-1.
Ben Murdoch257744e2011-11-30 15:57:28 +00003874 BinaryOpStub stub(expr->binary_op(), NO_OVERWRITE);
Steve Block1e0659c2011-05-24 12:43:12 +01003875 if (expr->op() == Token::INC) {
3876 __ Move(rdx, Smi::FromInt(1));
3877 } else {
3878 __ movq(rdx, rax);
3879 __ Move(rax, Smi::FromInt(1));
3880 }
Ben Murdoch257744e2011-11-30 15:57:28 +00003881 EmitCallIC(stub.GetCode(), &patch_site, expr->CountId());
Leon Clarked91b9f72010-01-27 17:25:45 +00003882 __ bind(&done);
Leon Clarkee46be812010-01-19 14:06:41 +00003883
3884 // Store the value returned in rax.
3885 switch (assign_type) {
3886 case VARIABLE:
3887 if (expr->is_postfix()) {
Leon Clarkef7060e22010-06-03 12:02:55 +01003888 // Perform the assignment as if via '='.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003889 { EffectContext context(this);
3890 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3891 Token::ASSIGN);
Ben Murdoch086aeea2011-05-13 15:57:08 +01003892 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003893 context.Plug(rax);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003894 }
Leon Clarkee46be812010-01-19 14:06:41 +00003895 // For all contexts except kEffect: We have the result on
3896 // top of the stack.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003897 if (!context()->IsEffect()) {
3898 context()->PlugTOS();
Leon Clarkee46be812010-01-19 14:06:41 +00003899 }
3900 } else {
Leon Clarkef7060e22010-06-03 12:02:55 +01003901 // Perform the assignment as if via '='.
Leon Clarkee46be812010-01-19 14:06:41 +00003902 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003903 Token::ASSIGN);
Ben Murdoch086aeea2011-05-13 15:57:08 +01003904 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
Ben Murdochb0fe1622011-05-05 13:52:32 +01003905 context()->Plug(rax);
Leon Clarkee46be812010-01-19 14:06:41 +00003906 }
3907 break;
3908 case NAMED_PROPERTY: {
3909 __ Move(rcx, prop->key()->AsLiteral()->handle());
Leon Clarke4515c472010-02-03 11:58:03 +00003910 __ pop(rdx);
Steve Block44f0eee2011-05-26 01:26:41 +01003911 Handle<Code> ic = is_strict_mode()
3912 ? isolate()->builtins()->StoreIC_Initialize_Strict()
3913 : isolate()->builtins()->StoreIC_Initialize();
Ben Murdoch257744e2011-11-30 15:57:28 +00003914 EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
Ben Murdoch086aeea2011-05-13 15:57:08 +01003915 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
Leon Clarkee46be812010-01-19 14:06:41 +00003916 if (expr->is_postfix()) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003917 if (!context()->IsEffect()) {
3918 context()->PlugTOS();
Leon Clarkee46be812010-01-19 14:06:41 +00003919 }
3920 } else {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003921 context()->Plug(rax);
Leon Clarkee46be812010-01-19 14:06:41 +00003922 }
3923 break;
3924 }
3925 case KEYED_PROPERTY: {
Leon Clarkef7060e22010-06-03 12:02:55 +01003926 __ pop(rcx);
3927 __ pop(rdx);
Steve Block44f0eee2011-05-26 01:26:41 +01003928 Handle<Code> ic = is_strict_mode()
3929 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
3930 : isolate()->builtins()->KeyedStoreIC_Initialize();
Ben Murdoch257744e2011-11-30 15:57:28 +00003931 EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
Ben Murdoch086aeea2011-05-13 15:57:08 +01003932 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
Leon Clarkee46be812010-01-19 14:06:41 +00003933 if (expr->is_postfix()) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003934 if (!context()->IsEffect()) {
3935 context()->PlugTOS();
Leon Clarkee46be812010-01-19 14:06:41 +00003936 }
3937 } else {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003938 context()->Plug(rax);
Leon Clarkee46be812010-01-19 14:06:41 +00003939 }
3940 break;
3941 }
3942 }
3943}
3944
Steve Block3ce2e202009-11-05 08:53:23 +00003945
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003946void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003947 VariableProxy* proxy = expr->AsVariableProxy();
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003948 ASSERT(!context()->IsEffect());
3949 ASSERT(!context()->IsTest());
3950
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003951 if (proxy != NULL && !proxy->var()->is_this() && proxy->var()->is_global()) {
3952 Comment cmnt(masm_, "Global variable");
3953 __ Move(rcx, proxy->name());
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003954 __ movq(rax, GlobalObjectOperand());
Steve Block44f0eee2011-05-26 01:26:41 +01003955 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003956 // Use a regular load, not a contextual load, to avoid a reference
3957 // error.
Ben Murdoch257744e2011-11-30 15:57:28 +00003958 EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
Ben Murdoch086aeea2011-05-13 15:57:08 +01003959 PrepareForBailout(expr, TOS_REG);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003960 context()->Plug(rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003961 } else if (proxy != NULL &&
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003962 proxy->var()->AsSlot() != NULL &&
3963 proxy->var()->AsSlot()->type() == Slot::LOOKUP) {
Steve Block59151502010-09-22 15:07:15 +01003964 Label done, slow;
3965
3966 // Generate code for loading from variables potentially shadowed
3967 // by eval-introduced variables.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003968 Slot* slot = proxy->var()->AsSlot();
Steve Block59151502010-09-22 15:07:15 +01003969 EmitDynamicLoadFromSlotFastCase(slot, INSIDE_TYPEOF, &slow, &done);
3970
3971 __ bind(&slow);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003972 __ push(rsi);
3973 __ Push(proxy->name());
3974 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
Ben Murdoch086aeea2011-05-13 15:57:08 +01003975 PrepareForBailout(expr, TOS_REG);
Steve Block59151502010-09-22 15:07:15 +01003976 __ bind(&done);
3977
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003978 context()->Plug(rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003979 } else {
3980 // This expression cannot throw a reference error at the top level.
Ben Murdoch086aeea2011-05-13 15:57:08 +01003981 context()->HandleExpression(expr);
Steve Block3ce2e202009-11-05 08:53:23 +00003982 }
Steve Block3ce2e202009-11-05 08:53:23 +00003983}
3984
3985
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003986bool FullCodeGenerator::TryLiteralCompare(Token::Value op,
3987 Expression* left,
3988 Expression* right,
3989 Label* if_true,
3990 Label* if_false,
3991 Label* fall_through) {
3992 if (op != Token::EQ && op != Token::EQ_STRICT) return false;
3993
3994 // Check for the pattern: typeof <expression> == <string literal>.
3995 Literal* right_literal = right->AsLiteral();
3996 if (right_literal == NULL) return false;
3997 Handle<Object> right_literal_value = right_literal->handle();
3998 if (!right_literal_value->IsString()) return false;
3999 UnaryOperation* left_unary = left->AsUnaryOperation();
4000 if (left_unary == NULL || left_unary->op() != Token::TYPEOF) return false;
4001 Handle<String> check = Handle<String>::cast(right_literal_value);
4002
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004003 { AccumulatorValueContext context(this);
4004 VisitForTypeofValue(left_unary->expression());
4005 }
Ben Murdoch086aeea2011-05-13 15:57:08 +01004006 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004007
Steve Block44f0eee2011-05-26 01:26:41 +01004008 if (check->Equals(isolate()->heap()->number_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004009 __ JumpIfSmi(rax, if_true);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004010 __ movq(rax, FieldOperand(rax, HeapObject::kMapOffset));
4011 __ CompareRoot(rax, Heap::kHeapNumberMapRootIndex);
4012 Split(equal, if_true, if_false, fall_through);
Steve Block44f0eee2011-05-26 01:26:41 +01004013 } else if (check->Equals(isolate()->heap()->string_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004014 __ JumpIfSmi(rax, if_false);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004015 // Check for undetectable objects => false.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004016 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx);
4017 __ j(above_equal, if_false);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004018 __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
Leon Clarkef7060e22010-06-03 12:02:55 +01004019 Immediate(1 << Map::kIsUndetectable));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004020 Split(zero, if_true, if_false, fall_through);
Steve Block44f0eee2011-05-26 01:26:41 +01004021 } else if (check->Equals(isolate()->heap()->boolean_symbol())) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004022 __ CompareRoot(rax, Heap::kTrueValueRootIndex);
4023 __ j(equal, if_true);
4024 __ CompareRoot(rax, Heap::kFalseValueRootIndex);
4025 Split(equal, if_true, if_false, fall_through);
Steve Block44f0eee2011-05-26 01:26:41 +01004026 } else if (check->Equals(isolate()->heap()->undefined_symbol())) {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004027 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
4028 __ j(equal, if_true);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004029 __ JumpIfSmi(rax, if_false);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004030 // Check for undetectable objects => true.
4031 __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
4032 __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
4033 Immediate(1 << Map::kIsUndetectable));
4034 Split(not_zero, if_true, if_false, fall_through);
Steve Block44f0eee2011-05-26 01:26:41 +01004035 } else if (check->Equals(isolate()->heap()->function_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004036 __ JumpIfSmi(rax, if_false);
4037 __ CmpObjectType(rax, FIRST_FUNCTION_CLASS_TYPE, rdx);
4038 Split(above_equal, if_true, if_false, fall_through);
Steve Block44f0eee2011-05-26 01:26:41 +01004039 } else if (check->Equals(isolate()->heap()->object_symbol())) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004040 __ JumpIfSmi(rax, if_false);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004041 __ CompareRoot(rax, Heap::kNullValueRootIndex);
4042 __ j(equal, if_true);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004043 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rdx);
4044 __ j(below, if_false);
4045 __ CmpInstanceType(rdx, FIRST_FUNCTION_CLASS_TYPE);
4046 __ j(above_equal, if_false);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004047 // Check for undetectable objects => false.
4048 __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
4049 Immediate(1 << Map::kIsUndetectable));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004050 Split(zero, if_true, if_false, fall_through);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004051 } else {
4052 if (if_false != fall_through) __ jmp(if_false);
Leon Clarkef7060e22010-06-03 12:02:55 +01004053 }
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004054
4055 return true;
Leon Clarkef7060e22010-06-03 12:02:55 +01004056}
4057
4058
Leon Clarked91b9f72010-01-27 17:25:45 +00004059void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
Steve Blockd0582a62009-12-15 09:54:21 +00004060 Comment cmnt(masm_, "[ CompareOperation");
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004061 SetSourcePosition(expr->position());
Steve Blockd0582a62009-12-15 09:54:21 +00004062
Leon Clarkee46be812010-01-19 14:06:41 +00004063 // Always perform the comparison for its control flow. Pack the result
4064 // into the expression's context after the comparison is performed.
Leon Clarkef7060e22010-06-03 12:02:55 +01004065 Label materialize_true, materialize_false;
4066 Label* if_true = NULL;
4067 Label* if_false = NULL;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004068 Label* fall_through = NULL;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004069 context()->PrepareTest(&materialize_true, &materialize_false,
4070 &if_true, &if_false, &fall_through);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004071
4072 // First we try a fast inlined version of the compare when one of
4073 // the operands is a literal.
4074 Token::Value op = expr->op();
4075 Expression* left = expr->left();
4076 Expression* right = expr->right();
4077 if (TryLiteralCompare(op, left, right, if_true, if_false, fall_through)) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004078 context()->Plug(if_true, if_false);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004079 return;
4080 }
Steve Blockd0582a62009-12-15 09:54:21 +00004081
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004082 VisitForStackValue(expr->left());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004083 switch (op) {
Leon Clarkee46be812010-01-19 14:06:41 +00004084 case Token::IN:
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004085 VisitForStackValue(expr->right());
Steve Blockd0582a62009-12-15 09:54:21 +00004086 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
Ben Murdoch086aeea2011-05-13 15:57:08 +01004087 PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
Steve Blockd0582a62009-12-15 09:54:21 +00004088 __ CompareRoot(rax, Heap::kTrueValueRootIndex);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004089 Split(equal, if_true, if_false, fall_through);
Steve Blockd0582a62009-12-15 09:54:21 +00004090 break;
Steve Blockd0582a62009-12-15 09:54:21 +00004091
4092 case Token::INSTANCEOF: {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004093 VisitForStackValue(expr->right());
Ben Murdochb0fe1622011-05-05 13:52:32 +01004094 InstanceofStub stub(InstanceofStub::kNoFlags);
Steve Blockd0582a62009-12-15 09:54:21 +00004095 __ CallStub(&stub);
Ben Murdoch086aeea2011-05-13 15:57:08 +01004096 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Steve Blockd0582a62009-12-15 09:54:21 +00004097 __ testq(rax, rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004098 // The stub returns 0 for true.
4099 Split(zero, if_true, if_false, fall_through);
Steve Blockd0582a62009-12-15 09:54:21 +00004100 break;
4101 }
4102
4103 default: {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004104 VisitForAccumulatorValue(expr->right());
Steve Blockd0582a62009-12-15 09:54:21 +00004105 Condition cc = no_condition;
4106 bool strict = false;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004107 switch (op) {
Steve Blockd0582a62009-12-15 09:54:21 +00004108 case Token::EQ_STRICT:
4109 strict = true;
Leon Clarkee46be812010-01-19 14:06:41 +00004110 // Fall through.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004111 case Token::EQ:
Steve Blockd0582a62009-12-15 09:54:21 +00004112 cc = equal;
Steve Blockd0582a62009-12-15 09:54:21 +00004113 __ pop(rdx);
4114 break;
4115 case Token::LT:
4116 cc = less;
Steve Blockd0582a62009-12-15 09:54:21 +00004117 __ pop(rdx);
4118 break;
4119 case Token::GT:
4120 // Reverse left and right sizes to obtain ECMA-262 conversion order.
4121 cc = less;
Leon Clarkee46be812010-01-19 14:06:41 +00004122 __ movq(rdx, result_register());
Steve Blockd0582a62009-12-15 09:54:21 +00004123 __ pop(rax);
4124 break;
4125 case Token::LTE:
4126 // Reverse left and right sizes to obtain ECMA-262 conversion order.
4127 cc = greater_equal;
Leon Clarkee46be812010-01-19 14:06:41 +00004128 __ movq(rdx, result_register());
Steve Blockd0582a62009-12-15 09:54:21 +00004129 __ pop(rax);
4130 break;
4131 case Token::GTE:
4132 cc = greater_equal;
Steve Blockd0582a62009-12-15 09:54:21 +00004133 __ pop(rdx);
4134 break;
4135 case Token::IN:
4136 case Token::INSTANCEOF:
4137 default:
4138 UNREACHABLE();
4139 }
4140
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004141 bool inline_smi_code = ShouldInlineSmiCase(op);
Steve Block1e0659c2011-05-24 12:43:12 +01004142 JumpPatchSite patch_site(masm_);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004143 if (inline_smi_code) {
Ben Murdoch257744e2011-11-30 15:57:28 +00004144 Label slow_case;
Steve Block1e0659c2011-05-24 12:43:12 +01004145 __ movq(rcx, rdx);
4146 __ or_(rcx, rax);
Ben Murdoch257744e2011-11-30 15:57:28 +00004147 patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
Steve Block1e0659c2011-05-24 12:43:12 +01004148 __ cmpq(rdx, rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004149 Split(cc, if_true, if_false, NULL);
4150 __ bind(&slow_case);
4151 }
Steve Blockd0582a62009-12-15 09:54:21 +00004152
Steve Block1e0659c2011-05-24 12:43:12 +01004153 // Record position and call the compare IC.
4154 SetSourcePosition(expr->position());
4155 Handle<Code> ic = CompareIC::GetUninitialized(op);
Ben Murdoch257744e2011-11-30 15:57:28 +00004156 EmitCallIC(ic, &patch_site, expr->id());
Ben Murdoch086aeea2011-05-13 15:57:08 +01004157
4158 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Steve Blockd0582a62009-12-15 09:54:21 +00004159 __ testq(rax, rax);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004160 Split(cc, if_true, if_false, fall_through);
Steve Blockd0582a62009-12-15 09:54:21 +00004161 }
4162 }
4163
Leon Clarkee46be812010-01-19 14:06:41 +00004164 // Convert the result of the comparison into one expected for this
4165 // expression's context.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004166 context()->Plug(if_true, if_false);
Steve Blockd0582a62009-12-15 09:54:21 +00004167}
4168
4169
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004170void FullCodeGenerator::VisitCompareToNull(CompareToNull* expr) {
4171 Comment cmnt(masm_, "[ CompareToNull");
4172 Label materialize_true, materialize_false;
4173 Label* if_true = NULL;
4174 Label* if_false = NULL;
4175 Label* fall_through = NULL;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004176 context()->PrepareTest(&materialize_true, &materialize_false,
4177 &if_true, &if_false, &fall_through);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004178
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004179 VisitForAccumulatorValue(expr->expression());
Ben Murdoch086aeea2011-05-13 15:57:08 +01004180 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004181 __ CompareRoot(rax, Heap::kNullValueRootIndex);
4182 if (expr->is_strict()) {
4183 Split(equal, if_true, if_false, fall_through);
4184 } else {
4185 __ j(equal, if_true);
4186 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
4187 __ j(equal, if_true);
4188 Condition is_smi = masm_->CheckSmi(rax);
4189 __ j(is_smi, if_false);
4190 // It can be an undetectable object.
4191 __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
4192 __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
4193 Immediate(1 << Map::kIsUndetectable));
4194 Split(not_zero, if_true, if_false, fall_through);
4195 }
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004196 context()->Plug(if_true, if_false);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004197}
4198
4199
Leon Clarked91b9f72010-01-27 17:25:45 +00004200void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
Steve Blockd0582a62009-12-15 09:54:21 +00004201 __ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004202 context()->Plug(rax);
Leon Clarkee46be812010-01-19 14:06:41 +00004203}
4204
4205
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004206Register FullCodeGenerator::result_register() {
4207 return rax;
4208}
Leon Clarkee46be812010-01-19 14:06:41 +00004209
4210
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004211Register FullCodeGenerator::context_register() {
4212 return rsi;
4213}
4214
4215
Ben Murdoch257744e2011-11-30 15:57:28 +00004216void FullCodeGenerator::EmitCallIC(Handle<Code> ic,
4217 RelocInfo::Mode mode,
4218 unsigned ast_id) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004219 ASSERT(mode == RelocInfo::CODE_TARGET ||
4220 mode == RelocInfo::CODE_TARGET_CONTEXT);
Steve Block44f0eee2011-05-26 01:26:41 +01004221 Counters* counters = isolate()->counters();
Steve Block1e0659c2011-05-24 12:43:12 +01004222 switch (ic->kind()) {
4223 case Code::LOAD_IC:
Steve Block44f0eee2011-05-26 01:26:41 +01004224 __ IncrementCounter(counters->named_load_full(), 1);
Steve Block1e0659c2011-05-24 12:43:12 +01004225 break;
4226 case Code::KEYED_LOAD_IC:
Steve Block44f0eee2011-05-26 01:26:41 +01004227 __ IncrementCounter(counters->keyed_load_full(), 1);
Steve Block1e0659c2011-05-24 12:43:12 +01004228 break;
4229 case Code::STORE_IC:
Steve Block44f0eee2011-05-26 01:26:41 +01004230 __ IncrementCounter(counters->named_store_full(), 1);
Steve Block1e0659c2011-05-24 12:43:12 +01004231 break;
4232 case Code::KEYED_STORE_IC:
Steve Block44f0eee2011-05-26 01:26:41 +01004233 __ IncrementCounter(counters->keyed_store_full(), 1);
Steve Block1e0659c2011-05-24 12:43:12 +01004234 default:
4235 break;
4236 }
Ben Murdoch257744e2011-11-30 15:57:28 +00004237 __ call(ic, mode, ast_id);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004238}
Leon Clarkee46be812010-01-19 14:06:41 +00004239
4240
Ben Murdoch257744e2011-11-30 15:57:28 +00004241void FullCodeGenerator::EmitCallIC(Handle<Code> ic,
4242 JumpPatchSite* patch_site,
4243 unsigned ast_id) {
Steve Block44f0eee2011-05-26 01:26:41 +01004244 Counters* counters = isolate()->counters();
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004245 switch (ic->kind()) {
4246 case Code::LOAD_IC:
Steve Block44f0eee2011-05-26 01:26:41 +01004247 __ IncrementCounter(counters->named_load_full(), 1);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004248 break;
4249 case Code::KEYED_LOAD_IC:
Steve Block44f0eee2011-05-26 01:26:41 +01004250 __ IncrementCounter(counters->keyed_load_full(), 1);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004251 break;
4252 case Code::STORE_IC:
Steve Block44f0eee2011-05-26 01:26:41 +01004253 __ IncrementCounter(counters->named_store_full(), 1);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004254 break;
4255 case Code::KEYED_STORE_IC:
Steve Block44f0eee2011-05-26 01:26:41 +01004256 __ IncrementCounter(counters->keyed_store_full(), 1);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01004257 default:
4258 break;
4259 }
Ben Murdoch257744e2011-11-30 15:57:28 +00004260 __ call(ic, RelocInfo::CODE_TARGET, ast_id);
Steve Block1e0659c2011-05-24 12:43:12 +01004261 if (patch_site != NULL && patch_site->is_bound()) {
4262 patch_site->EmitPatchInfo();
4263 } else {
4264 __ nop(); // Signals no inlined code.
4265 }
4266}
4267
4268
Leon Clarked91b9f72010-01-27 17:25:45 +00004269void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
Leon Clarkee46be812010-01-19 14:06:41 +00004270 ASSERT(IsAligned(frame_offset, kPointerSize));
4271 __ movq(Operand(rbp, frame_offset), value);
4272}
4273
4274
Leon Clarked91b9f72010-01-27 17:25:45 +00004275void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
Steve Block59151502010-09-22 15:07:15 +01004276 __ movq(dst, ContextOperand(rsi, context_index));
Leon Clarkee46be812010-01-19 14:06:41 +00004277}
4278
4279
4280// ----------------------------------------------------------------------------
4281// Non-local control flow support.
4282
4283
Leon Clarked91b9f72010-01-27 17:25:45 +00004284void FullCodeGenerator::EnterFinallyBlock() {
Leon Clarkee46be812010-01-19 14:06:41 +00004285 ASSERT(!result_register().is(rdx));
4286 ASSERT(!result_register().is(rcx));
4287 // Cook return address on top of stack (smi encoded Code* delta)
4288 __ movq(rdx, Operand(rsp, 0));
4289 __ Move(rcx, masm_->CodeObject());
4290 __ subq(rdx, rcx);
4291 __ Integer32ToSmi(rdx, rdx);
4292 __ movq(Operand(rsp, 0), rdx);
4293 // Store result register while executing finally block.
4294 __ push(result_register());
4295}
4296
4297
Leon Clarked91b9f72010-01-27 17:25:45 +00004298void FullCodeGenerator::ExitFinallyBlock() {
Leon Clarkee46be812010-01-19 14:06:41 +00004299 ASSERT(!result_register().is(rdx));
4300 ASSERT(!result_register().is(rcx));
4301 // Restore result register from stack.
4302 __ pop(result_register());
4303 // Uncook return address.
4304 __ movq(rdx, Operand(rsp, 0));
4305 __ SmiToInteger32(rdx, rdx);
4306 __ Move(rcx, masm_->CodeObject());
4307 __ addq(rdx, rcx);
4308 __ movq(Operand(rsp, 0), rdx);
4309 // And return.
4310 __ ret(0);
Steve Blockd0582a62009-12-15 09:54:21 +00004311}
4312
4313
4314#undef __
4315
4316
Steve Block3ce2e202009-11-05 08:53:23 +00004317} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01004318
4319#endif // V8_TARGET_ARCH_X64