blob: 39f543df560cd9cdac25d75a9d5fa19ffc948bed [file] [log] [blame]
Leon Clarked91b9f72010-01-27 17:25:45 +00001// Copyright 2010 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "bootstrapper.h"
31#include "codegen-inl.h"
Steve Blockd0582a62009-12-15 09:54:21 +000032#include "compiler.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000033#include "debug.h"
34#include "ic-inl.h"
35#include "parser.h"
Leon Clarke4515c472010-02-03 11:58:03 +000036#include "regexp-macro-assembler.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000037#include "register-allocator-inl.h"
38#include "scopes.h"
Steve Block6ded16b2010-05-10 14:33:55 +010039#include "virtual-frame-inl.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000040
41namespace v8 {
42namespace internal {
43
44#define __ ACCESS_MASM(masm_)
45
46// -------------------------------------------------------------------------
47// Platform-specific DeferredCode functions.
48
49void DeferredCode::SaveRegisters() {
50 for (int i = 0; i < RegisterAllocator::kNumRegisters; i++) {
51 int action = registers_[i];
52 if (action == kPush) {
53 __ push(RegisterAllocator::ToRegister(i));
54 } else if (action != kIgnore && (action & kSyncedFlag) == 0) {
55 __ movq(Operand(rbp, action), RegisterAllocator::ToRegister(i));
56 }
57 }
58}
59
Steve Block3ce2e202009-11-05 08:53:23 +000060
Steve Blocka7e24c12009-10-30 11:49:00 +000061void DeferredCode::RestoreRegisters() {
62 // Restore registers in reverse order due to the stack.
63 for (int i = RegisterAllocator::kNumRegisters - 1; i >= 0; i--) {
64 int action = registers_[i];
65 if (action == kPush) {
66 __ pop(RegisterAllocator::ToRegister(i));
67 } else if (action != kIgnore) {
68 action &= ~kSyncedFlag;
69 __ movq(RegisterAllocator::ToRegister(i), Operand(rbp, action));
70 }
71 }
72}
73
74
75// -------------------------------------------------------------------------
76// CodeGenState implementation.
77
78CodeGenState::CodeGenState(CodeGenerator* owner)
79 : owner_(owner),
Steve Blocka7e24c12009-10-30 11:49:00 +000080 destination_(NULL),
81 previous_(NULL) {
82 owner_->set_state(this);
83}
84
85
86CodeGenState::CodeGenState(CodeGenerator* owner,
Steve Blocka7e24c12009-10-30 11:49:00 +000087 ControlDestination* destination)
88 : owner_(owner),
Steve Blocka7e24c12009-10-30 11:49:00 +000089 destination_(destination),
90 previous_(owner->state()) {
91 owner_->set_state(this);
92}
93
94
95CodeGenState::~CodeGenState() {
96 ASSERT(owner_->state() == this);
97 owner_->set_state(previous_);
98}
99
100
101// -------------------------------------------------------------------------
102// Deferred code objects
103//
104// These subclasses of DeferredCode add pieces of code to the end of generated
105// code. They are branched to from the generated code, and
106// keep some slower code out of the main body of the generated code.
107// Many of them call a code stub or a runtime function.
108
109class DeferredInlineSmiAdd: public DeferredCode {
110 public:
111 DeferredInlineSmiAdd(Register dst,
112 Smi* value,
113 OverwriteMode overwrite_mode)
114 : dst_(dst), value_(value), overwrite_mode_(overwrite_mode) {
115 set_comment("[ DeferredInlineSmiAdd");
116 }
117
118 virtual void Generate();
119
120 private:
121 Register dst_;
122 Smi* value_;
123 OverwriteMode overwrite_mode_;
124};
125
126
127// The result of value + src is in dst. It either overflowed or was not
128// smi tagged. Undo the speculative addition and call the appropriate
129// specialized stub for add. The result is left in dst.
130class DeferredInlineSmiAddReversed: public DeferredCode {
131 public:
132 DeferredInlineSmiAddReversed(Register dst,
133 Smi* value,
134 OverwriteMode overwrite_mode)
135 : dst_(dst), value_(value), overwrite_mode_(overwrite_mode) {
136 set_comment("[ DeferredInlineSmiAddReversed");
137 }
138
139 virtual void Generate();
140
141 private:
142 Register dst_;
143 Smi* value_;
144 OverwriteMode overwrite_mode_;
145};
146
147
148class DeferredInlineSmiSub: public DeferredCode {
149 public:
150 DeferredInlineSmiSub(Register dst,
151 Smi* value,
152 OverwriteMode overwrite_mode)
153 : dst_(dst), value_(value), overwrite_mode_(overwrite_mode) {
154 set_comment("[ DeferredInlineSmiSub");
155 }
156
157 virtual void Generate();
158
159 private:
160 Register dst_;
161 Smi* value_;
162 OverwriteMode overwrite_mode_;
163};
164
165
166// Call the appropriate binary operation stub to compute src op value
167// and leave the result in dst.
168class DeferredInlineSmiOperation: public DeferredCode {
169 public:
170 DeferredInlineSmiOperation(Token::Value op,
171 Register dst,
172 Register src,
173 Smi* value,
174 OverwriteMode overwrite_mode)
175 : op_(op),
176 dst_(dst),
177 src_(src),
178 value_(value),
179 overwrite_mode_(overwrite_mode) {
180 set_comment("[ DeferredInlineSmiOperation");
181 }
182
183 virtual void Generate();
184
185 private:
186 Token::Value op_;
187 Register dst_;
188 Register src_;
189 Smi* value_;
190 OverwriteMode overwrite_mode_;
191};
192
193
Steve Block6ded16b2010-05-10 14:33:55 +0100194// Call the appropriate binary operation stub to compute value op src
195// and leave the result in dst.
196class DeferredInlineSmiOperationReversed: public DeferredCode {
197 public:
198 DeferredInlineSmiOperationReversed(Token::Value op,
199 Register dst,
200 Smi* value,
201 Register src,
202 OverwriteMode overwrite_mode)
203 : op_(op),
204 dst_(dst),
205 value_(value),
206 src_(src),
207 overwrite_mode_(overwrite_mode) {
208 set_comment("[ DeferredInlineSmiOperationReversed");
209 }
210
211 virtual void Generate();
212
213 private:
214 Token::Value op_;
215 Register dst_;
216 Smi* value_;
217 Register src_;
218 OverwriteMode overwrite_mode_;
219};
220
221
Steve Blocka7e24c12009-10-30 11:49:00 +0000222class FloatingPointHelper : public AllStatic {
223 public:
224 // Code pattern for loading a floating point value. Input value must
225 // be either a smi or a heap number object (fp value). Requirements:
226 // operand on TOS+1. Returns operand as floating point number on FPU
227 // stack.
228 static void LoadFloatOperand(MacroAssembler* masm, Register scratch);
229
230 // Code pattern for loading a floating point value. Input value must
231 // be either a smi or a heap number object (fp value). Requirements:
232 // operand in src register. Returns operand as floating point number
Steve Block6ded16b2010-05-10 14:33:55 +0100233 // in XMM register. May destroy src register.
Steve Blocka7e24c12009-10-30 11:49:00 +0000234 static void LoadFloatOperand(MacroAssembler* masm,
235 Register src,
236 XMMRegister dst);
237
Steve Block6ded16b2010-05-10 14:33:55 +0100238 // Code pattern for loading a possible number into a XMM register.
239 // If the contents of src is not a number, control branches to
240 // the Label not_number. If contents of src is a smi or a heap number
241 // object (fp value), it is loaded into the XMM register as a double.
242 // The register src is not changed, and src may not be kScratchRegister.
243 static void LoadFloatOperand(MacroAssembler* masm,
244 Register src,
245 XMMRegister dst,
246 Label *not_number);
247
Steve Blocka7e24c12009-10-30 11:49:00 +0000248 // Code pattern for loading floating point values. Input values must
249 // be either smi or heap number objects (fp values). Requirements:
Leon Clarke4515c472010-02-03 11:58:03 +0000250 // operand_1 in rdx, operand_2 in rax; Returns operands as
Steve Blocka7e24c12009-10-30 11:49:00 +0000251 // floating point numbers in XMM registers.
252 static void LoadFloatOperands(MacroAssembler* masm,
253 XMMRegister dst1,
254 XMMRegister dst2);
255
Leon Clarke4515c472010-02-03 11:58:03 +0000256 // Similar to LoadFloatOperands, assumes that the operands are smis.
257 static void LoadFloatOperandsFromSmis(MacroAssembler* masm,
258 XMMRegister dst1,
259 XMMRegister dst2);
260
Steve Blocka7e24c12009-10-30 11:49:00 +0000261 // Code pattern for loading floating point values onto the fp stack.
262 // Input values must be either smi or heap number objects (fp values).
263 // Requirements:
264 // Register version: operands in registers lhs and rhs.
265 // Stack version: operands on TOS+1 and TOS+2.
266 // Returns operands as floating point numbers on fp stack.
Steve Blocka7e24c12009-10-30 11:49:00 +0000267 static void LoadFloatOperands(MacroAssembler* masm,
268 Register lhs,
269 Register rhs);
270
Steve Blocka7e24c12009-10-30 11:49:00 +0000271 // Test if operands are smi or number objects (fp). Requirements:
272 // operand_1 in rax, operand_2 in rdx; falls through on float or smi
273 // operands, jumps to the non_float label otherwise.
Steve Block3ce2e202009-11-05 08:53:23 +0000274 static void CheckNumberOperands(MacroAssembler* masm,
275 Label* non_float);
Leon Clarked91b9f72010-01-27 17:25:45 +0000276
277 // Takes the operands in rdx and rax and loads them as integers in rax
278 // and rcx.
279 static void LoadAsIntegers(MacroAssembler* masm,
280 bool use_sse3,
281 Label* operand_conversion_failure);
Steve Blocka7e24c12009-10-30 11:49:00 +0000282};
283
284
285// -----------------------------------------------------------------------------
286// CodeGenerator implementation.
287
Andrei Popescu31002712010-02-23 13:46:05 +0000288CodeGenerator::CodeGenerator(MacroAssembler* masm)
289 : deferred_(8),
Leon Clarke4515c472010-02-03 11:58:03 +0000290 masm_(masm),
Andrei Popescu31002712010-02-23 13:46:05 +0000291 info_(NULL),
Steve Blocka7e24c12009-10-30 11:49:00 +0000292 frame_(NULL),
293 allocator_(NULL),
294 state_(NULL),
295 loop_nesting_(0),
296 function_return_is_shadowed_(false),
297 in_spilled_code_(false) {
298}
299
300
301void CodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
302 // Call the runtime to declare the globals. The inevitable call
303 // will sync frame elements to memory anyway, so we do it eagerly to
304 // allow us to push the arguments directly into place.
305 frame_->SyncRange(0, frame_->element_count() - 1);
306
307 __ movq(kScratchRegister, pairs, RelocInfo::EMBEDDED_OBJECT);
Steve Block3ce2e202009-11-05 08:53:23 +0000308 frame_->EmitPush(rsi); // The context is the first argument.
Steve Blocka7e24c12009-10-30 11:49:00 +0000309 frame_->EmitPush(kScratchRegister);
Steve Block3ce2e202009-11-05 08:53:23 +0000310 frame_->EmitPush(Smi::FromInt(is_eval() ? 1 : 0));
Steve Blocka7e24c12009-10-30 11:49:00 +0000311 Result ignored = frame_->CallRuntime(Runtime::kDeclareGlobals, 3);
312 // Return value is ignored.
313}
314
315
Andrei Popescu402d9372010-02-26 13:31:12 +0000316void CodeGenerator::Generate(CompilationInfo* info) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000317 // Record the position for debugging purposes.
Andrei Popescu31002712010-02-23 13:46:05 +0000318 CodeForFunctionPosition(info->function());
Steve Block6ded16b2010-05-10 14:33:55 +0100319 Comment cmnt(masm_, "[ function compiled by virtual frame code generator");
Steve Blocka7e24c12009-10-30 11:49:00 +0000320
321 // Initialize state.
Andrei Popescu31002712010-02-23 13:46:05 +0000322 info_ = info;
Steve Blocka7e24c12009-10-30 11:49:00 +0000323 ASSERT(allocator_ == NULL);
324 RegisterAllocator register_allocator(this);
325 allocator_ = &register_allocator;
326 ASSERT(frame_ == NULL);
327 frame_ = new VirtualFrame();
328 set_in_spilled_code(false);
329
330 // Adjust for function-level loop nesting.
Steve Block6ded16b2010-05-10 14:33:55 +0100331 ASSERT_EQ(0, loop_nesting_);
Leon Clarke4515c472010-02-03 11:58:03 +0000332 loop_nesting_ += info->loop_nesting();
Steve Blocka7e24c12009-10-30 11:49:00 +0000333
334 JumpTarget::set_compiling_deferred_code(false);
335
336#ifdef DEBUG
337 if (strlen(FLAG_stop_at) > 0 &&
Andrei Popescu31002712010-02-23 13:46:05 +0000338 info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000339 frame_->SpillAll();
340 __ int3();
341 }
342#endif
343
344 // New scope to get automatic timing calculation.
Steve Block6ded16b2010-05-10 14:33:55 +0100345 { HistogramTimerScope codegen_timer(&Counters::code_generation);
Steve Blocka7e24c12009-10-30 11:49:00 +0000346 CodeGenState state(this);
347
348 // Entry:
349 // Stack: receiver, arguments, return address.
350 // rbp: caller's frame pointer
351 // rsp: stack pointer
352 // rdi: called JS function
353 // rsi: callee's context
354 allocator_->Initialize();
Steve Blocka7e24c12009-10-30 11:49:00 +0000355
Andrei Popescu402d9372010-02-26 13:31:12 +0000356 if (info->mode() == CompilationInfo::PRIMARY) {
Leon Clarke4515c472010-02-03 11:58:03 +0000357 frame_->Enter();
358
359 // Allocate space for locals and initialize them.
360 frame_->AllocateStackSlots();
361
362 // Allocate the local context if needed.
Andrei Popescu31002712010-02-23 13:46:05 +0000363 int heap_slots = scope()->num_heap_slots();
Leon Clarke4515c472010-02-03 11:58:03 +0000364 if (heap_slots > 0) {
365 Comment cmnt(masm_, "[ allocate local context");
366 // Allocate local context.
367 // Get outer context and create a new context based on it.
368 frame_->PushFunction();
369 Result context;
370 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
371 FastNewContextStub stub(heap_slots);
372 context = frame_->CallStub(&stub, 1);
373 } else {
374 context = frame_->CallRuntime(Runtime::kNewContext, 1);
375 }
376
377 // Update context local.
378 frame_->SaveContextRegister();
379
380 // Verify that the runtime call result and rsi agree.
381 if (FLAG_debug_code) {
382 __ cmpq(context.reg(), rsi);
383 __ Assert(equal, "Runtime::NewContext should end up in rsi");
384 }
385 }
386
387 // TODO(1241774): Improve this code:
388 // 1) only needed if we have a context
389 // 2) no need to recompute context ptr every single time
390 // 3) don't copy parameter operand code from SlotOperand!
391 {
392 Comment cmnt2(masm_, "[ copy context parameters into .context");
Leon Clarke4515c472010-02-03 11:58:03 +0000393 // Note that iteration order is relevant here! If we have the same
394 // parameter twice (e.g., function (x, y, x)), and that parameter
395 // needs to be copied into the context, it must be the last argument
396 // passed to the parameter that needs to be copied. This is a rare
397 // case so we don't check for it, instead we rely on the copying
398 // order: such a parameter is copied repeatedly into the same
399 // context location and thus the last value is what is seen inside
400 // the function.
Andrei Popescu31002712010-02-23 13:46:05 +0000401 for (int i = 0; i < scope()->num_parameters(); i++) {
402 Variable* par = scope()->parameter(i);
Leon Clarke4515c472010-02-03 11:58:03 +0000403 Slot* slot = par->slot();
404 if (slot != NULL && slot->type() == Slot::CONTEXT) {
405 // The use of SlotOperand below is safe in unspilled code
406 // because the slot is guaranteed to be a context slot.
407 //
408 // There are no parameters in the global scope.
Andrei Popescu31002712010-02-23 13:46:05 +0000409 ASSERT(!scope()->is_global_scope());
Leon Clarke4515c472010-02-03 11:58:03 +0000410 frame_->PushParameterAt(i);
411 Result value = frame_->Pop();
412 value.ToRegister();
413
414 // SlotOperand loads context.reg() with the context object
415 // stored to, used below in RecordWrite.
416 Result context = allocator_->Allocate();
417 ASSERT(context.is_valid());
418 __ movq(SlotOperand(slot, context.reg()), value.reg());
419 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
420 Result scratch = allocator_->Allocate();
421 ASSERT(scratch.is_valid());
422 frame_->Spill(context.reg());
423 frame_->Spill(value.reg());
424 __ RecordWrite(context.reg(), offset, value.reg(), scratch.reg());
425 }
426 }
427 }
428
429 // Store the arguments object. This must happen after context
430 // initialization because the arguments object may be stored in
431 // the context.
432 if (ArgumentsMode() != NO_ARGUMENTS_ALLOCATION) {
433 StoreArgumentsObject(true);
434 }
435
436 // Initialize ThisFunction reference if present.
Andrei Popescu31002712010-02-23 13:46:05 +0000437 if (scope()->is_function_scope() && scope()->function() != NULL) {
Leon Clarke4515c472010-02-03 11:58:03 +0000438 frame_->Push(Factory::the_hole_value());
Andrei Popescu31002712010-02-23 13:46:05 +0000439 StoreToSlot(scope()->function()->slot(), NOT_CONST_INIT);
Leon Clarke4515c472010-02-03 11:58:03 +0000440 }
441 } else {
442 // When used as the secondary compiler for splitting, rbp, rsi,
443 // and rdi have been pushed on the stack. Adjust the virtual
444 // frame to match this state.
445 frame_->Adjust(3);
446 allocator_->Unuse(rdi);
Andrei Popescu402d9372010-02-26 13:31:12 +0000447
448 // Bind all the bailout labels to the beginning of the function.
449 List<CompilationInfo::Bailout*>* bailouts = info->bailouts();
450 for (int i = 0; i < bailouts->length(); i++) {
451 __ bind(bailouts->at(i)->label());
452 }
Leon Clarke4515c472010-02-03 11:58:03 +0000453 }
454
Steve Blocka7e24c12009-10-30 11:49:00 +0000455 // Initialize the function return target after the locals are set
456 // up, because it needs the expected frame height from the frame.
457 function_return_.set_direction(JumpTarget::BIDIRECTIONAL);
458 function_return_is_shadowed_ = false;
459
Steve Blocka7e24c12009-10-30 11:49:00 +0000460 // Generate code to 'execute' declarations and initialize functions
461 // (source elements). In case of an illegal redeclaration we need to
462 // handle that instead of processing the declarations.
Andrei Popescu31002712010-02-23 13:46:05 +0000463 if (scope()->HasIllegalRedeclaration()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000464 Comment cmnt(masm_, "[ illegal redeclarations");
Andrei Popescu31002712010-02-23 13:46:05 +0000465 scope()->VisitIllegalRedeclaration(this);
Steve Blocka7e24c12009-10-30 11:49:00 +0000466 } else {
467 Comment cmnt(masm_, "[ declarations");
Andrei Popescu31002712010-02-23 13:46:05 +0000468 ProcessDeclarations(scope()->declarations());
Steve Blocka7e24c12009-10-30 11:49:00 +0000469 // Bail out if a stack-overflow exception occurred when processing
470 // declarations.
471 if (HasStackOverflow()) return;
472 }
473
474 if (FLAG_trace) {
475 frame_->CallRuntime(Runtime::kTraceEnter, 0);
476 // Ignore the return value.
477 }
478 CheckStack();
479
480 // Compile the body of the function in a vanilla state. Don't
481 // bother compiling all the code if the scope has an illegal
482 // redeclaration.
Andrei Popescu31002712010-02-23 13:46:05 +0000483 if (!scope()->HasIllegalRedeclaration()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000484 Comment cmnt(masm_, "[ function body");
485#ifdef DEBUG
486 bool is_builtin = Bootstrapper::IsActive();
487 bool should_trace =
488 is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls;
489 if (should_trace) {
490 frame_->CallRuntime(Runtime::kDebugTrace, 0);
491 // Ignore the return value.
492 }
493#endif
Andrei Popescu31002712010-02-23 13:46:05 +0000494 VisitStatements(info->function()->body());
Steve Blocka7e24c12009-10-30 11:49:00 +0000495
496 // Handle the return from the function.
497 if (has_valid_frame()) {
498 // If there is a valid frame, control flow can fall off the end of
499 // the body. In that case there is an implicit return statement.
500 ASSERT(!function_return_is_shadowed_);
Andrei Popescu31002712010-02-23 13:46:05 +0000501 CodeForReturnPosition(info->function());
Steve Blocka7e24c12009-10-30 11:49:00 +0000502 frame_->PrepareForReturn();
503 Result undefined(Factory::undefined_value());
504 if (function_return_.is_bound()) {
505 function_return_.Jump(&undefined);
506 } else {
507 function_return_.Bind(&undefined);
508 GenerateReturnSequence(&undefined);
509 }
510 } else if (function_return_.is_linked()) {
511 // If the return target has dangling jumps to it, then we have not
512 // yet generated the return sequence. This can happen when (a)
513 // control does not flow off the end of the body so we did not
514 // compile an artificial return statement just above, and (b) there
515 // are return statements in the body but (c) they are all shadowed.
516 Result return_value;
517 function_return_.Bind(&return_value);
518 GenerateReturnSequence(&return_value);
519 }
520 }
521 }
522
523 // Adjust for function-level loop nesting.
Steve Block6ded16b2010-05-10 14:33:55 +0100524 ASSERT_EQ(loop_nesting_, info->loop_nesting());
525 loop_nesting_ = 0;
Steve Blocka7e24c12009-10-30 11:49:00 +0000526
527 // Code generation state must be reset.
528 ASSERT(state_ == NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000529 ASSERT(!function_return_is_shadowed_);
530 function_return_.Unuse();
531 DeleteFrame();
532
533 // Process any deferred code using the register allocator.
534 if (!HasStackOverflow()) {
535 HistogramTimerScope deferred_timer(&Counters::deferred_code_generation);
536 JumpTarget::set_compiling_deferred_code(true);
537 ProcessDeferred();
538 JumpTarget::set_compiling_deferred_code(false);
539 }
540
541 // There is no need to delete the register allocator, it is a
542 // stack-allocated local.
543 allocator_ = NULL;
Steve Blocka7e24c12009-10-30 11:49:00 +0000544}
545
546void CodeGenerator::GenerateReturnSequence(Result* return_value) {
547 // The return value is a live (but not currently reference counted)
548 // reference to rax. This is safe because the current frame does not
549 // contain a reference to rax (it is prepared for the return by spilling
550 // all registers).
551 if (FLAG_trace) {
552 frame_->Push(return_value);
553 *return_value = frame_->CallRuntime(Runtime::kTraceExit, 1);
554 }
555 return_value->ToRegister(rax);
556
557 // Add a label for checking the size of the code used for returning.
558#ifdef DEBUG
559 Label check_exit_codesize;
560 masm_->bind(&check_exit_codesize);
561#endif
562
563 // Leave the frame and return popping the arguments and the
564 // receiver.
565 frame_->Exit();
Andrei Popescu31002712010-02-23 13:46:05 +0000566 masm_->ret((scope()->num_parameters() + 1) * kPointerSize);
Steve Blocka7e24c12009-10-30 11:49:00 +0000567#ifdef ENABLE_DEBUGGER_SUPPORT
568 // Add padding that will be overwritten by a debugger breakpoint.
569 // frame_->Exit() generates "movq rsp, rbp; pop rbp; ret k"
570 // with length 7 (3 + 1 + 3).
Steve Blockd0582a62009-12-15 09:54:21 +0000571 const int kPadding = Assembler::kJSReturnSequenceLength - 7;
Steve Blocka7e24c12009-10-30 11:49:00 +0000572 for (int i = 0; i < kPadding; ++i) {
573 masm_->int3();
574 }
575 // Check that the size of the code used for returning matches what is
576 // expected by the debugger.
Steve Blockd0582a62009-12-15 09:54:21 +0000577 ASSERT_EQ(Assembler::kJSReturnSequenceLength,
Steve Blocka7e24c12009-10-30 11:49:00 +0000578 masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
579#endif
580 DeleteFrame();
581}
582
583
584#ifdef DEBUG
585bool CodeGenerator::HasValidEntryRegisters() {
586 return (allocator()->count(rax) == (frame()->is_used(rax) ? 1 : 0))
587 && (allocator()->count(rbx) == (frame()->is_used(rbx) ? 1 : 0))
588 && (allocator()->count(rcx) == (frame()->is_used(rcx) ? 1 : 0))
589 && (allocator()->count(rdx) == (frame()->is_used(rdx) ? 1 : 0))
590 && (allocator()->count(rdi) == (frame()->is_used(rdi) ? 1 : 0))
591 && (allocator()->count(r8) == (frame()->is_used(r8) ? 1 : 0))
592 && (allocator()->count(r9) == (frame()->is_used(r9) ? 1 : 0))
593 && (allocator()->count(r11) == (frame()->is_used(r11) ? 1 : 0))
594 && (allocator()->count(r14) == (frame()->is_used(r14) ? 1 : 0))
595 && (allocator()->count(r15) == (frame()->is_used(r15) ? 1 : 0))
596 && (allocator()->count(r12) == (frame()->is_used(r12) ? 1 : 0));
597}
598#endif
599
600
601class DeferredReferenceGetKeyedValue: public DeferredCode {
602 public:
603 explicit DeferredReferenceGetKeyedValue(Register dst,
604 Register receiver,
605 Register key,
606 bool is_global)
607 : dst_(dst), receiver_(receiver), key_(key), is_global_(is_global) {
608 set_comment("[ DeferredReferenceGetKeyedValue");
609 }
610
611 virtual void Generate();
612
613 Label* patch_site() { return &patch_site_; }
614
615 private:
616 Label patch_site_;
617 Register dst_;
618 Register receiver_;
619 Register key_;
620 bool is_global_;
621};
622
623
624void DeferredReferenceGetKeyedValue::Generate() {
625 __ push(receiver_); // First IC argument.
626 __ push(key_); // Second IC argument.
627
628 // Calculate the delta from the IC call instruction to the map check
629 // movq instruction in the inlined version. This delta is stored in
630 // a test(rax, delta) instruction after the call so that we can find
631 // it in the IC initialization code and patch the movq instruction.
632 // This means that we cannot allow test instructions after calls to
633 // KeyedLoadIC stubs in other places.
634 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
635 RelocInfo::Mode mode = is_global_
636 ? RelocInfo::CODE_TARGET_CONTEXT
637 : RelocInfo::CODE_TARGET;
638 __ Call(ic, mode);
639 // The delta from the start of the map-compare instruction to the
640 // test instruction. We use masm_-> directly here instead of the __
641 // macro because the macro sometimes uses macro expansion to turn
642 // into something that can't return a value. This is encountered
643 // when doing generated code coverage tests.
644 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
645 // Here we use masm_-> instead of the __ macro because this is the
646 // instruction that gets patched and coverage code gets in the way.
647 // TODO(X64): Consider whether it's worth switching the test to a
648 // 7-byte NOP with non-zero immediate (0f 1f 80 xxxxxxxx) which won't
649 // be generated normally.
650 masm_->testl(rax, Immediate(-delta_to_patch_site));
651 __ IncrementCounter(&Counters::keyed_load_inline_miss, 1);
652
653 if (!dst_.is(rax)) __ movq(dst_, rax);
654 __ pop(key_);
655 __ pop(receiver_);
656}
657
658
659class DeferredReferenceSetKeyedValue: public DeferredCode {
660 public:
661 DeferredReferenceSetKeyedValue(Register value,
662 Register key,
663 Register receiver)
664 : value_(value), key_(key), receiver_(receiver) {
665 set_comment("[ DeferredReferenceSetKeyedValue");
666 }
667
668 virtual void Generate();
669
670 Label* patch_site() { return &patch_site_; }
671
672 private:
673 Register value_;
674 Register key_;
675 Register receiver_;
676 Label patch_site_;
677};
678
679
680void DeferredReferenceSetKeyedValue::Generate() {
681 __ IncrementCounter(&Counters::keyed_store_inline_miss, 1);
682 // Push receiver and key arguments on the stack.
683 __ push(receiver_);
684 __ push(key_);
685 // Move value argument to eax as expected by the IC stub.
686 if (!value_.is(rax)) __ movq(rax, value_);
687 // Call the IC stub.
688 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
689 __ Call(ic, RelocInfo::CODE_TARGET);
690 // The delta from the start of the map-compare instructions (initial movq)
691 // to the test instruction. We use masm_-> directly here instead of the
692 // __ macro because the macro sometimes uses macro expansion to turn
693 // into something that can't return a value. This is encountered
694 // when doing generated code coverage tests.
695 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
696 // Here we use masm_-> instead of the __ macro because this is the
697 // instruction that gets patched and coverage code gets in the way.
698 masm_->testl(rax, Immediate(-delta_to_patch_site));
699 // Restore value (returned from store IC), key and receiver
700 // registers.
701 if (!value_.is(rax)) __ movq(value_, rax);
702 __ pop(key_);
703 __ pop(receiver_);
704}
705
706
Leon Clarked91b9f72010-01-27 17:25:45 +0000707void CodeGenerator::CallApplyLazy(Expression* applicand,
Steve Blocka7e24c12009-10-30 11:49:00 +0000708 Expression* receiver,
709 VariableProxy* arguments,
710 int position) {
Leon Clarked91b9f72010-01-27 17:25:45 +0000711 // An optimized implementation of expressions of the form
712 // x.apply(y, arguments).
713 // If the arguments object of the scope has not been allocated,
714 // and x.apply is Function.prototype.apply, this optimization
715 // just copies y and the arguments of the current function on the
716 // stack, as receiver and arguments, and calls x.
717 // In the implementation comments, we call x the applicand
718 // and y the receiver.
Steve Blocka7e24c12009-10-30 11:49:00 +0000719 ASSERT(ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION);
720 ASSERT(arguments->IsArguments());
721
Leon Clarked91b9f72010-01-27 17:25:45 +0000722 // Load applicand.apply onto the stack. This will usually
Steve Blocka7e24c12009-10-30 11:49:00 +0000723 // give us a megamorphic load site. Not super, but it works.
Leon Clarked91b9f72010-01-27 17:25:45 +0000724 Load(applicand);
725 Handle<String> name = Factory::LookupAsciiSymbol("apply");
726 frame()->Push(name);
727 Result answer = frame()->CallLoadIC(RelocInfo::CODE_TARGET);
728 __ nop();
729 frame()->Push(&answer);
Steve Blocka7e24c12009-10-30 11:49:00 +0000730
731 // Load the receiver and the existing arguments object onto the
732 // expression stack. Avoid allocating the arguments object here.
733 Load(receiver);
Andrei Popescu31002712010-02-23 13:46:05 +0000734 LoadFromSlot(scope()->arguments()->var()->slot(), NOT_INSIDE_TYPEOF);
Steve Blocka7e24c12009-10-30 11:49:00 +0000735
736 // Emit the source position information after having loaded the
737 // receiver and the arguments.
738 CodeForSourcePosition(position);
Leon Clarked91b9f72010-01-27 17:25:45 +0000739 // Contents of frame at this point:
740 // Frame[0]: arguments object of the current function or the hole.
741 // Frame[1]: receiver
742 // Frame[2]: applicand.apply
743 // Frame[3]: applicand.
Steve Blocka7e24c12009-10-30 11:49:00 +0000744
745 // Check if the arguments object has been lazily allocated
746 // already. If so, just use that instead of copying the arguments
747 // from the stack. This also deals with cases where a local variable
748 // named 'arguments' has been introduced.
749 frame_->Dup();
750 Result probe = frame_->Pop();
Leon Clarked91b9f72010-01-27 17:25:45 +0000751 { VirtualFrame::SpilledScope spilled_scope;
752 Label slow, done;
753 bool try_lazy = true;
754 if (probe.is_constant()) {
755 try_lazy = probe.handle()->IsTheHole();
756 } else {
757 __ CompareRoot(probe.reg(), Heap::kTheHoleValueRootIndex);
758 probe.Unuse();
759 __ j(not_equal, &slow);
760 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000761
Leon Clarked91b9f72010-01-27 17:25:45 +0000762 if (try_lazy) {
763 Label build_args;
764 // Get rid of the arguments object probe.
765 frame_->Drop(); // Can be called on a spilled frame.
766 // Stack now has 3 elements on it.
767 // Contents of stack at this point:
768 // rsp[0]: receiver
769 // rsp[1]: applicand.apply
770 // rsp[2]: applicand.
Steve Blocka7e24c12009-10-30 11:49:00 +0000771
Leon Clarked91b9f72010-01-27 17:25:45 +0000772 // Check that the receiver really is a JavaScript object.
773 __ movq(rax, Operand(rsp, 0));
774 Condition is_smi = masm_->CheckSmi(rax);
775 __ j(is_smi, &build_args);
Steve Blocka7e24c12009-10-30 11:49:00 +0000776 // We allow all JSObjects including JSFunctions. As long as
777 // JS_FUNCTION_TYPE is the last instance type and it is right
778 // after LAST_JS_OBJECT_TYPE, we do not have to check the upper
779 // bound.
780 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
781 ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
Leon Clarked91b9f72010-01-27 17:25:45 +0000782 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx);
783 __ j(below, &build_args);
Steve Blocka7e24c12009-10-30 11:49:00 +0000784
Leon Clarked91b9f72010-01-27 17:25:45 +0000785 // Check that applicand.apply is Function.prototype.apply.
786 __ movq(rax, Operand(rsp, kPointerSize));
787 is_smi = masm_->CheckSmi(rax);
788 __ j(is_smi, &build_args);
789 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rcx);
790 __ j(not_equal, &build_args);
791 __ movq(rax, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset));
Leon Clarkeeab96aa2010-01-27 16:31:12 +0000792 Handle<Code> apply_code(Builtins::builtin(Builtins::FunctionApply));
Leon Clarked91b9f72010-01-27 17:25:45 +0000793 __ Cmp(FieldOperand(rax, SharedFunctionInfo::kCodeOffset), apply_code);
794 __ j(not_equal, &build_args);
795
796 // Check that applicand is a function.
797 __ movq(rdi, Operand(rsp, 2 * kPointerSize));
798 is_smi = masm_->CheckSmi(rdi);
799 __ j(is_smi, &build_args);
800 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
801 __ j(not_equal, &build_args);
802
803 // Copy the arguments to this function possibly from the
804 // adaptor frame below it.
805 Label invoke, adapted;
806 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
807 __ SmiCompare(Operand(rdx, StandardFrameConstants::kContextOffset),
808 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
809 __ j(equal, &adapted);
810
811 // No arguments adaptor frame. Copy fixed number of arguments.
Andrei Popescu31002712010-02-23 13:46:05 +0000812 __ movq(rax, Immediate(scope()->num_parameters()));
813 for (int i = 0; i < scope()->num_parameters(); i++) {
Leon Clarked91b9f72010-01-27 17:25:45 +0000814 __ push(frame_->ParameterAt(i));
815 }
816 __ jmp(&invoke);
817
818 // Arguments adaptor frame present. Copy arguments from there, but
819 // avoid copying too many arguments to avoid stack overflows.
820 __ bind(&adapted);
821 static const uint32_t kArgumentsLimit = 1 * KB;
822 __ movq(rax, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset));
823 __ SmiToInteger32(rax, rax);
824 __ movq(rcx, rax);
825 __ cmpq(rax, Immediate(kArgumentsLimit));
826 __ j(above, &build_args);
827
828 // Loop through the arguments pushing them onto the execution
829 // stack. We don't inform the virtual frame of the push, so we don't
830 // have to worry about getting rid of the elements from the virtual
831 // frame.
832 Label loop;
833 // rcx is a small non-negative integer, due to the test above.
834 __ testl(rcx, rcx);
835 __ j(zero, &invoke);
836 __ bind(&loop);
837 __ push(Operand(rdx, rcx, times_pointer_size, 1 * kPointerSize));
838 __ decl(rcx);
839 __ j(not_zero, &loop);
840
841 // Invoke the function.
842 __ bind(&invoke);
843 ParameterCount actual(rax);
844 __ InvokeFunction(rdi, actual, CALL_FUNCTION);
845 // Drop applicand.apply and applicand from the stack, and push
846 // the result of the function call, but leave the spilled frame
847 // unchanged, with 3 elements, so it is correct when we compile the
848 // slow-case code.
849 __ addq(rsp, Immediate(2 * kPointerSize));
850 __ push(rax);
851 // Stack now has 1 element:
852 // rsp[0]: result
853 __ jmp(&done);
854
855 // Slow-case: Allocate the arguments object since we know it isn't
856 // there, and fall-through to the slow-case where we call
857 // applicand.apply.
858 __ bind(&build_args);
859 // Stack now has 3 elements, because we have jumped from where:
860 // rsp[0]: receiver
861 // rsp[1]: applicand.apply
862 // rsp[2]: applicand.
863
864 // StoreArgumentsObject requires a correct frame, and may modify it.
865 Result arguments_object = StoreArgumentsObject(false);
866 frame_->SpillAll();
867 arguments_object.ToRegister();
868 frame_->EmitPush(arguments_object.reg());
869 arguments_object.Unuse();
870 // Stack and frame now have 4 elements.
871 __ bind(&slow);
Leon Clarkeeab96aa2010-01-27 16:31:12 +0000872 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000873
Leon Clarked91b9f72010-01-27 17:25:45 +0000874 // Generic computation of x.apply(y, args) with no special optimization.
875 // Flip applicand.apply and applicand on the stack, so
876 // applicand looks like the receiver of the applicand.apply call.
877 // Then process it as a normal function call.
878 __ movq(rax, Operand(rsp, 3 * kPointerSize));
879 __ movq(rbx, Operand(rsp, 2 * kPointerSize));
880 __ movq(Operand(rsp, 2 * kPointerSize), rax);
881 __ movq(Operand(rsp, 3 * kPointerSize), rbx);
Leon Clarkeeab96aa2010-01-27 16:31:12 +0000882
Leon Clarked91b9f72010-01-27 17:25:45 +0000883 CallFunctionStub call_function(2, NOT_IN_LOOP, NO_CALL_FUNCTION_FLAGS);
884 Result res = frame_->CallStub(&call_function, 3);
885 // The function and its two arguments have been dropped.
886 frame_->Drop(1); // Drop the receiver as well.
887 res.ToRegister();
888 frame_->EmitPush(res.reg());
889 // Stack now has 1 element:
890 // rsp[0]: result
891 if (try_lazy) __ bind(&done);
892 } // End of spilled scope.
893 // Restore the context register after a call.
Steve Blocka7e24c12009-10-30 11:49:00 +0000894 frame_->RestoreContextRegister();
895}
896
897
898class DeferredStackCheck: public DeferredCode {
899 public:
900 DeferredStackCheck() {
901 set_comment("[ DeferredStackCheck");
902 }
903
904 virtual void Generate();
905};
906
907
908void DeferredStackCheck::Generate() {
909 StackCheckStub stub;
910 __ CallStub(&stub);
911}
912
913
914void CodeGenerator::CheckStack() {
Steve Blockd0582a62009-12-15 09:54:21 +0000915 DeferredStackCheck* deferred = new DeferredStackCheck;
916 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
917 deferred->Branch(below);
918 deferred->BindExit();
Steve Blocka7e24c12009-10-30 11:49:00 +0000919}
920
921
922void CodeGenerator::VisitAndSpill(Statement* statement) {
923 // TODO(X64): No architecture specific code. Move to shared location.
924 ASSERT(in_spilled_code());
925 set_in_spilled_code(false);
926 Visit(statement);
927 if (frame_ != NULL) {
928 frame_->SpillAll();
929 }
930 set_in_spilled_code(true);
931}
932
933
934void CodeGenerator::VisitStatementsAndSpill(ZoneList<Statement*>* statements) {
935 ASSERT(in_spilled_code());
936 set_in_spilled_code(false);
937 VisitStatements(statements);
938 if (frame_ != NULL) {
939 frame_->SpillAll();
940 }
941 set_in_spilled_code(true);
942}
943
944
945void CodeGenerator::VisitStatements(ZoneList<Statement*>* statements) {
946 ASSERT(!in_spilled_code());
947 for (int i = 0; has_valid_frame() && i < statements->length(); i++) {
948 Visit(statements->at(i));
949 }
950}
951
952
953void CodeGenerator::VisitBlock(Block* node) {
954 ASSERT(!in_spilled_code());
955 Comment cmnt(masm_, "[ Block");
956 CodeForStatementPosition(node);
957 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
958 VisitStatements(node->statements());
959 if (node->break_target()->is_linked()) {
960 node->break_target()->Bind();
961 }
962 node->break_target()->Unuse();
963}
964
965
966void CodeGenerator::VisitDeclaration(Declaration* node) {
967 Comment cmnt(masm_, "[ Declaration");
968 Variable* var = node->proxy()->var();
969 ASSERT(var != NULL); // must have been resolved
970 Slot* slot = var->slot();
971
972 // If it was not possible to allocate the variable at compile time,
973 // we need to "declare" it at runtime to make sure it actually
974 // exists in the local context.
975 if (slot != NULL && slot->type() == Slot::LOOKUP) {
976 // Variables with a "LOOKUP" slot were introduced as non-locals
977 // during variable resolution and must have mode DYNAMIC.
978 ASSERT(var->is_dynamic());
979 // For now, just do a runtime call. Sync the virtual frame eagerly
980 // so we can simply push the arguments into place.
981 frame_->SyncRange(0, frame_->element_count() - 1);
982 frame_->EmitPush(rsi);
983 __ movq(kScratchRegister, var->name(), RelocInfo::EMBEDDED_OBJECT);
984 frame_->EmitPush(kScratchRegister);
985 // Declaration nodes are always introduced in one of two modes.
986 ASSERT(node->mode() == Variable::VAR || node->mode() == Variable::CONST);
987 PropertyAttributes attr = node->mode() == Variable::VAR ? NONE : READ_ONLY;
Steve Block3ce2e202009-11-05 08:53:23 +0000988 frame_->EmitPush(Smi::FromInt(attr));
Steve Blocka7e24c12009-10-30 11:49:00 +0000989 // Push initial value, if any.
990 // Note: For variables we must not push an initial value (such as
991 // 'undefined') because we may have a (legal) redeclaration and we
992 // must not destroy the current value.
993 if (node->mode() == Variable::CONST) {
994 frame_->EmitPush(Heap::kTheHoleValueRootIndex);
995 } else if (node->fun() != NULL) {
996 Load(node->fun());
997 } else {
Steve Block3ce2e202009-11-05 08:53:23 +0000998 frame_->EmitPush(Smi::FromInt(0)); // no initial value!
Steve Blocka7e24c12009-10-30 11:49:00 +0000999 }
1000 Result ignored = frame_->CallRuntime(Runtime::kDeclareContextSlot, 4);
1001 // Ignore the return value (declarations are statements).
1002 return;
1003 }
1004
1005 ASSERT(!var->is_global());
1006
1007 // If we have a function or a constant, we need to initialize the variable.
1008 Expression* val = NULL;
1009 if (node->mode() == Variable::CONST) {
1010 val = new Literal(Factory::the_hole_value());
1011 } else {
1012 val = node->fun(); // NULL if we don't have a function
1013 }
1014
1015 if (val != NULL) {
1016 {
1017 // Set the initial value.
1018 Reference target(this, node->proxy());
1019 Load(val);
1020 target.SetValue(NOT_CONST_INIT);
1021 // The reference is removed from the stack (preserving TOS) when
1022 // it goes out of scope.
1023 }
1024 // Get rid of the assigned value (declarations are statements).
1025 frame_->Drop();
1026 }
1027}
1028
1029
1030void CodeGenerator::VisitExpressionStatement(ExpressionStatement* node) {
1031 ASSERT(!in_spilled_code());
1032 Comment cmnt(masm_, "[ ExpressionStatement");
1033 CodeForStatementPosition(node);
1034 Expression* expression = node->expression();
1035 expression->MarkAsStatement();
1036 Load(expression);
1037 // Remove the lingering expression result from the top of stack.
1038 frame_->Drop();
1039}
1040
1041
1042void CodeGenerator::VisitEmptyStatement(EmptyStatement* node) {
1043 ASSERT(!in_spilled_code());
1044 Comment cmnt(masm_, "// EmptyStatement");
1045 CodeForStatementPosition(node);
1046 // nothing to do
1047}
1048
1049
1050void CodeGenerator::VisitIfStatement(IfStatement* node) {
1051 ASSERT(!in_spilled_code());
1052 Comment cmnt(masm_, "[ IfStatement");
1053 // Generate different code depending on which parts of the if statement
1054 // are present or not.
1055 bool has_then_stm = node->HasThenStatement();
1056 bool has_else_stm = node->HasElseStatement();
1057
1058 CodeForStatementPosition(node);
1059 JumpTarget exit;
1060 if (has_then_stm && has_else_stm) {
1061 JumpTarget then;
1062 JumpTarget else_;
1063 ControlDestination dest(&then, &else_, true);
Steve Blockd0582a62009-12-15 09:54:21 +00001064 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00001065
1066 if (dest.false_was_fall_through()) {
1067 // The else target was bound, so we compile the else part first.
1068 Visit(node->else_statement());
1069
1070 // We may have dangling jumps to the then part.
1071 if (then.is_linked()) {
1072 if (has_valid_frame()) exit.Jump();
1073 then.Bind();
1074 Visit(node->then_statement());
1075 }
1076 } else {
1077 // The then target was bound, so we compile the then part first.
1078 Visit(node->then_statement());
1079
1080 if (else_.is_linked()) {
1081 if (has_valid_frame()) exit.Jump();
1082 else_.Bind();
1083 Visit(node->else_statement());
1084 }
1085 }
1086
1087 } else if (has_then_stm) {
1088 ASSERT(!has_else_stm);
1089 JumpTarget then;
1090 ControlDestination dest(&then, &exit, true);
Steve Blockd0582a62009-12-15 09:54:21 +00001091 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00001092
1093 if (dest.false_was_fall_through()) {
1094 // The exit label was bound. We may have dangling jumps to the
1095 // then part.
1096 if (then.is_linked()) {
1097 exit.Unuse();
1098 exit.Jump();
1099 then.Bind();
1100 Visit(node->then_statement());
1101 }
1102 } else {
1103 // The then label was bound.
1104 Visit(node->then_statement());
1105 }
1106
1107 } else if (has_else_stm) {
1108 ASSERT(!has_then_stm);
1109 JumpTarget else_;
1110 ControlDestination dest(&exit, &else_, false);
Steve Blockd0582a62009-12-15 09:54:21 +00001111 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00001112
1113 if (dest.true_was_fall_through()) {
1114 // The exit label was bound. We may have dangling jumps to the
1115 // else part.
1116 if (else_.is_linked()) {
1117 exit.Unuse();
1118 exit.Jump();
1119 else_.Bind();
1120 Visit(node->else_statement());
1121 }
1122 } else {
1123 // The else label was bound.
1124 Visit(node->else_statement());
1125 }
1126
1127 } else {
1128 ASSERT(!has_then_stm && !has_else_stm);
1129 // We only care about the condition's side effects (not its value
1130 // or control flow effect). LoadCondition is called without
1131 // forcing control flow.
1132 ControlDestination dest(&exit, &exit, true);
Steve Blockd0582a62009-12-15 09:54:21 +00001133 LoadCondition(node->condition(), &dest, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00001134 if (!dest.is_used()) {
1135 // We got a value on the frame rather than (or in addition to)
1136 // control flow.
1137 frame_->Drop();
1138 }
1139 }
1140
1141 if (exit.is_linked()) {
1142 exit.Bind();
1143 }
1144}
1145
1146
1147void CodeGenerator::VisitContinueStatement(ContinueStatement* node) {
1148 ASSERT(!in_spilled_code());
1149 Comment cmnt(masm_, "[ ContinueStatement");
1150 CodeForStatementPosition(node);
1151 node->target()->continue_target()->Jump();
1152}
1153
1154
1155void CodeGenerator::VisitBreakStatement(BreakStatement* node) {
1156 ASSERT(!in_spilled_code());
1157 Comment cmnt(masm_, "[ BreakStatement");
1158 CodeForStatementPosition(node);
1159 node->target()->break_target()->Jump();
1160}
1161
1162
1163void CodeGenerator::VisitReturnStatement(ReturnStatement* node) {
1164 ASSERT(!in_spilled_code());
1165 Comment cmnt(masm_, "[ ReturnStatement");
1166
1167 CodeForStatementPosition(node);
1168 Load(node->expression());
1169 Result return_value = frame_->Pop();
1170 if (function_return_is_shadowed_) {
1171 function_return_.Jump(&return_value);
1172 } else {
1173 frame_->PrepareForReturn();
1174 if (function_return_.is_bound()) {
1175 // If the function return label is already bound we reuse the
1176 // code by jumping to the return site.
1177 function_return_.Jump(&return_value);
1178 } else {
1179 function_return_.Bind(&return_value);
1180 GenerateReturnSequence(&return_value);
1181 }
1182 }
1183}
1184
1185
1186void CodeGenerator::VisitWithEnterStatement(WithEnterStatement* node) {
1187 ASSERT(!in_spilled_code());
1188 Comment cmnt(masm_, "[ WithEnterStatement");
1189 CodeForStatementPosition(node);
1190 Load(node->expression());
1191 Result context;
1192 if (node->is_catch_block()) {
1193 context = frame_->CallRuntime(Runtime::kPushCatchContext, 1);
1194 } else {
1195 context = frame_->CallRuntime(Runtime::kPushContext, 1);
1196 }
1197
1198 // Update context local.
1199 frame_->SaveContextRegister();
1200
1201 // Verify that the runtime call result and rsi agree.
1202 if (FLAG_debug_code) {
1203 __ cmpq(context.reg(), rsi);
1204 __ Assert(equal, "Runtime::NewContext should end up in rsi");
1205 }
1206}
1207
1208
1209void CodeGenerator::VisitWithExitStatement(WithExitStatement* node) {
1210 ASSERT(!in_spilled_code());
1211 Comment cmnt(masm_, "[ WithExitStatement");
1212 CodeForStatementPosition(node);
1213 // Pop context.
1214 __ movq(rsi, ContextOperand(rsi, Context::PREVIOUS_INDEX));
1215 // Update context local.
1216 frame_->SaveContextRegister();
1217}
1218
1219
1220void CodeGenerator::VisitSwitchStatement(SwitchStatement* node) {
1221 // TODO(X64): This code is completely generic and should be moved somewhere
1222 // where it can be shared between architectures.
1223 ASSERT(!in_spilled_code());
1224 Comment cmnt(masm_, "[ SwitchStatement");
1225 CodeForStatementPosition(node);
1226 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
1227
1228 // Compile the switch value.
1229 Load(node->tag());
1230
1231 ZoneList<CaseClause*>* cases = node->cases();
1232 int length = cases->length();
1233 CaseClause* default_clause = NULL;
1234
1235 JumpTarget next_test;
1236 // Compile the case label expressions and comparisons. Exit early
1237 // if a comparison is unconditionally true. The target next_test is
1238 // bound before the loop in order to indicate control flow to the
1239 // first comparison.
1240 next_test.Bind();
1241 for (int i = 0; i < length && !next_test.is_unused(); i++) {
1242 CaseClause* clause = cases->at(i);
1243 // The default is not a test, but remember it for later.
1244 if (clause->is_default()) {
1245 default_clause = clause;
1246 continue;
1247 }
1248
1249 Comment cmnt(masm_, "[ Case comparison");
1250 // We recycle the same target next_test for each test. Bind it if
1251 // the previous test has not done so and then unuse it for the
1252 // loop.
1253 if (next_test.is_linked()) {
1254 next_test.Bind();
1255 }
1256 next_test.Unuse();
1257
1258 // Duplicate the switch value.
1259 frame_->Dup();
1260
1261 // Compile the label expression.
1262 Load(clause->label());
1263
1264 // Compare and branch to the body if true or the next test if
1265 // false. Prefer the next test as a fall through.
1266 ControlDestination dest(clause->body_target(), &next_test, false);
Andrei Popescu402d9372010-02-26 13:31:12 +00001267 Comparison(node, equal, true, &dest);
Steve Blocka7e24c12009-10-30 11:49:00 +00001268
1269 // If the comparison fell through to the true target, jump to the
1270 // actual body.
1271 if (dest.true_was_fall_through()) {
1272 clause->body_target()->Unuse();
1273 clause->body_target()->Jump();
1274 }
1275 }
1276
1277 // If there was control flow to a next test from the last one
1278 // compiled, compile a jump to the default or break target.
1279 if (!next_test.is_unused()) {
1280 if (next_test.is_linked()) {
1281 next_test.Bind();
1282 }
1283 // Drop the switch value.
1284 frame_->Drop();
1285 if (default_clause != NULL) {
1286 default_clause->body_target()->Jump();
1287 } else {
1288 node->break_target()->Jump();
1289 }
1290 }
1291
1292 // The last instruction emitted was a jump, either to the default
1293 // clause or the break target, or else to a case body from the loop
1294 // that compiles the tests.
1295 ASSERT(!has_valid_frame());
1296 // Compile case bodies as needed.
1297 for (int i = 0; i < length; i++) {
1298 CaseClause* clause = cases->at(i);
1299
1300 // There are two ways to reach the body: from the corresponding
1301 // test or as the fall through of the previous body.
1302 if (clause->body_target()->is_linked() || has_valid_frame()) {
1303 if (clause->body_target()->is_linked()) {
1304 if (has_valid_frame()) {
1305 // If we have both a jump to the test and a fall through, put
1306 // a jump on the fall through path to avoid the dropping of
1307 // the switch value on the test path. The exception is the
1308 // default which has already had the switch value dropped.
1309 if (clause->is_default()) {
1310 clause->body_target()->Bind();
1311 } else {
1312 JumpTarget body;
1313 body.Jump();
1314 clause->body_target()->Bind();
1315 frame_->Drop();
1316 body.Bind();
1317 }
1318 } else {
1319 // No fall through to worry about.
1320 clause->body_target()->Bind();
1321 if (!clause->is_default()) {
1322 frame_->Drop();
1323 }
1324 }
1325 } else {
1326 // Otherwise, we have only fall through.
1327 ASSERT(has_valid_frame());
1328 }
1329
1330 // We are now prepared to compile the body.
1331 Comment cmnt(masm_, "[ Case body");
1332 VisitStatements(clause->statements());
1333 }
1334 clause->body_target()->Unuse();
1335 }
1336
1337 // We may not have a valid frame here so bind the break target only
1338 // if needed.
1339 if (node->break_target()->is_linked()) {
1340 node->break_target()->Bind();
1341 }
1342 node->break_target()->Unuse();
1343}
1344
1345
Steve Block3ce2e202009-11-05 08:53:23 +00001346void CodeGenerator::VisitDoWhileStatement(DoWhileStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001347 ASSERT(!in_spilled_code());
Steve Block3ce2e202009-11-05 08:53:23 +00001348 Comment cmnt(masm_, "[ DoWhileStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00001349 CodeForStatementPosition(node);
1350 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
Steve Block3ce2e202009-11-05 08:53:23 +00001351 JumpTarget body(JumpTarget::BIDIRECTIONAL);
1352 IncrementLoopNesting();
Steve Blocka7e24c12009-10-30 11:49:00 +00001353
Steve Block3ce2e202009-11-05 08:53:23 +00001354 ConditionAnalysis info = AnalyzeCondition(node->cond());
1355 // Label the top of the loop for the backward jump if necessary.
1356 switch (info) {
1357 case ALWAYS_TRUE:
1358 // Use the continue target.
1359 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
1360 node->continue_target()->Bind();
1361 break;
1362 case ALWAYS_FALSE:
1363 // No need to label it.
1364 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
1365 break;
1366 case DONT_KNOW:
1367 // Continue is the test, so use the backward body target.
1368 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
1369 body.Bind();
1370 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00001371 }
1372
Steve Block3ce2e202009-11-05 08:53:23 +00001373 CheckStack(); // TODO(1222600): ignore if body contains calls.
1374 Visit(node->body());
Steve Blocka7e24c12009-10-30 11:49:00 +00001375
Steve Block3ce2e202009-11-05 08:53:23 +00001376 // Compile the test.
1377 switch (info) {
1378 case ALWAYS_TRUE:
1379 // If control flow can fall off the end of the body, jump back
1380 // to the top and bind the break target at the exit.
1381 if (has_valid_frame()) {
1382 node->continue_target()->Jump();
Steve Blocka7e24c12009-10-30 11:49:00 +00001383 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001384 if (node->break_target()->is_linked()) {
1385 node->break_target()->Bind();
1386 }
1387 break;
Steve Block3ce2e202009-11-05 08:53:23 +00001388 case ALWAYS_FALSE:
1389 // We may have had continues or breaks in the body.
1390 if (node->continue_target()->is_linked()) {
1391 node->continue_target()->Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00001392 }
Steve Block3ce2e202009-11-05 08:53:23 +00001393 if (node->break_target()->is_linked()) {
1394 node->break_target()->Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00001395 }
Steve Block3ce2e202009-11-05 08:53:23 +00001396 break;
1397 case DONT_KNOW:
1398 // We have to compile the test expression if it can be reached by
1399 // control flow falling out of the body or via continue.
1400 if (node->continue_target()->is_linked()) {
1401 node->continue_target()->Bind();
1402 }
1403 if (has_valid_frame()) {
Steve Blockd0582a62009-12-15 09:54:21 +00001404 Comment cmnt(masm_, "[ DoWhileCondition");
1405 CodeForDoWhileConditionPosition(node);
Steve Block3ce2e202009-11-05 08:53:23 +00001406 ControlDestination dest(&body, node->break_target(), false);
Steve Blockd0582a62009-12-15 09:54:21 +00001407 LoadCondition(node->cond(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00001408 }
Steve Block3ce2e202009-11-05 08:53:23 +00001409 if (node->break_target()->is_linked()) {
1410 node->break_target()->Bind();
1411 }
1412 break;
1413 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001414
Steve Block3ce2e202009-11-05 08:53:23 +00001415 DecrementLoopNesting();
1416 node->continue_target()->Unuse();
1417 node->break_target()->Unuse();
1418}
Steve Blocka7e24c12009-10-30 11:49:00 +00001419
Steve Block3ce2e202009-11-05 08:53:23 +00001420
1421void CodeGenerator::VisitWhileStatement(WhileStatement* node) {
1422 ASSERT(!in_spilled_code());
1423 Comment cmnt(masm_, "[ WhileStatement");
1424 CodeForStatementPosition(node);
1425
1426 // If the condition is always false and has no side effects, we do not
1427 // need to compile anything.
1428 ConditionAnalysis info = AnalyzeCondition(node->cond());
1429 if (info == ALWAYS_FALSE) return;
1430
1431 // Do not duplicate conditions that may have function literal
1432 // subexpressions. This can cause us to compile the function literal
1433 // twice.
1434 bool test_at_bottom = !node->may_have_function_literal();
1435 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
1436 IncrementLoopNesting();
1437 JumpTarget body;
1438 if (test_at_bottom) {
1439 body.set_direction(JumpTarget::BIDIRECTIONAL);
1440 }
1441
1442 // Based on the condition analysis, compile the test as necessary.
1443 switch (info) {
1444 case ALWAYS_TRUE:
1445 // We will not compile the test expression. Label the top of the
1446 // loop with the continue target.
1447 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
1448 node->continue_target()->Bind();
1449 break;
1450 case DONT_KNOW: {
1451 if (test_at_bottom) {
1452 // Continue is the test at the bottom, no need to label the test
1453 // at the top. The body is a backward target.
1454 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
1455 } else {
1456 // Label the test at the top as the continue target. The body
1457 // is a forward-only target.
1458 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
1459 node->continue_target()->Bind();
1460 }
1461 // Compile the test with the body as the true target and preferred
1462 // fall-through and with the break target as the false target.
1463 ControlDestination dest(&body, node->break_target(), true);
Steve Blockd0582a62009-12-15 09:54:21 +00001464 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00001465
1466 if (dest.false_was_fall_through()) {
1467 // If we got the break target as fall-through, the test may have
1468 // been unconditionally false (if there are no jumps to the
1469 // body).
1470 if (!body.is_linked()) {
1471 DecrementLoopNesting();
1472 return;
1473 }
1474
1475 // Otherwise, jump around the body on the fall through and then
1476 // bind the body target.
1477 node->break_target()->Unuse();
1478 node->break_target()->Jump();
1479 body.Bind();
1480 }
1481 break;
1482 }
1483 case ALWAYS_FALSE:
1484 UNREACHABLE();
1485 break;
1486 }
1487
1488 CheckStack(); // TODO(1222600): ignore if body contains calls.
1489 Visit(node->body());
1490
1491 // Based on the condition analysis, compile the backward jump as
1492 // necessary.
1493 switch (info) {
1494 case ALWAYS_TRUE:
1495 // The loop body has been labeled with the continue target.
1496 if (has_valid_frame()) {
1497 node->continue_target()->Jump();
1498 }
1499 break;
1500 case DONT_KNOW:
1501 if (test_at_bottom) {
1502 // If we have chosen to recompile the test at the bottom,
1503 // then it is the continue target.
Steve Blocka7e24c12009-10-30 11:49:00 +00001504 if (node->continue_target()->is_linked()) {
1505 node->continue_target()->Bind();
1506 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001507 if (has_valid_frame()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001508 // The break target is the fall-through (body is a backward
1509 // jump from here and thus an invalid fall-through).
1510 ControlDestination dest(&body, node->break_target(), false);
Steve Blockd0582a62009-12-15 09:54:21 +00001511 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00001512 }
1513 } else {
1514 // If we have chosen not to recompile the test at the
1515 // bottom, jump back to the one at the top.
1516 if (has_valid_frame()) {
1517 node->continue_target()->Jump();
Steve Blocka7e24c12009-10-30 11:49:00 +00001518 }
1519 }
Steve Block3ce2e202009-11-05 08:53:23 +00001520 break;
1521 case ALWAYS_FALSE:
1522 UNREACHABLE();
1523 break;
1524 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001525
Steve Block3ce2e202009-11-05 08:53:23 +00001526 // The break target may be already bound (by the condition), or there
1527 // may not be a valid frame. Bind it only if needed.
1528 if (node->break_target()->is_linked()) {
1529 node->break_target()->Bind();
1530 }
1531 DecrementLoopNesting();
1532}
1533
1534
Steve Block6ded16b2010-05-10 14:33:55 +01001535void CodeGenerator::SetTypeForStackSlot(Slot* slot, TypeInfo info) {
1536 ASSERT(slot->type() == Slot::LOCAL || slot->type() == Slot::PARAMETER);
1537 if (slot->type() == Slot::LOCAL) {
1538 frame_->SetTypeForLocalAt(slot->index(), info);
1539 } else {
1540 frame_->SetTypeForParamAt(slot->index(), info);
1541 }
1542 if (FLAG_debug_code && info.IsSmi()) {
1543 if (slot->type() == Slot::LOCAL) {
1544 frame_->PushLocalAt(slot->index());
1545 } else {
1546 frame_->PushParameterAt(slot->index());
1547 }
1548 Result var = frame_->Pop();
1549 var.ToRegister();
1550 __ AbortIfNotSmi(var.reg(), "Non-smi value in smi-typed stack slot.");
1551 }
1552}
1553
1554
Steve Block3ce2e202009-11-05 08:53:23 +00001555void CodeGenerator::VisitForStatement(ForStatement* node) {
1556 ASSERT(!in_spilled_code());
1557 Comment cmnt(masm_, "[ ForStatement");
1558 CodeForStatementPosition(node);
1559
1560 // Compile the init expression if present.
1561 if (node->init() != NULL) {
1562 Visit(node->init());
1563 }
1564
1565 // If the condition is always false and has no side effects, we do not
1566 // need to compile anything else.
1567 ConditionAnalysis info = AnalyzeCondition(node->cond());
1568 if (info == ALWAYS_FALSE) return;
1569
1570 // Do not duplicate conditions that may have function literal
1571 // subexpressions. This can cause us to compile the function literal
1572 // twice.
1573 bool test_at_bottom = !node->may_have_function_literal();
1574 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
1575 IncrementLoopNesting();
1576
1577 // Target for backward edge if no test at the bottom, otherwise
1578 // unused.
1579 JumpTarget loop(JumpTarget::BIDIRECTIONAL);
1580
1581 // Target for backward edge if there is a test at the bottom,
1582 // otherwise used as target for test at the top.
1583 JumpTarget body;
1584 if (test_at_bottom) {
1585 body.set_direction(JumpTarget::BIDIRECTIONAL);
1586 }
1587
1588 // Based on the condition analysis, compile the test as necessary.
1589 switch (info) {
1590 case ALWAYS_TRUE:
1591 // We will not compile the test expression. Label the top of the
1592 // loop.
1593 if (node->next() == NULL) {
1594 // Use the continue target if there is no update expression.
1595 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
1596 node->continue_target()->Bind();
1597 } else {
1598 // Otherwise use the backward loop target.
1599 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
1600 loop.Bind();
1601 }
1602 break;
1603 case DONT_KNOW: {
1604 if (test_at_bottom) {
1605 // Continue is either the update expression or the test at the
1606 // bottom, no need to label the test at the top.
1607 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
1608 } else if (node->next() == NULL) {
1609 // We are not recompiling the test at the bottom and there is no
1610 // update expression.
1611 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
1612 node->continue_target()->Bind();
1613 } else {
1614 // We are not recompiling the test at the bottom and there is an
1615 // update expression.
1616 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
1617 loop.Bind();
1618 }
1619
1620 // Compile the test with the body as the true target and preferred
1621 // fall-through and with the break target as the false target.
1622 ControlDestination dest(&body, node->break_target(), true);
Steve Blockd0582a62009-12-15 09:54:21 +00001623 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00001624
1625 if (dest.false_was_fall_through()) {
1626 // If we got the break target as fall-through, the test may have
1627 // been unconditionally false (if there are no jumps to the
1628 // body).
1629 if (!body.is_linked()) {
1630 DecrementLoopNesting();
1631 return;
1632 }
1633
1634 // Otherwise, jump around the body on the fall through and then
1635 // bind the body target.
1636 node->break_target()->Unuse();
1637 node->break_target()->Jump();
1638 body.Bind();
1639 }
1640 break;
1641 }
1642 case ALWAYS_FALSE:
1643 UNREACHABLE();
1644 break;
1645 }
1646
1647 CheckStack(); // TODO(1222600): ignore if body contains calls.
Steve Block6ded16b2010-05-10 14:33:55 +01001648
1649 // We know that the loop index is a smi if it is not modified in the
1650 // loop body and it is checked against a constant limit in the loop
1651 // condition. In this case, we reset the static type information of the
1652 // loop index to smi before compiling the body, the update expression, and
1653 // the bottom check of the loop condition.
1654 if (node->is_fast_smi_loop()) {
1655 // Set number type of the loop variable to smi.
1656 SetTypeForStackSlot(node->loop_variable()->slot(), TypeInfo::Smi());
1657 }
1658
Steve Block3ce2e202009-11-05 08:53:23 +00001659 Visit(node->body());
1660
1661 // If there is an update expression, compile it if necessary.
1662 if (node->next() != NULL) {
1663 if (node->continue_target()->is_linked()) {
1664 node->continue_target()->Bind();
1665 }
1666
1667 // Control can reach the update by falling out of the body or by a
1668 // continue.
1669 if (has_valid_frame()) {
1670 // Record the source position of the statement as this code which
1671 // is after the code for the body actually belongs to the loop
1672 // statement and not the body.
1673 CodeForStatementPosition(node);
1674 Visit(node->next());
1675 }
1676 }
1677
Steve Block6ded16b2010-05-10 14:33:55 +01001678 // Set the type of the loop variable to smi before compiling the test
1679 // expression if we are in a fast smi loop condition.
1680 if (node->is_fast_smi_loop() && has_valid_frame()) {
1681 // Set number type of the loop variable to smi.
1682 SetTypeForStackSlot(node->loop_variable()->slot(), TypeInfo::Smi());
1683 }
1684
Steve Block3ce2e202009-11-05 08:53:23 +00001685 // Based on the condition analysis, compile the backward jump as
1686 // necessary.
1687 switch (info) {
1688 case ALWAYS_TRUE:
1689 if (has_valid_frame()) {
1690 if (node->next() == NULL) {
1691 node->continue_target()->Jump();
1692 } else {
1693 loop.Jump();
1694 }
1695 }
1696 break;
1697 case DONT_KNOW:
1698 if (test_at_bottom) {
1699 if (node->continue_target()->is_linked()) {
1700 // We can have dangling jumps to the continue target if there
1701 // was no update expression.
1702 node->continue_target()->Bind();
1703 }
1704 // Control can reach the test at the bottom by falling out of
1705 // the body, by a continue in the body, or from the update
1706 // expression.
1707 if (has_valid_frame()) {
1708 // The break target is the fall-through (body is a backward
1709 // jump from here).
1710 ControlDestination dest(&body, node->break_target(), false);
Steve Blockd0582a62009-12-15 09:54:21 +00001711 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00001712 }
1713 } else {
1714 // Otherwise, jump back to the test at the top.
Steve Blocka7e24c12009-10-30 11:49:00 +00001715 if (has_valid_frame()) {
1716 if (node->next() == NULL) {
1717 node->continue_target()->Jump();
1718 } else {
1719 loop.Jump();
1720 }
1721 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001722 }
1723 break;
Steve Block3ce2e202009-11-05 08:53:23 +00001724 case ALWAYS_FALSE:
1725 UNREACHABLE();
1726 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00001727 }
1728
Steve Block3ce2e202009-11-05 08:53:23 +00001729 // The break target may be already bound (by the condition), or there
1730 // may not be a valid frame. Bind it only if needed.
1731 if (node->break_target()->is_linked()) {
1732 node->break_target()->Bind();
1733 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001734 DecrementLoopNesting();
Steve Blocka7e24c12009-10-30 11:49:00 +00001735}
1736
1737
1738void CodeGenerator::VisitForInStatement(ForInStatement* node) {
1739 ASSERT(!in_spilled_code());
1740 VirtualFrame::SpilledScope spilled_scope;
1741 Comment cmnt(masm_, "[ ForInStatement");
1742 CodeForStatementPosition(node);
1743
1744 JumpTarget primitive;
1745 JumpTarget jsobject;
1746 JumpTarget fixed_array;
1747 JumpTarget entry(JumpTarget::BIDIRECTIONAL);
1748 JumpTarget end_del_check;
1749 JumpTarget exit;
1750
1751 // Get the object to enumerate over (converted to JSObject).
1752 LoadAndSpill(node->enumerable());
1753
1754 // Both SpiderMonkey and kjs ignore null and undefined in contrast
1755 // to the specification. 12.6.4 mandates a call to ToObject.
1756 frame_->EmitPop(rax);
1757
1758 // rax: value to be iterated over
1759 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
1760 exit.Branch(equal);
1761 __ CompareRoot(rax, Heap::kNullValueRootIndex);
1762 exit.Branch(equal);
1763
1764 // Stack layout in body:
1765 // [iteration counter (smi)] <- slot 0
1766 // [length of array] <- slot 1
1767 // [FixedArray] <- slot 2
1768 // [Map or 0] <- slot 3
1769 // [Object] <- slot 4
1770
1771 // Check if enumerable is already a JSObject
1772 // rax: value to be iterated over
1773 Condition is_smi = masm_->CheckSmi(rax);
1774 primitive.Branch(is_smi);
1775 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx);
1776 jsobject.Branch(above_equal);
1777
1778 primitive.Bind();
1779 frame_->EmitPush(rax);
1780 frame_->InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION, 1);
1781 // function call returns the value in rax, which is where we want it below
1782
1783 jsobject.Bind();
1784 // Get the set of properties (as a FixedArray or Map).
1785 // rax: value to be iterated over
Steve Blockd0582a62009-12-15 09:54:21 +00001786 frame_->EmitPush(rax); // Push the object being iterated over.
Steve Blocka7e24c12009-10-30 11:49:00 +00001787
Steve Blockd0582a62009-12-15 09:54:21 +00001788
1789 // Check cache validity in generated code. This is a fast case for
1790 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1791 // guarantee cache validity, call the runtime system to check cache
1792 // validity or get the property names in a fixed array.
1793 JumpTarget call_runtime;
1794 JumpTarget loop(JumpTarget::BIDIRECTIONAL);
1795 JumpTarget check_prototype;
1796 JumpTarget use_cache;
1797 __ movq(rcx, rax);
1798 loop.Bind();
1799 // Check that there are no elements.
1800 __ movq(rdx, FieldOperand(rcx, JSObject::kElementsOffset));
1801 __ CompareRoot(rdx, Heap::kEmptyFixedArrayRootIndex);
1802 call_runtime.Branch(not_equal);
1803 // Check that instance descriptors are not empty so that we can
1804 // check for an enum cache. Leave the map in ebx for the subsequent
1805 // prototype load.
1806 __ movq(rbx, FieldOperand(rcx, HeapObject::kMapOffset));
1807 __ movq(rdx, FieldOperand(rbx, Map::kInstanceDescriptorsOffset));
1808 __ CompareRoot(rdx, Heap::kEmptyDescriptorArrayRootIndex);
1809 call_runtime.Branch(equal);
1810 // Check that there in an enum cache in the non-empty instance
1811 // descriptors. This is the case if the next enumeration index
1812 // field does not contain a smi.
1813 __ movq(rdx, FieldOperand(rdx, DescriptorArray::kEnumerationIndexOffset));
1814 is_smi = masm_->CheckSmi(rdx);
1815 call_runtime.Branch(is_smi);
1816 // For all objects but the receiver, check that the cache is empty.
1817 __ cmpq(rcx, rax);
1818 check_prototype.Branch(equal);
1819 __ movq(rdx, FieldOperand(rdx, DescriptorArray::kEnumCacheBridgeCacheOffset));
1820 __ CompareRoot(rdx, Heap::kEmptyFixedArrayRootIndex);
1821 call_runtime.Branch(not_equal);
1822 check_prototype.Bind();
1823 // Load the prototype from the map and loop if non-null.
1824 __ movq(rcx, FieldOperand(rbx, Map::kPrototypeOffset));
1825 __ CompareRoot(rcx, Heap::kNullValueRootIndex);
1826 loop.Branch(not_equal);
1827 // The enum cache is valid. Load the map of the object being
1828 // iterated over and use the cache for the iteration.
1829 __ movq(rax, FieldOperand(rax, HeapObject::kMapOffset));
1830 use_cache.Jump();
1831
1832 call_runtime.Bind();
1833 // Call the runtime to get the property names for the object.
Steve Blocka7e24c12009-10-30 11:49:00 +00001834 frame_->EmitPush(rax); // push the Object (slot 4) for the runtime call
1835 frame_->CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1836
1837 // If we got a Map, we can do a fast modification check.
1838 // Otherwise, we got a FixedArray, and we have to do a slow check.
1839 // rax: map or fixed array (result from call to
1840 // Runtime::kGetPropertyNamesFast)
1841 __ movq(rdx, rax);
1842 __ movq(rcx, FieldOperand(rdx, HeapObject::kMapOffset));
1843 __ CompareRoot(rcx, Heap::kMetaMapRootIndex);
1844 fixed_array.Branch(not_equal);
1845
Steve Blockd0582a62009-12-15 09:54:21 +00001846 use_cache.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00001847 // Get enum cache
Steve Blockd0582a62009-12-15 09:54:21 +00001848 // rax: map (either the result from a call to
1849 // Runtime::kGetPropertyNamesFast or has been fetched directly from
1850 // the object)
Steve Blocka7e24c12009-10-30 11:49:00 +00001851 __ movq(rcx, rax);
1852 __ movq(rcx, FieldOperand(rcx, Map::kInstanceDescriptorsOffset));
1853 // Get the bridge array held in the enumeration index field.
1854 __ movq(rcx, FieldOperand(rcx, DescriptorArray::kEnumerationIndexOffset));
1855 // Get the cache from the bridge array.
1856 __ movq(rdx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset));
1857
1858 frame_->EmitPush(rax); // <- slot 3
1859 frame_->EmitPush(rdx); // <- slot 2
1860 __ movl(rax, FieldOperand(rdx, FixedArray::kLengthOffset));
1861 __ Integer32ToSmi(rax, rax);
1862 frame_->EmitPush(rax); // <- slot 1
Steve Block3ce2e202009-11-05 08:53:23 +00001863 frame_->EmitPush(Smi::FromInt(0)); // <- slot 0
Steve Blocka7e24c12009-10-30 11:49:00 +00001864 entry.Jump();
1865
1866 fixed_array.Bind();
1867 // rax: fixed array (result from call to Runtime::kGetPropertyNamesFast)
Steve Block3ce2e202009-11-05 08:53:23 +00001868 frame_->EmitPush(Smi::FromInt(0)); // <- slot 3
Steve Blocka7e24c12009-10-30 11:49:00 +00001869 frame_->EmitPush(rax); // <- slot 2
1870
1871 // Push the length of the array and the initial index onto the stack.
1872 __ movl(rax, FieldOperand(rax, FixedArray::kLengthOffset));
1873 __ Integer32ToSmi(rax, rax);
1874 frame_->EmitPush(rax); // <- slot 1
Steve Block3ce2e202009-11-05 08:53:23 +00001875 frame_->EmitPush(Smi::FromInt(0)); // <- slot 0
Steve Blocka7e24c12009-10-30 11:49:00 +00001876
1877 // Condition.
1878 entry.Bind();
1879 // Grab the current frame's height for the break and continue
1880 // targets only after all the state is pushed on the frame.
1881 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
1882 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
1883
1884 __ movq(rax, frame_->ElementAt(0)); // load the current count
Steve Block3ce2e202009-11-05 08:53:23 +00001885 __ SmiCompare(frame_->ElementAt(1), rax); // compare to the array length
1886 node->break_target()->Branch(below_equal);
Steve Blocka7e24c12009-10-30 11:49:00 +00001887
1888 // Get the i'th entry of the array.
1889 __ movq(rdx, frame_->ElementAt(2));
1890 SmiIndex index = masm_->SmiToIndex(rbx, rax, kPointerSizeLog2);
1891 __ movq(rbx,
1892 FieldOperand(rdx, index.reg, index.scale, FixedArray::kHeaderSize));
1893
1894 // Get the expected map from the stack or a zero map in the
1895 // permanent slow case rax: current iteration count rbx: i'th entry
1896 // of the enum cache
1897 __ movq(rdx, frame_->ElementAt(3));
1898 // Check if the expected map still matches that of the enumerable.
1899 // If not, we have to filter the key.
1900 // rax: current iteration count
1901 // rbx: i'th entry of the enum cache
1902 // rdx: expected map value
1903 __ movq(rcx, frame_->ElementAt(4));
1904 __ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
1905 __ cmpq(rcx, rdx);
1906 end_del_check.Branch(equal);
1907
1908 // Convert the entry to a string (or null if it isn't a property anymore).
1909 frame_->EmitPush(frame_->ElementAt(4)); // push enumerable
1910 frame_->EmitPush(rbx); // push entry
1911 frame_->InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION, 2);
1912 __ movq(rbx, rax);
1913
1914 // If the property has been removed while iterating, we just skip it.
1915 __ CompareRoot(rbx, Heap::kNullValueRootIndex);
1916 node->continue_target()->Branch(equal);
1917
1918 end_del_check.Bind();
1919 // Store the entry in the 'each' expression and take another spin in the
1920 // loop. rdx: i'th entry of the enum cache (or string there of)
1921 frame_->EmitPush(rbx);
1922 { Reference each(this, node->each());
1923 // Loading a reference may leave the frame in an unspilled state.
1924 frame_->SpillAll();
1925 if (!each.is_illegal()) {
1926 if (each.size() > 0) {
1927 frame_->EmitPush(frame_->ElementAt(each.size()));
Leon Clarked91b9f72010-01-27 17:25:45 +00001928 each.SetValue(NOT_CONST_INIT);
1929 frame_->Drop(2); // Drop the original and the copy of the element.
1930 } else {
1931 // If the reference has size zero then we can use the value below
1932 // the reference as if it were above the reference, instead of pushing
1933 // a new copy of it above the reference.
1934 each.SetValue(NOT_CONST_INIT);
1935 frame_->Drop(); // Drop the original of the element.
Steve Blocka7e24c12009-10-30 11:49:00 +00001936 }
1937 }
1938 }
1939 // Unloading a reference may leave the frame in an unspilled state.
1940 frame_->SpillAll();
1941
Steve Blocka7e24c12009-10-30 11:49:00 +00001942 // Body.
1943 CheckStack(); // TODO(1222600): ignore if body contains calls.
1944 VisitAndSpill(node->body());
1945
1946 // Next. Reestablish a spilled frame in case we are coming here via
1947 // a continue in the body.
1948 node->continue_target()->Bind();
1949 frame_->SpillAll();
1950 frame_->EmitPop(rax);
Steve Block3ce2e202009-11-05 08:53:23 +00001951 __ SmiAddConstant(rax, rax, Smi::FromInt(1));
Steve Blocka7e24c12009-10-30 11:49:00 +00001952 frame_->EmitPush(rax);
1953 entry.Jump();
1954
1955 // Cleanup. No need to spill because VirtualFrame::Drop is safe for
1956 // any frame.
1957 node->break_target()->Bind();
1958 frame_->Drop(5);
1959
1960 // Exit.
1961 exit.Bind();
1962
1963 node->continue_target()->Unuse();
1964 node->break_target()->Unuse();
1965}
1966
Steve Block3ce2e202009-11-05 08:53:23 +00001967void CodeGenerator::VisitTryCatchStatement(TryCatchStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001968 ASSERT(!in_spilled_code());
1969 VirtualFrame::SpilledScope spilled_scope;
Steve Block3ce2e202009-11-05 08:53:23 +00001970 Comment cmnt(masm_, "[ TryCatchStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00001971 CodeForStatementPosition(node);
1972
1973 JumpTarget try_block;
1974 JumpTarget exit;
1975
1976 try_block.Call();
1977 // --- Catch block ---
1978 frame_->EmitPush(rax);
1979
1980 // Store the caught exception in the catch variable.
Leon Clarkee46be812010-01-19 14:06:41 +00001981 Variable* catch_var = node->catch_var()->var();
1982 ASSERT(catch_var != NULL && catch_var->slot() != NULL);
1983 StoreToSlot(catch_var->slot(), NOT_CONST_INIT);
Steve Blocka7e24c12009-10-30 11:49:00 +00001984
1985 // Remove the exception from the stack.
1986 frame_->Drop();
1987
1988 VisitStatementsAndSpill(node->catch_block()->statements());
1989 if (has_valid_frame()) {
1990 exit.Jump();
1991 }
1992
1993
1994 // --- Try block ---
1995 try_block.Bind();
1996
1997 frame_->PushTryHandler(TRY_CATCH_HANDLER);
1998 int handler_height = frame_->height();
1999
2000 // Shadow the jump targets for all escapes from the try block, including
2001 // returns. During shadowing, the original target is hidden as the
2002 // ShadowTarget and operations on the original actually affect the
2003 // shadowing target.
2004 //
2005 // We should probably try to unify the escaping targets and the return
2006 // target.
2007 int nof_escapes = node->escaping_targets()->length();
2008 List<ShadowTarget*> shadows(1 + nof_escapes);
2009
2010 // Add the shadow target for the function return.
2011 static const int kReturnShadowIndex = 0;
2012 shadows.Add(new ShadowTarget(&function_return_));
2013 bool function_return_was_shadowed = function_return_is_shadowed_;
2014 function_return_is_shadowed_ = true;
2015 ASSERT(shadows[kReturnShadowIndex]->other_target() == &function_return_);
2016
2017 // Add the remaining shadow targets.
2018 for (int i = 0; i < nof_escapes; i++) {
2019 shadows.Add(new ShadowTarget(node->escaping_targets()->at(i)));
2020 }
2021
2022 // Generate code for the statements in the try block.
2023 VisitStatementsAndSpill(node->try_block()->statements());
2024
2025 // Stop the introduced shadowing and count the number of required unlinks.
2026 // After shadowing stops, the original targets are unshadowed and the
2027 // ShadowTargets represent the formerly shadowing targets.
2028 bool has_unlinks = false;
2029 for (int i = 0; i < shadows.length(); i++) {
2030 shadows[i]->StopShadowing();
2031 has_unlinks = has_unlinks || shadows[i]->is_linked();
2032 }
2033 function_return_is_shadowed_ = function_return_was_shadowed;
2034
2035 // Get an external reference to the handler address.
2036 ExternalReference handler_address(Top::k_handler_address);
2037
2038 // Make sure that there's nothing left on the stack above the
2039 // handler structure.
2040 if (FLAG_debug_code) {
2041 __ movq(kScratchRegister, handler_address);
2042 __ cmpq(rsp, Operand(kScratchRegister, 0));
2043 __ Assert(equal, "stack pointer should point to top handler");
2044 }
2045
2046 // If we can fall off the end of the try block, unlink from try chain.
2047 if (has_valid_frame()) {
2048 // The next handler address is on top of the frame. Unlink from
2049 // the handler list and drop the rest of this handler from the
2050 // frame.
2051 ASSERT(StackHandlerConstants::kNextOffset == 0);
2052 __ movq(kScratchRegister, handler_address);
2053 frame_->EmitPop(Operand(kScratchRegister, 0));
2054 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
2055 if (has_unlinks) {
2056 exit.Jump();
2057 }
2058 }
2059
2060 // Generate unlink code for the (formerly) shadowing targets that
2061 // have been jumped to. Deallocate each shadow target.
2062 Result return_value;
2063 for (int i = 0; i < shadows.length(); i++) {
2064 if (shadows[i]->is_linked()) {
2065 // Unlink from try chain; be careful not to destroy the TOS if
2066 // there is one.
2067 if (i == kReturnShadowIndex) {
2068 shadows[i]->Bind(&return_value);
2069 return_value.ToRegister(rax);
2070 } else {
2071 shadows[i]->Bind();
2072 }
2073 // Because we can be jumping here (to spilled code) from
2074 // unspilled code, we need to reestablish a spilled frame at
2075 // this block.
2076 frame_->SpillAll();
2077
2078 // Reload sp from the top handler, because some statements that we
2079 // break from (eg, for...in) may have left stuff on the stack.
2080 __ movq(kScratchRegister, handler_address);
2081 __ movq(rsp, Operand(kScratchRegister, 0));
2082 frame_->Forget(frame_->height() - handler_height);
2083
2084 ASSERT(StackHandlerConstants::kNextOffset == 0);
2085 __ movq(kScratchRegister, handler_address);
2086 frame_->EmitPop(Operand(kScratchRegister, 0));
2087 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
2088
2089 if (i == kReturnShadowIndex) {
2090 if (!function_return_is_shadowed_) frame_->PrepareForReturn();
2091 shadows[i]->other_target()->Jump(&return_value);
2092 } else {
2093 shadows[i]->other_target()->Jump();
2094 }
2095 }
2096 }
2097
2098 exit.Bind();
2099}
2100
2101
Steve Block3ce2e202009-11-05 08:53:23 +00002102void CodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002103 ASSERT(!in_spilled_code());
2104 VirtualFrame::SpilledScope spilled_scope;
Steve Block3ce2e202009-11-05 08:53:23 +00002105 Comment cmnt(masm_, "[ TryFinallyStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00002106 CodeForStatementPosition(node);
2107
2108 // State: Used to keep track of reason for entering the finally
2109 // block. Should probably be extended to hold information for
2110 // break/continue from within the try block.
2111 enum { FALLING, THROWING, JUMPING };
2112
2113 JumpTarget try_block;
2114 JumpTarget finally_block;
2115
2116 try_block.Call();
2117
2118 frame_->EmitPush(rax);
2119 // In case of thrown exceptions, this is where we continue.
Steve Block3ce2e202009-11-05 08:53:23 +00002120 __ Move(rcx, Smi::FromInt(THROWING));
Steve Blocka7e24c12009-10-30 11:49:00 +00002121 finally_block.Jump();
2122
2123 // --- Try block ---
2124 try_block.Bind();
2125
2126 frame_->PushTryHandler(TRY_FINALLY_HANDLER);
2127 int handler_height = frame_->height();
2128
2129 // Shadow the jump targets for all escapes from the try block, including
2130 // returns. During shadowing, the original target is hidden as the
2131 // ShadowTarget and operations on the original actually affect the
2132 // shadowing target.
2133 //
2134 // We should probably try to unify the escaping targets and the return
2135 // target.
2136 int nof_escapes = node->escaping_targets()->length();
2137 List<ShadowTarget*> shadows(1 + nof_escapes);
2138
2139 // Add the shadow target for the function return.
2140 static const int kReturnShadowIndex = 0;
2141 shadows.Add(new ShadowTarget(&function_return_));
2142 bool function_return_was_shadowed = function_return_is_shadowed_;
2143 function_return_is_shadowed_ = true;
2144 ASSERT(shadows[kReturnShadowIndex]->other_target() == &function_return_);
2145
2146 // Add the remaining shadow targets.
2147 for (int i = 0; i < nof_escapes; i++) {
2148 shadows.Add(new ShadowTarget(node->escaping_targets()->at(i)));
2149 }
2150
2151 // Generate code for the statements in the try block.
2152 VisitStatementsAndSpill(node->try_block()->statements());
2153
2154 // Stop the introduced shadowing and count the number of required unlinks.
2155 // After shadowing stops, the original targets are unshadowed and the
2156 // ShadowTargets represent the formerly shadowing targets.
2157 int nof_unlinks = 0;
2158 for (int i = 0; i < shadows.length(); i++) {
2159 shadows[i]->StopShadowing();
2160 if (shadows[i]->is_linked()) nof_unlinks++;
2161 }
2162 function_return_is_shadowed_ = function_return_was_shadowed;
2163
2164 // Get an external reference to the handler address.
2165 ExternalReference handler_address(Top::k_handler_address);
2166
2167 // If we can fall off the end of the try block, unlink from the try
2168 // chain and set the state on the frame to FALLING.
2169 if (has_valid_frame()) {
2170 // The next handler address is on top of the frame.
2171 ASSERT(StackHandlerConstants::kNextOffset == 0);
2172 __ movq(kScratchRegister, handler_address);
2173 frame_->EmitPop(Operand(kScratchRegister, 0));
2174 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
2175
2176 // Fake a top of stack value (unneeded when FALLING) and set the
2177 // state in ecx, then jump around the unlink blocks if any.
2178 frame_->EmitPush(Heap::kUndefinedValueRootIndex);
Steve Block3ce2e202009-11-05 08:53:23 +00002179 __ Move(rcx, Smi::FromInt(FALLING));
Steve Blocka7e24c12009-10-30 11:49:00 +00002180 if (nof_unlinks > 0) {
2181 finally_block.Jump();
2182 }
2183 }
2184
2185 // Generate code to unlink and set the state for the (formerly)
2186 // shadowing targets that have been jumped to.
2187 for (int i = 0; i < shadows.length(); i++) {
2188 if (shadows[i]->is_linked()) {
2189 // If we have come from the shadowed return, the return value is
2190 // on the virtual frame. We must preserve it until it is
2191 // pushed.
2192 if (i == kReturnShadowIndex) {
2193 Result return_value;
2194 shadows[i]->Bind(&return_value);
2195 return_value.ToRegister(rax);
2196 } else {
2197 shadows[i]->Bind();
2198 }
2199 // Because we can be jumping here (to spilled code) from
2200 // unspilled code, we need to reestablish a spilled frame at
2201 // this block.
2202 frame_->SpillAll();
2203
2204 // Reload sp from the top handler, because some statements that
2205 // we break from (eg, for...in) may have left stuff on the
2206 // stack.
2207 __ movq(kScratchRegister, handler_address);
2208 __ movq(rsp, Operand(kScratchRegister, 0));
2209 frame_->Forget(frame_->height() - handler_height);
2210
2211 // Unlink this handler and drop it from the frame.
2212 ASSERT(StackHandlerConstants::kNextOffset == 0);
2213 __ movq(kScratchRegister, handler_address);
2214 frame_->EmitPop(Operand(kScratchRegister, 0));
2215 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
2216
2217 if (i == kReturnShadowIndex) {
2218 // If this target shadowed the function return, materialize
2219 // the return value on the stack.
2220 frame_->EmitPush(rax);
2221 } else {
2222 // Fake TOS for targets that shadowed breaks and continues.
2223 frame_->EmitPush(Heap::kUndefinedValueRootIndex);
2224 }
Steve Block3ce2e202009-11-05 08:53:23 +00002225 __ Move(rcx, Smi::FromInt(JUMPING + i));
Steve Blocka7e24c12009-10-30 11:49:00 +00002226 if (--nof_unlinks > 0) {
2227 // If this is not the last unlink block, jump around the next.
2228 finally_block.Jump();
2229 }
2230 }
2231 }
2232
2233 // --- Finally block ---
2234 finally_block.Bind();
2235
2236 // Push the state on the stack.
2237 frame_->EmitPush(rcx);
2238
2239 // We keep two elements on the stack - the (possibly faked) result
2240 // and the state - while evaluating the finally block.
2241 //
2242 // Generate code for the statements in the finally block.
2243 VisitStatementsAndSpill(node->finally_block()->statements());
2244
2245 if (has_valid_frame()) {
2246 // Restore state and return value or faked TOS.
2247 frame_->EmitPop(rcx);
2248 frame_->EmitPop(rax);
2249 }
2250
2251 // Generate code to jump to the right destination for all used
2252 // formerly shadowing targets. Deallocate each shadow target.
2253 for (int i = 0; i < shadows.length(); i++) {
2254 if (has_valid_frame() && shadows[i]->is_bound()) {
2255 BreakTarget* original = shadows[i]->other_target();
Steve Block3ce2e202009-11-05 08:53:23 +00002256 __ SmiCompare(rcx, Smi::FromInt(JUMPING + i));
Steve Blocka7e24c12009-10-30 11:49:00 +00002257 if (i == kReturnShadowIndex) {
2258 // The return value is (already) in rax.
2259 Result return_value = allocator_->Allocate(rax);
2260 ASSERT(return_value.is_valid());
2261 if (function_return_is_shadowed_) {
2262 original->Branch(equal, &return_value);
2263 } else {
2264 // Branch around the preparation for return which may emit
2265 // code.
2266 JumpTarget skip;
2267 skip.Branch(not_equal);
2268 frame_->PrepareForReturn();
2269 original->Jump(&return_value);
2270 skip.Bind();
2271 }
2272 } else {
2273 original->Branch(equal);
2274 }
2275 }
2276 }
2277
2278 if (has_valid_frame()) {
2279 // Check if we need to rethrow the exception.
2280 JumpTarget exit;
Steve Block3ce2e202009-11-05 08:53:23 +00002281 __ SmiCompare(rcx, Smi::FromInt(THROWING));
Steve Blocka7e24c12009-10-30 11:49:00 +00002282 exit.Branch(not_equal);
2283
2284 // Rethrow exception.
2285 frame_->EmitPush(rax); // undo pop from above
2286 frame_->CallRuntime(Runtime::kReThrow, 1);
2287
2288 // Done.
2289 exit.Bind();
2290 }
2291}
2292
2293
2294void CodeGenerator::VisitDebuggerStatement(DebuggerStatement* node) {
2295 ASSERT(!in_spilled_code());
2296 Comment cmnt(masm_, "[ DebuggerStatement");
2297 CodeForStatementPosition(node);
2298#ifdef ENABLE_DEBUGGER_SUPPORT
2299 // Spill everything, even constants, to the frame.
2300 frame_->SpillAll();
Leon Clarke4515c472010-02-03 11:58:03 +00002301
Andrei Popescu402d9372010-02-26 13:31:12 +00002302 frame_->DebugBreak();
Steve Blocka7e24c12009-10-30 11:49:00 +00002303 // Ignore the return value.
2304#endif
2305}
2306
2307
Steve Block6ded16b2010-05-10 14:33:55 +01002308void CodeGenerator::InstantiateFunction(
2309 Handle<SharedFunctionInfo> function_info) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002310 // The inevitable call will sync frame elements to memory anyway, so
2311 // we do it eagerly to allow us to push the arguments directly into
2312 // place.
Steve Blocka7e24c12009-10-30 11:49:00 +00002313 frame_->SyncRange(0, frame_->element_count() - 1);
2314
Leon Clarkee46be812010-01-19 14:06:41 +00002315 // Use the fast case closure allocation code that allocates in new
2316 // space for nested functions that don't need literals cloning.
Steve Block6ded16b2010-05-10 14:33:55 +01002317 if (scope()->is_function_scope() && function_info->num_literals() == 0) {
Leon Clarkee46be812010-01-19 14:06:41 +00002318 FastNewClosureStub stub;
Steve Block6ded16b2010-05-10 14:33:55 +01002319 frame_->Push(function_info);
Leon Clarkee46be812010-01-19 14:06:41 +00002320 Result answer = frame_->CallStub(&stub, 1);
2321 frame_->Push(&answer);
2322 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01002323 // Call the runtime to instantiate the function based on the
2324 // shared function info.
Leon Clarkee46be812010-01-19 14:06:41 +00002325 frame_->EmitPush(rsi);
Steve Block6ded16b2010-05-10 14:33:55 +01002326 frame_->EmitPush(function_info);
Leon Clarkee46be812010-01-19 14:06:41 +00002327 Result result = frame_->CallRuntime(Runtime::kNewClosure, 2);
2328 frame_->Push(&result);
2329 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002330}
2331
2332
2333void CodeGenerator::VisitFunctionLiteral(FunctionLiteral* node) {
2334 Comment cmnt(masm_, "[ FunctionLiteral");
2335
Steve Block6ded16b2010-05-10 14:33:55 +01002336 // Build the function info and instantiate it.
2337 Handle<SharedFunctionInfo> function_info =
2338 Compiler::BuildFunctionInfo(node, script(), this);
Steve Blocka7e24c12009-10-30 11:49:00 +00002339 // Check for stack-overflow exception.
2340 if (HasStackOverflow()) return;
Steve Block6ded16b2010-05-10 14:33:55 +01002341 InstantiateFunction(function_info);
Steve Blocka7e24c12009-10-30 11:49:00 +00002342}
2343
2344
Steve Block6ded16b2010-05-10 14:33:55 +01002345void CodeGenerator::VisitSharedFunctionInfoLiteral(
2346 SharedFunctionInfoLiteral* node) {
2347 Comment cmnt(masm_, "[ SharedFunctionInfoLiteral");
2348 InstantiateFunction(node->shared_function_info());
Steve Blocka7e24c12009-10-30 11:49:00 +00002349}
2350
2351
2352void CodeGenerator::VisitConditional(Conditional* node) {
2353 Comment cmnt(masm_, "[ Conditional");
2354 JumpTarget then;
2355 JumpTarget else_;
2356 JumpTarget exit;
2357 ControlDestination dest(&then, &else_, true);
Steve Blockd0582a62009-12-15 09:54:21 +00002358 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00002359
2360 if (dest.false_was_fall_through()) {
2361 // The else target was bound, so we compile the else part first.
Steve Blockd0582a62009-12-15 09:54:21 +00002362 Load(node->else_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00002363
2364 if (then.is_linked()) {
2365 exit.Jump();
2366 then.Bind();
Steve Blockd0582a62009-12-15 09:54:21 +00002367 Load(node->then_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00002368 }
2369 } else {
2370 // The then target was bound, so we compile the then part first.
Steve Blockd0582a62009-12-15 09:54:21 +00002371 Load(node->then_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00002372
2373 if (else_.is_linked()) {
2374 exit.Jump();
2375 else_.Bind();
Steve Blockd0582a62009-12-15 09:54:21 +00002376 Load(node->else_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00002377 }
2378 }
2379
2380 exit.Bind();
2381}
2382
2383
2384void CodeGenerator::VisitSlot(Slot* node) {
2385 Comment cmnt(masm_, "[ Slot");
Steve Blockd0582a62009-12-15 09:54:21 +00002386 LoadFromSlotCheckForArguments(node, NOT_INSIDE_TYPEOF);
Steve Blocka7e24c12009-10-30 11:49:00 +00002387}
2388
2389
2390void CodeGenerator::VisitVariableProxy(VariableProxy* node) {
2391 Comment cmnt(masm_, "[ VariableProxy");
2392 Variable* var = node->var();
2393 Expression* expr = var->rewrite();
2394 if (expr != NULL) {
2395 Visit(expr);
2396 } else {
2397 ASSERT(var->is_global());
2398 Reference ref(this, node);
Steve Blockd0582a62009-12-15 09:54:21 +00002399 ref.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00002400 }
2401}
2402
2403
2404void CodeGenerator::VisitLiteral(Literal* node) {
2405 Comment cmnt(masm_, "[ Literal");
2406 frame_->Push(node->handle());
2407}
2408
2409
2410// Materialize the regexp literal 'node' in the literals array
2411// 'literals' of the function. Leave the regexp boilerplate in
2412// 'boilerplate'.
2413class DeferredRegExpLiteral: public DeferredCode {
2414 public:
2415 DeferredRegExpLiteral(Register boilerplate,
2416 Register literals,
2417 RegExpLiteral* node)
2418 : boilerplate_(boilerplate), literals_(literals), node_(node) {
2419 set_comment("[ DeferredRegExpLiteral");
2420 }
2421
2422 void Generate();
2423
2424 private:
2425 Register boilerplate_;
2426 Register literals_;
2427 RegExpLiteral* node_;
2428};
2429
2430
2431void DeferredRegExpLiteral::Generate() {
2432 // Since the entry is undefined we call the runtime system to
2433 // compute the literal.
2434 // Literal array (0).
2435 __ push(literals_);
2436 // Literal index (1).
Steve Block3ce2e202009-11-05 08:53:23 +00002437 __ Push(Smi::FromInt(node_->literal_index()));
Steve Blocka7e24c12009-10-30 11:49:00 +00002438 // RegExp pattern (2).
2439 __ Push(node_->pattern());
2440 // RegExp flags (3).
2441 __ Push(node_->flags());
2442 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
2443 if (!boilerplate_.is(rax)) __ movq(boilerplate_, rax);
2444}
2445
2446
2447void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) {
2448 Comment cmnt(masm_, "[ RegExp Literal");
2449
2450 // Retrieve the literals array and check the allocated entry. Begin
2451 // with a writable copy of the function of this activation in a
2452 // register.
2453 frame_->PushFunction();
2454 Result literals = frame_->Pop();
2455 literals.ToRegister();
2456 frame_->Spill(literals.reg());
2457
2458 // Load the literals array of the function.
2459 __ movq(literals.reg(),
2460 FieldOperand(literals.reg(), JSFunction::kLiteralsOffset));
2461
2462 // Load the literal at the ast saved index.
2463 Result boilerplate = allocator_->Allocate();
2464 ASSERT(boilerplate.is_valid());
2465 int literal_offset =
2466 FixedArray::kHeaderSize + node->literal_index() * kPointerSize;
2467 __ movq(boilerplate.reg(), FieldOperand(literals.reg(), literal_offset));
2468
2469 // Check whether we need to materialize the RegExp object. If so,
2470 // jump to the deferred code passing the literals array.
2471 DeferredRegExpLiteral* deferred =
2472 new DeferredRegExpLiteral(boilerplate.reg(), literals.reg(), node);
2473 __ CompareRoot(boilerplate.reg(), Heap::kUndefinedValueRootIndex);
2474 deferred->Branch(equal);
2475 deferred->BindExit();
2476 literals.Unuse();
2477
2478 // Push the boilerplate object.
2479 frame_->Push(&boilerplate);
2480}
2481
2482
Steve Blocka7e24c12009-10-30 11:49:00 +00002483void CodeGenerator::VisitObjectLiteral(ObjectLiteral* node) {
2484 Comment cmnt(masm_, "[ ObjectLiteral");
2485
Leon Clarkee46be812010-01-19 14:06:41 +00002486 // Load a writable copy of the function of this activation in a
Steve Blocka7e24c12009-10-30 11:49:00 +00002487 // register.
2488 frame_->PushFunction();
2489 Result literals = frame_->Pop();
2490 literals.ToRegister();
2491 frame_->Spill(literals.reg());
2492
2493 // Load the literals array of the function.
2494 __ movq(literals.reg(),
2495 FieldOperand(literals.reg(), JSFunction::kLiteralsOffset));
Leon Clarkee46be812010-01-19 14:06:41 +00002496 // Literal array.
2497 frame_->Push(&literals);
2498 // Literal index.
2499 frame_->Push(Smi::FromInt(node->literal_index()));
2500 // Constant properties.
2501 frame_->Push(node->constant_properties());
Steve Block6ded16b2010-05-10 14:33:55 +01002502 // Should the object literal have fast elements?
2503 frame_->Push(Smi::FromInt(node->fast_elements() ? 1 : 0));
Leon Clarkee46be812010-01-19 14:06:41 +00002504 Result clone;
2505 if (node->depth() > 1) {
Steve Block6ded16b2010-05-10 14:33:55 +01002506 clone = frame_->CallRuntime(Runtime::kCreateObjectLiteral, 4);
Leon Clarkee46be812010-01-19 14:06:41 +00002507 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01002508 clone = frame_->CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
Steve Blocka7e24c12009-10-30 11:49:00 +00002509 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002510 frame_->Push(&clone);
2511
2512 for (int i = 0; i < node->properties()->length(); i++) {
2513 ObjectLiteral::Property* property = node->properties()->at(i);
2514 switch (property->kind()) {
2515 case ObjectLiteral::Property::CONSTANT:
2516 break;
2517 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2518 if (CompileTimeValue::IsCompileTimeValue(property->value())) break;
2519 // else fall through.
2520 case ObjectLiteral::Property::COMPUTED: {
2521 Handle<Object> key(property->key()->handle());
2522 if (key->IsSymbol()) {
2523 // Duplicate the object as the IC receiver.
2524 frame_->Dup();
2525 Load(property->value());
2526 frame_->Push(key);
2527 Result ignored = frame_->CallStoreIC();
Steve Blocka7e24c12009-10-30 11:49:00 +00002528 break;
2529 }
2530 // Fall through
2531 }
2532 case ObjectLiteral::Property::PROTOTYPE: {
2533 // Duplicate the object as an argument to the runtime call.
2534 frame_->Dup();
2535 Load(property->key());
2536 Load(property->value());
2537 Result ignored = frame_->CallRuntime(Runtime::kSetProperty, 3);
2538 // Ignore the result.
2539 break;
2540 }
2541 case ObjectLiteral::Property::SETTER: {
2542 // Duplicate the object as an argument to the runtime call.
2543 frame_->Dup();
2544 Load(property->key());
2545 frame_->Push(Smi::FromInt(1));
2546 Load(property->value());
2547 Result ignored = frame_->CallRuntime(Runtime::kDefineAccessor, 4);
2548 // Ignore the result.
2549 break;
2550 }
2551 case ObjectLiteral::Property::GETTER: {
2552 // Duplicate the object as an argument to the runtime call.
2553 frame_->Dup();
2554 Load(property->key());
2555 frame_->Push(Smi::FromInt(0));
2556 Load(property->value());
2557 Result ignored = frame_->CallRuntime(Runtime::kDefineAccessor, 4);
2558 // Ignore the result.
2559 break;
2560 }
2561 default: UNREACHABLE();
2562 }
2563 }
2564}
2565
2566
Steve Blocka7e24c12009-10-30 11:49:00 +00002567void CodeGenerator::VisitArrayLiteral(ArrayLiteral* node) {
2568 Comment cmnt(masm_, "[ ArrayLiteral");
2569
Leon Clarkee46be812010-01-19 14:06:41 +00002570 // Load a writable copy of the function of this activation in a
Steve Blocka7e24c12009-10-30 11:49:00 +00002571 // register.
2572 frame_->PushFunction();
2573 Result literals = frame_->Pop();
2574 literals.ToRegister();
2575 frame_->Spill(literals.reg());
2576
2577 // Load the literals array of the function.
2578 __ movq(literals.reg(),
2579 FieldOperand(literals.reg(), JSFunction::kLiteralsOffset));
Andrei Popescu402d9372010-02-26 13:31:12 +00002580
Leon Clarkee46be812010-01-19 14:06:41 +00002581 frame_->Push(&literals);
Leon Clarkee46be812010-01-19 14:06:41 +00002582 frame_->Push(Smi::FromInt(node->literal_index()));
Leon Clarkee46be812010-01-19 14:06:41 +00002583 frame_->Push(node->constant_elements());
Andrei Popescu402d9372010-02-26 13:31:12 +00002584 int length = node->values()->length();
Leon Clarkee46be812010-01-19 14:06:41 +00002585 Result clone;
2586 if (node->depth() > 1) {
2587 clone = frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3);
Andrei Popescu402d9372010-02-26 13:31:12 +00002588 } else if (length > FastCloneShallowArrayStub::kMaximumLength) {
Leon Clarkee46be812010-01-19 14:06:41 +00002589 clone = frame_->CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
Andrei Popescu402d9372010-02-26 13:31:12 +00002590 } else {
2591 FastCloneShallowArrayStub stub(length);
2592 clone = frame_->CallStub(&stub, 3);
Steve Blocka7e24c12009-10-30 11:49:00 +00002593 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002594 frame_->Push(&clone);
2595
2596 // Generate code to set the elements in the array that are not
2597 // literals.
2598 for (int i = 0; i < node->values()->length(); i++) {
2599 Expression* value = node->values()->at(i);
2600
2601 // If value is a literal the property value is already set in the
2602 // boilerplate object.
2603 if (value->AsLiteral() != NULL) continue;
2604 // If value is a materialized literal the property value is already set
2605 // in the boilerplate object if it is simple.
2606 if (CompileTimeValue::IsCompileTimeValue(value)) continue;
2607
2608 // The property must be set by generated code.
2609 Load(value);
2610
2611 // Get the property value off the stack.
2612 Result prop_value = frame_->Pop();
2613 prop_value.ToRegister();
2614
2615 // Fetch the array literal while leaving a copy on the stack and
2616 // use it to get the elements array.
2617 frame_->Dup();
2618 Result elements = frame_->Pop();
2619 elements.ToRegister();
2620 frame_->Spill(elements.reg());
2621 // Get the elements FixedArray.
2622 __ movq(elements.reg(),
2623 FieldOperand(elements.reg(), JSObject::kElementsOffset));
2624
2625 // Write to the indexed properties array.
2626 int offset = i * kPointerSize + FixedArray::kHeaderSize;
2627 __ movq(FieldOperand(elements.reg(), offset), prop_value.reg());
2628
2629 // Update the write barrier for the array address.
2630 frame_->Spill(prop_value.reg()); // Overwritten by the write barrier.
2631 Result scratch = allocator_->Allocate();
2632 ASSERT(scratch.is_valid());
2633 __ RecordWrite(elements.reg(), offset, prop_value.reg(), scratch.reg());
2634 }
2635}
2636
2637
2638void CodeGenerator::VisitCatchExtensionObject(CatchExtensionObject* node) {
2639 ASSERT(!in_spilled_code());
2640 // Call runtime routine to allocate the catch extension object and
2641 // assign the exception value to the catch variable.
2642 Comment cmnt(masm_, "[ CatchExtensionObject");
2643 Load(node->key());
2644 Load(node->value());
2645 Result result =
2646 frame_->CallRuntime(Runtime::kCreateCatchExtensionObject, 2);
2647 frame_->Push(&result);
2648}
2649
2650
2651void CodeGenerator::VisitAssignment(Assignment* node) {
2652 Comment cmnt(masm_, "[ Assignment");
2653
Leon Clarked91b9f72010-01-27 17:25:45 +00002654 { Reference target(this, node->target(), node->is_compound());
Steve Blocka7e24c12009-10-30 11:49:00 +00002655 if (target.is_illegal()) {
2656 // Fool the virtual frame into thinking that we left the assignment's
2657 // value on the frame.
2658 frame_->Push(Smi::FromInt(0));
2659 return;
2660 }
2661 Variable* var = node->target()->AsVariableProxy()->AsVariable();
2662
2663 if (node->starts_initialization_block()) {
2664 ASSERT(target.type() == Reference::NAMED ||
2665 target.type() == Reference::KEYED);
2666 // Change to slow case in the beginning of an initialization
2667 // block to avoid the quadratic behavior of repeatedly adding
2668 // fast properties.
2669
2670 // The receiver is the argument to the runtime call. It is the
2671 // first value pushed when the reference was loaded to the
2672 // frame.
2673 frame_->PushElementAt(target.size() - 1);
2674 Result ignored = frame_->CallRuntime(Runtime::kToSlowProperties, 1);
2675 }
Leon Clarked91b9f72010-01-27 17:25:45 +00002676 if (node->ends_initialization_block()) {
2677 // Add an extra copy of the receiver to the frame, so that it can be
2678 // converted back to fast case after the assignment.
2679 ASSERT(target.type() == Reference::NAMED ||
2680 target.type() == Reference::KEYED);
2681 if (target.type() == Reference::NAMED) {
2682 frame_->Dup();
2683 // Dup target receiver on stack.
2684 } else {
2685 ASSERT(target.type() == Reference::KEYED);
2686 Result temp = frame_->Pop();
2687 frame_->Dup();
2688 frame_->Push(&temp);
2689 }
2690 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002691 if (node->op() == Token::ASSIGN ||
2692 node->op() == Token::INIT_VAR ||
2693 node->op() == Token::INIT_CONST) {
2694 Load(node->value());
2695
Leon Clarked91b9f72010-01-27 17:25:45 +00002696 } else { // Assignment is a compound assignment.
Steve Blocka7e24c12009-10-30 11:49:00 +00002697 Literal* literal = node->value()->AsLiteral();
2698 bool overwrite_value =
2699 (node->value()->AsBinaryOperation() != NULL &&
2700 node->value()->AsBinaryOperation()->ResultOverwriteAllowed());
2701 Variable* right_var = node->value()->AsVariableProxy()->AsVariable();
2702 // There are two cases where the target is not read in the right hand
2703 // side, that are easy to test for: the right hand side is a literal,
2704 // or the right hand side is a different variable. TakeValue invalidates
2705 // the target, with an implicit promise that it will be written to again
2706 // before it is read.
2707 if (literal != NULL || (right_var != NULL && right_var != var)) {
Steve Blockd0582a62009-12-15 09:54:21 +00002708 target.TakeValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00002709 } else {
Steve Blockd0582a62009-12-15 09:54:21 +00002710 target.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00002711 }
2712 Load(node->value());
Steve Block6ded16b2010-05-10 14:33:55 +01002713 BinaryOperation expr(node, node->binary_op(), node->target(),
2714 node->value());
2715 GenericBinaryOperation(&expr,
Steve Blocka7e24c12009-10-30 11:49:00 +00002716 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
2717 }
2718
2719 if (var != NULL &&
2720 var->mode() == Variable::CONST &&
2721 node->op() != Token::INIT_VAR && node->op() != Token::INIT_CONST) {
2722 // Assignment ignored - leave the value on the stack.
Leon Clarked91b9f72010-01-27 17:25:45 +00002723 UnloadReference(&target);
Steve Blocka7e24c12009-10-30 11:49:00 +00002724 } else {
2725 CodeForSourcePosition(node->position());
2726 if (node->op() == Token::INIT_CONST) {
2727 // Dynamic constant initializations must use the function context
2728 // and initialize the actual constant declared. Dynamic variable
2729 // initializations are simply assignments and use SetValue.
2730 target.SetValue(CONST_INIT);
2731 } else {
2732 target.SetValue(NOT_CONST_INIT);
2733 }
2734 if (node->ends_initialization_block()) {
Leon Clarked91b9f72010-01-27 17:25:45 +00002735 ASSERT(target.type() == Reference::UNLOADED);
Steve Blocka7e24c12009-10-30 11:49:00 +00002736 // End of initialization block. Revert to fast case. The
Leon Clarked91b9f72010-01-27 17:25:45 +00002737 // argument to the runtime call is the extra copy of the receiver,
2738 // which is below the value of the assignment.
2739 // Swap the receiver and the value of the assignment expression.
2740 Result lhs = frame_->Pop();
2741 Result receiver = frame_->Pop();
2742 frame_->Push(&lhs);
2743 frame_->Push(&receiver);
Steve Blocka7e24c12009-10-30 11:49:00 +00002744 Result ignored = frame_->CallRuntime(Runtime::kToFastProperties, 1);
2745 }
2746 }
2747 }
2748}
2749
2750
2751void CodeGenerator::VisitThrow(Throw* node) {
2752 Comment cmnt(masm_, "[ Throw");
2753 Load(node->exception());
2754 Result result = frame_->CallRuntime(Runtime::kThrow, 1);
2755 frame_->Push(&result);
2756}
2757
2758
2759void CodeGenerator::VisitProperty(Property* node) {
2760 Comment cmnt(masm_, "[ Property");
2761 Reference property(this, node);
Steve Blockd0582a62009-12-15 09:54:21 +00002762 property.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00002763}
2764
2765
2766void CodeGenerator::VisitCall(Call* node) {
2767 Comment cmnt(masm_, "[ Call");
2768
2769 ZoneList<Expression*>* args = node->arguments();
2770
2771 // Check if the function is a variable or a property.
2772 Expression* function = node->expression();
2773 Variable* var = function->AsVariableProxy()->AsVariable();
2774 Property* property = function->AsProperty();
2775
2776 // ------------------------------------------------------------------------
2777 // Fast-case: Use inline caching.
2778 // ---
2779 // According to ECMA-262, section 11.2.3, page 44, the function to call
2780 // must be resolved after the arguments have been evaluated. The IC code
2781 // automatically handles this by loading the arguments before the function
2782 // is resolved in cache misses (this also holds for megamorphic calls).
2783 // ------------------------------------------------------------------------
2784
2785 if (var != NULL && var->is_possibly_eval()) {
2786 // ----------------------------------
2787 // JavaScript example: 'eval(arg)' // eval is not known to be shadowed
2788 // ----------------------------------
2789
2790 // In a call to eval, we first call %ResolvePossiblyDirectEval to
2791 // resolve the function we need to call and the receiver of the
2792 // call. Then we call the resolved function using the given
2793 // arguments.
2794
2795 // Prepare the stack for the call to the resolved function.
2796 Load(function);
2797
2798 // Allocate a frame slot for the receiver.
2799 frame_->Push(Factory::undefined_value());
2800 int arg_count = args->length();
2801 for (int i = 0; i < arg_count; i++) {
2802 Load(args->at(i));
2803 }
2804
2805 // Prepare the stack for the call to ResolvePossiblyDirectEval.
2806 frame_->PushElementAt(arg_count + 1);
2807 if (arg_count > 0) {
2808 frame_->PushElementAt(arg_count);
2809 } else {
2810 frame_->Push(Factory::undefined_value());
2811 }
2812
Leon Clarkee46be812010-01-19 14:06:41 +00002813 // Push the receiver.
2814 frame_->PushParameterAt(-1);
2815
Steve Blocka7e24c12009-10-30 11:49:00 +00002816 // Resolve the call.
2817 Result result =
Leon Clarkee46be812010-01-19 14:06:41 +00002818 frame_->CallRuntime(Runtime::kResolvePossiblyDirectEval, 3);
Steve Blocka7e24c12009-10-30 11:49:00 +00002819
Leon Clarkee46be812010-01-19 14:06:41 +00002820 // The runtime call returns a pair of values in rax (function) and
2821 // rdx (receiver). Touch up the stack with the right values.
2822 Result receiver = allocator_->Allocate(rdx);
2823 frame_->SetElementAt(arg_count + 1, &result);
2824 frame_->SetElementAt(arg_count, &receiver);
2825 receiver.Unuse();
Steve Blocka7e24c12009-10-30 11:49:00 +00002826
2827 // Call the function.
2828 CodeForSourcePosition(node->position());
2829 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Leon Clarkee46be812010-01-19 14:06:41 +00002830 CallFunctionStub call_function(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
Steve Blocka7e24c12009-10-30 11:49:00 +00002831 result = frame_->CallStub(&call_function, arg_count + 1);
2832
2833 // Restore the context and overwrite the function on the stack with
2834 // the result.
2835 frame_->RestoreContextRegister();
2836 frame_->SetElementAt(0, &result);
2837
2838 } else if (var != NULL && !var->is_this() && var->is_global()) {
2839 // ----------------------------------
2840 // JavaScript example: 'foo(1, 2, 3)' // foo is global
2841 // ----------------------------------
2842
Steve Blocka7e24c12009-10-30 11:49:00 +00002843 // Pass the global object as the receiver and let the IC stub
2844 // patch the stack to use the global proxy as 'this' in the
2845 // invoked function.
2846 LoadGlobal();
2847
2848 // Load the arguments.
2849 int arg_count = args->length();
2850 for (int i = 0; i < arg_count; i++) {
2851 Load(args->at(i));
2852 }
2853
Andrei Popescu402d9372010-02-26 13:31:12 +00002854 // Push the name of the function on the frame.
2855 frame_->Push(var->name());
2856
Steve Blocka7e24c12009-10-30 11:49:00 +00002857 // Call the IC initialization code.
2858 CodeForSourcePosition(node->position());
2859 Result result = frame_->CallCallIC(RelocInfo::CODE_TARGET_CONTEXT,
2860 arg_count,
2861 loop_nesting());
2862 frame_->RestoreContextRegister();
2863 // Replace the function on the stack with the result.
Andrei Popescu402d9372010-02-26 13:31:12 +00002864 frame_->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00002865
2866 } else if (var != NULL && var->slot() != NULL &&
2867 var->slot()->type() == Slot::LOOKUP) {
2868 // ----------------------------------
2869 // JavaScript example: 'with (obj) foo(1, 2, 3)' // foo is in obj
2870 // ----------------------------------
2871
2872 // Load the function from the context. Sync the frame so we can
2873 // push the arguments directly into place.
2874 frame_->SyncRange(0, frame_->element_count() - 1);
2875 frame_->EmitPush(rsi);
2876 frame_->EmitPush(var->name());
2877 frame_->CallRuntime(Runtime::kLoadContextSlot, 2);
2878 // The runtime call returns a pair of values in rax and rdx. The
2879 // looked-up function is in rax and the receiver is in rdx. These
2880 // register references are not ref counted here. We spill them
2881 // eagerly since they are arguments to an inevitable call (and are
2882 // not sharable by the arguments).
2883 ASSERT(!allocator()->is_used(rax));
2884 frame_->EmitPush(rax);
2885
2886 // Load the receiver.
2887 ASSERT(!allocator()->is_used(rdx));
2888 frame_->EmitPush(rdx);
2889
2890 // Call the function.
Leon Clarkee46be812010-01-19 14:06:41 +00002891 CallWithArguments(args, NO_CALL_FUNCTION_FLAGS, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00002892
2893 } else if (property != NULL) {
2894 // Check if the key is a literal string.
2895 Literal* literal = property->key()->AsLiteral();
2896
2897 if (literal != NULL && literal->handle()->IsSymbol()) {
2898 // ------------------------------------------------------------------
2899 // JavaScript example: 'object.foo(1, 2, 3)' or 'map["key"](1, 2, 3)'
2900 // ------------------------------------------------------------------
2901
2902 Handle<String> name = Handle<String>::cast(literal->handle());
2903
2904 if (ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION &&
2905 name->IsEqualTo(CStrVector("apply")) &&
2906 args->length() == 2 &&
2907 args->at(1)->AsVariableProxy() != NULL &&
2908 args->at(1)->AsVariableProxy()->IsArguments()) {
2909 // Use the optimized Function.prototype.apply that avoids
2910 // allocating lazily allocated arguments objects.
Leon Clarked91b9f72010-01-27 17:25:45 +00002911 CallApplyLazy(property->obj(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002912 args->at(0),
2913 args->at(1)->AsVariableProxy(),
2914 node->position());
2915
2916 } else {
Andrei Popescu402d9372010-02-26 13:31:12 +00002917 // Push the receiver onto the frame.
Steve Blocka7e24c12009-10-30 11:49:00 +00002918 Load(property->obj());
2919
2920 // Load the arguments.
2921 int arg_count = args->length();
2922 for (int i = 0; i < arg_count; i++) {
2923 Load(args->at(i));
2924 }
2925
Andrei Popescu402d9372010-02-26 13:31:12 +00002926 // Push the name of the function onto the frame.
2927 frame_->Push(name);
2928
Steve Blocka7e24c12009-10-30 11:49:00 +00002929 // Call the IC initialization code.
2930 CodeForSourcePosition(node->position());
2931 Result result = frame_->CallCallIC(RelocInfo::CODE_TARGET,
2932 arg_count,
2933 loop_nesting());
2934 frame_->RestoreContextRegister();
Andrei Popescu402d9372010-02-26 13:31:12 +00002935 frame_->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00002936 }
2937
2938 } else {
2939 // -------------------------------------------
2940 // JavaScript example: 'array[index](1, 2, 3)'
2941 // -------------------------------------------
2942
2943 // Load the function to call from the property through a reference.
Steve Blocka7e24c12009-10-30 11:49:00 +00002944 if (property->is_synthetic()) {
Leon Clarked91b9f72010-01-27 17:25:45 +00002945 Reference ref(this, property, false);
2946 ref.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00002947 // Use global object as receiver.
2948 LoadGlobalReceiver();
2949 } else {
Leon Clarked91b9f72010-01-27 17:25:45 +00002950 Reference ref(this, property, false);
2951 ASSERT(ref.size() == 2);
2952 Result key = frame_->Pop();
2953 frame_->Dup(); // Duplicate the receiver.
2954 frame_->Push(&key);
2955 ref.GetValue();
2956 // Top of frame contains function to call, with duplicate copy of
2957 // receiver below it. Swap them.
2958 Result function = frame_->Pop();
2959 Result receiver = frame_->Pop();
2960 frame_->Push(&function);
2961 frame_->Push(&receiver);
Steve Blocka7e24c12009-10-30 11:49:00 +00002962 }
2963
2964 // Call the function.
Leon Clarkee46be812010-01-19 14:06:41 +00002965 CallWithArguments(args, RECEIVER_MIGHT_BE_VALUE, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00002966 }
2967
2968 } else {
2969 // ----------------------------------
2970 // JavaScript example: 'foo(1, 2, 3)' // foo is not global
2971 // ----------------------------------
2972
2973 // Load the function.
2974 Load(function);
2975
2976 // Pass the global proxy as the receiver.
2977 LoadGlobalReceiver();
2978
2979 // Call the function.
Leon Clarkee46be812010-01-19 14:06:41 +00002980 CallWithArguments(args, NO_CALL_FUNCTION_FLAGS, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00002981 }
2982}
2983
2984
2985void CodeGenerator::VisitCallNew(CallNew* node) {
2986 Comment cmnt(masm_, "[ CallNew");
2987
2988 // According to ECMA-262, section 11.2.2, page 44, the function
2989 // expression in new calls must be evaluated before the
2990 // arguments. This is different from ordinary calls, where the
2991 // actual function to call is resolved after the arguments have been
2992 // evaluated.
2993
2994 // Compute function to call and use the global object as the
2995 // receiver. There is no need to use the global proxy here because
2996 // it will always be replaced with a newly allocated object.
2997 Load(node->expression());
2998 LoadGlobal();
2999
3000 // Push the arguments ("left-to-right") on the stack.
3001 ZoneList<Expression*>* args = node->arguments();
3002 int arg_count = args->length();
3003 for (int i = 0; i < arg_count; i++) {
3004 Load(args->at(i));
3005 }
3006
3007 // Call the construct call builtin that handles allocation and
3008 // constructor invocation.
3009 CodeForSourcePosition(node->position());
3010 Result result = frame_->CallConstructor(arg_count);
3011 // Replace the function on the stack with the result.
3012 frame_->SetElementAt(0, &result);
3013}
3014
3015
3016void CodeGenerator::VisitCallRuntime(CallRuntime* node) {
3017 if (CheckForInlineRuntimeCall(node)) {
3018 return;
3019 }
3020
3021 ZoneList<Expression*>* args = node->arguments();
3022 Comment cmnt(masm_, "[ CallRuntime");
3023 Runtime::Function* function = node->function();
3024
3025 if (function == NULL) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003026 // Push the builtins object found in the current global object.
3027 Result temp = allocator()->Allocate();
3028 ASSERT(temp.is_valid());
3029 __ movq(temp.reg(), GlobalObject());
3030 __ movq(temp.reg(),
3031 FieldOperand(temp.reg(), GlobalObject::kBuiltinsOffset));
3032 frame_->Push(&temp);
3033 }
3034
3035 // Push the arguments ("left-to-right").
3036 int arg_count = args->length();
3037 for (int i = 0; i < arg_count; i++) {
3038 Load(args->at(i));
3039 }
3040
3041 if (function == NULL) {
3042 // Call the JS runtime function.
Andrei Popescu402d9372010-02-26 13:31:12 +00003043 frame_->Push(node->name());
Steve Blocka7e24c12009-10-30 11:49:00 +00003044 Result answer = frame_->CallCallIC(RelocInfo::CODE_TARGET,
3045 arg_count,
3046 loop_nesting_);
3047 frame_->RestoreContextRegister();
Andrei Popescu402d9372010-02-26 13:31:12 +00003048 frame_->Push(&answer);
Steve Blocka7e24c12009-10-30 11:49:00 +00003049 } else {
3050 // Call the C runtime function.
3051 Result answer = frame_->CallRuntime(function, arg_count);
3052 frame_->Push(&answer);
3053 }
3054}
3055
3056
3057void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003058 Comment cmnt(masm_, "[ UnaryOperation");
3059
3060 Token::Value op = node->op();
3061
3062 if (op == Token::NOT) {
3063 // Swap the true and false targets but keep the same actual label
3064 // as the fall through.
3065 destination()->Invert();
Steve Blockd0582a62009-12-15 09:54:21 +00003066 LoadCondition(node->expression(), destination(), true);
Steve Blocka7e24c12009-10-30 11:49:00 +00003067 // Swap the labels back.
3068 destination()->Invert();
3069
3070 } else if (op == Token::DELETE) {
3071 Property* property = node->expression()->AsProperty();
3072 if (property != NULL) {
3073 Load(property->obj());
3074 Load(property->key());
3075 Result answer = frame_->InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, 2);
3076 frame_->Push(&answer);
3077 return;
3078 }
3079
3080 Variable* variable = node->expression()->AsVariableProxy()->AsVariable();
3081 if (variable != NULL) {
3082 Slot* slot = variable->slot();
3083 if (variable->is_global()) {
3084 LoadGlobal();
3085 frame_->Push(variable->name());
3086 Result answer = frame_->InvokeBuiltin(Builtins::DELETE,
3087 CALL_FUNCTION, 2);
3088 frame_->Push(&answer);
3089 return;
3090
3091 } else if (slot != NULL && slot->type() == Slot::LOOKUP) {
3092 // Call the runtime to look up the context holding the named
3093 // variable. Sync the virtual frame eagerly so we can push the
3094 // arguments directly into place.
3095 frame_->SyncRange(0, frame_->element_count() - 1);
3096 frame_->EmitPush(rsi);
3097 frame_->EmitPush(variable->name());
3098 Result context = frame_->CallRuntime(Runtime::kLookupContext, 2);
3099 ASSERT(context.is_register());
3100 frame_->EmitPush(context.reg());
3101 context.Unuse();
3102 frame_->EmitPush(variable->name());
3103 Result answer = frame_->InvokeBuiltin(Builtins::DELETE,
3104 CALL_FUNCTION, 2);
3105 frame_->Push(&answer);
3106 return;
3107 }
3108
3109 // Default: Result of deleting non-global, not dynamically
3110 // introduced variables is false.
3111 frame_->Push(Factory::false_value());
3112
3113 } else {
3114 // Default: Result of deleting expressions is true.
3115 Load(node->expression()); // may have side-effects
3116 frame_->SetElementAt(0, Factory::true_value());
3117 }
3118
3119 } else if (op == Token::TYPEOF) {
3120 // Special case for loading the typeof expression; see comment on
3121 // LoadTypeofExpression().
3122 LoadTypeofExpression(node->expression());
3123 Result answer = frame_->CallRuntime(Runtime::kTypeof, 1);
3124 frame_->Push(&answer);
3125
3126 } else if (op == Token::VOID) {
3127 Expression* expression = node->expression();
3128 if (expression && expression->AsLiteral() && (
3129 expression->AsLiteral()->IsTrue() ||
3130 expression->AsLiteral()->IsFalse() ||
3131 expression->AsLiteral()->handle()->IsNumber() ||
3132 expression->AsLiteral()->handle()->IsString() ||
3133 expression->AsLiteral()->handle()->IsJSRegExp() ||
3134 expression->AsLiteral()->IsNull())) {
3135 // Omit evaluating the value of the primitive literal.
3136 // It will be discarded anyway, and can have no side effect.
3137 frame_->Push(Factory::undefined_value());
3138 } else {
3139 Load(node->expression());
3140 frame_->SetElementAt(0, Factory::undefined_value());
3141 }
3142
3143 } else {
Leon Clarked91b9f72010-01-27 17:25:45 +00003144 bool overwrite =
3145 (node->expression()->AsBinaryOperation() != NULL &&
3146 node->expression()->AsBinaryOperation()->ResultOverwriteAllowed());
Steve Blocka7e24c12009-10-30 11:49:00 +00003147 Load(node->expression());
3148 switch (op) {
3149 case Token::NOT:
3150 case Token::DELETE:
3151 case Token::TYPEOF:
3152 UNREACHABLE(); // handled above
3153 break;
3154
3155 case Token::SUB: {
Leon Clarkee46be812010-01-19 14:06:41 +00003156 GenericUnaryOpStub stub(Token::SUB, overwrite);
Steve Blocka7e24c12009-10-30 11:49:00 +00003157 Result operand = frame_->Pop();
3158 Result answer = frame_->CallStub(&stub, &operand);
Steve Block6ded16b2010-05-10 14:33:55 +01003159 answer.set_type_info(TypeInfo::Number());
Steve Blocka7e24c12009-10-30 11:49:00 +00003160 frame_->Push(&answer);
3161 break;
3162 }
3163
3164 case Token::BIT_NOT: {
3165 // Smi check.
3166 JumpTarget smi_label;
3167 JumpTarget continue_label;
3168 Result operand = frame_->Pop();
3169 operand.ToRegister();
3170
3171 Condition is_smi = masm_->CheckSmi(operand.reg());
3172 smi_label.Branch(is_smi, &operand);
3173
Leon Clarked91b9f72010-01-27 17:25:45 +00003174 GenericUnaryOpStub stub(Token::BIT_NOT, overwrite);
3175 Result answer = frame_->CallStub(&stub, &operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00003176 continue_label.Jump(&answer);
Leon Clarked91b9f72010-01-27 17:25:45 +00003177
Steve Blocka7e24c12009-10-30 11:49:00 +00003178 smi_label.Bind(&answer);
3179 answer.ToRegister();
3180 frame_->Spill(answer.reg());
3181 __ SmiNot(answer.reg(), answer.reg());
3182 continue_label.Bind(&answer);
Steve Block6ded16b2010-05-10 14:33:55 +01003183 answer.set_type_info(TypeInfo::Smi());
Steve Blocka7e24c12009-10-30 11:49:00 +00003184 frame_->Push(&answer);
3185 break;
3186 }
3187
3188 case Token::ADD: {
3189 // Smi check.
3190 JumpTarget continue_label;
3191 Result operand = frame_->Pop();
Steve Block6ded16b2010-05-10 14:33:55 +01003192 TypeInfo operand_info = operand.type_info();
Steve Blocka7e24c12009-10-30 11:49:00 +00003193 operand.ToRegister();
3194 Condition is_smi = masm_->CheckSmi(operand.reg());
3195 continue_label.Branch(is_smi, &operand);
3196 frame_->Push(&operand);
3197 Result answer = frame_->InvokeBuiltin(Builtins::TO_NUMBER,
3198 CALL_FUNCTION, 1);
3199
3200 continue_label.Bind(&answer);
Steve Block6ded16b2010-05-10 14:33:55 +01003201 if (operand_info.IsSmi()) {
3202 answer.set_type_info(TypeInfo::Smi());
3203 } else if (operand_info.IsInteger32()) {
3204 answer.set_type_info(TypeInfo::Integer32());
3205 } else {
3206 answer.set_type_info(TypeInfo::Number());
3207 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003208 frame_->Push(&answer);
3209 break;
3210 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003211 default:
3212 UNREACHABLE();
3213 }
3214 }
3215}
3216
3217
Steve Block6ded16b2010-05-10 14:33:55 +01003218// The value in dst was optimistically incremented or decremented.
3219// The result overflowed or was not smi tagged. Call into the runtime
3220// to convert the argument to a number, and call the specialized add
3221// or subtract stub. The result is left in dst.
Steve Blocka7e24c12009-10-30 11:49:00 +00003222class DeferredPrefixCountOperation: public DeferredCode {
3223 public:
Steve Block6ded16b2010-05-10 14:33:55 +01003224 DeferredPrefixCountOperation(Register dst,
3225 bool is_increment,
3226 TypeInfo input_type)
3227 : dst_(dst), is_increment_(is_increment), input_type_(input_type) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003228 set_comment("[ DeferredCountOperation");
3229 }
3230
3231 virtual void Generate();
3232
3233 private:
3234 Register dst_;
3235 bool is_increment_;
Steve Block6ded16b2010-05-10 14:33:55 +01003236 TypeInfo input_type_;
Steve Blocka7e24c12009-10-30 11:49:00 +00003237};
3238
3239
3240void DeferredPrefixCountOperation::Generate() {
Steve Block6ded16b2010-05-10 14:33:55 +01003241 Register left;
3242 if (input_type_.IsNumber()) {
3243 left = dst_;
Steve Blocka7e24c12009-10-30 11:49:00 +00003244 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01003245 __ push(dst_);
3246 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION);
3247 left = rax;
Steve Blocka7e24c12009-10-30 11:49:00 +00003248 }
Steve Block6ded16b2010-05-10 14:33:55 +01003249
3250 GenericBinaryOpStub stub(is_increment_ ? Token::ADD : Token::SUB,
3251 NO_OVERWRITE,
3252 NO_GENERIC_BINARY_FLAGS,
3253 TypeInfo::Number());
3254 stub.GenerateCall(masm_, left, Smi::FromInt(1));
3255
Steve Blocka7e24c12009-10-30 11:49:00 +00003256 if (!dst_.is(rax)) __ movq(dst_, rax);
3257}
3258
3259
Steve Block6ded16b2010-05-10 14:33:55 +01003260// The value in dst was optimistically incremented or decremented.
3261// The result overflowed or was not smi tagged. Call into the runtime
3262// to convert the argument to a number. Update the original value in
3263// old. Call the specialized add or subtract stub. The result is
3264// left in dst.
Steve Blocka7e24c12009-10-30 11:49:00 +00003265class DeferredPostfixCountOperation: public DeferredCode {
3266 public:
Steve Block6ded16b2010-05-10 14:33:55 +01003267 DeferredPostfixCountOperation(Register dst,
3268 Register old,
3269 bool is_increment,
3270 TypeInfo input_type)
3271 : dst_(dst),
3272 old_(old),
3273 is_increment_(is_increment),
3274 input_type_(input_type) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003275 set_comment("[ DeferredCountOperation");
3276 }
3277
3278 virtual void Generate();
3279
3280 private:
3281 Register dst_;
3282 Register old_;
3283 bool is_increment_;
Steve Block6ded16b2010-05-10 14:33:55 +01003284 TypeInfo input_type_;
Steve Blocka7e24c12009-10-30 11:49:00 +00003285};
3286
3287
3288void DeferredPostfixCountOperation::Generate() {
Steve Block6ded16b2010-05-10 14:33:55 +01003289 Register left;
3290 if (input_type_.IsNumber()) {
3291 __ push(dst_); // Save the input to use as the old value.
3292 left = dst_;
Steve Blocka7e24c12009-10-30 11:49:00 +00003293 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01003294 __ push(dst_);
3295 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION);
3296 __ push(rax); // Save the result of ToNumber to use as the old value.
3297 left = rax;
Steve Blocka7e24c12009-10-30 11:49:00 +00003298 }
Steve Block6ded16b2010-05-10 14:33:55 +01003299
3300 GenericBinaryOpStub stub(is_increment_ ? Token::ADD : Token::SUB,
3301 NO_OVERWRITE,
3302 NO_GENERIC_BINARY_FLAGS,
3303 TypeInfo::Number());
3304 stub.GenerateCall(masm_, left, Smi::FromInt(1));
3305
Steve Blocka7e24c12009-10-30 11:49:00 +00003306 if (!dst_.is(rax)) __ movq(dst_, rax);
3307 __ pop(old_);
3308}
3309
3310
3311void CodeGenerator::VisitCountOperation(CountOperation* node) {
3312 Comment cmnt(masm_, "[ CountOperation");
3313
3314 bool is_postfix = node->is_postfix();
3315 bool is_increment = node->op() == Token::INC;
3316
3317 Variable* var = node->expression()->AsVariableProxy()->AsVariable();
3318 bool is_const = (var != NULL && var->mode() == Variable::CONST);
3319
3320 // Postfix operations need a stack slot under the reference to hold
3321 // the old value while the new value is being stored. This is so that
3322 // in the case that storing the new value requires a call, the old
3323 // value will be in the frame to be spilled.
3324 if (is_postfix) frame_->Push(Smi::FromInt(0));
3325
Leon Clarked91b9f72010-01-27 17:25:45 +00003326 // A constant reference is not saved to, so the reference is not a
3327 // compound assignment reference.
3328 { Reference target(this, node->expression(), !is_const);
Steve Blocka7e24c12009-10-30 11:49:00 +00003329 if (target.is_illegal()) {
3330 // Spoof the virtual frame to have the expected height (one higher
3331 // than on entry).
3332 if (!is_postfix) frame_->Push(Smi::FromInt(0));
3333 return;
3334 }
Steve Blockd0582a62009-12-15 09:54:21 +00003335 target.TakeValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00003336
3337 Result new_value = frame_->Pop();
3338 new_value.ToRegister();
3339
3340 Result old_value; // Only allocated in the postfix case.
3341 if (is_postfix) {
3342 // Allocate a temporary to preserve the old value.
3343 old_value = allocator_->Allocate();
3344 ASSERT(old_value.is_valid());
3345 __ movq(old_value.reg(), new_value.reg());
Steve Block6ded16b2010-05-10 14:33:55 +01003346
3347 // The return value for postfix operations is ToNumber(input).
3348 // Keep more precise type info if the input is some kind of
3349 // number already. If the input is not a number we have to wait
3350 // for the deferred code to convert it.
3351 if (new_value.type_info().IsNumber()) {
3352 old_value.set_type_info(new_value.type_info());
3353 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003354 }
3355 // Ensure the new value is writable.
3356 frame_->Spill(new_value.reg());
3357
3358 DeferredCode* deferred = NULL;
3359 if (is_postfix) {
3360 deferred = new DeferredPostfixCountOperation(new_value.reg(),
3361 old_value.reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01003362 is_increment,
3363 new_value.type_info());
Steve Blocka7e24c12009-10-30 11:49:00 +00003364 } else {
3365 deferred = new DeferredPrefixCountOperation(new_value.reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01003366 is_increment,
3367 new_value.type_info());
Steve Blocka7e24c12009-10-30 11:49:00 +00003368 }
3369
Steve Block3ce2e202009-11-05 08:53:23 +00003370 __ JumpIfNotSmi(new_value.reg(), deferred->entry_label());
Steve Blocka7e24c12009-10-30 11:49:00 +00003371 if (is_increment) {
Steve Block3ce2e202009-11-05 08:53:23 +00003372 __ SmiAddConstant(kScratchRegister,
3373 new_value.reg(),
3374 Smi::FromInt(1),
3375 deferred->entry_label());
Steve Blocka7e24c12009-10-30 11:49:00 +00003376 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00003377 __ SmiSubConstant(kScratchRegister,
3378 new_value.reg(),
3379 Smi::FromInt(1),
3380 deferred->entry_label());
Steve Blocka7e24c12009-10-30 11:49:00 +00003381 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003382 __ movq(new_value.reg(), kScratchRegister);
3383 deferred->BindExit();
3384
Steve Block6ded16b2010-05-10 14:33:55 +01003385 // Postfix count operations return their input converted to
3386 // number. The case when the input is already a number is covered
3387 // above in the allocation code for old_value.
3388 if (is_postfix && !new_value.type_info().IsNumber()) {
3389 old_value.set_type_info(TypeInfo::Number());
3390 }
3391
3392 new_value.set_type_info(TypeInfo::Number());
3393
Steve Blocka7e24c12009-10-30 11:49:00 +00003394 // Postfix: store the old value in the allocated slot under the
3395 // reference.
3396 if (is_postfix) frame_->SetElementAt(target.size(), &old_value);
3397
3398 frame_->Push(&new_value);
3399 // Non-constant: update the reference.
3400 if (!is_const) target.SetValue(NOT_CONST_INIT);
3401 }
3402
3403 // Postfix: drop the new value and use the old.
3404 if (is_postfix) frame_->Drop();
3405}
3406
3407
Steve Block6ded16b2010-05-10 14:33:55 +01003408void CodeGenerator::GenerateLogicalBooleanOperation(BinaryOperation* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003409 // According to ECMA-262 section 11.11, page 58, the binary logical
3410 // operators must yield the result of one of the two expressions
3411 // before any ToBoolean() conversions. This means that the value
3412 // produced by a && or || operator is not necessarily a boolean.
3413
3414 // NOTE: If the left hand side produces a materialized value (not
3415 // control flow), we force the right hand side to do the same. This
3416 // is necessary because we assume that if we get control flow on the
3417 // last path out of an expression we got it on all paths.
Steve Block6ded16b2010-05-10 14:33:55 +01003418 if (node->op() == Token::AND) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003419 JumpTarget is_true;
3420 ControlDestination dest(&is_true, destination()->false_target(), true);
Steve Blockd0582a62009-12-15 09:54:21 +00003421 LoadCondition(node->left(), &dest, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00003422
3423 if (dest.false_was_fall_through()) {
3424 // The current false target was used as the fall-through. If
3425 // there are no dangling jumps to is_true then the left
3426 // subexpression was unconditionally false. Otherwise we have
3427 // paths where we do have to evaluate the right subexpression.
3428 if (is_true.is_linked()) {
3429 // We need to compile the right subexpression. If the jump to
3430 // the current false target was a forward jump then we have a
3431 // valid frame, we have just bound the false target, and we
3432 // have to jump around the code for the right subexpression.
3433 if (has_valid_frame()) {
3434 destination()->false_target()->Unuse();
3435 destination()->false_target()->Jump();
3436 }
3437 is_true.Bind();
3438 // The left subexpression compiled to control flow, so the
3439 // right one is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00003440 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00003441 } else {
3442 // We have actually just jumped to or bound the current false
3443 // target but the current control destination is not marked as
3444 // used.
3445 destination()->Use(false);
3446 }
3447
3448 } else if (dest.is_used()) {
3449 // The left subexpression compiled to control flow (and is_true
3450 // was just bound), so the right is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00003451 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00003452
3453 } else {
3454 // We have a materialized value on the frame, so we exit with
3455 // one on all paths. There are possibly also jumps to is_true
3456 // from nested subexpressions.
3457 JumpTarget pop_and_continue;
3458 JumpTarget exit;
3459
3460 // Avoid popping the result if it converts to 'false' using the
3461 // standard ToBoolean() conversion as described in ECMA-262,
3462 // section 9.2, page 30.
3463 //
3464 // Duplicate the TOS value. The duplicate will be popped by
3465 // ToBoolean.
3466 frame_->Dup();
3467 ControlDestination dest(&pop_and_continue, &exit, true);
3468 ToBoolean(&dest);
3469
3470 // Pop the result of evaluating the first part.
3471 frame_->Drop();
3472
3473 // Compile right side expression.
3474 is_true.Bind();
3475 Load(node->right());
3476
3477 // Exit (always with a materialized value).
3478 exit.Bind();
3479 }
3480
Steve Block6ded16b2010-05-10 14:33:55 +01003481 } else {
3482 ASSERT(node->op() == Token::OR);
Steve Blocka7e24c12009-10-30 11:49:00 +00003483 JumpTarget is_false;
3484 ControlDestination dest(destination()->true_target(), &is_false, false);
Steve Blockd0582a62009-12-15 09:54:21 +00003485 LoadCondition(node->left(), &dest, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00003486
3487 if (dest.true_was_fall_through()) {
3488 // The current true target was used as the fall-through. If
3489 // there are no dangling jumps to is_false then the left
3490 // subexpression was unconditionally true. Otherwise we have
3491 // paths where we do have to evaluate the right subexpression.
3492 if (is_false.is_linked()) {
3493 // We need to compile the right subexpression. If the jump to
3494 // the current true target was a forward jump then we have a
3495 // valid frame, we have just bound the true target, and we
3496 // have to jump around the code for the right subexpression.
3497 if (has_valid_frame()) {
3498 destination()->true_target()->Unuse();
3499 destination()->true_target()->Jump();
3500 }
3501 is_false.Bind();
3502 // The left subexpression compiled to control flow, so the
3503 // right one is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00003504 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00003505 } else {
3506 // We have just jumped to or bound the current true target but
3507 // the current control destination is not marked as used.
3508 destination()->Use(true);
3509 }
3510
3511 } else if (dest.is_used()) {
3512 // The left subexpression compiled to control flow (and is_false
3513 // was just bound), so the right is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00003514 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00003515
3516 } else {
3517 // We have a materialized value on the frame, so we exit with
3518 // one on all paths. There are possibly also jumps to is_false
3519 // from nested subexpressions.
3520 JumpTarget pop_and_continue;
3521 JumpTarget exit;
3522
3523 // Avoid popping the result if it converts to 'true' using the
3524 // standard ToBoolean() conversion as described in ECMA-262,
3525 // section 9.2, page 30.
3526 //
3527 // Duplicate the TOS value. The duplicate will be popped by
3528 // ToBoolean.
3529 frame_->Dup();
3530 ControlDestination dest(&exit, &pop_and_continue, false);
3531 ToBoolean(&dest);
3532
3533 // Pop the result of evaluating the first part.
3534 frame_->Drop();
3535
3536 // Compile right side expression.
3537 is_false.Bind();
3538 Load(node->right());
3539
3540 // Exit (always with a materialized value).
3541 exit.Bind();
3542 }
Steve Block6ded16b2010-05-10 14:33:55 +01003543 }
3544}
Steve Blocka7e24c12009-10-30 11:49:00 +00003545
Steve Block6ded16b2010-05-10 14:33:55 +01003546void CodeGenerator::VisitBinaryOperation(BinaryOperation* node) {
3547 Comment cmnt(masm_, "[ BinaryOperation");
3548
3549 if (node->op() == Token::AND || node->op() == Token::OR) {
3550 GenerateLogicalBooleanOperation(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00003551 } else {
3552 // NOTE: The code below assumes that the slow cases (calls to runtime)
3553 // never return a constant/immutable object.
3554 OverwriteMode overwrite_mode = NO_OVERWRITE;
3555 if (node->left()->AsBinaryOperation() != NULL &&
3556 node->left()->AsBinaryOperation()->ResultOverwriteAllowed()) {
3557 overwrite_mode = OVERWRITE_LEFT;
3558 } else if (node->right()->AsBinaryOperation() != NULL &&
3559 node->right()->AsBinaryOperation()->ResultOverwriteAllowed()) {
3560 overwrite_mode = OVERWRITE_RIGHT;
3561 }
3562
Steve Block6ded16b2010-05-10 14:33:55 +01003563 if (node->left()->IsTrivial()) {
3564 Load(node->right());
3565 Result right = frame_->Pop();
3566 frame_->Push(node->left());
3567 frame_->Push(&right);
3568 } else {
3569 Load(node->left());
3570 Load(node->right());
3571 }
3572 GenericBinaryOperation(node, overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00003573 }
3574}
3575
3576
3577
3578void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
3579 Comment cmnt(masm_, "[ CompareOperation");
3580
3581 // Get the expressions from the node.
3582 Expression* left = node->left();
3583 Expression* right = node->right();
3584 Token::Value op = node->op();
3585 // To make typeof testing for natives implemented in JavaScript really
3586 // efficient, we generate special code for expressions of the form:
3587 // 'typeof <expression> == <string>'.
3588 UnaryOperation* operation = left->AsUnaryOperation();
3589 if ((op == Token::EQ || op == Token::EQ_STRICT) &&
3590 (operation != NULL && operation->op() == Token::TYPEOF) &&
3591 (right->AsLiteral() != NULL &&
3592 right->AsLiteral()->handle()->IsString())) {
3593 Handle<String> check(Handle<String>::cast(right->AsLiteral()->handle()));
3594
3595 // Load the operand and move it to a register.
3596 LoadTypeofExpression(operation->expression());
3597 Result answer = frame_->Pop();
3598 answer.ToRegister();
3599
3600 if (check->Equals(Heap::number_symbol())) {
3601 Condition is_smi = masm_->CheckSmi(answer.reg());
3602 destination()->true_target()->Branch(is_smi);
3603 frame_->Spill(answer.reg());
3604 __ movq(answer.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset));
3605 __ CompareRoot(answer.reg(), Heap::kHeapNumberMapRootIndex);
3606 answer.Unuse();
3607 destination()->Split(equal);
3608
3609 } else if (check->Equals(Heap::string_symbol())) {
3610 Condition is_smi = masm_->CheckSmi(answer.reg());
3611 destination()->false_target()->Branch(is_smi);
3612
3613 // It can be an undetectable string object.
3614 __ movq(kScratchRegister,
3615 FieldOperand(answer.reg(), HeapObject::kMapOffset));
3616 __ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset),
3617 Immediate(1 << Map::kIsUndetectable));
3618 destination()->false_target()->Branch(not_zero);
3619 __ CmpInstanceType(kScratchRegister, FIRST_NONSTRING_TYPE);
3620 answer.Unuse();
3621 destination()->Split(below); // Unsigned byte comparison needed.
3622
3623 } else if (check->Equals(Heap::boolean_symbol())) {
3624 __ CompareRoot(answer.reg(), Heap::kTrueValueRootIndex);
3625 destination()->true_target()->Branch(equal);
3626 __ CompareRoot(answer.reg(), Heap::kFalseValueRootIndex);
3627 answer.Unuse();
3628 destination()->Split(equal);
3629
3630 } else if (check->Equals(Heap::undefined_symbol())) {
3631 __ CompareRoot(answer.reg(), Heap::kUndefinedValueRootIndex);
3632 destination()->true_target()->Branch(equal);
3633
3634 Condition is_smi = masm_->CheckSmi(answer.reg());
3635 destination()->false_target()->Branch(is_smi);
3636
3637 // It can be an undetectable object.
3638 __ movq(kScratchRegister,
3639 FieldOperand(answer.reg(), HeapObject::kMapOffset));
3640 __ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset),
3641 Immediate(1 << Map::kIsUndetectable));
3642 answer.Unuse();
3643 destination()->Split(not_zero);
3644
3645 } else if (check->Equals(Heap::function_symbol())) {
3646 Condition is_smi = masm_->CheckSmi(answer.reg());
3647 destination()->false_target()->Branch(is_smi);
3648 frame_->Spill(answer.reg());
3649 __ CmpObjectType(answer.reg(), JS_FUNCTION_TYPE, answer.reg());
Steve Blockd0582a62009-12-15 09:54:21 +00003650 destination()->true_target()->Branch(equal);
3651 // Regular expressions are callable so typeof == 'function'.
3652 __ CmpInstanceType(answer.reg(), JS_REGEXP_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00003653 answer.Unuse();
3654 destination()->Split(equal);
3655
3656 } else if (check->Equals(Heap::object_symbol())) {
3657 Condition is_smi = masm_->CheckSmi(answer.reg());
3658 destination()->false_target()->Branch(is_smi);
3659 __ CompareRoot(answer.reg(), Heap::kNullValueRootIndex);
3660 destination()->true_target()->Branch(equal);
3661
Steve Blockd0582a62009-12-15 09:54:21 +00003662 // Regular expressions are typeof == 'function', not 'object'.
3663 __ CmpObjectType(answer.reg(), JS_REGEXP_TYPE, kScratchRegister);
3664 destination()->false_target()->Branch(equal);
3665
Steve Blocka7e24c12009-10-30 11:49:00 +00003666 // It can be an undetectable object.
Steve Blocka7e24c12009-10-30 11:49:00 +00003667 __ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset),
3668 Immediate(1 << Map::kIsUndetectable));
3669 destination()->false_target()->Branch(not_zero);
3670 __ CmpInstanceType(kScratchRegister, FIRST_JS_OBJECT_TYPE);
3671 destination()->false_target()->Branch(below);
3672 __ CmpInstanceType(kScratchRegister, LAST_JS_OBJECT_TYPE);
3673 answer.Unuse();
3674 destination()->Split(below_equal);
3675 } else {
3676 // Uncommon case: typeof testing against a string literal that is
3677 // never returned from the typeof operator.
3678 answer.Unuse();
3679 destination()->Goto(false);
3680 }
3681 return;
3682 }
3683
3684 Condition cc = no_condition;
3685 bool strict = false;
3686 switch (op) {
3687 case Token::EQ_STRICT:
3688 strict = true;
3689 // Fall through
3690 case Token::EQ:
3691 cc = equal;
3692 break;
3693 case Token::LT:
3694 cc = less;
3695 break;
3696 case Token::GT:
3697 cc = greater;
3698 break;
3699 case Token::LTE:
3700 cc = less_equal;
3701 break;
3702 case Token::GTE:
3703 cc = greater_equal;
3704 break;
3705 case Token::IN: {
3706 Load(left);
3707 Load(right);
3708 Result answer = frame_->InvokeBuiltin(Builtins::IN, CALL_FUNCTION, 2);
3709 frame_->Push(&answer); // push the result
3710 return;
3711 }
3712 case Token::INSTANCEOF: {
3713 Load(left);
3714 Load(right);
3715 InstanceofStub stub;
3716 Result answer = frame_->CallStub(&stub, 2);
3717 answer.ToRegister();
3718 __ testq(answer.reg(), answer.reg());
3719 answer.Unuse();
3720 destination()->Split(zero);
3721 return;
3722 }
3723 default:
3724 UNREACHABLE();
3725 }
3726 Load(left);
3727 Load(right);
Andrei Popescu402d9372010-02-26 13:31:12 +00003728 Comparison(node, cc, strict, destination());
Steve Blocka7e24c12009-10-30 11:49:00 +00003729}
3730
3731
3732void CodeGenerator::VisitThisFunction(ThisFunction* node) {
3733 frame_->PushFunction();
3734}
3735
3736
Steve Block6ded16b2010-05-10 14:33:55 +01003737void CodeGenerator::GenerateArguments(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003738 ASSERT(args->length() == 1);
3739
3740 // ArgumentsAccessStub expects the key in rdx and the formal
3741 // parameter count in rax.
3742 Load(args->at(0));
3743 Result key = frame_->Pop();
3744 // Explicitly create a constant result.
Andrei Popescu31002712010-02-23 13:46:05 +00003745 Result count(Handle<Smi>(Smi::FromInt(scope()->num_parameters())));
Steve Blocka7e24c12009-10-30 11:49:00 +00003746 // Call the shared stub to get to arguments[key].
3747 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
3748 Result result = frame_->CallStub(&stub, &key, &count);
3749 frame_->Push(&result);
3750}
3751
3752
3753void CodeGenerator::GenerateIsArray(ZoneList<Expression*>* args) {
3754 ASSERT(args->length() == 1);
3755 Load(args->at(0));
3756 Result value = frame_->Pop();
3757 value.ToRegister();
3758 ASSERT(value.is_valid());
3759 Condition is_smi = masm_->CheckSmi(value.reg());
3760 destination()->false_target()->Branch(is_smi);
3761 // It is a heap object - get map.
3762 // Check if the object is a JS array or not.
3763 __ CmpObjectType(value.reg(), JS_ARRAY_TYPE, kScratchRegister);
3764 value.Unuse();
3765 destination()->Split(equal);
3766}
3767
3768
Andrei Popescu402d9372010-02-26 13:31:12 +00003769void CodeGenerator::GenerateIsRegExp(ZoneList<Expression*>* args) {
3770 ASSERT(args->length() == 1);
3771 Load(args->at(0));
3772 Result value = frame_->Pop();
3773 value.ToRegister();
3774 ASSERT(value.is_valid());
3775 Condition is_smi = masm_->CheckSmi(value.reg());
3776 destination()->false_target()->Branch(is_smi);
3777 // It is a heap object - get map.
3778 // Check if the object is a regexp.
3779 __ CmpObjectType(value.reg(), JS_REGEXP_TYPE, kScratchRegister);
3780 value.Unuse();
3781 destination()->Split(equal);
3782}
3783
3784
Steve Blockd0582a62009-12-15 09:54:21 +00003785void CodeGenerator::GenerateIsObject(ZoneList<Expression*>* args) {
3786 // This generates a fast version of:
3787 // (typeof(arg) === 'object' || %_ClassOf(arg) == 'RegExp')
3788 ASSERT(args->length() == 1);
3789 Load(args->at(0));
3790 Result obj = frame_->Pop();
3791 obj.ToRegister();
3792 Condition is_smi = masm_->CheckSmi(obj.reg());
3793 destination()->false_target()->Branch(is_smi);
3794
3795 __ Move(kScratchRegister, Factory::null_value());
3796 __ cmpq(obj.reg(), kScratchRegister);
3797 destination()->true_target()->Branch(equal);
3798
3799 __ movq(kScratchRegister, FieldOperand(obj.reg(), HeapObject::kMapOffset));
3800 // Undetectable objects behave like undefined when tested with typeof.
3801 __ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset),
3802 Immediate(1 << Map::kIsUndetectable));
3803 destination()->false_target()->Branch(not_zero);
3804 __ CmpInstanceType(kScratchRegister, FIRST_JS_OBJECT_TYPE);
3805 destination()->false_target()->Branch(less);
3806 __ CmpInstanceType(kScratchRegister, LAST_JS_OBJECT_TYPE);
3807 obj.Unuse();
3808 destination()->Split(less_equal);
3809}
3810
3811
3812void CodeGenerator::GenerateIsFunction(ZoneList<Expression*>* args) {
3813 // This generates a fast version of:
3814 // (%_ClassOf(arg) === 'Function')
3815 ASSERT(args->length() == 1);
3816 Load(args->at(0));
3817 Result obj = frame_->Pop();
3818 obj.ToRegister();
3819 Condition is_smi = masm_->CheckSmi(obj.reg());
3820 destination()->false_target()->Branch(is_smi);
3821 __ CmpObjectType(obj.reg(), JS_FUNCTION_TYPE, kScratchRegister);
3822 obj.Unuse();
3823 destination()->Split(equal);
3824}
3825
3826
Leon Clarked91b9f72010-01-27 17:25:45 +00003827void CodeGenerator::GenerateIsUndetectableObject(ZoneList<Expression*>* args) {
3828 ASSERT(args->length() == 1);
3829 Load(args->at(0));
3830 Result obj = frame_->Pop();
3831 obj.ToRegister();
3832 Condition is_smi = masm_->CheckSmi(obj.reg());
3833 destination()->false_target()->Branch(is_smi);
3834 __ movq(kScratchRegister, FieldOperand(obj.reg(), HeapObject::kMapOffset));
3835 __ movzxbl(kScratchRegister,
3836 FieldOperand(kScratchRegister, Map::kBitFieldOffset));
3837 __ testl(kScratchRegister, Immediate(1 << Map::kIsUndetectable));
3838 obj.Unuse();
3839 destination()->Split(not_zero);
3840}
3841
3842
Steve Blocka7e24c12009-10-30 11:49:00 +00003843void CodeGenerator::GenerateIsConstructCall(ZoneList<Expression*>* args) {
3844 ASSERT(args->length() == 0);
3845
3846 // Get the frame pointer for the calling frame.
3847 Result fp = allocator()->Allocate();
3848 __ movq(fp.reg(), Operand(rbp, StandardFrameConstants::kCallerFPOffset));
3849
3850 // Skip the arguments adaptor frame if it exists.
3851 Label check_frame_marker;
Steve Block3ce2e202009-11-05 08:53:23 +00003852 __ SmiCompare(Operand(fp.reg(), StandardFrameConstants::kContextOffset),
3853 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
Steve Blocka7e24c12009-10-30 11:49:00 +00003854 __ j(not_equal, &check_frame_marker);
3855 __ movq(fp.reg(), Operand(fp.reg(), StandardFrameConstants::kCallerFPOffset));
3856
3857 // Check the marker in the calling frame.
3858 __ bind(&check_frame_marker);
Steve Block3ce2e202009-11-05 08:53:23 +00003859 __ SmiCompare(Operand(fp.reg(), StandardFrameConstants::kMarkerOffset),
3860 Smi::FromInt(StackFrame::CONSTRUCT));
Steve Blocka7e24c12009-10-30 11:49:00 +00003861 fp.Unuse();
3862 destination()->Split(equal);
3863}
3864
3865
3866void CodeGenerator::GenerateArgumentsLength(ZoneList<Expression*>* args) {
3867 ASSERT(args->length() == 0);
Steve Block6ded16b2010-05-10 14:33:55 +01003868
3869 Result fp = allocator_->Allocate();
3870 Result result = allocator_->Allocate();
3871 ASSERT(fp.is_valid() && result.is_valid());
3872
3873 Label exit;
3874
3875 // Get the number of formal parameters.
3876 __ Move(result.reg(), Smi::FromInt(scope()->num_parameters()));
3877
3878 // Check if the calling frame is an arguments adaptor frame.
3879 __ movq(fp.reg(), Operand(rbp, StandardFrameConstants::kCallerFPOffset));
3880 __ SmiCompare(Operand(fp.reg(), StandardFrameConstants::kContextOffset),
3881 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3882 __ j(not_equal, &exit);
3883
3884 // Arguments adaptor case: Read the arguments length from the
3885 // adaptor frame.
3886 __ movq(result.reg(),
3887 Operand(fp.reg(), ArgumentsAdaptorFrameConstants::kLengthOffset));
3888
3889 __ bind(&exit);
3890 result.set_type_info(TypeInfo::Smi());
3891 if (FLAG_debug_code) {
3892 __ AbortIfNotSmi(result.reg(), "Computed arguments.length is not a smi.");
3893 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003894 frame_->Push(&result);
3895}
3896
3897
3898void CodeGenerator::GenerateFastCharCodeAt(ZoneList<Expression*>* args) {
3899 Comment(masm_, "[ GenerateFastCharCodeAt");
3900 ASSERT(args->length() == 2);
3901
Steve Blocka7e24c12009-10-30 11:49:00 +00003902 Load(args->at(0));
3903 Load(args->at(1));
3904 Result index = frame_->Pop();
3905 Result object = frame_->Pop();
3906
Steve Blocka7e24c12009-10-30 11:49:00 +00003907 // We will mutate the index register and possibly the object register.
3908 // The case where they are somehow the same register is handled
3909 // because we only mutate them in the case where the receiver is a
3910 // heap object and the index is not.
3911 object.ToRegister();
3912 index.ToRegister();
3913 frame_->Spill(object.reg());
3914 frame_->Spill(index.reg());
3915
Steve Block6ded16b2010-05-10 14:33:55 +01003916 // We need two extra registers.
3917 Result result = allocator()->Allocate();
3918 ASSERT(result.is_valid());
3919 Result scratch = allocator()->Allocate();
3920 ASSERT(scratch.is_valid());
Steve Blocka7e24c12009-10-30 11:49:00 +00003921
3922 // There is no virtual frame effect from here up to the final result
3923 // push.
Steve Block6ded16b2010-05-10 14:33:55 +01003924 Label slow_case;
3925 Label exit;
3926 StringHelper::GenerateFastCharCodeAt(masm_,
3927 object.reg(),
3928 index.reg(),
3929 scratch.reg(),
3930 result.reg(),
3931 &slow_case,
3932 &slow_case,
3933 &slow_case,
3934 &slow_case);
3935 __ jmp(&exit);
Steve Blocka7e24c12009-10-30 11:49:00 +00003936
3937 __ bind(&slow_case);
3938 // Move the undefined value into the result register, which will
3939 // trigger the slow case.
Steve Block6ded16b2010-05-10 14:33:55 +01003940 __ LoadRoot(result.reg(), Heap::kUndefinedValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00003941
Steve Block6ded16b2010-05-10 14:33:55 +01003942 __ bind(&exit);
3943 frame_->Push(&result);
3944}
3945
3946
3947void CodeGenerator::GenerateCharFromCode(ZoneList<Expression*>* args) {
3948 Comment(masm_, "[ GenerateCharFromCode");
3949 ASSERT(args->length() == 1);
3950
3951 Load(args->at(0));
3952
3953 Result code = frame_->Pop();
3954 code.ToRegister();
3955 ASSERT(code.is_valid());
3956
3957 // StringHelper::GenerateCharFromCode may do a runtime call.
3958 frame_->SpillAll();
3959
3960 Result result = allocator()->Allocate();
3961 ASSERT(result.is_valid());
3962 Result scratch = allocator()->Allocate();
3963 ASSERT(scratch.is_valid());
3964
3965 StringHelper::GenerateCharFromCode(masm_,
3966 code.reg(),
3967 result.reg(),
3968 scratch.reg(),
3969 CALL_FUNCTION);
3970 frame_->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00003971}
3972
3973
3974void CodeGenerator::GenerateIsNonNegativeSmi(ZoneList<Expression*>* args) {
3975 ASSERT(args->length() == 1);
3976 Load(args->at(0));
3977 Result value = frame_->Pop();
3978 value.ToRegister();
3979 ASSERT(value.is_valid());
3980 Condition positive_smi = masm_->CheckPositiveSmi(value.reg());
3981 value.Unuse();
3982 destination()->Split(positive_smi);
3983}
3984
3985
Steve Block6ded16b2010-05-10 14:33:55 +01003986// Generates the Math.pow method. Only handles special cases and
3987// branches to the runtime system for everything else. Please note
3988// that this function assumes that the callsite has executed ToNumber
3989// on both arguments.
3990void CodeGenerator::GenerateMathPow(ZoneList<Expression*>* args) {
3991 ASSERT(args->length() == 2);
3992 Load(args->at(0));
3993 Load(args->at(1));
3994
3995 Label allocate_return;
3996 // Load the two operands while leaving the values on the frame.
3997 frame()->Dup();
3998 Result exponent = frame()->Pop();
3999 exponent.ToRegister();
4000 frame()->Spill(exponent.reg());
4001 frame()->PushElementAt(1);
4002 Result base = frame()->Pop();
4003 base.ToRegister();
4004 frame()->Spill(base.reg());
4005
4006 Result answer = allocator()->Allocate();
4007 ASSERT(answer.is_valid());
4008 ASSERT(!exponent.reg().is(base.reg()));
4009 JumpTarget call_runtime;
4010
4011 // Save 1 in xmm3 - we need this several times later on.
4012 __ movl(answer.reg(), Immediate(1));
4013 __ cvtlsi2sd(xmm3, answer.reg());
4014
4015 Label exponent_nonsmi;
4016 Label base_nonsmi;
4017 // If the exponent is a heap number go to that specific case.
4018 __ JumpIfNotSmi(exponent.reg(), &exponent_nonsmi);
4019 __ JumpIfNotSmi(base.reg(), &base_nonsmi);
4020
4021 // Optimized version when y is an integer.
4022 Label powi;
4023 __ SmiToInteger32(base.reg(), base.reg());
4024 __ cvtlsi2sd(xmm0, base.reg());
4025 __ jmp(&powi);
4026 // exponent is smi and base is a heapnumber.
4027 __ bind(&base_nonsmi);
4028 __ CompareRoot(FieldOperand(base.reg(), HeapObject::kMapOffset),
4029 Heap::kHeapNumberMapRootIndex);
4030 call_runtime.Branch(not_equal);
4031
4032 __ movsd(xmm0, FieldOperand(base.reg(), HeapNumber::kValueOffset));
4033
4034 // Optimized version of pow if y is an integer.
4035 __ bind(&powi);
4036 __ SmiToInteger32(exponent.reg(), exponent.reg());
4037
4038 // Save exponent in base as we need to check if exponent is negative later.
4039 // We know that base and exponent are in different registers.
4040 __ movl(base.reg(), exponent.reg());
4041
4042 // Get absolute value of exponent.
4043 Label no_neg;
4044 __ cmpl(exponent.reg(), Immediate(0));
4045 __ j(greater_equal, &no_neg);
4046 __ negl(exponent.reg());
4047 __ bind(&no_neg);
4048
4049 // Load xmm1 with 1.
4050 __ movsd(xmm1, xmm3);
4051 Label while_true;
4052 Label no_multiply;
4053
4054 __ bind(&while_true);
4055 __ shrl(exponent.reg(), Immediate(1));
4056 __ j(not_carry, &no_multiply);
4057 __ mulsd(xmm1, xmm0);
4058 __ bind(&no_multiply);
4059 __ testl(exponent.reg(), exponent.reg());
4060 __ mulsd(xmm0, xmm0);
4061 __ j(not_zero, &while_true);
4062
4063 // x has the original value of y - if y is negative return 1/result.
4064 __ testl(base.reg(), base.reg());
4065 __ j(positive, &allocate_return);
4066 // Special case if xmm1 has reached infinity.
4067 __ movl(answer.reg(), Immediate(0x7FB00000));
4068 __ movd(xmm0, answer.reg());
4069 __ cvtss2sd(xmm0, xmm0);
4070 __ ucomisd(xmm0, xmm1);
4071 call_runtime.Branch(equal);
4072 __ divsd(xmm3, xmm1);
4073 __ movsd(xmm1, xmm3);
4074 __ jmp(&allocate_return);
4075
4076 // exponent (or both) is a heapnumber - no matter what we should now work
4077 // on doubles.
4078 __ bind(&exponent_nonsmi);
4079 __ CompareRoot(FieldOperand(exponent.reg(), HeapObject::kMapOffset),
4080 Heap::kHeapNumberMapRootIndex);
4081 call_runtime.Branch(not_equal);
4082 __ movsd(xmm1, FieldOperand(exponent.reg(), HeapNumber::kValueOffset));
4083 // Test if exponent is nan.
4084 __ ucomisd(xmm1, xmm1);
4085 call_runtime.Branch(parity_even);
4086
4087 Label base_not_smi;
4088 Label handle_special_cases;
4089 __ JumpIfNotSmi(base.reg(), &base_not_smi);
4090 __ SmiToInteger32(base.reg(), base.reg());
4091 __ cvtlsi2sd(xmm0, base.reg());
4092 __ jmp(&handle_special_cases);
4093 __ bind(&base_not_smi);
4094 __ CompareRoot(FieldOperand(base.reg(), HeapObject::kMapOffset),
4095 Heap::kHeapNumberMapRootIndex);
4096 call_runtime.Branch(not_equal);
4097 __ movl(answer.reg(), FieldOperand(base.reg(), HeapNumber::kExponentOffset));
4098 __ andl(answer.reg(), Immediate(HeapNumber::kExponentMask));
4099 __ cmpl(answer.reg(), Immediate(HeapNumber::kExponentMask));
4100 // base is NaN or +/-Infinity
4101 call_runtime.Branch(greater_equal);
4102 __ movsd(xmm0, FieldOperand(base.reg(), HeapNumber::kValueOffset));
4103
4104 // base is in xmm0 and exponent is in xmm1.
4105 __ bind(&handle_special_cases);
4106 Label not_minus_half;
4107 // Test for -0.5.
4108 // Load xmm2 with -0.5.
4109 __ movl(answer.reg(), Immediate(0xBF000000));
4110 __ movd(xmm2, answer.reg());
4111 __ cvtss2sd(xmm2, xmm2);
4112 // xmm2 now has -0.5.
4113 __ ucomisd(xmm2, xmm1);
4114 __ j(not_equal, &not_minus_half);
4115
4116 // Calculates reciprocal of square root.
4117 // Note that 1/sqrt(x) = sqrt(1/x))
4118 __ divsd(xmm3, xmm0);
4119 __ movsd(xmm1, xmm3);
4120 __ sqrtsd(xmm1, xmm1);
4121 __ jmp(&allocate_return);
4122
4123 // Test for 0.5.
4124 __ bind(&not_minus_half);
4125 // Load xmm2 with 0.5.
4126 // Since xmm3 is 1 and xmm2 is -0.5 this is simply xmm2 + xmm3.
4127 __ addsd(xmm2, xmm3);
4128 // xmm2 now has 0.5.
4129 __ comisd(xmm2, xmm1);
4130 call_runtime.Branch(not_equal);
4131
4132 // Calculates square root.
4133 __ movsd(xmm1, xmm0);
4134 __ sqrtsd(xmm1, xmm1);
4135
4136 JumpTarget done;
4137 Label failure, success;
4138 __ bind(&allocate_return);
4139 // Make a copy of the frame to enable us to handle allocation
4140 // failure after the JumpTarget jump.
4141 VirtualFrame* clone = new VirtualFrame(frame());
4142 __ AllocateHeapNumber(answer.reg(), exponent.reg(), &failure);
4143 __ movsd(FieldOperand(answer.reg(), HeapNumber::kValueOffset), xmm1);
4144 // Remove the two original values from the frame - we only need those
4145 // in the case where we branch to runtime.
4146 frame()->Drop(2);
4147 exponent.Unuse();
4148 base.Unuse();
4149 done.Jump(&answer);
4150 // Use the copy of the original frame as our current frame.
4151 RegisterFile empty_regs;
4152 SetFrame(clone, &empty_regs);
4153 // If we experience an allocation failure we branch to runtime.
4154 __ bind(&failure);
4155 call_runtime.Bind();
4156 answer = frame()->CallRuntime(Runtime::kMath_pow_cfunction, 2);
4157
4158 done.Bind(&answer);
4159 frame()->Push(&answer);
4160}
4161
4162
4163// Generates the Math.sqrt method. Please note - this function assumes that
4164// the callsite has executed ToNumber on the argument.
4165void CodeGenerator::GenerateMathSqrt(ZoneList<Expression*>* args) {
4166 ASSERT(args->length() == 1);
4167 Load(args->at(0));
4168
4169 // Leave original value on the frame if we need to call runtime.
4170 frame()->Dup();
4171 Result result = frame()->Pop();
4172 result.ToRegister();
4173 frame()->Spill(result.reg());
4174 Label runtime;
4175 Label non_smi;
4176 Label load_done;
4177 JumpTarget end;
4178
4179 __ JumpIfNotSmi(result.reg(), &non_smi);
4180 __ SmiToInteger32(result.reg(), result.reg());
4181 __ cvtlsi2sd(xmm0, result.reg());
4182 __ jmp(&load_done);
4183 __ bind(&non_smi);
4184 __ CompareRoot(FieldOperand(result.reg(), HeapObject::kMapOffset),
4185 Heap::kHeapNumberMapRootIndex);
4186 __ j(not_equal, &runtime);
4187 __ movsd(xmm0, FieldOperand(result.reg(), HeapNumber::kValueOffset));
4188
4189 __ bind(&load_done);
4190 __ sqrtsd(xmm0, xmm0);
4191 // A copy of the virtual frame to allow us to go to runtime after the
4192 // JumpTarget jump.
4193 Result scratch = allocator()->Allocate();
4194 VirtualFrame* clone = new VirtualFrame(frame());
4195 __ AllocateHeapNumber(result.reg(), scratch.reg(), &runtime);
4196
4197 __ movsd(FieldOperand(result.reg(), HeapNumber::kValueOffset), xmm0);
4198 frame()->Drop(1);
4199 scratch.Unuse();
4200 end.Jump(&result);
4201 // We only branch to runtime if we have an allocation error.
4202 // Use the copy of the original frame as our current frame.
4203 RegisterFile empty_regs;
4204 SetFrame(clone, &empty_regs);
4205 __ bind(&runtime);
4206 result = frame()->CallRuntime(Runtime::kMath_sqrt, 1);
4207
4208 end.Bind(&result);
4209 frame()->Push(&result);
4210}
4211
4212
Steve Blocka7e24c12009-10-30 11:49:00 +00004213void CodeGenerator::GenerateIsSmi(ZoneList<Expression*>* args) {
4214 ASSERT(args->length() == 1);
4215 Load(args->at(0));
4216 Result value = frame_->Pop();
4217 value.ToRegister();
4218 ASSERT(value.is_valid());
4219 Condition is_smi = masm_->CheckSmi(value.reg());
4220 value.Unuse();
4221 destination()->Split(is_smi);
4222}
4223
4224
4225void CodeGenerator::GenerateLog(ZoneList<Expression*>* args) {
4226 // Conditionally generate a log call.
4227 // Args:
4228 // 0 (literal string): The type of logging (corresponds to the flags).
4229 // This is used to determine whether or not to generate the log call.
4230 // 1 (string): Format string. Access the string at argument index 2
4231 // with '%2s' (see Logger::LogRuntime for all the formats).
4232 // 2 (array): Arguments to the format string.
4233 ASSERT_EQ(args->length(), 3);
4234#ifdef ENABLE_LOGGING_AND_PROFILING
4235 if (ShouldGenerateLog(args->at(0))) {
4236 Load(args->at(1));
4237 Load(args->at(2));
4238 frame_->CallRuntime(Runtime::kLog, 2);
4239 }
4240#endif
4241 // Finally, we're expected to leave a value on the top of the stack.
4242 frame_->Push(Factory::undefined_value());
4243}
4244
4245
4246void CodeGenerator::GenerateObjectEquals(ZoneList<Expression*>* args) {
4247 ASSERT(args->length() == 2);
4248
4249 // Load the two objects into registers and perform the comparison.
4250 Load(args->at(0));
4251 Load(args->at(1));
4252 Result right = frame_->Pop();
4253 Result left = frame_->Pop();
4254 right.ToRegister();
4255 left.ToRegister();
4256 __ cmpq(right.reg(), left.reg());
4257 right.Unuse();
4258 left.Unuse();
4259 destination()->Split(equal);
4260}
4261
4262
4263void CodeGenerator::GenerateGetFramePointer(ZoneList<Expression*>* args) {
4264 ASSERT(args->length() == 0);
4265 // RBP value is aligned, so it should be tagged as a smi (without necesarily
Steve Block3ce2e202009-11-05 08:53:23 +00004266 // being padded as a smi, so it should not be treated as a smi.).
Steve Blocka7e24c12009-10-30 11:49:00 +00004267 ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
4268 Result rbp_as_smi = allocator_->Allocate();
4269 ASSERT(rbp_as_smi.is_valid());
4270 __ movq(rbp_as_smi.reg(), rbp);
4271 frame_->Push(&rbp_as_smi);
4272}
4273
4274
Steve Block6ded16b2010-05-10 14:33:55 +01004275void CodeGenerator::GenerateRandomHeapNumber(
4276 ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004277 ASSERT(args->length() == 0);
4278 frame_->SpillAll();
Steve Blocka7e24c12009-10-30 11:49:00 +00004279
Steve Block6ded16b2010-05-10 14:33:55 +01004280 Label slow_allocate_heapnumber;
4281 Label heapnumber_allocated;
4282 __ AllocateHeapNumber(rbx, rcx, &slow_allocate_heapnumber);
4283 __ jmp(&heapnumber_allocated);
Steve Blocka7e24c12009-10-30 11:49:00 +00004284
Steve Block6ded16b2010-05-10 14:33:55 +01004285 __ bind(&slow_allocate_heapnumber);
4286 // To allocate a heap number, and ensure that it is not a smi, we
4287 // call the runtime function FUnaryMinus on 0, returning the double
4288 // -0.0. A new, distinct heap number is returned each time.
4289 __ Push(Smi::FromInt(0));
4290 __ CallRuntime(Runtime::kNumberUnaryMinus, 1);
4291 __ movq(rbx, rax);
Steve Blocka7e24c12009-10-30 11:49:00 +00004292
Steve Block6ded16b2010-05-10 14:33:55 +01004293 __ bind(&heapnumber_allocated);
Steve Blocka7e24c12009-10-30 11:49:00 +00004294
Steve Block6ded16b2010-05-10 14:33:55 +01004295 // Return a random uint32 number in rax.
4296 // The fresh HeapNumber is in rbx, which is callee-save on both x64 ABIs.
4297 __ PrepareCallCFunction(0);
4298 __ CallCFunction(ExternalReference::random_uint32_function(), 0);
4299
4300 // Convert 32 random bits in eax to 0.(32 random bits) in a double
4301 // by computing:
4302 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
4303 __ movl(rcx, Immediate(0x49800000)); // 1.0 x 2^20 as single.
4304 __ movd(xmm1, rcx);
4305 __ movd(xmm0, rax);
4306 __ cvtss2sd(xmm1, xmm1);
4307 __ xorpd(xmm0, xmm1);
4308 __ subsd(xmm0, xmm1);
4309 __ movsd(FieldOperand(rbx, HeapNumber::kValueOffset), xmm0);
4310
4311 __ movq(rax, rbx);
Steve Blocka7e24c12009-10-30 11:49:00 +00004312 Result result = allocator_->Allocate(rax);
4313 frame_->Push(&result);
4314}
4315
4316
Leon Clarkee46be812010-01-19 14:06:41 +00004317void CodeGenerator::GenerateRegExpExec(ZoneList<Expression*>* args) {
4318 ASSERT_EQ(args->length(), 4);
Steve Blocka7e24c12009-10-30 11:49:00 +00004319
Leon Clarkee46be812010-01-19 14:06:41 +00004320 // Load the arguments on the stack and call the runtime system.
Steve Blocka7e24c12009-10-30 11:49:00 +00004321 Load(args->at(0));
Leon Clarkee46be812010-01-19 14:06:41 +00004322 Load(args->at(1));
4323 Load(args->at(2));
4324 Load(args->at(3));
Leon Clarke4515c472010-02-03 11:58:03 +00004325 RegExpExecStub stub;
4326 Result result = frame_->CallStub(&stub, 4);
Leon Clarkee46be812010-01-19 14:06:41 +00004327 frame_->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00004328}
4329
4330
Steve Block6ded16b2010-05-10 14:33:55 +01004331void CodeGenerator::GenerateRegExpConstructResult(ZoneList<Expression*>* args) {
4332 // No stub. This code only occurs a few times in regexp.js.
4333 const int kMaxInlineLength = 100;
4334 ASSERT_EQ(3, args->length());
4335 Load(args->at(0)); // Size of array, smi.
4336 Load(args->at(1)); // "index" property value.
4337 Load(args->at(2)); // "input" property value.
4338 {
4339 VirtualFrame::SpilledScope spilled_scope;
4340
4341 Label slowcase;
4342 Label done;
4343 __ movq(r8, Operand(rsp, kPointerSize * 2));
4344 __ JumpIfNotSmi(r8, &slowcase);
4345 __ SmiToInteger32(rbx, r8);
4346 __ cmpl(rbx, Immediate(kMaxInlineLength));
4347 __ j(above, &slowcase);
4348 // Smi-tagging is equivalent to multiplying by 2.
4349 STATIC_ASSERT(kSmiTag == 0);
4350 STATIC_ASSERT(kSmiTagSize == 1);
4351 // Allocate RegExpResult followed by FixedArray with size in ebx.
4352 // JSArray: [Map][empty properties][Elements][Length-smi][index][input]
4353 // Elements: [Map][Length][..elements..]
4354 __ AllocateInNewSpace(JSRegExpResult::kSize + FixedArray::kHeaderSize,
4355 times_pointer_size,
4356 rbx, // In: Number of elements.
4357 rax, // Out: Start of allocation (tagged).
4358 rcx, // Out: End of allocation.
4359 rdx, // Scratch register
4360 &slowcase,
4361 TAG_OBJECT);
4362 // rax: Start of allocated area, object-tagged.
4363 // rbx: Number of array elements as int32.
4364 // r8: Number of array elements as smi.
4365
4366 // Set JSArray map to global.regexp_result_map().
4367 __ movq(rdx, ContextOperand(rsi, Context::GLOBAL_INDEX));
4368 __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalContextOffset));
4369 __ movq(rdx, ContextOperand(rdx, Context::REGEXP_RESULT_MAP_INDEX));
4370 __ movq(FieldOperand(rax, HeapObject::kMapOffset), rdx);
4371
4372 // Set empty properties FixedArray.
4373 __ Move(FieldOperand(rax, JSObject::kPropertiesOffset),
4374 Factory::empty_fixed_array());
4375
4376 // Set elements to point to FixedArray allocated right after the JSArray.
4377 __ lea(rcx, Operand(rax, JSRegExpResult::kSize));
4378 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rcx);
4379
4380 // Set input, index and length fields from arguments.
4381 __ pop(FieldOperand(rax, JSRegExpResult::kInputOffset));
4382 __ pop(FieldOperand(rax, JSRegExpResult::kIndexOffset));
4383 __ lea(rsp, Operand(rsp, kPointerSize));
4384 __ movq(FieldOperand(rax, JSArray::kLengthOffset), r8);
4385
4386 // Fill out the elements FixedArray.
4387 // rax: JSArray.
4388 // rcx: FixedArray.
4389 // rbx: Number of elements in array as int32.
4390
4391 // Set map.
4392 __ Move(FieldOperand(rcx, HeapObject::kMapOffset),
4393 Factory::fixed_array_map());
4394 // Set length.
4395 __ movq(FieldOperand(rcx, FixedArray::kLengthOffset), rbx);
4396 // Fill contents of fixed-array with the-hole.
4397 __ Move(rdx, Factory::the_hole_value());
4398 __ lea(rcx, FieldOperand(rcx, FixedArray::kHeaderSize));
4399 // Fill fixed array elements with hole.
4400 // rax: JSArray.
4401 // rbx: Number of elements in array that remains to be filled, as int32.
4402 // rcx: Start of elements in FixedArray.
4403 // rdx: the hole.
4404 Label loop;
4405 __ testl(rbx, rbx);
4406 __ bind(&loop);
4407 __ j(less_equal, &done); // Jump if ecx is negative or zero.
4408 __ subl(rbx, Immediate(1));
4409 __ movq(Operand(rcx, rbx, times_pointer_size, 0), rdx);
4410 __ jmp(&loop);
4411
4412 __ bind(&slowcase);
4413 __ CallRuntime(Runtime::kRegExpConstructResult, 3);
4414
4415 __ bind(&done);
4416 }
4417 frame_->Forget(3);
4418 frame_->Push(rax);
4419}
4420
4421
4422class DeferredSearchCache: public DeferredCode {
4423 public:
4424 DeferredSearchCache(Register dst, Register cache, Register key)
4425 : dst_(dst), cache_(cache), key_(key) {
4426 set_comment("[ DeferredSearchCache");
4427 }
4428
4429 virtual void Generate();
4430
4431 private:
4432 Register dst_, cache_, key_;
4433};
4434
4435
4436void DeferredSearchCache::Generate() {
4437 __ push(cache_);
4438 __ push(key_);
4439 __ CallRuntime(Runtime::kGetFromCache, 2);
4440 if (!dst_.is(rax)) {
4441 __ movq(dst_, rax);
4442 }
4443}
4444
4445
4446void CodeGenerator::GenerateGetFromCache(ZoneList<Expression*>* args) {
4447 ASSERT_EQ(2, args->length());
4448
4449 ASSERT_NE(NULL, args->at(0)->AsLiteral());
4450 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
4451
4452 Handle<FixedArray> jsfunction_result_caches(
4453 Top::global_context()->jsfunction_result_caches());
4454 if (jsfunction_result_caches->length() <= cache_id) {
4455 __ Abort("Attempt to use undefined cache.");
4456 frame_->Push(Factory::undefined_value());
4457 return;
4458 }
4459
4460 Load(args->at(1));
4461 Result key = frame_->Pop();
4462 key.ToRegister();
4463
4464 Result cache = allocator()->Allocate();
4465 ASSERT(cache.is_valid());
4466 __ movq(cache.reg(), ContextOperand(rsi, Context::GLOBAL_INDEX));
4467 __ movq(cache.reg(),
4468 FieldOperand(cache.reg(), GlobalObject::kGlobalContextOffset));
4469 __ movq(cache.reg(),
4470 ContextOperand(cache.reg(), Context::JSFUNCTION_RESULT_CACHES_INDEX));
4471 __ movq(cache.reg(),
4472 FieldOperand(cache.reg(), FixedArray::OffsetOfElementAt(cache_id)));
4473
4474 Result tmp = allocator()->Allocate();
4475 ASSERT(tmp.is_valid());
4476
4477 DeferredSearchCache* deferred = new DeferredSearchCache(tmp.reg(),
4478 cache.reg(),
4479 key.reg());
4480
4481 const int kFingerOffset =
4482 FixedArray::OffsetOfElementAt(JSFunctionResultCache::kFingerIndex);
4483 // tmp.reg() now holds finger offset as a smi.
4484 ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
4485 __ movq(tmp.reg(), FieldOperand(cache.reg(), kFingerOffset));
4486 SmiIndex index =
4487 masm()->SmiToIndex(kScratchRegister, tmp.reg(), kPointerSizeLog2);
4488 __ cmpq(key.reg(), FieldOperand(cache.reg(),
4489 index.reg,
4490 index.scale,
4491 FixedArray::kHeaderSize));
4492 deferred->Branch(not_equal);
4493
4494 __ movq(tmp.reg(), FieldOperand(cache.reg(),
4495 index.reg,
4496 index.scale,
4497 kPointerSize + FixedArray::kHeaderSize));
4498
4499 deferred->BindExit();
4500 frame_->Push(&tmp);
4501}
4502
4503
Andrei Popescu402d9372010-02-26 13:31:12 +00004504void CodeGenerator::GenerateNumberToString(ZoneList<Expression*>* args) {
4505 ASSERT_EQ(args->length(), 1);
4506
4507 // Load the argument on the stack and jump to the runtime.
4508 Load(args->at(0));
4509
Steve Block6ded16b2010-05-10 14:33:55 +01004510 NumberToStringStub stub;
4511 Result result = frame_->CallStub(&stub, 1);
4512 frame_->Push(&result);
4513}
4514
4515
4516class DeferredSwapElements: public DeferredCode {
4517 public:
4518 DeferredSwapElements(Register object, Register index1, Register index2)
4519 : object_(object), index1_(index1), index2_(index2) {
4520 set_comment("[ DeferredSwapElements");
4521 }
4522
4523 virtual void Generate();
4524
4525 private:
4526 Register object_, index1_, index2_;
4527};
4528
4529
4530void DeferredSwapElements::Generate() {
4531 __ push(object_);
4532 __ push(index1_);
4533 __ push(index2_);
4534 __ CallRuntime(Runtime::kSwapElements, 3);
4535}
4536
4537
4538void CodeGenerator::GenerateSwapElements(ZoneList<Expression*>* args) {
4539 Comment cmnt(masm_, "[ GenerateSwapElements");
4540
4541 ASSERT_EQ(3, args->length());
4542
4543 Load(args->at(0));
4544 Load(args->at(1));
4545 Load(args->at(2));
4546
4547 Result index2 = frame_->Pop();
4548 index2.ToRegister();
4549
4550 Result index1 = frame_->Pop();
4551 index1.ToRegister();
4552
4553 Result object = frame_->Pop();
4554 object.ToRegister();
4555
4556 Result tmp1 = allocator()->Allocate();
4557 tmp1.ToRegister();
4558 Result tmp2 = allocator()->Allocate();
4559 tmp2.ToRegister();
4560
4561 frame_->Spill(object.reg());
4562 frame_->Spill(index1.reg());
4563 frame_->Spill(index2.reg());
4564
4565 DeferredSwapElements* deferred = new DeferredSwapElements(object.reg(),
4566 index1.reg(),
4567 index2.reg());
4568
4569 // Fetch the map and check if array is in fast case.
4570 // Check that object doesn't require security checks and
4571 // has no indexed interceptor.
4572 __ CmpObjectType(object.reg(), FIRST_JS_OBJECT_TYPE, tmp1.reg());
4573 deferred->Branch(below);
4574 __ testb(FieldOperand(tmp1.reg(), Map::kBitFieldOffset),
4575 Immediate(KeyedLoadIC::kSlowCaseBitFieldMask));
4576 deferred->Branch(not_zero);
4577
4578 // Check the object's elements are in fast case.
4579 __ movq(tmp1.reg(), FieldOperand(object.reg(), JSObject::kElementsOffset));
4580 __ CompareRoot(FieldOperand(tmp1.reg(), HeapObject::kMapOffset),
4581 Heap::kFixedArrayMapRootIndex);
4582 deferred->Branch(not_equal);
4583
4584 // Check that both indices are smis.
4585 Condition both_smi = __ CheckBothSmi(index1.reg(), index2.reg());
4586 deferred->Branch(NegateCondition(both_smi));
4587
4588 // Bring addresses into index1 and index2.
4589 __ SmiToInteger32(index1.reg(), index1.reg());
4590 __ lea(index1.reg(), FieldOperand(tmp1.reg(),
4591 index1.reg(),
4592 times_pointer_size,
4593 FixedArray::kHeaderSize));
4594 __ SmiToInteger32(index2.reg(), index2.reg());
4595 __ lea(index2.reg(), FieldOperand(tmp1.reg(),
4596 index2.reg(),
4597 times_pointer_size,
4598 FixedArray::kHeaderSize));
4599
4600 // Swap elements.
4601 __ movq(object.reg(), Operand(index1.reg(), 0));
4602 __ movq(tmp2.reg(), Operand(index2.reg(), 0));
4603 __ movq(Operand(index2.reg(), 0), object.reg());
4604 __ movq(Operand(index1.reg(), 0), tmp2.reg());
4605
4606 Label done;
4607 __ InNewSpace(tmp1.reg(), tmp2.reg(), equal, &done);
4608 // Possible optimization: do a check that both values are Smis
4609 // (or them and test against Smi mask.)
4610
4611 __ movq(tmp2.reg(), tmp1.reg());
4612 RecordWriteStub recordWrite1(tmp2.reg(), index1.reg(), object.reg());
4613 __ CallStub(&recordWrite1);
4614
4615 RecordWriteStub recordWrite2(tmp1.reg(), index2.reg(), object.reg());
4616 __ CallStub(&recordWrite2);
4617
4618 __ bind(&done);
4619
4620 deferred->BindExit();
4621 frame_->Push(Factory::undefined_value());
4622}
4623
4624
4625void CodeGenerator::GenerateCallFunction(ZoneList<Expression*>* args) {
4626 Comment cmnt(masm_, "[ GenerateCallFunction");
4627
4628 ASSERT(args->length() >= 2);
4629
4630 int n_args = args->length() - 2; // for receiver and function.
4631 Load(args->at(0)); // receiver
4632 for (int i = 0; i < n_args; i++) {
4633 Load(args->at(i + 1));
4634 }
4635 Load(args->at(n_args + 1)); // function
4636 Result result = frame_->CallJSFunction(n_args);
4637 frame_->Push(&result);
Andrei Popescu402d9372010-02-26 13:31:12 +00004638}
4639
4640
4641void CodeGenerator::GenerateMathSin(ZoneList<Expression*>* args) {
4642 ASSERT_EQ(args->length(), 1);
Andrei Popescu402d9372010-02-26 13:31:12 +00004643 Load(args->at(0));
Steve Block6ded16b2010-05-10 14:33:55 +01004644 TranscendentalCacheStub stub(TranscendentalCache::SIN);
4645 Result result = frame_->CallStub(&stub, 1);
4646 frame_->Push(&result);
Andrei Popescu402d9372010-02-26 13:31:12 +00004647}
4648
4649
4650void CodeGenerator::GenerateMathCos(ZoneList<Expression*>* args) {
4651 ASSERT_EQ(args->length(), 1);
Andrei Popescu402d9372010-02-26 13:31:12 +00004652 Load(args->at(0));
Steve Block6ded16b2010-05-10 14:33:55 +01004653 TranscendentalCacheStub stub(TranscendentalCache::COS);
4654 Result result = frame_->CallStub(&stub, 1);
4655 frame_->Push(&result);
Andrei Popescu402d9372010-02-26 13:31:12 +00004656}
4657
4658
Steve Blockd0582a62009-12-15 09:54:21 +00004659void CodeGenerator::GenerateStringAdd(ZoneList<Expression*>* args) {
4660 ASSERT_EQ(2, args->length());
4661
4662 Load(args->at(0));
4663 Load(args->at(1));
4664
Leon Clarkee46be812010-01-19 14:06:41 +00004665 StringAddStub stub(NO_STRING_ADD_FLAGS);
4666 Result answer = frame_->CallStub(&stub, 2);
4667 frame_->Push(&answer);
4668}
4669
4670
4671void CodeGenerator::GenerateSubString(ZoneList<Expression*>* args) {
4672 ASSERT_EQ(3, args->length());
4673
4674 Load(args->at(0));
4675 Load(args->at(1));
4676 Load(args->at(2));
4677
Leon Clarked91b9f72010-01-27 17:25:45 +00004678 SubStringStub stub;
4679 Result answer = frame_->CallStub(&stub, 3);
Leon Clarkee46be812010-01-19 14:06:41 +00004680 frame_->Push(&answer);
4681}
4682
4683
4684void CodeGenerator::GenerateStringCompare(ZoneList<Expression*>* args) {
4685 ASSERT_EQ(2, args->length());
4686
4687 Load(args->at(0));
4688 Load(args->at(1));
4689
4690 StringCompareStub stub;
4691 Result answer = frame_->CallStub(&stub, 2);
Steve Blockd0582a62009-12-15 09:54:21 +00004692 frame_->Push(&answer);
4693}
4694
4695
Steve Blocka7e24c12009-10-30 11:49:00 +00004696void CodeGenerator::GenerateClassOf(ZoneList<Expression*>* args) {
4697 ASSERT(args->length() == 1);
4698 JumpTarget leave, null, function, non_function_constructor;
4699 Load(args->at(0)); // Load the object.
4700 Result obj = frame_->Pop();
4701 obj.ToRegister();
4702 frame_->Spill(obj.reg());
4703
4704 // If the object is a smi, we return null.
4705 Condition is_smi = masm_->CheckSmi(obj.reg());
4706 null.Branch(is_smi);
4707
4708 // Check that the object is a JS object but take special care of JS
4709 // functions to make sure they have 'Function' as their class.
4710
4711 __ CmpObjectType(obj.reg(), FIRST_JS_OBJECT_TYPE, obj.reg());
4712 null.Branch(below);
4713
4714 // As long as JS_FUNCTION_TYPE is the last instance type and it is
4715 // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
4716 // LAST_JS_OBJECT_TYPE.
4717 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
4718 ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
4719 __ CmpInstanceType(obj.reg(), JS_FUNCTION_TYPE);
4720 function.Branch(equal);
4721
4722 // Check if the constructor in the map is a function.
4723 __ movq(obj.reg(), FieldOperand(obj.reg(), Map::kConstructorOffset));
4724 __ CmpObjectType(obj.reg(), JS_FUNCTION_TYPE, kScratchRegister);
4725 non_function_constructor.Branch(not_equal);
4726
4727 // The obj register now contains the constructor function. Grab the
4728 // instance class name from there.
4729 __ movq(obj.reg(),
4730 FieldOperand(obj.reg(), JSFunction::kSharedFunctionInfoOffset));
4731 __ movq(obj.reg(),
4732 FieldOperand(obj.reg(),
4733 SharedFunctionInfo::kInstanceClassNameOffset));
4734 frame_->Push(&obj);
4735 leave.Jump();
4736
4737 // Functions have class 'Function'.
4738 function.Bind();
4739 frame_->Push(Factory::function_class_symbol());
4740 leave.Jump();
4741
4742 // Objects with a non-function constructor have class 'Object'.
4743 non_function_constructor.Bind();
4744 frame_->Push(Factory::Object_symbol());
4745 leave.Jump();
4746
4747 // Non-JS objects have class null.
4748 null.Bind();
4749 frame_->Push(Factory::null_value());
4750
4751 // All done.
4752 leave.Bind();
4753}
4754
4755
4756void CodeGenerator::GenerateSetValueOf(ZoneList<Expression*>* args) {
4757 ASSERT(args->length() == 2);
4758 JumpTarget leave;
4759 Load(args->at(0)); // Load the object.
4760 Load(args->at(1)); // Load the value.
4761 Result value = frame_->Pop();
4762 Result object = frame_->Pop();
4763 value.ToRegister();
4764 object.ToRegister();
4765
4766 // if (object->IsSmi()) return value.
4767 Condition is_smi = masm_->CheckSmi(object.reg());
4768 leave.Branch(is_smi, &value);
4769
4770 // It is a heap object - get its map.
4771 Result scratch = allocator_->Allocate();
4772 ASSERT(scratch.is_valid());
4773 // if (!object->IsJSValue()) return value.
4774 __ CmpObjectType(object.reg(), JS_VALUE_TYPE, scratch.reg());
4775 leave.Branch(not_equal, &value);
4776
4777 // Store the value.
4778 __ movq(FieldOperand(object.reg(), JSValue::kValueOffset), value.reg());
4779 // Update the write barrier. Save the value as it will be
4780 // overwritten by the write barrier code and is needed afterward.
4781 Result duplicate_value = allocator_->Allocate();
4782 ASSERT(duplicate_value.is_valid());
4783 __ movq(duplicate_value.reg(), value.reg());
4784 // The object register is also overwritten by the write barrier and
4785 // possibly aliased in the frame.
4786 frame_->Spill(object.reg());
4787 __ RecordWrite(object.reg(), JSValue::kValueOffset, duplicate_value.reg(),
4788 scratch.reg());
4789 object.Unuse();
4790 scratch.Unuse();
4791 duplicate_value.Unuse();
4792
4793 // Leave.
4794 leave.Bind(&value);
4795 frame_->Push(&value);
4796}
4797
4798
4799void CodeGenerator::GenerateValueOf(ZoneList<Expression*>* args) {
4800 ASSERT(args->length() == 1);
4801 JumpTarget leave;
4802 Load(args->at(0)); // Load the object.
4803 frame_->Dup();
4804 Result object = frame_->Pop();
4805 object.ToRegister();
4806 ASSERT(object.is_valid());
4807 // if (object->IsSmi()) return object.
4808 Condition is_smi = masm_->CheckSmi(object.reg());
4809 leave.Branch(is_smi);
4810 // It is a heap object - get map.
4811 Result temp = allocator()->Allocate();
4812 ASSERT(temp.is_valid());
4813 // if (!object->IsJSValue()) return object.
4814 __ CmpObjectType(object.reg(), JS_VALUE_TYPE, temp.reg());
4815 leave.Branch(not_equal);
4816 __ movq(temp.reg(), FieldOperand(object.reg(), JSValue::kValueOffset));
4817 object.Unuse();
4818 frame_->SetElementAt(0, &temp);
4819 leave.Bind();
4820}
4821
4822
4823// -----------------------------------------------------------------------------
4824// CodeGenerator implementation of Expressions
4825
Steve Blockd0582a62009-12-15 09:54:21 +00004826void CodeGenerator::LoadAndSpill(Expression* expression) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004827 // TODO(x64): No architecture specific code. Move to shared location.
4828 ASSERT(in_spilled_code());
4829 set_in_spilled_code(false);
Steve Blockd0582a62009-12-15 09:54:21 +00004830 Load(expression);
Steve Blocka7e24c12009-10-30 11:49:00 +00004831 frame_->SpillAll();
4832 set_in_spilled_code(true);
4833}
4834
4835
Steve Blockd0582a62009-12-15 09:54:21 +00004836void CodeGenerator::Load(Expression* expr) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004837#ifdef DEBUG
4838 int original_height = frame_->height();
4839#endif
4840 ASSERT(!in_spilled_code());
4841 JumpTarget true_target;
4842 JumpTarget false_target;
4843 ControlDestination dest(&true_target, &false_target, true);
Steve Blockd0582a62009-12-15 09:54:21 +00004844 LoadCondition(expr, &dest, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00004845
4846 if (dest.false_was_fall_through()) {
4847 // The false target was just bound.
4848 JumpTarget loaded;
4849 frame_->Push(Factory::false_value());
4850 // There may be dangling jumps to the true target.
4851 if (true_target.is_linked()) {
4852 loaded.Jump();
4853 true_target.Bind();
4854 frame_->Push(Factory::true_value());
4855 loaded.Bind();
4856 }
4857
4858 } else if (dest.is_used()) {
4859 // There is true, and possibly false, control flow (with true as
4860 // the fall through).
4861 JumpTarget loaded;
4862 frame_->Push(Factory::true_value());
4863 if (false_target.is_linked()) {
4864 loaded.Jump();
4865 false_target.Bind();
4866 frame_->Push(Factory::false_value());
4867 loaded.Bind();
4868 }
4869
4870 } else {
4871 // We have a valid value on top of the frame, but we still may
4872 // have dangling jumps to the true and false targets from nested
4873 // subexpressions (eg, the left subexpressions of the
4874 // short-circuited boolean operators).
4875 ASSERT(has_valid_frame());
4876 if (true_target.is_linked() || false_target.is_linked()) {
4877 JumpTarget loaded;
4878 loaded.Jump(); // Don't lose the current TOS.
4879 if (true_target.is_linked()) {
4880 true_target.Bind();
4881 frame_->Push(Factory::true_value());
4882 if (false_target.is_linked()) {
4883 loaded.Jump();
4884 }
4885 }
4886 if (false_target.is_linked()) {
4887 false_target.Bind();
4888 frame_->Push(Factory::false_value());
4889 }
4890 loaded.Bind();
4891 }
4892 }
4893
4894 ASSERT(has_valid_frame());
4895 ASSERT(frame_->height() == original_height + 1);
4896}
4897
4898
4899// Emit code to load the value of an expression to the top of the
4900// frame. If the expression is boolean-valued it may be compiled (or
4901// partially compiled) into control flow to the control destination.
4902// If force_control is true, control flow is forced.
4903void CodeGenerator::LoadCondition(Expression* x,
Steve Blocka7e24c12009-10-30 11:49:00 +00004904 ControlDestination* dest,
4905 bool force_control) {
4906 ASSERT(!in_spilled_code());
4907 int original_height = frame_->height();
4908
Steve Blockd0582a62009-12-15 09:54:21 +00004909 { CodeGenState new_state(this, dest);
Steve Blocka7e24c12009-10-30 11:49:00 +00004910 Visit(x);
4911
4912 // If we hit a stack overflow, we may not have actually visited
4913 // the expression. In that case, we ensure that we have a
4914 // valid-looking frame state because we will continue to generate
4915 // code as we unwind the C++ stack.
4916 //
4917 // It's possible to have both a stack overflow and a valid frame
4918 // state (eg, a subexpression overflowed, visiting it returned
4919 // with a dummied frame state, and visiting this expression
4920 // returned with a normal-looking state).
4921 if (HasStackOverflow() &&
4922 !dest->is_used() &&
4923 frame_->height() == original_height) {
4924 dest->Goto(true);
4925 }
4926 }
4927
4928 if (force_control && !dest->is_used()) {
4929 // Convert the TOS value into flow to the control destination.
4930 // TODO(X64): Make control flow to control destinations work.
4931 ToBoolean(dest);
4932 }
4933
4934 ASSERT(!(force_control && !dest->is_used()));
4935 ASSERT(dest->is_used() || frame_->height() == original_height + 1);
4936}
4937
4938
Steve Blocka7e24c12009-10-30 11:49:00 +00004939// ECMA-262, section 9.2, page 30: ToBoolean(). Pop the top of stack and
4940// convert it to a boolean in the condition code register or jump to
4941// 'false_target'/'true_target' as appropriate.
4942void CodeGenerator::ToBoolean(ControlDestination* dest) {
4943 Comment cmnt(masm_, "[ ToBoolean");
4944
4945 // The value to convert should be popped from the frame.
4946 Result value = frame_->Pop();
4947 value.ToRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +00004948
Andrei Popescu402d9372010-02-26 13:31:12 +00004949 if (value.is_number()) {
4950 Comment cmnt(masm_, "ONLY_NUMBER");
Steve Block6ded16b2010-05-10 14:33:55 +01004951 // Fast case if TypeInfo indicates only numbers.
Andrei Popescu402d9372010-02-26 13:31:12 +00004952 if (FLAG_debug_code) {
4953 __ AbortIfNotNumber(value.reg(), "ToBoolean operand is not a number.");
4954 }
4955 // Smi => false iff zero.
4956 __ SmiCompare(value.reg(), Smi::FromInt(0));
4957 dest->false_target()->Branch(equal);
4958 Condition is_smi = masm_->CheckSmi(value.reg());
4959 dest->true_target()->Branch(is_smi);
4960 __ fldz();
4961 __ fld_d(FieldOperand(value.reg(), HeapNumber::kValueOffset));
4962 __ FCmp();
4963 value.Unuse();
4964 dest->Split(not_zero);
4965 } else {
4966 // Fast case checks.
4967 // 'false' => false.
4968 __ CompareRoot(value.reg(), Heap::kFalseValueRootIndex);
4969 dest->false_target()->Branch(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +00004970
Andrei Popescu402d9372010-02-26 13:31:12 +00004971 // 'true' => true.
4972 __ CompareRoot(value.reg(), Heap::kTrueValueRootIndex);
4973 dest->true_target()->Branch(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +00004974
Andrei Popescu402d9372010-02-26 13:31:12 +00004975 // 'undefined' => false.
4976 __ CompareRoot(value.reg(), Heap::kUndefinedValueRootIndex);
4977 dest->false_target()->Branch(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +00004978
Andrei Popescu402d9372010-02-26 13:31:12 +00004979 // Smi => false iff zero.
4980 __ SmiCompare(value.reg(), Smi::FromInt(0));
4981 dest->false_target()->Branch(equal);
4982 Condition is_smi = masm_->CheckSmi(value.reg());
4983 dest->true_target()->Branch(is_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +00004984
Andrei Popescu402d9372010-02-26 13:31:12 +00004985 // Call the stub for all other cases.
4986 frame_->Push(&value); // Undo the Pop() from above.
4987 ToBooleanStub stub;
4988 Result temp = frame_->CallStub(&stub, 1);
4989 // Convert the result to a condition code.
4990 __ testq(temp.reg(), temp.reg());
4991 temp.Unuse();
4992 dest->Split(not_equal);
4993 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004994}
4995
4996
4997void CodeGenerator::LoadUnsafeSmi(Register target, Handle<Object> value) {
4998 UNIMPLEMENTED();
4999 // TODO(X64): Implement security policy for loads of smis.
5000}
5001
5002
5003bool CodeGenerator::IsUnsafeSmi(Handle<Object> value) {
5004 return false;
5005}
5006
5007//------------------------------------------------------------------------------
5008// CodeGenerator implementation of variables, lookups, and stores.
5009
Leon Clarked91b9f72010-01-27 17:25:45 +00005010Reference::Reference(CodeGenerator* cgen,
5011 Expression* expression,
5012 bool persist_after_get)
5013 : cgen_(cgen),
5014 expression_(expression),
5015 type_(ILLEGAL),
5016 persist_after_get_(persist_after_get) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005017 cgen->LoadReference(this);
5018}
5019
5020
5021Reference::~Reference() {
Leon Clarked91b9f72010-01-27 17:25:45 +00005022 ASSERT(is_unloaded() || is_illegal());
Steve Blocka7e24c12009-10-30 11:49:00 +00005023}
5024
5025
5026void CodeGenerator::LoadReference(Reference* ref) {
5027 // References are loaded from both spilled and unspilled code. Set the
5028 // state to unspilled to allow that (and explicitly spill after
5029 // construction at the construction sites).
5030 bool was_in_spilled_code = in_spilled_code_;
5031 in_spilled_code_ = false;
5032
5033 Comment cmnt(masm_, "[ LoadReference");
5034 Expression* e = ref->expression();
5035 Property* property = e->AsProperty();
5036 Variable* var = e->AsVariableProxy()->AsVariable();
5037
5038 if (property != NULL) {
5039 // The expression is either a property or a variable proxy that rewrites
5040 // to a property.
5041 Load(property->obj());
Leon Clarkee46be812010-01-19 14:06:41 +00005042 if (property->key()->IsPropertyName()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005043 ref->set_type(Reference::NAMED);
5044 } else {
5045 Load(property->key());
5046 ref->set_type(Reference::KEYED);
5047 }
5048 } else if (var != NULL) {
5049 // The expression is a variable proxy that does not rewrite to a
5050 // property. Global variables are treated as named property references.
5051 if (var->is_global()) {
5052 LoadGlobal();
5053 ref->set_type(Reference::NAMED);
5054 } else {
5055 ASSERT(var->slot() != NULL);
5056 ref->set_type(Reference::SLOT);
5057 }
5058 } else {
5059 // Anything else is a runtime error.
5060 Load(e);
5061 frame_->CallRuntime(Runtime::kThrowReferenceError, 1);
5062 }
5063
5064 in_spilled_code_ = was_in_spilled_code;
5065}
5066
5067
5068void CodeGenerator::UnloadReference(Reference* ref) {
5069 // Pop a reference from the stack while preserving TOS.
5070 Comment cmnt(masm_, "[ UnloadReference");
5071 frame_->Nip(ref->size());
Leon Clarked91b9f72010-01-27 17:25:45 +00005072 ref->set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00005073}
5074
5075
5076Operand CodeGenerator::SlotOperand(Slot* slot, Register tmp) {
5077 // Currently, this assertion will fail if we try to assign to
5078 // a constant variable that is constant because it is read-only
5079 // (such as the variable referring to a named function expression).
5080 // We need to implement assignments to read-only variables.
5081 // Ideally, we should do this during AST generation (by converting
5082 // such assignments into expression statements); however, in general
5083 // we may not be able to make the decision until past AST generation,
5084 // that is when the entire program is known.
5085 ASSERT(slot != NULL);
5086 int index = slot->index();
5087 switch (slot->type()) {
5088 case Slot::PARAMETER:
5089 return frame_->ParameterAt(index);
5090
5091 case Slot::LOCAL:
5092 return frame_->LocalAt(index);
5093
5094 case Slot::CONTEXT: {
5095 // Follow the context chain if necessary.
5096 ASSERT(!tmp.is(rsi)); // do not overwrite context register
5097 Register context = rsi;
5098 int chain_length = scope()->ContextChainLength(slot->var()->scope());
5099 for (int i = 0; i < chain_length; i++) {
5100 // Load the closure.
5101 // (All contexts, even 'with' contexts, have a closure,
5102 // and it is the same for all contexts inside a function.
5103 // There is no need to go to the function context first.)
5104 __ movq(tmp, ContextOperand(context, Context::CLOSURE_INDEX));
5105 // Load the function context (which is the incoming, outer context).
5106 __ movq(tmp, FieldOperand(tmp, JSFunction::kContextOffset));
5107 context = tmp;
5108 }
5109 // We may have a 'with' context now. Get the function context.
5110 // (In fact this mov may never be the needed, since the scope analysis
5111 // may not permit a direct context access in this case and thus we are
5112 // always at a function context. However it is safe to dereference be-
5113 // cause the function context of a function context is itself. Before
5114 // deleting this mov we should try to create a counter-example first,
5115 // though...)
5116 __ movq(tmp, ContextOperand(context, Context::FCONTEXT_INDEX));
5117 return ContextOperand(tmp, index);
5118 }
5119
5120 default:
5121 UNREACHABLE();
5122 return Operand(rsp, 0);
5123 }
5124}
5125
5126
5127Operand CodeGenerator::ContextSlotOperandCheckExtensions(Slot* slot,
5128 Result tmp,
5129 JumpTarget* slow) {
5130 ASSERT(slot->type() == Slot::CONTEXT);
5131 ASSERT(tmp.is_register());
5132 Register context = rsi;
5133
5134 for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) {
5135 if (s->num_heap_slots() > 0) {
5136 if (s->calls_eval()) {
5137 // Check that extension is NULL.
5138 __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX),
5139 Immediate(0));
5140 slow->Branch(not_equal, not_taken);
5141 }
5142 __ movq(tmp.reg(), ContextOperand(context, Context::CLOSURE_INDEX));
5143 __ movq(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset));
5144 context = tmp.reg();
5145 }
5146 }
5147 // Check that last extension is NULL.
5148 __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
5149 slow->Branch(not_equal, not_taken);
5150 __ movq(tmp.reg(), ContextOperand(context, Context::FCONTEXT_INDEX));
5151 return ContextOperand(tmp.reg(), slot->index());
5152}
5153
5154
5155void CodeGenerator::LoadFromSlot(Slot* slot, TypeofState typeof_state) {
5156 if (slot->type() == Slot::LOOKUP) {
5157 ASSERT(slot->var()->is_dynamic());
5158
5159 JumpTarget slow;
5160 JumpTarget done;
5161 Result value;
5162
5163 // Generate fast-case code for variables that might be shadowed by
5164 // eval-introduced variables. Eval is used a lot without
5165 // introducing variables. In those cases, we do not want to
5166 // perform a runtime call for all variables in the scope
5167 // containing the eval.
5168 if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) {
5169 value = LoadFromGlobalSlotCheckExtensions(slot, typeof_state, &slow);
5170 // If there was no control flow to slow, we can exit early.
5171 if (!slow.is_linked()) {
5172 frame_->Push(&value);
5173 return;
5174 }
5175
5176 done.Jump(&value);
5177
5178 } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) {
5179 Slot* potential_slot = slot->var()->local_if_not_shadowed()->slot();
5180 // Only generate the fast case for locals that rewrite to slots.
5181 // This rules out argument loads.
5182 if (potential_slot != NULL) {
5183 // Allocate a fresh register to use as a temp in
5184 // ContextSlotOperandCheckExtensions and to hold the result
5185 // value.
5186 value = allocator_->Allocate();
5187 ASSERT(value.is_valid());
5188 __ movq(value.reg(),
5189 ContextSlotOperandCheckExtensions(potential_slot,
5190 value,
5191 &slow));
5192 if (potential_slot->var()->mode() == Variable::CONST) {
5193 __ CompareRoot(value.reg(), Heap::kTheHoleValueRootIndex);
5194 done.Branch(not_equal, &value);
5195 __ LoadRoot(value.reg(), Heap::kUndefinedValueRootIndex);
5196 }
5197 // There is always control flow to slow from
5198 // ContextSlotOperandCheckExtensions so we have to jump around
5199 // it.
5200 done.Jump(&value);
5201 }
5202 }
5203
5204 slow.Bind();
5205 // A runtime call is inevitable. We eagerly sync frame elements
5206 // to memory so that we can push the arguments directly into place
5207 // on top of the frame.
5208 frame_->SyncRange(0, frame_->element_count() - 1);
5209 frame_->EmitPush(rsi);
5210 __ movq(kScratchRegister, slot->var()->name(), RelocInfo::EMBEDDED_OBJECT);
5211 frame_->EmitPush(kScratchRegister);
5212 if (typeof_state == INSIDE_TYPEOF) {
5213 value =
5214 frame_->CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
5215 } else {
5216 value = frame_->CallRuntime(Runtime::kLoadContextSlot, 2);
5217 }
5218
5219 done.Bind(&value);
5220 frame_->Push(&value);
5221
5222 } else if (slot->var()->mode() == Variable::CONST) {
5223 // Const slots may contain 'the hole' value (the constant hasn't been
5224 // initialized yet) which needs to be converted into the 'undefined'
5225 // value.
5226 //
5227 // We currently spill the virtual frame because constants use the
5228 // potentially unsafe direct-frame access of SlotOperand.
5229 VirtualFrame::SpilledScope spilled_scope;
5230 Comment cmnt(masm_, "[ Load const");
5231 JumpTarget exit;
5232 __ movq(rcx, SlotOperand(slot, rcx));
5233 __ CompareRoot(rcx, Heap::kTheHoleValueRootIndex);
5234 exit.Branch(not_equal);
5235 __ LoadRoot(rcx, Heap::kUndefinedValueRootIndex);
5236 exit.Bind();
5237 frame_->EmitPush(rcx);
5238
5239 } else if (slot->type() == Slot::PARAMETER) {
5240 frame_->PushParameterAt(slot->index());
5241
5242 } else if (slot->type() == Slot::LOCAL) {
5243 frame_->PushLocalAt(slot->index());
5244
5245 } else {
5246 // The other remaining slot types (LOOKUP and GLOBAL) cannot reach
5247 // here.
5248 //
5249 // The use of SlotOperand below is safe for an unspilled frame
5250 // because it will always be a context slot.
5251 ASSERT(slot->type() == Slot::CONTEXT);
5252 Result temp = allocator_->Allocate();
5253 ASSERT(temp.is_valid());
5254 __ movq(temp.reg(), SlotOperand(slot, temp.reg()));
5255 frame_->Push(&temp);
5256 }
5257}
5258
5259
5260void CodeGenerator::LoadFromSlotCheckForArguments(Slot* slot,
5261 TypeofState state) {
5262 LoadFromSlot(slot, state);
5263
5264 // Bail out quickly if we're not using lazy arguments allocation.
5265 if (ArgumentsMode() != LAZY_ARGUMENTS_ALLOCATION) return;
5266
5267 // ... or if the slot isn't a non-parameter arguments slot.
5268 if (slot->type() == Slot::PARAMETER || !slot->is_arguments()) return;
5269
5270 // Pop the loaded value from the stack.
5271 Result value = frame_->Pop();
5272
5273 // If the loaded value is a constant, we know if the arguments
5274 // object has been lazily loaded yet.
5275 if (value.is_constant()) {
5276 if (value.handle()->IsTheHole()) {
5277 Result arguments = StoreArgumentsObject(false);
5278 frame_->Push(&arguments);
5279 } else {
5280 frame_->Push(&value);
5281 }
5282 return;
5283 }
5284
5285 // The loaded value is in a register. If it is the sentinel that
5286 // indicates that we haven't loaded the arguments object yet, we
5287 // need to do it now.
5288 JumpTarget exit;
5289 __ CompareRoot(value.reg(), Heap::kTheHoleValueRootIndex);
5290 frame_->Push(&value);
5291 exit.Branch(not_equal);
5292 Result arguments = StoreArgumentsObject(false);
5293 frame_->SetElementAt(0, &arguments);
5294 exit.Bind();
5295}
5296
5297
5298void CodeGenerator::StoreToSlot(Slot* slot, InitState init_state) {
5299 if (slot->type() == Slot::LOOKUP) {
5300 ASSERT(slot->var()->is_dynamic());
5301
5302 // For now, just do a runtime call. Since the call is inevitable,
5303 // we eagerly sync the virtual frame so we can directly push the
5304 // arguments into place.
5305 frame_->SyncRange(0, frame_->element_count() - 1);
5306
5307 frame_->EmitPush(rsi);
5308 frame_->EmitPush(slot->var()->name());
5309
5310 Result value;
5311 if (init_state == CONST_INIT) {
5312 // Same as the case for a normal store, but ignores attribute
5313 // (e.g. READ_ONLY) of context slot so that we can initialize const
5314 // properties (introduced via eval("const foo = (some expr);")). Also,
5315 // uses the current function context instead of the top context.
5316 //
5317 // Note that we must declare the foo upon entry of eval(), via a
5318 // context slot declaration, but we cannot initialize it at the same
5319 // time, because the const declaration may be at the end of the eval
5320 // code (sigh...) and the const variable may have been used before
5321 // (where its value is 'undefined'). Thus, we can only do the
5322 // initialization when we actually encounter the expression and when
5323 // the expression operands are defined and valid, and thus we need the
5324 // split into 2 operations: declaration of the context slot followed
5325 // by initialization.
5326 value = frame_->CallRuntime(Runtime::kInitializeConstContextSlot, 3);
5327 } else {
5328 value = frame_->CallRuntime(Runtime::kStoreContextSlot, 3);
5329 }
5330 // Storing a variable must keep the (new) value on the expression
5331 // stack. This is necessary for compiling chained assignment
5332 // expressions.
5333 frame_->Push(&value);
5334 } else {
5335 ASSERT(!slot->var()->is_dynamic());
5336
5337 JumpTarget exit;
5338 if (init_state == CONST_INIT) {
5339 ASSERT(slot->var()->mode() == Variable::CONST);
5340 // Only the first const initialization must be executed (the slot
5341 // still contains 'the hole' value). When the assignment is executed,
5342 // the code is identical to a normal store (see below).
5343 //
5344 // We spill the frame in the code below because the direct-frame
5345 // access of SlotOperand is potentially unsafe with an unspilled
5346 // frame.
5347 VirtualFrame::SpilledScope spilled_scope;
5348 Comment cmnt(masm_, "[ Init const");
5349 __ movq(rcx, SlotOperand(slot, rcx));
5350 __ CompareRoot(rcx, Heap::kTheHoleValueRootIndex);
5351 exit.Branch(not_equal);
5352 }
5353
5354 // We must execute the store. Storing a variable must keep the (new)
5355 // value on the stack. This is necessary for compiling assignment
5356 // expressions.
5357 //
5358 // Note: We will reach here even with slot->var()->mode() ==
5359 // Variable::CONST because of const declarations which will initialize
5360 // consts to 'the hole' value and by doing so, end up calling this code.
5361 if (slot->type() == Slot::PARAMETER) {
5362 frame_->StoreToParameterAt(slot->index());
5363 } else if (slot->type() == Slot::LOCAL) {
5364 frame_->StoreToLocalAt(slot->index());
5365 } else {
5366 // The other slot types (LOOKUP and GLOBAL) cannot reach here.
5367 //
5368 // The use of SlotOperand below is safe for an unspilled frame
5369 // because the slot is a context slot.
5370 ASSERT(slot->type() == Slot::CONTEXT);
5371 frame_->Dup();
5372 Result value = frame_->Pop();
5373 value.ToRegister();
5374 Result start = allocator_->Allocate();
5375 ASSERT(start.is_valid());
5376 __ movq(SlotOperand(slot, start.reg()), value.reg());
5377 // RecordWrite may destroy the value registers.
5378 //
5379 // TODO(204): Avoid actually spilling when the value is not
5380 // needed (probably the common case).
5381 frame_->Spill(value.reg());
5382 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
5383 Result temp = allocator_->Allocate();
5384 ASSERT(temp.is_valid());
5385 __ RecordWrite(start.reg(), offset, value.reg(), temp.reg());
5386 // The results start, value, and temp are unused by going out of
5387 // scope.
5388 }
5389
5390 exit.Bind();
5391 }
5392}
5393
5394
5395Result CodeGenerator::LoadFromGlobalSlotCheckExtensions(
5396 Slot* slot,
5397 TypeofState typeof_state,
5398 JumpTarget* slow) {
5399 // Check that no extension objects have been created by calls to
5400 // eval from the current scope to the global scope.
5401 Register context = rsi;
5402 Result tmp = allocator_->Allocate();
5403 ASSERT(tmp.is_valid()); // All non-reserved registers were available.
5404
5405 Scope* s = scope();
5406 while (s != NULL) {
5407 if (s->num_heap_slots() > 0) {
5408 if (s->calls_eval()) {
5409 // Check that extension is NULL.
5410 __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX),
5411 Immediate(0));
5412 slow->Branch(not_equal, not_taken);
5413 }
5414 // Load next context in chain.
5415 __ movq(tmp.reg(), ContextOperand(context, Context::CLOSURE_INDEX));
5416 __ movq(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset));
5417 context = tmp.reg();
5418 }
5419 // If no outer scope calls eval, we do not need to check more
5420 // context extensions. If we have reached an eval scope, we check
5421 // all extensions from this point.
5422 if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break;
5423 s = s->outer_scope();
5424 }
5425
5426 if (s->is_eval_scope()) {
5427 // Loop up the context chain. There is no frame effect so it is
5428 // safe to use raw labels here.
5429 Label next, fast;
5430 if (!context.is(tmp.reg())) {
5431 __ movq(tmp.reg(), context);
5432 }
5433 // Load map for comparison into register, outside loop.
5434 __ LoadRoot(kScratchRegister, Heap::kGlobalContextMapRootIndex);
5435 __ bind(&next);
5436 // Terminate at global context.
5437 __ cmpq(kScratchRegister, FieldOperand(tmp.reg(), HeapObject::kMapOffset));
5438 __ j(equal, &fast);
5439 // Check that extension is NULL.
5440 __ cmpq(ContextOperand(tmp.reg(), Context::EXTENSION_INDEX), Immediate(0));
5441 slow->Branch(not_equal);
5442 // Load next context in chain.
5443 __ movq(tmp.reg(), ContextOperand(tmp.reg(), Context::CLOSURE_INDEX));
5444 __ movq(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset));
5445 __ jmp(&next);
5446 __ bind(&fast);
5447 }
5448 tmp.Unuse();
5449
5450 // All extension objects were empty and it is safe to use a global
5451 // load IC call.
5452 LoadGlobal();
5453 frame_->Push(slot->var()->name());
5454 RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
5455 ? RelocInfo::CODE_TARGET
5456 : RelocInfo::CODE_TARGET_CONTEXT;
5457 Result answer = frame_->CallLoadIC(mode);
5458 // A test rax instruction following the call signals that the inobject
5459 // property case was inlined. Ensure that there is not a test rax
5460 // instruction here.
5461 masm_->nop();
5462 // Discard the global object. The result is in answer.
5463 frame_->Drop();
5464 return answer;
5465}
5466
5467
5468void CodeGenerator::LoadGlobal() {
5469 if (in_spilled_code()) {
5470 frame_->EmitPush(GlobalObject());
5471 } else {
5472 Result temp = allocator_->Allocate();
5473 __ movq(temp.reg(), GlobalObject());
5474 frame_->Push(&temp);
5475 }
5476}
5477
5478
5479void CodeGenerator::LoadGlobalReceiver() {
5480 Result temp = allocator_->Allocate();
5481 Register reg = temp.reg();
5482 __ movq(reg, GlobalObject());
5483 __ movq(reg, FieldOperand(reg, GlobalObject::kGlobalReceiverOffset));
5484 frame_->Push(&temp);
5485}
5486
5487
Andrei Popescu31002712010-02-23 13:46:05 +00005488ArgumentsAllocationMode CodeGenerator::ArgumentsMode() {
5489 if (scope()->arguments() == NULL) return NO_ARGUMENTS_ALLOCATION;
5490 ASSERT(scope()->arguments_shadow() != NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +00005491 // We don't want to do lazy arguments allocation for functions that
5492 // have heap-allocated contexts, because it interfers with the
5493 // uninitialized const tracking in the context objects.
Andrei Popescu31002712010-02-23 13:46:05 +00005494 return (scope()->num_heap_slots() > 0)
Steve Blocka7e24c12009-10-30 11:49:00 +00005495 ? EAGER_ARGUMENTS_ALLOCATION
5496 : LAZY_ARGUMENTS_ALLOCATION;
5497}
5498
5499
5500Result CodeGenerator::StoreArgumentsObject(bool initial) {
5501 ArgumentsAllocationMode mode = ArgumentsMode();
5502 ASSERT(mode != NO_ARGUMENTS_ALLOCATION);
5503
5504 Comment cmnt(masm_, "[ store arguments object");
5505 if (mode == LAZY_ARGUMENTS_ALLOCATION && initial) {
5506 // When using lazy arguments allocation, we store the hole value
5507 // as a sentinel indicating that the arguments object hasn't been
5508 // allocated yet.
5509 frame_->Push(Factory::the_hole_value());
5510 } else {
5511 ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT);
5512 frame_->PushFunction();
5513 frame_->PushReceiverSlotAddress();
Andrei Popescu31002712010-02-23 13:46:05 +00005514 frame_->Push(Smi::FromInt(scope()->num_parameters()));
Steve Blocka7e24c12009-10-30 11:49:00 +00005515 Result result = frame_->CallStub(&stub, 3);
5516 frame_->Push(&result);
5517 }
5518
Leon Clarkee46be812010-01-19 14:06:41 +00005519
Andrei Popescu31002712010-02-23 13:46:05 +00005520 Variable* arguments = scope()->arguments()->var();
5521 Variable* shadow = scope()->arguments_shadow()->var();
Leon Clarkee46be812010-01-19 14:06:41 +00005522 ASSERT(arguments != NULL && arguments->slot() != NULL);
5523 ASSERT(shadow != NULL && shadow->slot() != NULL);
5524 JumpTarget done;
5525 bool skip_arguments = false;
5526 if (mode == LAZY_ARGUMENTS_ALLOCATION && !initial) {
5527 // We have to skip storing into the arguments slot if it has
5528 // already been written to. This can happen if the a function
5529 // has a local variable named 'arguments'.
Andrei Popescu31002712010-02-23 13:46:05 +00005530 LoadFromSlot(scope()->arguments()->var()->slot(), NOT_INSIDE_TYPEOF);
Leon Clarkee46be812010-01-19 14:06:41 +00005531 Result probe = frame_->Pop();
5532 if (probe.is_constant()) {
5533 // We have to skip updating the arguments object if it has been
5534 // assigned a proper value.
5535 skip_arguments = !probe.handle()->IsTheHole();
5536 } else {
5537 __ CompareRoot(probe.reg(), Heap::kTheHoleValueRootIndex);
5538 probe.Unuse();
5539 done.Branch(not_equal);
Steve Blocka7e24c12009-10-30 11:49:00 +00005540 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005541 }
Leon Clarkee46be812010-01-19 14:06:41 +00005542 if (!skip_arguments) {
5543 StoreToSlot(arguments->slot(), NOT_CONST_INIT);
5544 if (mode == LAZY_ARGUMENTS_ALLOCATION) done.Bind();
5545 }
5546 StoreToSlot(shadow->slot(), NOT_CONST_INIT);
Steve Blocka7e24c12009-10-30 11:49:00 +00005547 return frame_->Pop();
5548}
5549
5550
Steve Blockd0582a62009-12-15 09:54:21 +00005551void CodeGenerator::LoadTypeofExpression(Expression* expr) {
5552 // Special handling of identifiers as subexpressions of typeof.
5553 Variable* variable = expr->AsVariableProxy()->AsVariable();
Steve Blocka7e24c12009-10-30 11:49:00 +00005554 if (variable != NULL && !variable->is_this() && variable->is_global()) {
Steve Blockd0582a62009-12-15 09:54:21 +00005555 // For a global variable we build the property reference
5556 // <global>.<variable> and perform a (regular non-contextual) property
5557 // load to make sure we do not get reference errors.
Steve Blocka7e24c12009-10-30 11:49:00 +00005558 Slot global(variable, Slot::CONTEXT, Context::GLOBAL_INDEX);
5559 Literal key(variable->name());
Steve Blocka7e24c12009-10-30 11:49:00 +00005560 Property property(&global, &key, RelocInfo::kNoPosition);
Steve Blockd0582a62009-12-15 09:54:21 +00005561 Reference ref(this, &property);
5562 ref.GetValue();
5563 } else if (variable != NULL && variable->slot() != NULL) {
5564 // For a variable that rewrites to a slot, we signal it is the immediate
5565 // subexpression of a typeof.
5566 LoadFromSlotCheckForArguments(variable->slot(), INSIDE_TYPEOF);
Steve Blocka7e24c12009-10-30 11:49:00 +00005567 } else {
Steve Blockd0582a62009-12-15 09:54:21 +00005568 // Anything else can be handled normally.
5569 Load(expr);
Steve Blocka7e24c12009-10-30 11:49:00 +00005570 }
5571}
5572
5573
Steve Block6ded16b2010-05-10 14:33:55 +01005574static bool CouldBeNaN(const Result& result) {
5575 if (result.type_info().IsSmi()) return false;
5576 if (result.type_info().IsInteger32()) return false;
5577 if (!result.is_constant()) return true;
5578 if (!result.handle()->IsHeapNumber()) return false;
5579 return isnan(HeapNumber::cast(*result.handle())->value());
5580}
5581
5582
5583// Convert from signed to unsigned comparison to match the way EFLAGS are set
5584// by FPU and XMM compare instructions.
5585static Condition DoubleCondition(Condition cc) {
5586 switch (cc) {
5587 case less: return below;
5588 case equal: return equal;
5589 case less_equal: return below_equal;
5590 case greater: return above;
5591 case greater_equal: return above_equal;
5592 default: UNREACHABLE();
5593 }
5594 UNREACHABLE();
5595 return equal;
5596}
5597
5598
Andrei Popescu402d9372010-02-26 13:31:12 +00005599void CodeGenerator::Comparison(AstNode* node,
5600 Condition cc,
Steve Blocka7e24c12009-10-30 11:49:00 +00005601 bool strict,
5602 ControlDestination* dest) {
5603 // Strict only makes sense for equality comparisons.
5604 ASSERT(!strict || cc == equal);
5605
5606 Result left_side;
5607 Result right_side;
5608 // Implement '>' and '<=' by reversal to obtain ECMA-262 conversion order.
5609 if (cc == greater || cc == less_equal) {
5610 cc = ReverseCondition(cc);
5611 left_side = frame_->Pop();
5612 right_side = frame_->Pop();
5613 } else {
5614 right_side = frame_->Pop();
5615 left_side = frame_->Pop();
5616 }
5617 ASSERT(cc == less || cc == equal || cc == greater_equal);
5618
5619 // If either side is a constant smi, optimize the comparison.
Steve Block6ded16b2010-05-10 14:33:55 +01005620 bool left_side_constant_smi = false;
5621 bool left_side_constant_null = false;
5622 bool left_side_constant_1_char_string = false;
5623 if (left_side.is_constant()) {
5624 left_side_constant_smi = left_side.handle()->IsSmi();
5625 left_side_constant_null = left_side.handle()->IsNull();
5626 left_side_constant_1_char_string =
5627 (left_side.handle()->IsString() &&
5628 String::cast(*left_side.handle())->length() == 1 &&
5629 String::cast(*left_side.handle())->IsAsciiRepresentation());
5630 }
5631 bool right_side_constant_smi = false;
5632 bool right_side_constant_null = false;
5633 bool right_side_constant_1_char_string = false;
5634 if (right_side.is_constant()) {
5635 right_side_constant_smi = right_side.handle()->IsSmi();
5636 right_side_constant_null = right_side.handle()->IsNull();
5637 right_side_constant_1_char_string =
5638 (right_side.handle()->IsString() &&
5639 String::cast(*right_side.handle())->length() == 1 &&
5640 String::cast(*right_side.handle())->IsAsciiRepresentation());
5641 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005642
5643 if (left_side_constant_smi || right_side_constant_smi) {
5644 if (left_side_constant_smi && right_side_constant_smi) {
5645 // Trivial case, comparing two constants.
5646 int left_value = Smi::cast(*left_side.handle())->value();
5647 int right_value = Smi::cast(*right_side.handle())->value();
5648 switch (cc) {
5649 case less:
5650 dest->Goto(left_value < right_value);
5651 break;
5652 case equal:
5653 dest->Goto(left_value == right_value);
5654 break;
5655 case greater_equal:
5656 dest->Goto(left_value >= right_value);
5657 break;
5658 default:
5659 UNREACHABLE();
5660 }
Andrei Popescu402d9372010-02-26 13:31:12 +00005661 } else {
5662 // Only one side is a constant Smi.
Steve Blocka7e24c12009-10-30 11:49:00 +00005663 // If left side is a constant Smi, reverse the operands.
5664 // Since one side is a constant Smi, conversion order does not matter.
5665 if (left_side_constant_smi) {
5666 Result temp = left_side;
5667 left_side = right_side;
5668 right_side = temp;
5669 cc = ReverseCondition(cc);
Steve Block6ded16b2010-05-10 14:33:55 +01005670 // This may re-introduce greater or less_equal as the value of cc.
Steve Blocka7e24c12009-10-30 11:49:00 +00005671 // CompareStub and the inline code both support all values of cc.
5672 }
5673 // Implement comparison against a constant Smi, inlining the case
5674 // where both sides are Smis.
5675 left_side.ToRegister();
Andrei Popescu402d9372010-02-26 13:31:12 +00005676 Register left_reg = left_side.reg();
5677 Handle<Object> right_val = right_side.handle();
Steve Blocka7e24c12009-10-30 11:49:00 +00005678
5679 // Here we split control flow to the stub call and inlined cases
5680 // before finally splitting it to the control destination. We use
5681 // a jump target and branching to duplicate the virtual frame at
5682 // the first split. We manually handle the off-frame references
5683 // by reconstituting them on the non-fall-through path.
5684 JumpTarget is_smi;
Steve Blocka7e24c12009-10-30 11:49:00 +00005685
5686 Condition left_is_smi = masm_->CheckSmi(left_side.reg());
5687 is_smi.Branch(left_is_smi);
5688
Steve Block6ded16b2010-05-10 14:33:55 +01005689 bool is_loop_condition = (node->AsExpression() != NULL) &&
5690 node->AsExpression()->is_loop_condition();
5691 if (!is_loop_condition && right_val->IsSmi()) {
Andrei Popescu402d9372010-02-26 13:31:12 +00005692 // Right side is a constant smi and left side has been checked
5693 // not to be a smi.
5694 JumpTarget not_number;
5695 __ Cmp(FieldOperand(left_reg, HeapObject::kMapOffset),
5696 Factory::heap_number_map());
5697 not_number.Branch(not_equal, &left_side);
5698 __ movsd(xmm1,
5699 FieldOperand(left_reg, HeapNumber::kValueOffset));
5700 int value = Smi::cast(*right_val)->value();
5701 if (value == 0) {
5702 __ xorpd(xmm0, xmm0);
5703 } else {
5704 Result temp = allocator()->Allocate();
5705 __ movl(temp.reg(), Immediate(value));
5706 __ cvtlsi2sd(xmm0, temp.reg());
5707 temp.Unuse();
5708 }
5709 __ ucomisd(xmm1, xmm0);
5710 // Jump to builtin for NaN.
5711 not_number.Branch(parity_even, &left_side);
5712 left_side.Unuse();
Steve Block6ded16b2010-05-10 14:33:55 +01005713 dest->true_target()->Branch(DoubleCondition(cc));
Andrei Popescu402d9372010-02-26 13:31:12 +00005714 dest->false_target()->Jump();
5715 not_number.Bind(&left_side);
5716 }
5717
Steve Blocka7e24c12009-10-30 11:49:00 +00005718 // Setup and call the compare stub.
Steve Block6ded16b2010-05-10 14:33:55 +01005719 CompareStub stub(cc, strict, kCantBothBeNaN);
Steve Blocka7e24c12009-10-30 11:49:00 +00005720 Result result = frame_->CallStub(&stub, &left_side, &right_side);
5721 result.ToRegister();
5722 __ testq(result.reg(), result.reg());
5723 result.Unuse();
5724 dest->true_target()->Branch(cc);
5725 dest->false_target()->Jump();
5726
5727 is_smi.Bind();
5728 left_side = Result(left_reg);
5729 right_side = Result(right_val);
5730 // Test smi equality and comparison by signed int comparison.
5731 // Both sides are smis, so we can use an Immediate.
Steve Block3ce2e202009-11-05 08:53:23 +00005732 __ SmiCompare(left_side.reg(), Smi::cast(*right_side.handle()));
Steve Blocka7e24c12009-10-30 11:49:00 +00005733 left_side.Unuse();
5734 right_side.Unuse();
5735 dest->Split(cc);
5736 }
5737 } else if (cc == equal &&
5738 (left_side_constant_null || right_side_constant_null)) {
5739 // To make null checks efficient, we check if either the left side or
5740 // the right side is the constant 'null'.
5741 // If so, we optimize the code by inlining a null check instead of
5742 // calling the (very) general runtime routine for checking equality.
5743 Result operand = left_side_constant_null ? right_side : left_side;
5744 right_side.Unuse();
5745 left_side.Unuse();
5746 operand.ToRegister();
5747 __ CompareRoot(operand.reg(), Heap::kNullValueRootIndex);
5748 if (strict) {
5749 operand.Unuse();
5750 dest->Split(equal);
5751 } else {
5752 // The 'null' value is only equal to 'undefined' if using non-strict
5753 // comparisons.
5754 dest->true_target()->Branch(equal);
5755 __ CompareRoot(operand.reg(), Heap::kUndefinedValueRootIndex);
5756 dest->true_target()->Branch(equal);
5757 Condition is_smi = masm_->CheckSmi(operand.reg());
5758 dest->false_target()->Branch(is_smi);
5759
5760 // It can be an undetectable object.
5761 // Use a scratch register in preference to spilling operand.reg().
5762 Result temp = allocator()->Allocate();
5763 ASSERT(temp.is_valid());
5764 __ movq(temp.reg(),
Steve Block3ce2e202009-11-05 08:53:23 +00005765 FieldOperand(operand.reg(), HeapObject::kMapOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00005766 __ testb(FieldOperand(temp.reg(), Map::kBitFieldOffset),
5767 Immediate(1 << Map::kIsUndetectable));
5768 temp.Unuse();
5769 operand.Unuse();
5770 dest->Split(not_zero);
5771 }
Steve Block6ded16b2010-05-10 14:33:55 +01005772 } else if (left_side_constant_1_char_string ||
5773 right_side_constant_1_char_string) {
5774 if (left_side_constant_1_char_string && right_side_constant_1_char_string) {
5775 // Trivial case, comparing two constants.
5776 int left_value = String::cast(*left_side.handle())->Get(0);
5777 int right_value = String::cast(*right_side.handle())->Get(0);
5778 switch (cc) {
5779 case less:
5780 dest->Goto(left_value < right_value);
5781 break;
5782 case equal:
5783 dest->Goto(left_value == right_value);
5784 break;
5785 case greater_equal:
5786 dest->Goto(left_value >= right_value);
5787 break;
5788 default:
5789 UNREACHABLE();
5790 }
5791 } else {
5792 // Only one side is a constant 1 character string.
5793 // If left side is a constant 1-character string, reverse the operands.
5794 // Since one side is a constant string, conversion order does not matter.
5795 if (left_side_constant_1_char_string) {
5796 Result temp = left_side;
5797 left_side = right_side;
5798 right_side = temp;
5799 cc = ReverseCondition(cc);
5800 // This may reintroduce greater or less_equal as the value of cc.
5801 // CompareStub and the inline code both support all values of cc.
5802 }
5803 // Implement comparison against a constant string, inlining the case
5804 // where both sides are strings.
5805 left_side.ToRegister();
5806
5807 // Here we split control flow to the stub call and inlined cases
5808 // before finally splitting it to the control destination. We use
5809 // a jump target and branching to duplicate the virtual frame at
5810 // the first split. We manually handle the off-frame references
5811 // by reconstituting them on the non-fall-through path.
5812 JumpTarget is_not_string, is_string;
5813 Register left_reg = left_side.reg();
5814 Handle<Object> right_val = right_side.handle();
5815 ASSERT(StringShape(String::cast(*right_val)).IsSymbol());
5816 Condition is_smi = masm()->CheckSmi(left_reg);
5817 is_not_string.Branch(is_smi, &left_side);
5818 Result temp = allocator_->Allocate();
5819 ASSERT(temp.is_valid());
5820 __ movq(temp.reg(),
5821 FieldOperand(left_reg, HeapObject::kMapOffset));
5822 __ movzxbl(temp.reg(),
5823 FieldOperand(temp.reg(), Map::kInstanceTypeOffset));
5824 // If we are testing for equality then make use of the symbol shortcut.
5825 // Check if the left hand side has the same type as the right hand
5826 // side (which is always a symbol).
5827 if (cc == equal) {
5828 Label not_a_symbol;
5829 ASSERT(kSymbolTag != 0);
5830 // Ensure that no non-strings have the symbol bit set.
5831 ASSERT(kNotStringTag + kIsSymbolMask > LAST_TYPE);
5832 __ testb(temp.reg(), Immediate(kIsSymbolMask)); // Test the symbol bit.
5833 __ j(zero, &not_a_symbol);
5834 // They are symbols, so do identity compare.
5835 __ Cmp(left_reg, right_side.handle());
5836 dest->true_target()->Branch(equal);
5837 dest->false_target()->Branch(not_equal);
5838 __ bind(&not_a_symbol);
5839 }
5840 // Call the compare stub if the left side is not a flat ascii string.
5841 __ andb(temp.reg(),
5842 Immediate(kIsNotStringMask |
5843 kStringRepresentationMask |
5844 kStringEncodingMask));
5845 __ cmpb(temp.reg(),
5846 Immediate(kStringTag | kSeqStringTag | kAsciiStringTag));
5847 temp.Unuse();
5848 is_string.Branch(equal, &left_side);
5849
5850 // Setup and call the compare stub.
5851 is_not_string.Bind(&left_side);
5852 CompareStub stub(cc, strict, kCantBothBeNaN);
5853 Result result = frame_->CallStub(&stub, &left_side, &right_side);
5854 result.ToRegister();
5855 __ testq(result.reg(), result.reg());
5856 result.Unuse();
5857 dest->true_target()->Branch(cc);
5858 dest->false_target()->Jump();
5859
5860 is_string.Bind(&left_side);
5861 // left_side is a sequential ASCII string.
5862 ASSERT(left_side.reg().is(left_reg));
5863 right_side = Result(right_val);
5864 Result temp2 = allocator_->Allocate();
5865 ASSERT(temp2.is_valid());
5866 // Test string equality and comparison.
5867 if (cc == equal) {
5868 Label comparison_done;
5869 __ SmiCompare(FieldOperand(left_side.reg(), String::kLengthOffset),
5870 Smi::FromInt(1));
5871 __ j(not_equal, &comparison_done);
5872 uint8_t char_value =
5873 static_cast<uint8_t>(String::cast(*right_val)->Get(0));
5874 __ cmpb(FieldOperand(left_side.reg(), SeqAsciiString::kHeaderSize),
5875 Immediate(char_value));
5876 __ bind(&comparison_done);
5877 } else {
5878 __ movq(temp2.reg(),
5879 FieldOperand(left_side.reg(), String::kLengthOffset));
5880 __ SmiSubConstant(temp2.reg(), temp2.reg(), Smi::FromInt(1));
5881 Label comparison;
5882 // If the length is 0 then the subtraction gave -1 which compares less
5883 // than any character.
5884 __ j(negative, &comparison);
5885 // Otherwise load the first character.
5886 __ movzxbl(temp2.reg(),
5887 FieldOperand(left_side.reg(), SeqAsciiString::kHeaderSize));
5888 __ bind(&comparison);
5889 // Compare the first character of the string with the
5890 // constant 1-character string.
5891 uint8_t char_value =
5892 static_cast<uint8_t>(String::cast(*right_side.handle())->Get(0));
5893 __ cmpb(temp2.reg(), Immediate(char_value));
5894 Label characters_were_different;
5895 __ j(not_equal, &characters_were_different);
5896 // If the first character is the same then the long string sorts after
5897 // the short one.
5898 __ SmiCompare(FieldOperand(left_side.reg(), String::kLengthOffset),
5899 Smi::FromInt(1));
5900 __ bind(&characters_were_different);
5901 }
5902 temp2.Unuse();
5903 left_side.Unuse();
5904 right_side.Unuse();
5905 dest->Split(cc);
5906 }
5907 } else {
5908 // Neither side is a constant Smi, constant 1-char string, or constant null.
Steve Blocka7e24c12009-10-30 11:49:00 +00005909 // If either side is a non-smi constant, skip the smi check.
5910 bool known_non_smi =
5911 (left_side.is_constant() && !left_side.handle()->IsSmi()) ||
Steve Block6ded16b2010-05-10 14:33:55 +01005912 (right_side.is_constant() && !right_side.handle()->IsSmi()) ||
5913 left_side.type_info().IsDouble() ||
5914 right_side.type_info().IsDouble();
5915
5916 NaNInformation nan_info =
5917 (CouldBeNaN(left_side) && CouldBeNaN(right_side)) ?
5918 kBothCouldBeNaN :
5919 kCantBothBeNaN;
5920
5921 // Inline number comparison handling any combination of smi's and heap
5922 // numbers if:
5923 // code is in a loop
5924 // the compare operation is different from equal
5925 // compare is not a for-loop comparison
5926 // The reason for excluding equal is that it will most likely be done
5927 // with smi's (not heap numbers) and the code to comparing smi's is inlined
5928 // separately. The same reason applies for for-loop comparison which will
5929 // also most likely be smi comparisons.
5930 bool is_loop_condition = (node->AsExpression() != NULL)
5931 && node->AsExpression()->is_loop_condition();
5932 bool inline_number_compare =
5933 loop_nesting() > 0 && cc != equal && !is_loop_condition;
5934
Steve Blocka7e24c12009-10-30 11:49:00 +00005935 left_side.ToRegister();
5936 right_side.ToRegister();
5937
5938 if (known_non_smi) {
Steve Block6ded16b2010-05-10 14:33:55 +01005939 // Inlined equality check:
5940 // If at least one of the objects is not NaN, then if the objects
5941 // are identical, they are equal.
5942 if (nan_info == kCantBothBeNaN && cc == equal) {
5943 __ cmpq(left_side.reg(), right_side.reg());
5944 dest->true_target()->Branch(equal);
5945 }
5946
5947 // Inlined number comparison:
5948 if (inline_number_compare) {
5949 GenerateInlineNumberComparison(&left_side, &right_side, cc, dest);
5950 }
5951
5952 CompareStub stub(cc, strict, nan_info, !inline_number_compare);
Steve Blocka7e24c12009-10-30 11:49:00 +00005953 Result answer = frame_->CallStub(&stub, &left_side, &right_side);
Steve Block6ded16b2010-05-10 14:33:55 +01005954 __ testq(answer.reg(), answer.reg()); // Sets both zero and sign flag.
Steve Blocka7e24c12009-10-30 11:49:00 +00005955 answer.Unuse();
5956 dest->Split(cc);
5957 } else {
5958 // Here we split control flow to the stub call and inlined cases
5959 // before finally splitting it to the control destination. We use
5960 // a jump target and branching to duplicate the virtual frame at
5961 // the first split. We manually handle the off-frame references
5962 // by reconstituting them on the non-fall-through path.
5963 JumpTarget is_smi;
5964 Register left_reg = left_side.reg();
5965 Register right_reg = right_side.reg();
5966
5967 Condition both_smi = masm_->CheckBothSmi(left_reg, right_reg);
5968 is_smi.Branch(both_smi);
Steve Block6ded16b2010-05-10 14:33:55 +01005969
5970 // Inline the equality check if both operands can't be a NaN. If both
5971 // objects are the same they are equal.
5972 if (nan_info == kCantBothBeNaN && cc == equal) {
5973 __ cmpq(left_side.reg(), right_side.reg());
5974 dest->true_target()->Branch(equal);
5975 }
5976
5977 // Inlined number comparison:
5978 if (inline_number_compare) {
5979 GenerateInlineNumberComparison(&left_side, &right_side, cc, dest);
5980 }
5981
5982 CompareStub stub(cc, strict, nan_info, !inline_number_compare);
Steve Blocka7e24c12009-10-30 11:49:00 +00005983 Result answer = frame_->CallStub(&stub, &left_side, &right_side);
Steve Block6ded16b2010-05-10 14:33:55 +01005984 __ testq(answer.reg(), answer.reg()); // Sets both zero and sign flags.
Steve Blocka7e24c12009-10-30 11:49:00 +00005985 answer.Unuse();
5986 dest->true_target()->Branch(cc);
5987 dest->false_target()->Jump();
5988
5989 is_smi.Bind();
5990 left_side = Result(left_reg);
5991 right_side = Result(right_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00005992 __ SmiCompare(left_side.reg(), right_side.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00005993 right_side.Unuse();
5994 left_side.Unuse();
5995 dest->Split(cc);
5996 }
5997 }
5998}
5999
6000
Steve Block6ded16b2010-05-10 14:33:55 +01006001// Load a comparison operand into into a XMM register. Jump to not_numbers jump
6002// target passing the left and right result if the operand is not a number.
6003static void LoadComparisonOperand(MacroAssembler* masm_,
6004 Result* operand,
6005 XMMRegister xmm_reg,
6006 Result* left_side,
6007 Result* right_side,
6008 JumpTarget* not_numbers) {
6009 Label done;
6010 if (operand->type_info().IsDouble()) {
6011 // Operand is known to be a heap number, just load it.
6012 __ movsd(xmm_reg, FieldOperand(operand->reg(), HeapNumber::kValueOffset));
6013 } else if (operand->type_info().IsSmi()) {
6014 // Operand is known to be a smi. Convert it to double and keep the original
6015 // smi.
6016 __ SmiToInteger32(kScratchRegister, operand->reg());
6017 __ cvtlsi2sd(xmm_reg, kScratchRegister);
6018 } else {
6019 // Operand type not known, check for smi or heap number.
6020 Label smi;
6021 __ JumpIfSmi(operand->reg(), &smi);
6022 if (!operand->type_info().IsNumber()) {
6023 __ LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex);
6024 __ cmpq(FieldOperand(operand->reg(), HeapObject::kMapOffset),
6025 kScratchRegister);
6026 not_numbers->Branch(not_equal, left_side, right_side, taken);
6027 }
6028 __ movsd(xmm_reg, FieldOperand(operand->reg(), HeapNumber::kValueOffset));
6029 __ jmp(&done);
6030
6031 __ bind(&smi);
6032 // Comvert smi to float and keep the original smi.
6033 __ SmiToInteger32(kScratchRegister, operand->reg());
6034 __ cvtlsi2sd(xmm_reg, kScratchRegister);
6035 __ jmp(&done);
6036 }
6037 __ bind(&done);
6038}
6039
6040
6041void CodeGenerator::GenerateInlineNumberComparison(Result* left_side,
6042 Result* right_side,
6043 Condition cc,
6044 ControlDestination* dest) {
6045 ASSERT(left_side->is_register());
6046 ASSERT(right_side->is_register());
6047
6048 JumpTarget not_numbers;
6049 // Load left and right operand into registers xmm0 and xmm1 and compare.
6050 LoadComparisonOperand(masm_, left_side, xmm0, left_side, right_side,
6051 &not_numbers);
6052 LoadComparisonOperand(masm_, right_side, xmm1, left_side, right_side,
6053 &not_numbers);
6054 __ comisd(xmm0, xmm1);
6055 // Bail out if a NaN is involved.
6056 not_numbers.Branch(parity_even, left_side, right_side);
6057
6058 // Split to destination targets based on comparison.
6059 left_side->Unuse();
6060 right_side->Unuse();
6061 dest->true_target()->Branch(DoubleCondition(cc));
6062 dest->false_target()->Jump();
6063
6064 not_numbers.Bind(left_side, right_side);
6065}
6066
6067
Steve Blocka7e24c12009-10-30 11:49:00 +00006068class DeferredInlineBinaryOperation: public DeferredCode {
6069 public:
6070 DeferredInlineBinaryOperation(Token::Value op,
6071 Register dst,
6072 Register left,
6073 Register right,
6074 OverwriteMode mode)
6075 : op_(op), dst_(dst), left_(left), right_(right), mode_(mode) {
6076 set_comment("[ DeferredInlineBinaryOperation");
6077 }
6078
6079 virtual void Generate();
6080
6081 private:
6082 Token::Value op_;
6083 Register dst_;
6084 Register left_;
6085 Register right_;
6086 OverwriteMode mode_;
6087};
6088
6089
6090void DeferredInlineBinaryOperation::Generate() {
Steve Block6ded16b2010-05-10 14:33:55 +01006091 Label done;
6092 if ((op_ == Token::ADD)
6093 || (op_ ==Token::SUB)
6094 || (op_ == Token::MUL)
6095 || (op_ == Token::DIV)) {
6096 Label call_runtime;
6097 Label left_smi, right_smi, load_right, do_op;
6098 __ JumpIfSmi(left_, &left_smi);
6099 __ CompareRoot(FieldOperand(left_, HeapObject::kMapOffset),
6100 Heap::kHeapNumberMapRootIndex);
6101 __ j(not_equal, &call_runtime);
6102 __ movsd(xmm0, FieldOperand(left_, HeapNumber::kValueOffset));
6103 if (mode_ == OVERWRITE_LEFT) {
6104 __ movq(dst_, left_);
6105 }
6106 __ jmp(&load_right);
6107
6108 __ bind(&left_smi);
6109 __ SmiToInteger32(left_, left_);
6110 __ cvtlsi2sd(xmm0, left_);
6111 __ Integer32ToSmi(left_, left_);
6112 if (mode_ == OVERWRITE_LEFT) {
6113 Label alloc_failure;
6114 __ AllocateHeapNumber(dst_, no_reg, &call_runtime);
6115 }
6116
6117 __ bind(&load_right);
6118 __ JumpIfSmi(right_, &right_smi);
6119 __ CompareRoot(FieldOperand(right_, HeapObject::kMapOffset),
6120 Heap::kHeapNumberMapRootIndex);
6121 __ j(not_equal, &call_runtime);
6122 __ movsd(xmm1, FieldOperand(right_, HeapNumber::kValueOffset));
6123 if (mode_ == OVERWRITE_RIGHT) {
6124 __ movq(dst_, right_);
6125 } else if (mode_ == NO_OVERWRITE) {
6126 Label alloc_failure;
6127 __ AllocateHeapNumber(dst_, no_reg, &call_runtime);
6128 }
6129 __ jmp(&do_op);
6130
6131 __ bind(&right_smi);
6132 __ SmiToInteger32(right_, right_);
6133 __ cvtlsi2sd(xmm1, right_);
6134 __ Integer32ToSmi(right_, right_);
6135 if (mode_ == OVERWRITE_RIGHT || mode_ == NO_OVERWRITE) {
6136 Label alloc_failure;
6137 __ AllocateHeapNumber(dst_, no_reg, &call_runtime);
6138 }
6139
6140 __ bind(&do_op);
6141 switch (op_) {
6142 case Token::ADD: __ addsd(xmm0, xmm1); break;
6143 case Token::SUB: __ subsd(xmm0, xmm1); break;
6144 case Token::MUL: __ mulsd(xmm0, xmm1); break;
6145 case Token::DIV: __ divsd(xmm0, xmm1); break;
6146 default: UNREACHABLE();
6147 }
6148 __ movsd(FieldOperand(dst_, HeapNumber::kValueOffset), xmm0);
6149 __ jmp(&done);
6150
6151 __ bind(&call_runtime);
6152 }
Steve Blockd0582a62009-12-15 09:54:21 +00006153 GenericBinaryOpStub stub(op_, mode_, NO_SMI_CODE_IN_STUB);
6154 stub.GenerateCall(masm_, left_, right_);
Steve Blocka7e24c12009-10-30 11:49:00 +00006155 if (!dst_.is(rax)) __ movq(dst_, rax);
Steve Block6ded16b2010-05-10 14:33:55 +01006156 __ bind(&done);
Steve Blocka7e24c12009-10-30 11:49:00 +00006157}
6158
6159
Steve Block6ded16b2010-05-10 14:33:55 +01006160static TypeInfo CalculateTypeInfo(TypeInfo operands_type,
6161 Token::Value op,
6162 const Result& right,
6163 const Result& left) {
6164 // Set TypeInfo of result according to the operation performed.
6165 // We rely on the fact that smis have a 32 bit payload on x64.
6166 STATIC_ASSERT(kSmiValueSize == 32);
6167 switch (op) {
6168 case Token::COMMA:
6169 return right.type_info();
6170 case Token::OR:
6171 case Token::AND:
6172 // Result type can be either of the two input types.
6173 return operands_type;
6174 case Token::BIT_OR:
6175 case Token::BIT_XOR:
6176 case Token::BIT_AND:
6177 // Result is always a smi.
6178 return TypeInfo::Smi();
6179 case Token::SAR:
6180 case Token::SHL:
6181 // Result is always a smi.
6182 return TypeInfo::Smi();
6183 case Token::SHR:
6184 // Result of x >>> y is always a smi if masked y >= 1, otherwise a number.
6185 return (right.is_constant() && right.handle()->IsSmi()
6186 && (Smi::cast(*right.handle())->value() & 0x1F) >= 1)
6187 ? TypeInfo::Smi()
6188 : TypeInfo::Number();
6189 case Token::ADD:
6190 if (operands_type.IsNumber()) {
6191 return TypeInfo::Number();
6192 } else if (left.type_info().IsString() || right.type_info().IsString()) {
6193 return TypeInfo::String();
6194 } else {
6195 return TypeInfo::Unknown();
6196 }
6197 case Token::SUB:
6198 case Token::MUL:
6199 case Token::DIV:
6200 case Token::MOD:
6201 // Result is always a number.
6202 return TypeInfo::Number();
6203 default:
6204 UNREACHABLE();
6205 }
6206 UNREACHABLE();
6207 return TypeInfo::Unknown();
6208}
6209
6210
6211void CodeGenerator::GenericBinaryOperation(BinaryOperation* expr,
Steve Blocka7e24c12009-10-30 11:49:00 +00006212 OverwriteMode overwrite_mode) {
6213 Comment cmnt(masm_, "[ BinaryOperation");
Steve Block6ded16b2010-05-10 14:33:55 +01006214 Token::Value op = expr->op();
Steve Blocka7e24c12009-10-30 11:49:00 +00006215 Comment cmnt_token(masm_, Token::String(op));
6216
6217 if (op == Token::COMMA) {
6218 // Simply discard left value.
6219 frame_->Nip(1);
6220 return;
6221 }
6222
Steve Blocka7e24c12009-10-30 11:49:00 +00006223 Result right = frame_->Pop();
6224 Result left = frame_->Pop();
6225
6226 if (op == Token::ADD) {
Steve Block6ded16b2010-05-10 14:33:55 +01006227 const bool left_is_string = left.type_info().IsString();
6228 const bool right_is_string = right.type_info().IsString();
6229 // Make sure constant strings have string type info.
6230 ASSERT(!(left.is_constant() && left.handle()->IsString()) ||
6231 left_is_string);
6232 ASSERT(!(right.is_constant() && right.handle()->IsString()) ||
6233 right_is_string);
Steve Blocka7e24c12009-10-30 11:49:00 +00006234 if (left_is_string || right_is_string) {
6235 frame_->Push(&left);
6236 frame_->Push(&right);
6237 Result answer;
6238 if (left_is_string) {
6239 if (right_is_string) {
Steve Block6ded16b2010-05-10 14:33:55 +01006240 StringAddStub stub(NO_STRING_CHECK_IN_STUB);
6241 answer = frame_->CallStub(&stub, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00006242 } else {
6243 answer =
6244 frame_->InvokeBuiltin(Builtins::STRING_ADD_LEFT, CALL_FUNCTION, 2);
6245 }
6246 } else if (right_is_string) {
6247 answer =
6248 frame_->InvokeBuiltin(Builtins::STRING_ADD_RIGHT, CALL_FUNCTION, 2);
6249 }
Steve Block6ded16b2010-05-10 14:33:55 +01006250 answer.set_type_info(TypeInfo::String());
Steve Blocka7e24c12009-10-30 11:49:00 +00006251 frame_->Push(&answer);
6252 return;
6253 }
6254 // Neither operand is known to be a string.
6255 }
6256
Andrei Popescu402d9372010-02-26 13:31:12 +00006257 bool left_is_smi_constant = left.is_constant() && left.handle()->IsSmi();
6258 bool left_is_non_smi_constant = left.is_constant() && !left.handle()->IsSmi();
6259 bool right_is_smi_constant = right.is_constant() && right.handle()->IsSmi();
6260 bool right_is_non_smi_constant =
6261 right.is_constant() && !right.handle()->IsSmi();
Steve Blocka7e24c12009-10-30 11:49:00 +00006262
Andrei Popescu402d9372010-02-26 13:31:12 +00006263 if (left_is_smi_constant && right_is_smi_constant) {
Steve Blocka7e24c12009-10-30 11:49:00 +00006264 // Compute the constant result at compile time, and leave it on the frame.
6265 int left_int = Smi::cast(*left.handle())->value();
6266 int right_int = Smi::cast(*right.handle())->value();
6267 if (FoldConstantSmis(op, left_int, right_int)) return;
6268 }
6269
Andrei Popescu402d9372010-02-26 13:31:12 +00006270 // Get number type of left and right sub-expressions.
Steve Block6ded16b2010-05-10 14:33:55 +01006271 TypeInfo operands_type =
6272 TypeInfo::Combine(left.type_info(), right.type_info());
6273
6274 TypeInfo result_type = CalculateTypeInfo(operands_type, op, right, left);
Andrei Popescu402d9372010-02-26 13:31:12 +00006275
Leon Clarked91b9f72010-01-27 17:25:45 +00006276 Result answer;
Andrei Popescu402d9372010-02-26 13:31:12 +00006277 if (left_is_non_smi_constant || right_is_non_smi_constant) {
Steve Block6ded16b2010-05-10 14:33:55 +01006278 // Go straight to the slow case, with no smi code.
Andrei Popescu402d9372010-02-26 13:31:12 +00006279 GenericBinaryOpStub stub(op,
6280 overwrite_mode,
6281 NO_SMI_CODE_IN_STUB,
6282 operands_type);
Leon Clarke4515c472010-02-03 11:58:03 +00006283 answer = stub.GenerateCall(masm_, frame_, &left, &right);
Andrei Popescu402d9372010-02-26 13:31:12 +00006284 } else if (right_is_smi_constant) {
Steve Block6ded16b2010-05-10 14:33:55 +01006285 answer = ConstantSmiBinaryOperation(expr, &left, right.handle(),
6286 false, overwrite_mode);
Andrei Popescu402d9372010-02-26 13:31:12 +00006287 } else if (left_is_smi_constant) {
Steve Block6ded16b2010-05-10 14:33:55 +01006288 answer = ConstantSmiBinaryOperation(expr, &right, left.handle(),
6289 true, overwrite_mode);
Leon Clarked91b9f72010-01-27 17:25:45 +00006290 } else {
6291 // Set the flags based on the operation, type and loop nesting level.
6292 // Bit operations always assume they likely operate on Smis. Still only
6293 // generate the inline Smi check code if this operation is part of a loop.
6294 // For all other operations only inline the Smi check code for likely smis
6295 // if the operation is part of a loop.
Steve Block6ded16b2010-05-10 14:33:55 +01006296 if (loop_nesting() > 0 &&
6297 (Token::IsBitOp(op) ||
6298 operands_type.IsInteger32() ||
6299 expr->type()->IsLikelySmi())) {
6300 answer = LikelySmiBinaryOperation(expr, &left, &right, overwrite_mode);
Leon Clarked91b9f72010-01-27 17:25:45 +00006301 } else {
Andrei Popescu402d9372010-02-26 13:31:12 +00006302 GenericBinaryOpStub stub(op,
6303 overwrite_mode,
6304 NO_GENERIC_BINARY_FLAGS,
6305 operands_type);
Leon Clarke4515c472010-02-03 11:58:03 +00006306 answer = stub.GenerateCall(masm_, frame_, &left, &right);
Steve Blocka7e24c12009-10-30 11:49:00 +00006307 }
Steve Blocka7e24c12009-10-30 11:49:00 +00006308 }
Andrei Popescu402d9372010-02-26 13:31:12 +00006309
Steve Block6ded16b2010-05-10 14:33:55 +01006310 answer.set_type_info(result_type);
Leon Clarked91b9f72010-01-27 17:25:45 +00006311 frame_->Push(&answer);
Steve Blocka7e24c12009-10-30 11:49:00 +00006312}
6313
6314
6315// Emit a LoadIC call to get the value from receiver and leave it in
6316// dst. The receiver register is restored after the call.
6317class DeferredReferenceGetNamedValue: public DeferredCode {
6318 public:
6319 DeferredReferenceGetNamedValue(Register dst,
6320 Register receiver,
6321 Handle<String> name)
6322 : dst_(dst), receiver_(receiver), name_(name) {
6323 set_comment("[ DeferredReferenceGetNamedValue");
6324 }
6325
6326 virtual void Generate();
6327
6328 Label* patch_site() { return &patch_site_; }
6329
6330 private:
6331 Label patch_site_;
6332 Register dst_;
6333 Register receiver_;
6334 Handle<String> name_;
6335};
6336
6337
6338void DeferredReferenceGetNamedValue::Generate() {
6339 __ push(receiver_);
6340 __ Move(rcx, name_);
6341 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
6342 __ Call(ic, RelocInfo::CODE_TARGET);
6343 // The call must be followed by a test rax instruction to indicate
6344 // that the inobject property case was inlined.
6345 //
6346 // Store the delta to the map check instruction here in the test
6347 // instruction. Use masm_-> instead of the __ macro since the
6348 // latter can't return a value.
6349 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
6350 // Here we use masm_-> instead of the __ macro because this is the
6351 // instruction that gets patched and coverage code gets in the way.
6352 masm_->testl(rax, Immediate(-delta_to_patch_site));
6353 __ IncrementCounter(&Counters::named_load_inline_miss, 1);
6354
6355 if (!dst_.is(rax)) __ movq(dst_, rax);
6356 __ pop(receiver_);
6357}
6358
6359
6360void DeferredInlineSmiAdd::Generate() {
Steve Blockd0582a62009-12-15 09:54:21 +00006361 GenericBinaryOpStub igostub(Token::ADD, overwrite_mode_, NO_SMI_CODE_IN_STUB);
6362 igostub.GenerateCall(masm_, dst_, value_);
Steve Blocka7e24c12009-10-30 11:49:00 +00006363 if (!dst_.is(rax)) __ movq(dst_, rax);
6364}
6365
6366
6367void DeferredInlineSmiAddReversed::Generate() {
Steve Blockd0582a62009-12-15 09:54:21 +00006368 GenericBinaryOpStub igostub(Token::ADD, overwrite_mode_, NO_SMI_CODE_IN_STUB);
6369 igostub.GenerateCall(masm_, value_, dst_);
Steve Blocka7e24c12009-10-30 11:49:00 +00006370 if (!dst_.is(rax)) __ movq(dst_, rax);
6371}
6372
6373
6374void DeferredInlineSmiSub::Generate() {
Steve Blockd0582a62009-12-15 09:54:21 +00006375 GenericBinaryOpStub igostub(Token::SUB, overwrite_mode_, NO_SMI_CODE_IN_STUB);
6376 igostub.GenerateCall(masm_, dst_, value_);
Steve Blocka7e24c12009-10-30 11:49:00 +00006377 if (!dst_.is(rax)) __ movq(dst_, rax);
6378}
6379
6380
6381void DeferredInlineSmiOperation::Generate() {
Steve Blocka7e24c12009-10-30 11:49:00 +00006382 // For mod we don't generate all the Smi code inline.
6383 GenericBinaryOpStub stub(
6384 op_,
6385 overwrite_mode_,
Steve Blockd0582a62009-12-15 09:54:21 +00006386 (op_ == Token::MOD) ? NO_GENERIC_BINARY_FLAGS : NO_SMI_CODE_IN_STUB);
6387 stub.GenerateCall(masm_, src_, value_);
Steve Blocka7e24c12009-10-30 11:49:00 +00006388 if (!dst_.is(rax)) __ movq(dst_, rax);
6389}
6390
6391
Steve Block6ded16b2010-05-10 14:33:55 +01006392void DeferredInlineSmiOperationReversed::Generate() {
6393 GenericBinaryOpStub stub(
6394 op_,
6395 overwrite_mode_,
6396 NO_SMI_CODE_IN_STUB);
6397 stub.GenerateCall(masm_, value_, src_);
6398 if (!dst_.is(rax)) __ movq(dst_, rax);
6399}
6400
6401
6402Result CodeGenerator::ConstantSmiBinaryOperation(BinaryOperation* expr,
Leon Clarked91b9f72010-01-27 17:25:45 +00006403 Result* operand,
6404 Handle<Object> value,
Leon Clarked91b9f72010-01-27 17:25:45 +00006405 bool reversed,
6406 OverwriteMode overwrite_mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +00006407 // NOTE: This is an attempt to inline (a bit) more of the code for
6408 // some possible smi operations (like + and -) when (at least) one
6409 // of the operands is a constant smi.
6410 // Consumes the argument "operand".
Steve Blocka7e24c12009-10-30 11:49:00 +00006411 if (IsUnsafeSmi(value)) {
6412 Result unsafe_operand(value);
6413 if (reversed) {
Steve Block6ded16b2010-05-10 14:33:55 +01006414 return LikelySmiBinaryOperation(expr, &unsafe_operand, operand,
Steve Blocka7e24c12009-10-30 11:49:00 +00006415 overwrite_mode);
6416 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01006417 return LikelySmiBinaryOperation(expr, operand, &unsafe_operand,
Steve Blocka7e24c12009-10-30 11:49:00 +00006418 overwrite_mode);
6419 }
Steve Blocka7e24c12009-10-30 11:49:00 +00006420 }
6421
6422 // Get the literal value.
6423 Smi* smi_value = Smi::cast(*value);
6424 int int_value = smi_value->value();
6425
Steve Block6ded16b2010-05-10 14:33:55 +01006426 Token::Value op = expr->op();
Leon Clarked91b9f72010-01-27 17:25:45 +00006427 Result answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00006428 switch (op) {
6429 case Token::ADD: {
6430 operand->ToRegister();
6431 frame_->Spill(operand->reg());
6432 DeferredCode* deferred = NULL;
6433 if (reversed) {
6434 deferred = new DeferredInlineSmiAddReversed(operand->reg(),
6435 smi_value,
6436 overwrite_mode);
6437 } else {
6438 deferred = new DeferredInlineSmiAdd(operand->reg(),
6439 smi_value,
6440 overwrite_mode);
6441 }
6442 __ JumpIfNotSmi(operand->reg(), deferred->entry_label());
6443 __ SmiAddConstant(operand->reg(),
6444 operand->reg(),
Steve Block3ce2e202009-11-05 08:53:23 +00006445 smi_value,
Steve Blocka7e24c12009-10-30 11:49:00 +00006446 deferred->entry_label());
6447 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00006448 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00006449 break;
6450 }
6451
6452 case Token::SUB: {
6453 if (reversed) {
6454 Result constant_operand(value);
Steve Block6ded16b2010-05-10 14:33:55 +01006455 answer = LikelySmiBinaryOperation(expr, &constant_operand, operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00006456 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00006457 } else {
6458 operand->ToRegister();
6459 frame_->Spill(operand->reg());
6460 DeferredCode* deferred = new DeferredInlineSmiSub(operand->reg(),
6461 smi_value,
6462 overwrite_mode);
6463 __ JumpIfNotSmi(operand->reg(), deferred->entry_label());
6464 // A smi currently fits in a 32-bit Immediate.
6465 __ SmiSubConstant(operand->reg(),
6466 operand->reg(),
Steve Block3ce2e202009-11-05 08:53:23 +00006467 smi_value,
Steve Blocka7e24c12009-10-30 11:49:00 +00006468 deferred->entry_label());
6469 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00006470 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00006471 }
6472 break;
6473 }
6474
6475 case Token::SAR:
6476 if (reversed) {
6477 Result constant_operand(value);
Steve Block6ded16b2010-05-10 14:33:55 +01006478 answer = LikelySmiBinaryOperation(expr, &constant_operand, operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00006479 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00006480 } else {
6481 // Only the least significant 5 bits of the shift value are used.
6482 // In the slow case, this masking is done inside the runtime call.
6483 int shift_value = int_value & 0x1f;
6484 operand->ToRegister();
6485 frame_->Spill(operand->reg());
6486 DeferredInlineSmiOperation* deferred =
6487 new DeferredInlineSmiOperation(op,
6488 operand->reg(),
6489 operand->reg(),
6490 smi_value,
6491 overwrite_mode);
6492 __ JumpIfNotSmi(operand->reg(), deferred->entry_label());
6493 __ SmiShiftArithmeticRightConstant(operand->reg(),
6494 operand->reg(),
6495 shift_value);
6496 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00006497 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00006498 }
6499 break;
6500
6501 case Token::SHR:
6502 if (reversed) {
6503 Result constant_operand(value);
Steve Block6ded16b2010-05-10 14:33:55 +01006504 answer = LikelySmiBinaryOperation(expr, &constant_operand, operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00006505 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00006506 } else {
6507 // Only the least significant 5 bits of the shift value are used.
6508 // In the slow case, this masking is done inside the runtime call.
6509 int shift_value = int_value & 0x1f;
6510 operand->ToRegister();
Leon Clarked91b9f72010-01-27 17:25:45 +00006511 answer = allocator()->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00006512 ASSERT(answer.is_valid());
6513 DeferredInlineSmiOperation* deferred =
6514 new DeferredInlineSmiOperation(op,
6515 answer.reg(),
6516 operand->reg(),
6517 smi_value,
6518 overwrite_mode);
6519 __ JumpIfNotSmi(operand->reg(), deferred->entry_label());
6520 __ SmiShiftLogicalRightConstant(answer.reg(),
Steve Block3ce2e202009-11-05 08:53:23 +00006521 operand->reg(),
6522 shift_value,
6523 deferred->entry_label());
Steve Blocka7e24c12009-10-30 11:49:00 +00006524 deferred->BindExit();
6525 operand->Unuse();
Steve Blocka7e24c12009-10-30 11:49:00 +00006526 }
6527 break;
6528
6529 case Token::SHL:
6530 if (reversed) {
Steve Block6ded16b2010-05-10 14:33:55 +01006531 // Move operand into rcx and also into a second register.
6532 // If operand is already in a register, take advantage of that.
6533 // This lets us modify rcx, but still bail out to deferred code.
6534 Result right;
6535 Result right_copy_in_rcx;
6536 TypeInfo right_type_info = operand->type_info();
6537 operand->ToRegister();
6538 if (operand->reg().is(rcx)) {
6539 right = allocator()->Allocate();
6540 __ movq(right.reg(), rcx);
6541 frame_->Spill(rcx);
6542 right_copy_in_rcx = *operand;
6543 } else {
6544 right_copy_in_rcx = allocator()->Allocate(rcx);
6545 __ movq(rcx, operand->reg());
6546 right = *operand;
6547 }
6548 operand->Unuse();
6549
6550 answer = allocator()->Allocate();
6551 DeferredInlineSmiOperationReversed* deferred =
6552 new DeferredInlineSmiOperationReversed(op,
6553 answer.reg(),
6554 smi_value,
6555 right.reg(),
6556 overwrite_mode);
6557 __ movq(answer.reg(), Immediate(int_value));
6558 __ SmiToInteger32(rcx, rcx);
6559 if (!right_type_info.IsSmi()) {
6560 Condition is_smi = masm_->CheckSmi(right.reg());
6561 deferred->Branch(NegateCondition(is_smi));
6562 } else if (FLAG_debug_code) {
6563 __ AbortIfNotSmi(right.reg(),
6564 "Static type info claims non-smi is smi in (const SHL smi).");
6565 }
6566 __ shl_cl(answer.reg());
6567 __ Integer32ToSmi(answer.reg(), answer.reg());
6568
6569 deferred->BindExit();
Steve Blocka7e24c12009-10-30 11:49:00 +00006570 } else {
6571 // Only the least significant 5 bits of the shift value are used.
6572 // In the slow case, this masking is done inside the runtime call.
6573 int shift_value = int_value & 0x1f;
6574 operand->ToRegister();
6575 if (shift_value == 0) {
6576 // Spill operand so it can be overwritten in the slow case.
6577 frame_->Spill(operand->reg());
6578 DeferredInlineSmiOperation* deferred =
6579 new DeferredInlineSmiOperation(op,
6580 operand->reg(),
6581 operand->reg(),
6582 smi_value,
6583 overwrite_mode);
6584 __ JumpIfNotSmi(operand->reg(), deferred->entry_label());
6585 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00006586 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00006587 } else {
6588 // Use a fresh temporary for nonzero shift values.
Leon Clarked91b9f72010-01-27 17:25:45 +00006589 answer = allocator()->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00006590 ASSERT(answer.is_valid());
6591 DeferredInlineSmiOperation* deferred =
6592 new DeferredInlineSmiOperation(op,
6593 answer.reg(),
6594 operand->reg(),
6595 smi_value,
6596 overwrite_mode);
6597 __ JumpIfNotSmi(operand->reg(), deferred->entry_label());
6598 __ SmiShiftLeftConstant(answer.reg(),
6599 operand->reg(),
6600 shift_value,
6601 deferred->entry_label());
6602 deferred->BindExit();
6603 operand->Unuse();
Steve Blocka7e24c12009-10-30 11:49:00 +00006604 }
6605 }
6606 break;
6607
6608 case Token::BIT_OR:
6609 case Token::BIT_XOR:
6610 case Token::BIT_AND: {
6611 operand->ToRegister();
6612 frame_->Spill(operand->reg());
6613 if (reversed) {
6614 // Bit operations with a constant smi are commutative.
6615 // We can swap left and right operands with no problem.
6616 // Swap left and right overwrite modes. 0->0, 1->2, 2->1.
6617 overwrite_mode = static_cast<OverwriteMode>((2 * overwrite_mode) % 3);
6618 }
6619 DeferredCode* deferred = new DeferredInlineSmiOperation(op,
6620 operand->reg(),
6621 operand->reg(),
6622 smi_value,
6623 overwrite_mode);
6624 __ JumpIfNotSmi(operand->reg(), deferred->entry_label());
6625 if (op == Token::BIT_AND) {
Steve Block3ce2e202009-11-05 08:53:23 +00006626 __ SmiAndConstant(operand->reg(), operand->reg(), smi_value);
Steve Blocka7e24c12009-10-30 11:49:00 +00006627 } else if (op == Token::BIT_XOR) {
6628 if (int_value != 0) {
Steve Block3ce2e202009-11-05 08:53:23 +00006629 __ SmiXorConstant(operand->reg(), operand->reg(), smi_value);
Steve Blocka7e24c12009-10-30 11:49:00 +00006630 }
6631 } else {
6632 ASSERT(op == Token::BIT_OR);
6633 if (int_value != 0) {
Steve Block3ce2e202009-11-05 08:53:23 +00006634 __ SmiOrConstant(operand->reg(), operand->reg(), smi_value);
Steve Blocka7e24c12009-10-30 11:49:00 +00006635 }
6636 }
6637 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00006638 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00006639 break;
6640 }
6641
6642 // Generate inline code for mod of powers of 2 and negative powers of 2.
6643 case Token::MOD:
6644 if (!reversed &&
6645 int_value != 0 &&
6646 (IsPowerOf2(int_value) || IsPowerOf2(-int_value))) {
6647 operand->ToRegister();
6648 frame_->Spill(operand->reg());
Steve Block3ce2e202009-11-05 08:53:23 +00006649 DeferredCode* deferred =
6650 new DeferredInlineSmiOperation(op,
6651 operand->reg(),
6652 operand->reg(),
6653 smi_value,
6654 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00006655 // Check for negative or non-Smi left hand side.
6656 __ JumpIfNotPositiveSmi(operand->reg(), deferred->entry_label());
6657 if (int_value < 0) int_value = -int_value;
6658 if (int_value == 1) {
Steve Block3ce2e202009-11-05 08:53:23 +00006659 __ Move(operand->reg(), Smi::FromInt(0));
Steve Blocka7e24c12009-10-30 11:49:00 +00006660 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00006661 __ SmiAndConstant(operand->reg(),
6662 operand->reg(),
6663 Smi::FromInt(int_value - 1));
Steve Blocka7e24c12009-10-30 11:49:00 +00006664 }
6665 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00006666 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00006667 break; // This break only applies if we generated code for MOD.
6668 }
6669 // Fall through if we did not find a power of 2 on the right hand side!
6670 // The next case must be the default.
6671
6672 default: {
6673 Result constant_operand(value);
6674 if (reversed) {
Steve Block6ded16b2010-05-10 14:33:55 +01006675 answer = LikelySmiBinaryOperation(expr, &constant_operand, operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00006676 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00006677 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01006678 answer = LikelySmiBinaryOperation(expr, operand, &constant_operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00006679 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00006680 }
6681 break;
6682 }
6683 }
Leon Clarked91b9f72010-01-27 17:25:45 +00006684 ASSERT(answer.is_valid());
6685 return answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00006686}
6687
Steve Block6ded16b2010-05-10 14:33:55 +01006688Result CodeGenerator::LikelySmiBinaryOperation(BinaryOperation* expr,
Leon Clarked91b9f72010-01-27 17:25:45 +00006689 Result* left,
6690 Result* right,
6691 OverwriteMode overwrite_mode) {
Steve Block6ded16b2010-05-10 14:33:55 +01006692 Token::Value op = expr->op();
Leon Clarked91b9f72010-01-27 17:25:45 +00006693 Result answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00006694 // Special handling of div and mod because they use fixed registers.
6695 if (op == Token::DIV || op == Token::MOD) {
6696 // We need rax as the quotient register, rdx as the remainder
6697 // register, neither left nor right in rax or rdx, and left copied
6698 // to rax.
6699 Result quotient;
6700 Result remainder;
6701 bool left_is_in_rax = false;
6702 // Step 1: get rax for quotient.
6703 if ((left->is_register() && left->reg().is(rax)) ||
6704 (right->is_register() && right->reg().is(rax))) {
6705 // One or both is in rax. Use a fresh non-rdx register for
6706 // them.
6707 Result fresh = allocator_->Allocate();
6708 ASSERT(fresh.is_valid());
6709 if (fresh.reg().is(rdx)) {
6710 remainder = fresh;
6711 fresh = allocator_->Allocate();
6712 ASSERT(fresh.is_valid());
6713 }
6714 if (left->is_register() && left->reg().is(rax)) {
6715 quotient = *left;
6716 *left = fresh;
6717 left_is_in_rax = true;
6718 }
6719 if (right->is_register() && right->reg().is(rax)) {
6720 quotient = *right;
6721 *right = fresh;
6722 }
6723 __ movq(fresh.reg(), rax);
6724 } else {
6725 // Neither left nor right is in rax.
6726 quotient = allocator_->Allocate(rax);
6727 }
6728 ASSERT(quotient.is_register() && quotient.reg().is(rax));
6729 ASSERT(!(left->is_register() && left->reg().is(rax)));
6730 ASSERT(!(right->is_register() && right->reg().is(rax)));
6731
6732 // Step 2: get rdx for remainder if necessary.
6733 if (!remainder.is_valid()) {
6734 if ((left->is_register() && left->reg().is(rdx)) ||
6735 (right->is_register() && right->reg().is(rdx))) {
6736 Result fresh = allocator_->Allocate();
6737 ASSERT(fresh.is_valid());
6738 if (left->is_register() && left->reg().is(rdx)) {
6739 remainder = *left;
6740 *left = fresh;
6741 }
6742 if (right->is_register() && right->reg().is(rdx)) {
6743 remainder = *right;
6744 *right = fresh;
6745 }
6746 __ movq(fresh.reg(), rdx);
6747 } else {
6748 // Neither left nor right is in rdx.
6749 remainder = allocator_->Allocate(rdx);
6750 }
6751 }
6752 ASSERT(remainder.is_register() && remainder.reg().is(rdx));
6753 ASSERT(!(left->is_register() && left->reg().is(rdx)));
6754 ASSERT(!(right->is_register() && right->reg().is(rdx)));
6755
6756 left->ToRegister();
6757 right->ToRegister();
6758 frame_->Spill(rax);
6759 frame_->Spill(rdx);
6760
6761 // Check that left and right are smi tagged.
6762 DeferredInlineBinaryOperation* deferred =
6763 new DeferredInlineBinaryOperation(op,
6764 (op == Token::DIV) ? rax : rdx,
6765 left->reg(),
6766 right->reg(),
6767 overwrite_mode);
6768 __ JumpIfNotBothSmi(left->reg(), right->reg(), deferred->entry_label());
6769
6770 if (op == Token::DIV) {
6771 __ SmiDiv(rax, left->reg(), right->reg(), deferred->entry_label());
6772 deferred->BindExit();
6773 left->Unuse();
6774 right->Unuse();
Leon Clarked91b9f72010-01-27 17:25:45 +00006775 answer = quotient;
Steve Blocka7e24c12009-10-30 11:49:00 +00006776 } else {
6777 ASSERT(op == Token::MOD);
6778 __ SmiMod(rdx, left->reg(), right->reg(), deferred->entry_label());
6779 deferred->BindExit();
6780 left->Unuse();
6781 right->Unuse();
Leon Clarked91b9f72010-01-27 17:25:45 +00006782 answer = remainder;
Steve Blocka7e24c12009-10-30 11:49:00 +00006783 }
Leon Clarked91b9f72010-01-27 17:25:45 +00006784 ASSERT(answer.is_valid());
6785 return answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00006786 }
6787
6788 // Special handling of shift operations because they use fixed
6789 // registers.
6790 if (op == Token::SHL || op == Token::SHR || op == Token::SAR) {
6791 // Move left out of rcx if necessary.
6792 if (left->is_register() && left->reg().is(rcx)) {
6793 *left = allocator_->Allocate();
6794 ASSERT(left->is_valid());
6795 __ movq(left->reg(), rcx);
6796 }
6797 right->ToRegister(rcx);
6798 left->ToRegister();
6799 ASSERT(left->is_register() && !left->reg().is(rcx));
6800 ASSERT(right->is_register() && right->reg().is(rcx));
6801
6802 // We will modify right, it must be spilled.
6803 frame_->Spill(rcx);
6804
6805 // Use a fresh answer register to avoid spilling the left operand.
Leon Clarked91b9f72010-01-27 17:25:45 +00006806 answer = allocator_->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00006807 ASSERT(answer.is_valid());
6808 // Check that both operands are smis using the answer register as a
6809 // temporary.
6810 DeferredInlineBinaryOperation* deferred =
6811 new DeferredInlineBinaryOperation(op,
6812 answer.reg(),
6813 left->reg(),
6814 rcx,
6815 overwrite_mode);
6816 __ movq(answer.reg(), left->reg());
6817 __ or_(answer.reg(), rcx);
6818 __ JumpIfNotSmi(answer.reg(), deferred->entry_label());
6819
6820 // Perform the operation.
6821 switch (op) {
6822 case Token::SAR:
6823 __ SmiShiftArithmeticRight(answer.reg(), left->reg(), rcx);
6824 break;
6825 case Token::SHR: {
6826 __ SmiShiftLogicalRight(answer.reg(),
6827 left->reg(),
6828 rcx,
6829 deferred->entry_label());
6830 break;
6831 }
6832 case Token::SHL: {
6833 __ SmiShiftLeft(answer.reg(),
6834 left->reg(),
6835 rcx,
6836 deferred->entry_label());
6837 break;
6838 }
6839 default:
6840 UNREACHABLE();
6841 }
6842 deferred->BindExit();
6843 left->Unuse();
6844 right->Unuse();
Leon Clarked91b9f72010-01-27 17:25:45 +00006845 ASSERT(answer.is_valid());
6846 return answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00006847 }
6848
6849 // Handle the other binary operations.
6850 left->ToRegister();
6851 right->ToRegister();
6852 // A newly allocated register answer is used to hold the answer. The
6853 // registers containing left and right are not modified so they don't
6854 // need to be spilled in the fast case.
Leon Clarked91b9f72010-01-27 17:25:45 +00006855 answer = allocator_->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00006856 ASSERT(answer.is_valid());
6857
6858 // Perform the smi tag check.
6859 DeferredInlineBinaryOperation* deferred =
6860 new DeferredInlineBinaryOperation(op,
6861 answer.reg(),
6862 left->reg(),
6863 right->reg(),
6864 overwrite_mode);
6865 __ JumpIfNotBothSmi(left->reg(), right->reg(), deferred->entry_label());
6866
6867 switch (op) {
6868 case Token::ADD:
6869 __ SmiAdd(answer.reg(),
6870 left->reg(),
6871 right->reg(),
6872 deferred->entry_label());
6873 break;
6874
6875 case Token::SUB:
6876 __ SmiSub(answer.reg(),
6877 left->reg(),
6878 right->reg(),
6879 deferred->entry_label());
6880 break;
6881
6882 case Token::MUL: {
6883 __ SmiMul(answer.reg(),
6884 left->reg(),
6885 right->reg(),
6886 deferred->entry_label());
6887 break;
6888 }
6889
6890 case Token::BIT_OR:
6891 __ SmiOr(answer.reg(), left->reg(), right->reg());
6892 break;
6893
6894 case Token::BIT_AND:
6895 __ SmiAnd(answer.reg(), left->reg(), right->reg());
6896 break;
6897
6898 case Token::BIT_XOR:
6899 __ SmiXor(answer.reg(), left->reg(), right->reg());
6900 break;
6901
6902 default:
6903 UNREACHABLE();
6904 break;
6905 }
6906 deferred->BindExit();
6907 left->Unuse();
6908 right->Unuse();
Leon Clarked91b9f72010-01-27 17:25:45 +00006909 ASSERT(answer.is_valid());
6910 return answer;
6911}
6912
6913
6914Result CodeGenerator::EmitKeyedLoad(bool is_global) {
6915 Comment cmnt(masm_, "[ Load from keyed Property");
6916 // Inline array load code if inside of a loop. We do not know
6917 // the receiver map yet, so we initially generate the code with
6918 // a check against an invalid map. In the inline cache code, we
6919 // patch the map check if appropriate.
6920 if (loop_nesting() > 0) {
6921 Comment cmnt(masm_, "[ Inlined load from keyed Property");
6922
6923 Result key = frame_->Pop();
6924 Result receiver = frame_->Pop();
6925 key.ToRegister();
6926 receiver.ToRegister();
6927
6928 // Use a fresh temporary to load the elements without destroying
6929 // the receiver which is needed for the deferred slow case.
6930 Result elements = allocator()->Allocate();
6931 ASSERT(elements.is_valid());
6932
6933 // Use a fresh temporary for the index and later the loaded
6934 // value.
6935 Result index = allocator()->Allocate();
6936 ASSERT(index.is_valid());
6937
6938 DeferredReferenceGetKeyedValue* deferred =
6939 new DeferredReferenceGetKeyedValue(index.reg(),
6940 receiver.reg(),
6941 key.reg(),
6942 is_global);
6943
6944 // Check that the receiver is not a smi (only needed if this
6945 // is not a load from the global context) and that it has the
6946 // expected map.
6947 if (!is_global) {
6948 __ JumpIfSmi(receiver.reg(), deferred->entry_label());
6949 }
6950
6951 // Initially, use an invalid map. The map is patched in the IC
6952 // initialization code.
6953 __ bind(deferred->patch_site());
6954 // Use masm-> here instead of the double underscore macro since extra
6955 // coverage code can interfere with the patching. Do not use
6956 // root array to load null_value, since it must be patched with
6957 // the expected receiver map.
6958 masm_->movq(kScratchRegister, Factory::null_value(),
6959 RelocInfo::EMBEDDED_OBJECT);
6960 masm_->cmpq(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
6961 kScratchRegister);
6962 deferred->Branch(not_equal);
6963
6964 // Check that the key is a non-negative smi.
6965 __ JumpIfNotPositiveSmi(key.reg(), deferred->entry_label());
6966
6967 // Get the elements array from the receiver and check that it
6968 // is not a dictionary.
6969 __ movq(elements.reg(),
6970 FieldOperand(receiver.reg(), JSObject::kElementsOffset));
6971 __ Cmp(FieldOperand(elements.reg(), HeapObject::kMapOffset),
6972 Factory::fixed_array_map());
6973 deferred->Branch(not_equal);
6974
6975 // Shift the key to get the actual index value and check that
6976 // it is within bounds.
6977 __ SmiToInteger32(index.reg(), key.reg());
6978 __ cmpl(index.reg(),
6979 FieldOperand(elements.reg(), FixedArray::kLengthOffset));
6980 deferred->Branch(above_equal);
6981
6982 // The index register holds the un-smi-tagged key. It has been
6983 // zero-extended to 64-bits, so it can be used directly as index in the
6984 // operand below.
6985 // Load and check that the result is not the hole. We could
6986 // reuse the index or elements register for the value.
6987 //
6988 // TODO(206): Consider whether it makes sense to try some
6989 // heuristic about which register to reuse. For example, if
6990 // one is rax, the we can reuse that one because the value
6991 // coming from the deferred code will be in rax.
6992 Result value = index;
6993 __ movq(value.reg(),
6994 Operand(elements.reg(),
6995 index.reg(),
6996 times_pointer_size,
6997 FixedArray::kHeaderSize - kHeapObjectTag));
6998 elements.Unuse();
6999 index.Unuse();
7000 __ CompareRoot(value.reg(), Heap::kTheHoleValueRootIndex);
7001 deferred->Branch(equal);
7002 __ IncrementCounter(&Counters::keyed_load_inline, 1);
7003
7004 deferred->BindExit();
7005 // Restore the receiver and key to the frame and push the
7006 // result on top of it.
7007 frame_->Push(&receiver);
7008 frame_->Push(&key);
7009 return value;
7010
7011 } else {
7012 Comment cmnt(masm_, "[ Load from keyed Property");
7013 RelocInfo::Mode mode = is_global
7014 ? RelocInfo::CODE_TARGET_CONTEXT
7015 : RelocInfo::CODE_TARGET;
7016 Result answer = frame_->CallKeyedLoadIC(mode);
7017 // Make sure that we do not have a test instruction after the
7018 // call. A test instruction after the call is used to
7019 // indicate that we have generated an inline version of the
7020 // keyed load. The explicit nop instruction is here because
7021 // the push that follows might be peep-hole optimized away.
7022 __ nop();
7023 return answer;
7024 }
Steve Blocka7e24c12009-10-30 11:49:00 +00007025}
7026
7027
7028#undef __
7029#define __ ACCESS_MASM(masm)
7030
7031
7032Handle<String> Reference::GetName() {
7033 ASSERT(type_ == NAMED);
7034 Property* property = expression_->AsProperty();
7035 if (property == NULL) {
7036 // Global variable reference treated as a named property reference.
7037 VariableProxy* proxy = expression_->AsVariableProxy();
7038 ASSERT(proxy->AsVariable() != NULL);
7039 ASSERT(proxy->AsVariable()->is_global());
7040 return proxy->name();
7041 } else {
7042 Literal* raw_name = property->key()->AsLiteral();
7043 ASSERT(raw_name != NULL);
7044 return Handle<String>(String::cast(*raw_name->handle()));
7045 }
7046}
7047
7048
Steve Blockd0582a62009-12-15 09:54:21 +00007049void Reference::GetValue() {
Steve Blocka7e24c12009-10-30 11:49:00 +00007050 ASSERT(!cgen_->in_spilled_code());
7051 ASSERT(cgen_->HasValidEntryRegisters());
7052 ASSERT(!is_illegal());
7053 MacroAssembler* masm = cgen_->masm();
7054
7055 // Record the source position for the property load.
7056 Property* property = expression_->AsProperty();
7057 if (property != NULL) {
7058 cgen_->CodeForSourcePosition(property->position());
7059 }
7060
7061 switch (type_) {
7062 case SLOT: {
7063 Comment cmnt(masm, "[ Load from Slot");
7064 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
7065 ASSERT(slot != NULL);
Steve Blockd0582a62009-12-15 09:54:21 +00007066 cgen_->LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF);
Steve Blocka7e24c12009-10-30 11:49:00 +00007067 break;
7068 }
7069
7070 case NAMED: {
Steve Blocka7e24c12009-10-30 11:49:00 +00007071 Variable* var = expression_->AsVariableProxy()->AsVariable();
7072 bool is_global = var != NULL;
7073 ASSERT(!is_global || var->is_global());
7074
7075 // Do not inline the inobject property case for loads from the global
7076 // object. Also do not inline for unoptimized code. This saves time
7077 // in the code generator. Unoptimized code is toplevel code or code
7078 // that is not in a loop.
7079 if (is_global ||
7080 cgen_->scope()->is_global_scope() ||
7081 cgen_->loop_nesting() == 0) {
7082 Comment cmnt(masm, "[ Load from named Property");
7083 cgen_->frame()->Push(GetName());
7084
7085 RelocInfo::Mode mode = is_global
7086 ? RelocInfo::CODE_TARGET_CONTEXT
7087 : RelocInfo::CODE_TARGET;
7088 Result answer = cgen_->frame()->CallLoadIC(mode);
7089 // A test rax instruction following the call signals that the
7090 // inobject property case was inlined. Ensure that there is not
7091 // a test rax instruction here.
7092 __ nop();
7093 cgen_->frame()->Push(&answer);
7094 } else {
7095 // Inline the inobject property case.
7096 Comment cmnt(masm, "[ Inlined named property load");
7097 Result receiver = cgen_->frame()->Pop();
7098 receiver.ToRegister();
7099 Result value = cgen_->allocator()->Allocate();
7100 ASSERT(value.is_valid());
7101 // Cannot use r12 for receiver, because that changes
7102 // the distance between a call and a fixup location,
7103 // due to a special encoding of r12 as r/m in a ModR/M byte.
7104 if (receiver.reg().is(r12)) {
7105 // Swap receiver and value.
7106 __ movq(value.reg(), receiver.reg());
7107 Result temp = receiver;
7108 receiver = value;
7109 value = temp;
7110 cgen_->frame()->Spill(value.reg()); // r12 may have been shared.
7111 }
7112
7113 DeferredReferenceGetNamedValue* deferred =
7114 new DeferredReferenceGetNamedValue(value.reg(),
7115 receiver.reg(),
7116 GetName());
7117
7118 // Check that the receiver is a heap object.
7119 __ JumpIfSmi(receiver.reg(), deferred->entry_label());
7120
7121 __ bind(deferred->patch_site());
7122 // This is the map check instruction that will be patched (so we can't
7123 // use the double underscore macro that may insert instructions).
7124 // Initially use an invalid map to force a failure.
7125 masm->Move(kScratchRegister, Factory::null_value());
7126 masm->cmpq(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
7127 kScratchRegister);
7128 // This branch is always a forwards branch so it's always a fixed
7129 // size which allows the assert below to succeed and patching to work.
7130 // Don't use deferred->Branch(...), since that might add coverage code.
7131 masm->j(not_equal, deferred->entry_label());
7132
7133 // The delta from the patch label to the load offset must be
7134 // statically known.
7135 ASSERT(masm->SizeOfCodeGeneratedSince(deferred->patch_site()) ==
7136 LoadIC::kOffsetToLoadInstruction);
7137 // The initial (invalid) offset has to be large enough to force
7138 // a 32-bit instruction encoding to allow patching with an
7139 // arbitrary offset. Use kMaxInt (minus kHeapObjectTag).
7140 int offset = kMaxInt;
7141 masm->movq(value.reg(), FieldOperand(receiver.reg(), offset));
7142
7143 __ IncrementCounter(&Counters::named_load_inline, 1);
7144 deferred->BindExit();
7145 cgen_->frame()->Push(&receiver);
7146 cgen_->frame()->Push(&value);
7147 }
7148 break;
7149 }
7150
7151 case KEYED: {
Steve Blocka7e24c12009-10-30 11:49:00 +00007152 Comment cmnt(masm, "[ Load from keyed Property");
7153 Variable* var = expression_->AsVariableProxy()->AsVariable();
7154 bool is_global = var != NULL;
7155 ASSERT(!is_global || var->is_global());
7156
Leon Clarked91b9f72010-01-27 17:25:45 +00007157 Result value = cgen_->EmitKeyedLoad(is_global);
7158 cgen_->frame()->Push(&value);
Steve Blocka7e24c12009-10-30 11:49:00 +00007159 break;
7160 }
7161
7162 default:
7163 UNREACHABLE();
7164 }
Leon Clarked91b9f72010-01-27 17:25:45 +00007165
7166 if (!persist_after_get_) {
7167 cgen_->UnloadReference(this);
7168 }
Steve Blocka7e24c12009-10-30 11:49:00 +00007169}
7170
7171
Steve Blockd0582a62009-12-15 09:54:21 +00007172void Reference::TakeValue() {
Steve Blocka7e24c12009-10-30 11:49:00 +00007173 // TODO(X64): This function is completely architecture independent. Move
7174 // it somewhere shared.
7175
7176 // For non-constant frame-allocated slots, we invalidate the value in the
7177 // slot. For all others, we fall back on GetValue.
7178 ASSERT(!cgen_->in_spilled_code());
7179 ASSERT(!is_illegal());
7180 if (type_ != SLOT) {
Steve Blockd0582a62009-12-15 09:54:21 +00007181 GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00007182 return;
7183 }
7184
7185 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
7186 ASSERT(slot != NULL);
7187 if (slot->type() == Slot::LOOKUP ||
7188 slot->type() == Slot::CONTEXT ||
7189 slot->var()->mode() == Variable::CONST ||
7190 slot->is_arguments()) {
Steve Blockd0582a62009-12-15 09:54:21 +00007191 GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00007192 return;
7193 }
7194
7195 // Only non-constant, frame-allocated parameters and locals can reach
7196 // here. Be careful not to use the optimizations for arguments
7197 // object access since it may not have been initialized yet.
7198 ASSERT(!slot->is_arguments());
7199 if (slot->type() == Slot::PARAMETER) {
7200 cgen_->frame()->TakeParameterAt(slot->index());
7201 } else {
7202 ASSERT(slot->type() == Slot::LOCAL);
7203 cgen_->frame()->TakeLocalAt(slot->index());
7204 }
Leon Clarked91b9f72010-01-27 17:25:45 +00007205
7206 ASSERT(persist_after_get_);
7207 // Do not unload the reference, because it is used in SetValue.
Steve Blocka7e24c12009-10-30 11:49:00 +00007208}
7209
7210
7211void Reference::SetValue(InitState init_state) {
7212 ASSERT(cgen_->HasValidEntryRegisters());
7213 ASSERT(!is_illegal());
7214 MacroAssembler* masm = cgen_->masm();
7215 switch (type_) {
7216 case SLOT: {
7217 Comment cmnt(masm, "[ Store to Slot");
7218 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
7219 ASSERT(slot != NULL);
7220 cgen_->StoreToSlot(slot, init_state);
Leon Clarke4515c472010-02-03 11:58:03 +00007221 cgen_->UnloadReference(this);
Steve Blocka7e24c12009-10-30 11:49:00 +00007222 break;
7223 }
7224
7225 case NAMED: {
7226 Comment cmnt(masm, "[ Store to named Property");
7227 cgen_->frame()->Push(GetName());
7228 Result answer = cgen_->frame()->CallStoreIC();
7229 cgen_->frame()->Push(&answer);
Leon Clarke4515c472010-02-03 11:58:03 +00007230 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00007231 break;
7232 }
7233
7234 case KEYED: {
7235 Comment cmnt(masm, "[ Store to keyed Property");
7236
7237 // Generate inlined version of the keyed store if the code is in
7238 // a loop and the key is likely to be a smi.
7239 Property* property = expression()->AsProperty();
7240 ASSERT(property != NULL);
Leon Clarkee46be812010-01-19 14:06:41 +00007241 StaticType* key_smi_analysis = property->key()->type();
Steve Blocka7e24c12009-10-30 11:49:00 +00007242
7243 if (cgen_->loop_nesting() > 0 && key_smi_analysis->IsLikelySmi()) {
7244 Comment cmnt(masm, "[ Inlined store to keyed Property");
7245
7246 // Get the receiver, key and value into registers.
7247 Result value = cgen_->frame()->Pop();
7248 Result key = cgen_->frame()->Pop();
7249 Result receiver = cgen_->frame()->Pop();
7250
7251 Result tmp = cgen_->allocator_->Allocate();
7252 ASSERT(tmp.is_valid());
Steve Block6ded16b2010-05-10 14:33:55 +01007253 Result tmp2 = cgen_->allocator_->Allocate();
7254 ASSERT(tmp2.is_valid());
Steve Blocka7e24c12009-10-30 11:49:00 +00007255
7256 // Determine whether the value is a constant before putting it
7257 // in a register.
7258 bool value_is_constant = value.is_constant();
7259
7260 // Make sure that value, key and receiver are in registers.
7261 value.ToRegister();
7262 key.ToRegister();
7263 receiver.ToRegister();
7264
7265 DeferredReferenceSetKeyedValue* deferred =
7266 new DeferredReferenceSetKeyedValue(value.reg(),
7267 key.reg(),
7268 receiver.reg());
7269
Steve Blocka7e24c12009-10-30 11:49:00 +00007270 // Check that the receiver is not a smi.
7271 __ JumpIfSmi(receiver.reg(), deferred->entry_label());
7272
Steve Block6ded16b2010-05-10 14:33:55 +01007273 // Check that the key is a smi.
7274 if (!key.is_smi()) {
7275 __ JumpIfNotSmi(key.reg(), deferred->entry_label());
7276 } else if (FLAG_debug_code) {
7277 __ AbortIfNotSmi(key.reg(), "Non-smi value in smi-typed value.");
7278 }
7279
Steve Blocka7e24c12009-10-30 11:49:00 +00007280 // Check that the receiver is a JSArray.
7281 __ CmpObjectType(receiver.reg(), JS_ARRAY_TYPE, kScratchRegister);
7282 deferred->Branch(not_equal);
7283
7284 // Check that the key is within bounds. Both the key and the
Steve Block6ded16b2010-05-10 14:33:55 +01007285 // length of the JSArray are smis. Use unsigned comparison to handle
7286 // negative keys.
Steve Block3ce2e202009-11-05 08:53:23 +00007287 __ SmiCompare(FieldOperand(receiver.reg(), JSArray::kLengthOffset),
7288 key.reg());
Steve Block6ded16b2010-05-10 14:33:55 +01007289 deferred->Branch(below_equal);
Steve Blocka7e24c12009-10-30 11:49:00 +00007290
7291 // Get the elements array from the receiver and check that it
7292 // is a flat array (not a dictionary).
7293 __ movq(tmp.reg(),
7294 FieldOperand(receiver.reg(), JSObject::kElementsOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01007295
7296 // Check whether it is possible to omit the write barrier. If the
7297 // elements array is in new space or the value written is a smi we can
7298 // safely update the elements array without updating the remembered set.
7299 Label in_new_space;
7300 __ InNewSpace(tmp.reg(), tmp2.reg(), equal, &in_new_space);
7301 if (!value_is_constant) {
7302 __ JumpIfNotSmi(value.reg(), deferred->entry_label());
7303 }
7304
7305 __ bind(&in_new_space);
Steve Blocka7e24c12009-10-30 11:49:00 +00007306 // Bind the deferred code patch site to be able to locate the
7307 // fixed array map comparison. When debugging, we patch this
7308 // comparison to always fail so that we will hit the IC call
7309 // in the deferred code which will allow the debugger to
7310 // break for fast case stores.
7311 __ bind(deferred->patch_site());
7312 // Avoid using __ to ensure the distance from patch_site
7313 // to the map address is always the same.
7314 masm->movq(kScratchRegister, Factory::fixed_array_map(),
7315 RelocInfo::EMBEDDED_OBJECT);
7316 __ cmpq(FieldOperand(tmp.reg(), HeapObject::kMapOffset),
7317 kScratchRegister);
7318 deferred->Branch(not_equal);
7319
7320 // Store the value.
7321 SmiIndex index =
7322 masm->SmiToIndex(kScratchRegister, key.reg(), kPointerSizeLog2);
7323 __ movq(Operand(tmp.reg(),
7324 index.reg,
7325 index.scale,
7326 FixedArray::kHeaderSize - kHeapObjectTag),
7327 value.reg());
7328 __ IncrementCounter(&Counters::keyed_store_inline, 1);
7329
7330 deferred->BindExit();
7331
7332 cgen_->frame()->Push(&receiver);
7333 cgen_->frame()->Push(&key);
7334 cgen_->frame()->Push(&value);
7335 } else {
7336 Result answer = cgen_->frame()->CallKeyedStoreIC();
7337 // Make sure that we do not have a test instruction after the
7338 // call. A test instruction after the call is used to
7339 // indicate that we have generated an inline version of the
7340 // keyed store.
7341 masm->nop();
7342 cgen_->frame()->Push(&answer);
7343 }
Leon Clarke4515c472010-02-03 11:58:03 +00007344 cgen_->UnloadReference(this);
Steve Blocka7e24c12009-10-30 11:49:00 +00007345 break;
7346 }
7347
7348 default:
7349 UNREACHABLE();
7350 }
7351}
7352
7353
Leon Clarkee46be812010-01-19 14:06:41 +00007354void FastNewClosureStub::Generate(MacroAssembler* masm) {
Steve Block6ded16b2010-05-10 14:33:55 +01007355 // Create a new closure from the given function info in new
7356 // space. Set the context to the current context in rsi.
Leon Clarkee46be812010-01-19 14:06:41 +00007357 Label gc;
7358 __ AllocateInNewSpace(JSFunction::kSize, rax, rbx, rcx, &gc, TAG_OBJECT);
7359
Steve Block6ded16b2010-05-10 14:33:55 +01007360 // Get the function info from the stack.
Leon Clarkee46be812010-01-19 14:06:41 +00007361 __ movq(rdx, Operand(rsp, 1 * kPointerSize));
7362
7363 // Compute the function map in the current global context and set that
7364 // as the map of the allocated object.
7365 __ movq(rcx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
7366 __ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalContextOffset));
7367 __ movq(rcx, Operand(rcx, Context::SlotOffset(Context::FUNCTION_MAP_INDEX)));
7368 __ movq(FieldOperand(rax, JSObject::kMapOffset), rcx);
7369
Steve Block6ded16b2010-05-10 14:33:55 +01007370 // Initialize the rest of the function. We don't have to update the
7371 // write barrier because the allocated object is in new space.
7372 __ LoadRoot(rbx, Heap::kEmptyFixedArrayRootIndex);
7373 __ LoadRoot(rcx, Heap::kTheHoleValueRootIndex);
7374 __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), rbx);
7375 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rbx);
7376 __ movq(FieldOperand(rax, JSFunction::kPrototypeOrInitialMapOffset), rcx);
7377 __ movq(FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset), rdx);
7378 __ movq(FieldOperand(rax, JSFunction::kContextOffset), rsi);
7379 __ movq(FieldOperand(rax, JSFunction::kLiteralsOffset), rbx);
Leon Clarkee46be812010-01-19 14:06:41 +00007380
7381 // Return and remove the on-stack parameter.
7382 __ ret(1 * kPointerSize);
7383
7384 // Create a new closure through the slower runtime call.
7385 __ bind(&gc);
7386 __ pop(rcx); // Temporarily remove return address.
7387 __ pop(rdx);
7388 __ push(rsi);
7389 __ push(rdx);
7390 __ push(rcx); // Restore return address.
Steve Block6ded16b2010-05-10 14:33:55 +01007391 __ TailCallRuntime(Runtime::kNewClosure, 2, 1);
Leon Clarkee46be812010-01-19 14:06:41 +00007392}
7393
7394
7395void FastNewContextStub::Generate(MacroAssembler* masm) {
7396 // Try to allocate the context in new space.
7397 Label gc;
7398 int length = slots_ + Context::MIN_CONTEXT_SLOTS;
7399 __ AllocateInNewSpace((length * kPointerSize) + FixedArray::kHeaderSize,
7400 rax, rbx, rcx, &gc, TAG_OBJECT);
7401
7402 // Get the function from the stack.
7403 __ movq(rcx, Operand(rsp, 1 * kPointerSize));
7404
7405 // Setup the object header.
7406 __ LoadRoot(kScratchRegister, Heap::kContextMapRootIndex);
7407 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister);
7408 __ movl(FieldOperand(rax, Array::kLengthOffset), Immediate(length));
7409
7410 // Setup the fixed slots.
7411 __ xor_(rbx, rbx); // Set to NULL.
7412 __ movq(Operand(rax, Context::SlotOffset(Context::CLOSURE_INDEX)), rcx);
7413 __ movq(Operand(rax, Context::SlotOffset(Context::FCONTEXT_INDEX)), rax);
7414 __ movq(Operand(rax, Context::SlotOffset(Context::PREVIOUS_INDEX)), rbx);
7415 __ movq(Operand(rax, Context::SlotOffset(Context::EXTENSION_INDEX)), rbx);
7416
7417 // Copy the global object from the surrounding context.
7418 __ movq(rbx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
7419 __ movq(Operand(rax, Context::SlotOffset(Context::GLOBAL_INDEX)), rbx);
7420
7421 // Initialize the rest of the slots to undefined.
7422 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
7423 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) {
7424 __ movq(Operand(rax, Context::SlotOffset(i)), rbx);
7425 }
7426
7427 // Return and remove the on-stack parameter.
7428 __ movq(rsi, rax);
7429 __ ret(1 * kPointerSize);
7430
7431 // Need to collect. Call into runtime system.
7432 __ bind(&gc);
Steve Block6ded16b2010-05-10 14:33:55 +01007433 __ TailCallRuntime(Runtime::kNewContext, 1, 1);
Leon Clarkee46be812010-01-19 14:06:41 +00007434}
7435
7436
Andrei Popescu402d9372010-02-26 13:31:12 +00007437void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
7438 // Stack layout on entry:
7439 //
7440 // [rsp + kPointerSize]: constant elements.
7441 // [rsp + (2 * kPointerSize)]: literal index.
7442 // [rsp + (3 * kPointerSize)]: literals array.
7443
7444 // All sizes here are multiples of kPointerSize.
7445 int elements_size = (length_ > 0) ? FixedArray::SizeFor(length_) : 0;
7446 int size = JSArray::kSize + elements_size;
7447
7448 // Load boilerplate object into rcx and check if we need to create a
7449 // boilerplate.
7450 Label slow_case;
7451 __ movq(rcx, Operand(rsp, 3 * kPointerSize));
7452 __ movq(rax, Operand(rsp, 2 * kPointerSize));
7453 SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2);
7454 __ movq(rcx,
7455 FieldOperand(rcx, index.reg, index.scale, FixedArray::kHeaderSize));
7456 __ CompareRoot(rcx, Heap::kUndefinedValueRootIndex);
7457 __ j(equal, &slow_case);
7458
7459 // Allocate both the JS array and the elements array in one big
7460 // allocation. This avoids multiple limit checks.
7461 __ AllocateInNewSpace(size, rax, rbx, rdx, &slow_case, TAG_OBJECT);
7462
7463 // Copy the JS array part.
7464 for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
7465 if ((i != JSArray::kElementsOffset) || (length_ == 0)) {
7466 __ movq(rbx, FieldOperand(rcx, i));
7467 __ movq(FieldOperand(rax, i), rbx);
7468 }
7469 }
7470
7471 if (length_ > 0) {
7472 // Get hold of the elements array of the boilerplate and setup the
7473 // elements pointer in the resulting object.
7474 __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset));
7475 __ lea(rdx, Operand(rax, JSArray::kSize));
7476 __ movq(FieldOperand(rax, JSArray::kElementsOffset), rdx);
7477
7478 // Copy the elements array.
7479 for (int i = 0; i < elements_size; i += kPointerSize) {
7480 __ movq(rbx, FieldOperand(rcx, i));
7481 __ movq(FieldOperand(rdx, i), rbx);
7482 }
7483 }
7484
7485 // Return and remove the on-stack parameters.
7486 __ ret(3 * kPointerSize);
7487
7488 __ bind(&slow_case);
Steve Block6ded16b2010-05-10 14:33:55 +01007489 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1);
Andrei Popescu402d9372010-02-26 13:31:12 +00007490}
7491
7492
Steve Blocka7e24c12009-10-30 11:49:00 +00007493void ToBooleanStub::Generate(MacroAssembler* masm) {
7494 Label false_result, true_result, not_string;
7495 __ movq(rax, Operand(rsp, 1 * kPointerSize));
7496
7497 // 'null' => false.
7498 __ CompareRoot(rax, Heap::kNullValueRootIndex);
7499 __ j(equal, &false_result);
7500
7501 // Get the map and type of the heap object.
7502 // We don't use CmpObjectType because we manipulate the type field.
7503 __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
7504 __ movzxbq(rcx, FieldOperand(rdx, Map::kInstanceTypeOffset));
7505
7506 // Undetectable => false.
7507 __ movzxbq(rbx, FieldOperand(rdx, Map::kBitFieldOffset));
7508 __ and_(rbx, Immediate(1 << Map::kIsUndetectable));
7509 __ j(not_zero, &false_result);
7510
7511 // JavaScript object => true.
7512 __ cmpq(rcx, Immediate(FIRST_JS_OBJECT_TYPE));
7513 __ j(above_equal, &true_result);
7514
7515 // String value => false iff empty.
7516 __ cmpq(rcx, Immediate(FIRST_NONSTRING_TYPE));
7517 __ j(above_equal, &not_string);
Steve Block6ded16b2010-05-10 14:33:55 +01007518 __ movq(rdx, FieldOperand(rax, String::kLengthOffset));
7519 __ SmiTest(rdx);
Steve Blocka7e24c12009-10-30 11:49:00 +00007520 __ j(zero, &false_result);
7521 __ jmp(&true_result);
7522
7523 __ bind(&not_string);
7524 // HeapNumber => false iff +0, -0, or NaN.
7525 // These three cases set C3 when compared to zero in the FPU.
7526 __ CompareRoot(rdx, Heap::kHeapNumberMapRootIndex);
7527 __ j(not_equal, &true_result);
Steve Blocka7e24c12009-10-30 11:49:00 +00007528 __ fldz(); // Load zero onto fp stack
7529 // Load heap-number double value onto fp stack
7530 __ fld_d(FieldOperand(rax, HeapNumber::kValueOffset));
Steve Block3ce2e202009-11-05 08:53:23 +00007531 __ FCmp();
7532 __ j(zero, &false_result);
Steve Blocka7e24c12009-10-30 11:49:00 +00007533 // Fall through to |true_result|.
7534
7535 // Return 1/0 for true/false in rax.
7536 __ bind(&true_result);
7537 __ movq(rax, Immediate(1));
7538 __ ret(1 * kPointerSize);
7539 __ bind(&false_result);
7540 __ xor_(rax, rax);
7541 __ ret(1 * kPointerSize);
7542}
7543
7544
7545bool CodeGenerator::FoldConstantSmis(Token::Value op, int left, int right) {
Steve Blocka7e24c12009-10-30 11:49:00 +00007546 Object* answer_object = Heap::undefined_value();
7547 switch (op) {
7548 case Token::ADD:
Leon Clarked91b9f72010-01-27 17:25:45 +00007549 // Use intptr_t to detect overflow of 32-bit int.
7550 if (Smi::IsValid(static_cast<intptr_t>(left) + right)) {
Steve Blocka7e24c12009-10-30 11:49:00 +00007551 answer_object = Smi::FromInt(left + right);
7552 }
7553 break;
7554 case Token::SUB:
Leon Clarked91b9f72010-01-27 17:25:45 +00007555 // Use intptr_t to detect overflow of 32-bit int.
7556 if (Smi::IsValid(static_cast<intptr_t>(left) - right)) {
Steve Blocka7e24c12009-10-30 11:49:00 +00007557 answer_object = Smi::FromInt(left - right);
7558 }
7559 break;
7560 case Token::MUL: {
7561 double answer = static_cast<double>(left) * right;
7562 if (answer >= Smi::kMinValue && answer <= Smi::kMaxValue) {
7563 // If the product is zero and the non-zero factor is negative,
7564 // the spec requires us to return floating point negative zero.
7565 if (answer != 0 || (left + right) >= 0) {
7566 answer_object = Smi::FromInt(static_cast<int>(answer));
7567 }
7568 }
7569 }
7570 break;
7571 case Token::DIV:
7572 case Token::MOD:
7573 break;
7574 case Token::BIT_OR:
7575 answer_object = Smi::FromInt(left | right);
7576 break;
7577 case Token::BIT_AND:
7578 answer_object = Smi::FromInt(left & right);
7579 break;
7580 case Token::BIT_XOR:
7581 answer_object = Smi::FromInt(left ^ right);
7582 break;
7583
7584 case Token::SHL: {
7585 int shift_amount = right & 0x1F;
7586 if (Smi::IsValid(left << shift_amount)) {
7587 answer_object = Smi::FromInt(left << shift_amount);
7588 }
7589 break;
7590 }
7591 case Token::SHR: {
7592 int shift_amount = right & 0x1F;
7593 unsigned int unsigned_left = left;
7594 unsigned_left >>= shift_amount;
7595 if (unsigned_left <= static_cast<unsigned int>(Smi::kMaxValue)) {
7596 answer_object = Smi::FromInt(unsigned_left);
7597 }
7598 break;
7599 }
7600 case Token::SAR: {
7601 int shift_amount = right & 0x1F;
7602 unsigned int unsigned_left = left;
7603 if (left < 0) {
7604 // Perform arithmetic shift of a negative number by
7605 // complementing number, logical shifting, complementing again.
7606 unsigned_left = ~unsigned_left;
7607 unsigned_left >>= shift_amount;
7608 unsigned_left = ~unsigned_left;
7609 } else {
7610 unsigned_left >>= shift_amount;
7611 }
7612 ASSERT(Smi::IsValid(static_cast<int32_t>(unsigned_left)));
7613 answer_object = Smi::FromInt(static_cast<int32_t>(unsigned_left));
7614 break;
7615 }
7616 default:
7617 UNREACHABLE();
7618 break;
7619 }
7620 if (answer_object == Heap::undefined_value()) {
7621 return false;
7622 }
7623 frame_->Push(Handle<Object>(answer_object));
7624 return true;
7625}
7626
7627
7628// End of CodeGenerator implementation.
7629
Steve Block6ded16b2010-05-10 14:33:55 +01007630void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
7631 // Input on stack:
7632 // rsp[8]: argument (should be number).
7633 // rsp[0]: return address.
7634 Label runtime_call;
7635 Label runtime_call_clear_stack;
7636 Label input_not_smi;
7637 Label loaded;
7638 // Test that rax is a number.
7639 __ movq(rax, Operand(rsp, kPointerSize));
7640 __ JumpIfNotSmi(rax, &input_not_smi);
7641 // Input is a smi. Untag and load it onto the FPU stack.
7642 // Then load the bits of the double into rbx.
7643 __ SmiToInteger32(rax, rax);
7644 __ subq(rsp, Immediate(kPointerSize));
7645 __ cvtlsi2sd(xmm1, rax);
7646 __ movsd(Operand(rsp, 0), xmm1);
7647 __ movq(rbx, xmm1);
7648 __ movq(rdx, xmm1);
7649 __ fld_d(Operand(rsp, 0));
7650 __ addq(rsp, Immediate(kPointerSize));
7651 __ jmp(&loaded);
7652
7653 __ bind(&input_not_smi);
7654 // Check if input is a HeapNumber.
7655 __ Move(rbx, Factory::heap_number_map());
7656 __ cmpq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
7657 __ j(not_equal, &runtime_call);
7658 // Input is a HeapNumber. Push it on the FPU stack and load its
7659 // bits into rbx.
7660 __ fld_d(FieldOperand(rax, HeapNumber::kValueOffset));
7661 __ movq(rbx, FieldOperand(rax, HeapNumber::kValueOffset));
7662 __ movq(rdx, rbx);
7663 __ bind(&loaded);
7664 // ST[0] == double value
7665 // rbx = bits of double value.
7666 // rdx = also bits of double value.
7667 // Compute hash (h is 32 bits, bits are 64):
7668 // h = h0 = bits ^ (bits >> 32);
7669 // h ^= h >> 16;
7670 // h ^= h >> 8;
7671 // h = h & (cacheSize - 1);
7672 // or h = (h0 ^ (h0 >> 8) ^ (h0 >> 16) ^ (h0 >> 24)) & (cacheSize - 1)
7673 __ sar(rdx, Immediate(32));
7674 __ xorl(rdx, rbx);
7675 __ movl(rcx, rdx);
7676 __ movl(rax, rdx);
7677 __ movl(rdi, rdx);
7678 __ sarl(rdx, Immediate(8));
7679 __ sarl(rcx, Immediate(16));
7680 __ sarl(rax, Immediate(24));
7681 __ xorl(rcx, rdx);
7682 __ xorl(rax, rdi);
7683 __ xorl(rcx, rax);
7684 ASSERT(IsPowerOf2(TranscendentalCache::kCacheSize));
7685 __ andl(rcx, Immediate(TranscendentalCache::kCacheSize - 1));
7686 // ST[0] == double value.
7687 // rbx = bits of double value.
7688 // rcx = TranscendentalCache::hash(double value).
7689 __ movq(rax, ExternalReference::transcendental_cache_array_address());
7690 // rax points to cache array.
7691 __ movq(rax, Operand(rax, type_ * sizeof(TranscendentalCache::caches_[0])));
7692 // rax points to the cache for the type type_.
7693 // If NULL, the cache hasn't been initialized yet, so go through runtime.
7694 __ testq(rax, rax);
7695 __ j(zero, &runtime_call_clear_stack);
7696#ifdef DEBUG
7697 // Check that the layout of cache elements match expectations.
7698 { // NOLINT - doesn't like a single brace on a line.
7699 TranscendentalCache::Element test_elem[2];
7700 char* elem_start = reinterpret_cast<char*>(&test_elem[0]);
7701 char* elem2_start = reinterpret_cast<char*>(&test_elem[1]);
7702 char* elem_in0 = reinterpret_cast<char*>(&(test_elem[0].in[0]));
7703 char* elem_in1 = reinterpret_cast<char*>(&(test_elem[0].in[1]));
7704 char* elem_out = reinterpret_cast<char*>(&(test_elem[0].output));
7705 // Two uint_32's and a pointer per element.
7706 CHECK_EQ(16, static_cast<int>(elem2_start - elem_start));
7707 CHECK_EQ(0, static_cast<int>(elem_in0 - elem_start));
7708 CHECK_EQ(kIntSize, static_cast<int>(elem_in1 - elem_start));
7709 CHECK_EQ(2 * kIntSize, static_cast<int>(elem_out - elem_start));
7710 }
7711#endif
7712 // Find the address of the rcx'th entry in the cache, i.e., &rax[rcx*16].
7713 __ addl(rcx, rcx);
7714 __ lea(rcx, Operand(rax, rcx, times_8, 0));
7715 // Check if cache matches: Double value is stored in uint32_t[2] array.
7716 Label cache_miss;
7717 __ cmpq(rbx, Operand(rcx, 0));
7718 __ j(not_equal, &cache_miss);
7719 // Cache hit!
7720 __ movq(rax, Operand(rcx, 2 * kIntSize));
7721 __ fstp(0); // Clear FPU stack.
7722 __ ret(kPointerSize);
7723
7724 __ bind(&cache_miss);
7725 // Update cache with new value.
7726 Label nan_result;
7727 GenerateOperation(masm, &nan_result);
7728 __ AllocateHeapNumber(rax, rdi, &runtime_call_clear_stack);
7729 __ movq(Operand(rcx, 0), rbx);
7730 __ movq(Operand(rcx, 2 * kIntSize), rax);
7731 __ fstp_d(FieldOperand(rax, HeapNumber::kValueOffset));
7732 __ ret(kPointerSize);
7733
7734 __ bind(&runtime_call_clear_stack);
7735 __ fstp(0);
7736 __ bind(&runtime_call);
7737 __ TailCallExternalReference(ExternalReference(RuntimeFunction()), 1, 1);
7738
7739 __ bind(&nan_result);
7740 __ fstp(0); // Remove argument from FPU stack.
7741 __ LoadRoot(rax, Heap::kNanValueRootIndex);
7742 __ movq(Operand(rcx, 0), rbx);
7743 __ movq(Operand(rcx, 2 * kIntSize), rax);
7744 __ ret(kPointerSize);
7745}
7746
7747
7748Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() {
7749 switch (type_) {
7750 // Add more cases when necessary.
7751 case TranscendentalCache::SIN: return Runtime::kMath_sin;
7752 case TranscendentalCache::COS: return Runtime::kMath_cos;
7753 default:
7754 UNIMPLEMENTED();
7755 return Runtime::kAbort;
7756 }
7757}
7758
7759
7760void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm,
7761 Label* on_nan_result) {
7762 // Registers:
7763 // rbx: Bits of input double. Must be preserved.
7764 // rcx: Pointer to cache entry. Must be preserved.
7765 // st(0): Input double
7766 Label done;
7767 ASSERT(type_ == TranscendentalCache::SIN ||
7768 type_ == TranscendentalCache::COS);
7769 // More transcendental types can be added later.
7770
7771 // Both fsin and fcos require arguments in the range +/-2^63 and
7772 // return NaN for infinities and NaN. They can share all code except
7773 // the actual fsin/fcos operation.
7774 Label in_range;
7775 // If argument is outside the range -2^63..2^63, fsin/cos doesn't
7776 // work. We must reduce it to the appropriate range.
7777 __ movq(rdi, rbx);
7778 // Move exponent and sign bits to low bits.
7779 __ shr(rdi, Immediate(HeapNumber::kMantissaBits));
7780 // Remove sign bit.
7781 __ andl(rdi, Immediate((1 << HeapNumber::KExponentBits) - 1));
7782 int supported_exponent_limit = (63 + HeapNumber::kExponentBias);
7783 __ cmpl(rdi, Immediate(supported_exponent_limit));
7784 __ j(below, &in_range);
7785 // Check for infinity and NaN. Both return NaN for sin.
7786 __ cmpl(rdi, Immediate(0x7ff));
7787 __ j(equal, on_nan_result);
7788
7789 // Use fpmod to restrict argument to the range +/-2*PI.
7790 __ fldpi();
7791 __ fadd(0);
7792 __ fld(1);
7793 // FPU Stack: input, 2*pi, input.
7794 {
7795 Label no_exceptions;
7796 __ fwait();
7797 __ fnstsw_ax();
7798 // Clear if Illegal Operand or Zero Division exceptions are set.
7799 __ testl(rax, Immediate(5)); // #IO and #ZD flags of FPU status word.
7800 __ j(zero, &no_exceptions);
7801 __ fnclex();
7802 __ bind(&no_exceptions);
7803 }
7804
7805 // Compute st(0) % st(1)
7806 {
7807 Label partial_remainder_loop;
7808 __ bind(&partial_remainder_loop);
7809 __ fprem1();
7810 __ fwait();
7811 __ fnstsw_ax();
7812 __ testl(rax, Immediate(0x400)); // Check C2 bit of FPU status word.
7813 // If C2 is set, computation only has partial result. Loop to
7814 // continue computation.
7815 __ j(not_zero, &partial_remainder_loop);
7816 }
7817 // FPU Stack: input, 2*pi, input % 2*pi
7818 __ fstp(2);
7819 // FPU Stack: input % 2*pi, 2*pi,
7820 __ fstp(0);
7821 // FPU Stack: input % 2*pi
7822 __ bind(&in_range);
7823 switch (type_) {
7824 case TranscendentalCache::SIN:
7825 __ fsin();
7826 break;
7827 case TranscendentalCache::COS:
7828 __ fcos();
7829 break;
7830 default:
7831 UNREACHABLE();
7832 }
7833 __ bind(&done);
7834}
7835
7836
Leon Clarked91b9f72010-01-27 17:25:45 +00007837// Get the integer part of a heap number. Surprisingly, all this bit twiddling
7838// is faster than using the built-in instructions on floating point registers.
7839// Trashes rdi and rbx. Dest is rcx. Source cannot be rcx or one of the
7840// trashed registers.
7841void IntegerConvert(MacroAssembler* masm,
7842 Register source,
7843 bool use_sse3,
7844 Label* conversion_failure) {
7845 ASSERT(!source.is(rcx) && !source.is(rdi) && !source.is(rbx));
7846 Label done, right_exponent, normal_exponent;
7847 Register scratch = rbx;
7848 Register scratch2 = rdi;
7849 // Get exponent word.
7850 __ movl(scratch, FieldOperand(source, HeapNumber::kExponentOffset));
7851 // Get exponent alone in scratch2.
7852 __ movl(scratch2, scratch);
7853 __ and_(scratch2, Immediate(HeapNumber::kExponentMask));
7854 if (use_sse3) {
7855 CpuFeatures::Scope scope(SSE3);
7856 // Check whether the exponent is too big for a 64 bit signed integer.
7857 static const uint32_t kTooBigExponent =
7858 (HeapNumber::kExponentBias + 63) << HeapNumber::kExponentShift;
7859 __ cmpl(scratch2, Immediate(kTooBigExponent));
7860 __ j(greater_equal, conversion_failure);
7861 // Load x87 register with heap number.
7862 __ fld_d(FieldOperand(source, HeapNumber::kValueOffset));
7863 // Reserve space for 64 bit answer.
7864 __ subq(rsp, Immediate(sizeof(uint64_t))); // Nolint.
7865 // Do conversion, which cannot fail because we checked the exponent.
7866 __ fisttp_d(Operand(rsp, 0));
7867 __ movl(rcx, Operand(rsp, 0)); // Load low word of answer into rcx.
7868 __ addq(rsp, Immediate(sizeof(uint64_t))); // Nolint.
7869 } else {
7870 // Load rcx with zero. We use this either for the final shift or
7871 // for the answer.
7872 __ xor_(rcx, rcx);
7873 // Check whether the exponent matches a 32 bit signed int that cannot be
7874 // represented by a Smi. A non-smi 32 bit integer is 1.xxx * 2^30 so the
7875 // exponent is 30 (biased). This is the exponent that we are fastest at and
7876 // also the highest exponent we can handle here.
7877 const uint32_t non_smi_exponent =
7878 (HeapNumber::kExponentBias + 30) << HeapNumber::kExponentShift;
7879 __ cmpl(scratch2, Immediate(non_smi_exponent));
7880 // If we have a match of the int32-but-not-Smi exponent then skip some
7881 // logic.
7882 __ j(equal, &right_exponent);
7883 // If the exponent is higher than that then go to slow case. This catches
7884 // numbers that don't fit in a signed int32, infinities and NaNs.
7885 __ j(less, &normal_exponent);
7886
7887 {
7888 // Handle a big exponent. The only reason we have this code is that the
7889 // >>> operator has a tendency to generate numbers with an exponent of 31.
7890 const uint32_t big_non_smi_exponent =
7891 (HeapNumber::kExponentBias + 31) << HeapNumber::kExponentShift;
7892 __ cmpl(scratch2, Immediate(big_non_smi_exponent));
7893 __ j(not_equal, conversion_failure);
7894 // We have the big exponent, typically from >>>. This means the number is
7895 // in the range 2^31 to 2^32 - 1. Get the top bits of the mantissa.
7896 __ movl(scratch2, scratch);
7897 __ and_(scratch2, Immediate(HeapNumber::kMantissaMask));
7898 // Put back the implicit 1.
7899 __ or_(scratch2, Immediate(1 << HeapNumber::kExponentShift));
7900 // Shift up the mantissa bits to take up the space the exponent used to
7901 // take. We just orred in the implicit bit so that took care of one and
7902 // we want to use the full unsigned range so we subtract 1 bit from the
7903 // shift distance.
7904 const int big_shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 1;
7905 __ shl(scratch2, Immediate(big_shift_distance));
7906 // Get the second half of the double.
7907 __ movl(rcx, FieldOperand(source, HeapNumber::kMantissaOffset));
7908 // Shift down 21 bits to get the most significant 11 bits or the low
7909 // mantissa word.
7910 __ shr(rcx, Immediate(32 - big_shift_distance));
7911 __ or_(rcx, scratch2);
7912 // We have the answer in rcx, but we may need to negate it.
7913 __ testl(scratch, scratch);
7914 __ j(positive, &done);
7915 __ neg(rcx);
7916 __ jmp(&done);
7917 }
7918
7919 __ bind(&normal_exponent);
7920 // Exponent word in scratch, exponent part of exponent word in scratch2.
7921 // Zero in rcx.
7922 // We know the exponent is smaller than 30 (biased). If it is less than
7923 // 0 (biased) then the number is smaller in magnitude than 1.0 * 2^0, ie
7924 // it rounds to zero.
7925 const uint32_t zero_exponent =
7926 (HeapNumber::kExponentBias + 0) << HeapNumber::kExponentShift;
7927 __ subl(scratch2, Immediate(zero_exponent));
7928 // rcx already has a Smi zero.
7929 __ j(less, &done);
7930
7931 // We have a shifted exponent between 0 and 30 in scratch2.
7932 __ shr(scratch2, Immediate(HeapNumber::kExponentShift));
7933 __ movl(rcx, Immediate(30));
7934 __ subl(rcx, scratch2);
7935
7936 __ bind(&right_exponent);
7937 // Here rcx is the shift, scratch is the exponent word.
7938 // Get the top bits of the mantissa.
7939 __ and_(scratch, Immediate(HeapNumber::kMantissaMask));
7940 // Put back the implicit 1.
7941 __ or_(scratch, Immediate(1 << HeapNumber::kExponentShift));
7942 // Shift up the mantissa bits to take up the space the exponent used to
7943 // take. We have kExponentShift + 1 significant bits int he low end of the
7944 // word. Shift them to the top bits.
7945 const int shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 2;
7946 __ shl(scratch, Immediate(shift_distance));
7947 // Get the second half of the double. For some exponents we don't
7948 // actually need this because the bits get shifted out again, but
7949 // it's probably slower to test than just to do it.
7950 __ movl(scratch2, FieldOperand(source, HeapNumber::kMantissaOffset));
7951 // Shift down 22 bits to get the most significant 10 bits or the low
7952 // mantissa word.
7953 __ shr(scratch2, Immediate(32 - shift_distance));
7954 __ or_(scratch2, scratch);
7955 // Move down according to the exponent.
7956 __ shr_cl(scratch2);
7957 // Now the unsigned answer is in scratch2. We need to move it to rcx and
7958 // we may need to fix the sign.
7959 Label negative;
7960 __ xor_(rcx, rcx);
7961 __ cmpl(rcx, FieldOperand(source, HeapNumber::kExponentOffset));
7962 __ j(greater, &negative);
7963 __ movl(rcx, scratch2);
7964 __ jmp(&done);
7965 __ bind(&negative);
7966 __ subl(rcx, scratch2);
7967 __ bind(&done);
7968 }
7969}
7970
7971
Leon Clarkee46be812010-01-19 14:06:41 +00007972void GenericUnaryOpStub::Generate(MacroAssembler* masm) {
Leon Clarked91b9f72010-01-27 17:25:45 +00007973 Label slow, done;
Leon Clarkee46be812010-01-19 14:06:41 +00007974
Leon Clarked91b9f72010-01-27 17:25:45 +00007975 if (op_ == Token::SUB) {
7976 // Check whether the value is a smi.
7977 Label try_float;
7978 __ JumpIfNotSmi(rax, &try_float);
Steve Blocka7e24c12009-10-30 11:49:00 +00007979
Leon Clarked91b9f72010-01-27 17:25:45 +00007980 // Enter runtime system if the value of the smi is zero
7981 // to make sure that we switch between 0 and -0.
7982 // Also enter it if the value of the smi is Smi::kMinValue.
7983 __ SmiNeg(rax, rax, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +00007984
Leon Clarked91b9f72010-01-27 17:25:45 +00007985 // Either zero or Smi::kMinValue, neither of which become a smi when
7986 // negated.
7987 __ SmiCompare(rax, Smi::FromInt(0));
7988 __ j(not_equal, &slow);
7989 __ Move(rax, Factory::minus_zero_value());
7990 __ jmp(&done);
Steve Blocka7e24c12009-10-30 11:49:00 +00007991
Leon Clarked91b9f72010-01-27 17:25:45 +00007992 // Try floating point case.
7993 __ bind(&try_float);
7994 __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
7995 __ CompareRoot(rdx, Heap::kHeapNumberMapRootIndex);
7996 __ j(not_equal, &slow);
7997 // Operand is a float, negate its value by flipping sign bit.
7998 __ movq(rdx, FieldOperand(rax, HeapNumber::kValueOffset));
7999 __ movq(kScratchRegister, Immediate(0x01));
8000 __ shl(kScratchRegister, Immediate(63));
8001 __ xor_(rdx, kScratchRegister); // Flip sign.
8002 // rdx is value to store.
8003 if (overwrite_) {
8004 __ movq(FieldOperand(rax, HeapNumber::kValueOffset), rdx);
8005 } else {
8006 __ AllocateHeapNumber(rcx, rbx, &slow);
8007 // rcx: allocated 'empty' number
8008 __ movq(FieldOperand(rcx, HeapNumber::kValueOffset), rdx);
8009 __ movq(rax, rcx);
8010 }
8011 } else if (op_ == Token::BIT_NOT) {
8012 // Check if the operand is a heap number.
8013 __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
8014 __ CompareRoot(rdx, Heap::kHeapNumberMapRootIndex);
8015 __ j(not_equal, &slow);
8016
8017 // Convert the heap number in rax to an untagged integer in rcx.
8018 IntegerConvert(masm, rax, CpuFeatures::IsSupported(SSE3), &slow);
8019
8020 // Do the bitwise operation and check if the result fits in a smi.
8021 Label try_float;
8022 __ not_(rcx);
8023 // Tag the result as a smi and we're done.
8024 ASSERT(kSmiTagSize == 1);
8025 __ Integer32ToSmi(rax, rcx);
8026 }
8027
8028 // Return from the stub.
8029 __ bind(&done);
8030 __ StubReturn(1);
8031
8032 // Handle the slow case by jumping to the JavaScript builtin.
Steve Blocka7e24c12009-10-30 11:49:00 +00008033 __ bind(&slow);
8034 __ pop(rcx); // pop return address
8035 __ push(rax);
8036 __ push(rcx); // push return address
Leon Clarked91b9f72010-01-27 17:25:45 +00008037 switch (op_) {
8038 case Token::SUB:
8039 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION);
8040 break;
8041 case Token::BIT_NOT:
8042 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION);
8043 break;
8044 default:
8045 UNREACHABLE();
Steve Blocka7e24c12009-10-30 11:49:00 +00008046 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008047}
8048
8049
Leon Clarke4515c472010-02-03 11:58:03 +00008050void RegExpExecStub::Generate(MacroAssembler* masm) {
8051 // Just jump directly to runtime if native RegExp is not selected at compile
8052 // time or if regexp entry in generated code is turned off runtime switch or
8053 // at compilation.
Steve Block6ded16b2010-05-10 14:33:55 +01008054#ifdef V8_INTERPRETED_REGEXP
8055 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
8056#else // V8_INTERPRETED_REGEXP
Leon Clarke4515c472010-02-03 11:58:03 +00008057 if (!FLAG_regexp_entry_native) {
Steve Block6ded16b2010-05-10 14:33:55 +01008058 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
Leon Clarke4515c472010-02-03 11:58:03 +00008059 return;
8060 }
8061
8062 // Stack frame on entry.
8063 // esp[0]: return address
8064 // esp[8]: last_match_info (expected JSArray)
8065 // esp[16]: previous index
8066 // esp[24]: subject string
8067 // esp[32]: JSRegExp object
8068
8069 static const int kLastMatchInfoOffset = 1 * kPointerSize;
8070 static const int kPreviousIndexOffset = 2 * kPointerSize;
8071 static const int kSubjectOffset = 3 * kPointerSize;
8072 static const int kJSRegExpOffset = 4 * kPointerSize;
8073
8074 Label runtime;
8075
8076 // Ensure that a RegExp stack is allocated.
8077 ExternalReference address_of_regexp_stack_memory_address =
8078 ExternalReference::address_of_regexp_stack_memory_address();
8079 ExternalReference address_of_regexp_stack_memory_size =
8080 ExternalReference::address_of_regexp_stack_memory_size();
8081 __ movq(kScratchRegister, address_of_regexp_stack_memory_size);
8082 __ movq(kScratchRegister, Operand(kScratchRegister, 0));
8083 __ testq(kScratchRegister, kScratchRegister);
8084 __ j(zero, &runtime);
8085
8086
8087 // Check that the first argument is a JSRegExp object.
8088 __ movq(rax, Operand(rsp, kJSRegExpOffset));
8089 __ JumpIfSmi(rax, &runtime);
8090 __ CmpObjectType(rax, JS_REGEXP_TYPE, kScratchRegister);
8091 __ j(not_equal, &runtime);
8092 // Check that the RegExp has been compiled (data contains a fixed array).
8093 __ movq(rcx, FieldOperand(rax, JSRegExp::kDataOffset));
8094 if (FLAG_debug_code) {
8095 Condition is_smi = masm->CheckSmi(rcx);
8096 __ Check(NegateCondition(is_smi),
8097 "Unexpected type for RegExp data, FixedArray expected");
8098 __ CmpObjectType(rcx, FIXED_ARRAY_TYPE, kScratchRegister);
8099 __ Check(equal, "Unexpected type for RegExp data, FixedArray expected");
8100 }
8101
8102 // rcx: RegExp data (FixedArray)
8103 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
8104 __ movq(rbx, FieldOperand(rcx, JSRegExp::kDataTagOffset));
8105 __ SmiCompare(rbx, Smi::FromInt(JSRegExp::IRREGEXP));
8106 __ j(not_equal, &runtime);
8107
8108 // rcx: RegExp data (FixedArray)
8109 // Check that the number of captures fit in the static offsets vector buffer.
8110 __ movq(rdx, FieldOperand(rcx, JSRegExp::kIrregexpCaptureCountOffset));
8111 // Calculate number of capture registers (number_of_captures + 1) * 2.
8112 __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rdx, 1);
8113 __ addq(rdx, Immediate(2)); // rdx was number_of_captures * 2.
8114 // Check that the static offsets vector buffer is large enough.
8115 __ cmpq(rdx, Immediate(OffsetsVector::kStaticOffsetsVectorSize));
8116 __ j(above, &runtime);
8117
8118 // rcx: RegExp data (FixedArray)
8119 // rdx: Number of capture registers
8120 // Check that the second argument is a string.
8121 __ movq(rax, Operand(rsp, kSubjectOffset));
8122 __ JumpIfSmi(rax, &runtime);
8123 Condition is_string = masm->IsObjectStringType(rax, rbx, rbx);
8124 __ j(NegateCondition(is_string), &runtime);
8125 // Get the length of the string to rbx.
Steve Block6ded16b2010-05-10 14:33:55 +01008126 __ movq(rbx, FieldOperand(rax, String::kLengthOffset));
Leon Clarke4515c472010-02-03 11:58:03 +00008127
Steve Block6ded16b2010-05-10 14:33:55 +01008128 // rbx: Length of subject string as smi
Leon Clarke4515c472010-02-03 11:58:03 +00008129 // rcx: RegExp data (FixedArray)
8130 // rdx: Number of capture registers
8131 // Check that the third argument is a positive smi less than the string
Steve Block6ded16b2010-05-10 14:33:55 +01008132 // length. A negative value will be greater (unsigned comparison).
Leon Clarke4515c472010-02-03 11:58:03 +00008133 __ movq(rax, Operand(rsp, kPreviousIndexOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01008134 __ JumpIfNotSmi(rax, &runtime);
8135 __ SmiCompare(rax, rbx);
8136 __ j(above_equal, &runtime);
Leon Clarke4515c472010-02-03 11:58:03 +00008137
8138 // rcx: RegExp data (FixedArray)
8139 // rdx: Number of capture registers
8140 // Check that the fourth object is a JSArray object.
8141 __ movq(rax, Operand(rsp, kLastMatchInfoOffset));
8142 __ JumpIfSmi(rax, &runtime);
8143 __ CmpObjectType(rax, JS_ARRAY_TYPE, kScratchRegister);
8144 __ j(not_equal, &runtime);
8145 // Check that the JSArray is in fast case.
8146 __ movq(rbx, FieldOperand(rax, JSArray::kElementsOffset));
8147 __ movq(rax, FieldOperand(rbx, HeapObject::kMapOffset));
8148 __ Cmp(rax, Factory::fixed_array_map());
8149 __ j(not_equal, &runtime);
8150 // Check that the last match info has space for the capture registers and the
8151 // additional information. Ensure no overflow in add.
8152 ASSERT(FixedArray::kMaxLength < kMaxInt - FixedArray::kLengthOffset);
8153 __ movl(rax, FieldOperand(rbx, FixedArray::kLengthOffset));
8154 __ addl(rdx, Immediate(RegExpImpl::kLastMatchOverhead));
8155 __ cmpl(rdx, rax);
8156 __ j(greater, &runtime);
8157
8158 // ecx: RegExp data (FixedArray)
8159 // Check the representation and encoding of the subject string.
8160 Label seq_string, seq_two_byte_string, check_code;
8161 const int kStringRepresentationEncodingMask =
8162 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
8163 __ movq(rax, Operand(rsp, kSubjectOffset));
8164 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
8165 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
8166 __ andb(rbx, Immediate(kStringRepresentationEncodingMask));
8167 // First check for sequential string.
8168 ASSERT_EQ(0, kStringTag);
8169 ASSERT_EQ(0, kSeqStringTag);
8170 __ testb(rbx, Immediate(kIsNotStringMask | kStringRepresentationMask));
8171 __ j(zero, &seq_string);
8172
8173 // Check for flat cons string.
8174 // A flat cons string is a cons string where the second part is the empty
8175 // string. In that case the subject string is just the first part of the cons
8176 // string. Also in this case the first part of the cons string is known to be
8177 // a sequential string or an external string.
Steve Block6ded16b2010-05-10 14:33:55 +01008178 __ andb(rbx, Immediate(kStringRepresentationMask));
8179 __ cmpb(rbx, Immediate(kConsStringTag));
Leon Clarke4515c472010-02-03 11:58:03 +00008180 __ j(not_equal, &runtime);
8181 __ movq(rdx, FieldOperand(rax, ConsString::kSecondOffset));
8182 __ Cmp(rdx, Factory::empty_string());
8183 __ j(not_equal, &runtime);
8184 __ movq(rax, FieldOperand(rax, ConsString::kFirstOffset));
8185 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
8186 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
8187 ASSERT_EQ(0, kSeqStringTag);
8188 __ testb(rbx, Immediate(kStringRepresentationMask));
8189 __ j(not_zero, &runtime);
8190 __ andb(rbx, Immediate(kStringRepresentationEncodingMask));
8191
8192 __ bind(&seq_string);
8193 // rax: subject string (sequential either ascii to two byte)
8194 // rbx: suject string type & kStringRepresentationEncodingMask
8195 // rcx: RegExp data (FixedArray)
8196 // Check that the irregexp code has been generated for an ascii string. If
8197 // it has, the field contains a code object otherwise it contains the hole.
Steve Block6ded16b2010-05-10 14:33:55 +01008198 const int kSeqTwoByteString = kStringTag | kSeqStringTag | kTwoByteStringTag;
8199 __ cmpb(rbx, Immediate(kSeqTwoByteString));
Leon Clarke4515c472010-02-03 11:58:03 +00008200 __ j(equal, &seq_two_byte_string);
8201 if (FLAG_debug_code) {
8202 __ cmpb(rbx, Immediate(kStringTag | kSeqStringTag | kAsciiStringTag));
8203 __ Check(equal, "Expected sequential ascii string");
8204 }
8205 __ movq(r12, FieldOperand(rcx, JSRegExp::kDataAsciiCodeOffset));
8206 __ Set(rdi, 1); // Type is ascii.
8207 __ jmp(&check_code);
8208
8209 __ bind(&seq_two_byte_string);
8210 // rax: subject string
8211 // rcx: RegExp data (FixedArray)
8212 __ movq(r12, FieldOperand(rcx, JSRegExp::kDataUC16CodeOffset));
8213 __ Set(rdi, 0); // Type is two byte.
8214
8215 __ bind(&check_code);
8216 // Check that the irregexp code has been generated for the actual string
8217 // encoding. If it has, the field contains a code object otherwise it contains
8218 // the hole.
8219 __ CmpObjectType(r12, CODE_TYPE, kScratchRegister);
8220 __ j(not_equal, &runtime);
8221
8222 // rax: subject string
8223 // rdi: encoding of subject string (1 if ascii, 0 if two_byte);
8224 // r12: code
8225 // Load used arguments before starting to push arguments for call to native
8226 // RegExp code to avoid handling changing stack height.
8227 __ movq(rbx, Operand(rsp, kPreviousIndexOffset));
8228 __ SmiToInteger64(rbx, rbx); // Previous index from smi.
8229
8230 // rax: subject string
8231 // rbx: previous index
8232 // rdi: encoding of subject string (1 if ascii 0 if two_byte);
8233 // r12: code
8234 // All checks done. Now push arguments for native regexp code.
8235 __ IncrementCounter(&Counters::regexp_entry_native, 1);
8236
8237 // rsi is caller save on Windows and used to pass parameter on Linux.
8238 __ push(rsi);
8239
8240 static const int kRegExpExecuteArguments = 7;
8241 __ PrepareCallCFunction(kRegExpExecuteArguments);
8242 int argument_slots_on_stack =
8243 masm->ArgumentStackSlotsForCFunctionCall(kRegExpExecuteArguments);
8244
8245 // Argument 7: Indicate that this is a direct call from JavaScript.
8246 __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kPointerSize),
8247 Immediate(1));
8248
8249 // Argument 6: Start (high end) of backtracking stack memory area.
8250 __ movq(kScratchRegister, address_of_regexp_stack_memory_address);
8251 __ movq(r9, Operand(kScratchRegister, 0));
8252 __ movq(kScratchRegister, address_of_regexp_stack_memory_size);
8253 __ addq(r9, Operand(kScratchRegister, 0));
8254 // Argument 6 passed in r9 on Linux and on the stack on Windows.
8255#ifdef _WIN64
8256 __ movq(Operand(rsp, (argument_slots_on_stack - 2) * kPointerSize), r9);
8257#endif
8258
8259 // Argument 5: static offsets vector buffer.
8260 __ movq(r8, ExternalReference::address_of_static_offsets_vector());
8261 // Argument 5 passed in r8 on Linux and on the stack on Windows.
8262#ifdef _WIN64
8263 __ movq(Operand(rsp, (argument_slots_on_stack - 3) * kPointerSize), r8);
8264#endif
8265
8266 // First four arguments are passed in registers on both Linux and Windows.
8267#ifdef _WIN64
8268 Register arg4 = r9;
8269 Register arg3 = r8;
8270 Register arg2 = rdx;
8271 Register arg1 = rcx;
8272#else
8273 Register arg4 = rcx;
8274 Register arg3 = rdx;
8275 Register arg2 = rsi;
8276 Register arg1 = rdi;
8277#endif
8278
8279 // Keep track on aliasing between argX defined above and the registers used.
8280 // rax: subject string
8281 // rbx: previous index
8282 // rdi: encoding of subject string (1 if ascii 0 if two_byte);
8283 // r12: code
8284
8285 // Argument 4: End of string data
8286 // Argument 3: Start of string data
8287 Label setup_two_byte, setup_rest;
8288 __ testb(rdi, rdi);
Steve Block6ded16b2010-05-10 14:33:55 +01008289 __ movq(rdi, FieldOperand(rax, String::kLengthOffset));
Leon Clarke4515c472010-02-03 11:58:03 +00008290 __ j(zero, &setup_two_byte);
Steve Block6ded16b2010-05-10 14:33:55 +01008291 __ SmiToInteger32(rdi, rdi);
Leon Clarke4515c472010-02-03 11:58:03 +00008292 __ lea(arg4, FieldOperand(rax, rdi, times_1, SeqAsciiString::kHeaderSize));
8293 __ lea(arg3, FieldOperand(rax, rbx, times_1, SeqAsciiString::kHeaderSize));
8294 __ jmp(&setup_rest);
8295 __ bind(&setup_two_byte);
Steve Block6ded16b2010-05-10 14:33:55 +01008296 __ SmiToInteger32(rdi, rdi);
Leon Clarke4515c472010-02-03 11:58:03 +00008297 __ lea(arg4, FieldOperand(rax, rdi, times_2, SeqTwoByteString::kHeaderSize));
8298 __ lea(arg3, FieldOperand(rax, rbx, times_2, SeqTwoByteString::kHeaderSize));
8299
8300 __ bind(&setup_rest);
8301 // Argument 2: Previous index.
8302 __ movq(arg2, rbx);
8303
8304 // Argument 1: Subject string.
8305 __ movq(arg1, rax);
8306
8307 // Locate the code entry and call it.
8308 __ addq(r12, Immediate(Code::kHeaderSize - kHeapObjectTag));
8309 __ CallCFunction(r12, kRegExpExecuteArguments);
8310
8311 // rsi is caller save, as it is used to pass parameter.
8312 __ pop(rsi);
8313
8314 // Check the result.
8315 Label success;
8316 __ cmpq(rax, Immediate(NativeRegExpMacroAssembler::SUCCESS));
8317 __ j(equal, &success);
8318 Label failure;
8319 __ cmpq(rax, Immediate(NativeRegExpMacroAssembler::FAILURE));
8320 __ j(equal, &failure);
8321 __ cmpq(rax, Immediate(NativeRegExpMacroAssembler::EXCEPTION));
8322 // If not exception it can only be retry. Handle that in the runtime system.
8323 __ j(not_equal, &runtime);
8324 // Result must now be exception. If there is no pending exception already a
8325 // stack overflow (on the backtrack stack) was detected in RegExp code but
8326 // haven't created the exception yet. Handle that in the runtime system.
Steve Block6ded16b2010-05-10 14:33:55 +01008327 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
Leon Clarke4515c472010-02-03 11:58:03 +00008328 ExternalReference pending_exception_address(Top::k_pending_exception_address);
8329 __ movq(kScratchRegister, pending_exception_address);
8330 __ Cmp(kScratchRegister, Factory::the_hole_value());
8331 __ j(equal, &runtime);
8332 __ bind(&failure);
8333 // For failure and exception return null.
8334 __ Move(rax, Factory::null_value());
8335 __ ret(4 * kPointerSize);
8336
8337 // Load RegExp data.
8338 __ bind(&success);
8339 __ movq(rax, Operand(rsp, kJSRegExpOffset));
8340 __ movq(rcx, FieldOperand(rax, JSRegExp::kDataOffset));
8341 __ movq(rdx, FieldOperand(rcx, JSRegExp::kIrregexpCaptureCountOffset));
8342 // Calculate number of capture registers (number_of_captures + 1) * 2.
8343 __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rdx, 1);
8344 __ addq(rdx, Immediate(2)); // rdx was number_of_captures * 2.
8345
8346 // rdx: Number of capture registers
8347 // Load last_match_info which is still known to be a fast case JSArray.
8348 __ movq(rax, Operand(rsp, kLastMatchInfoOffset));
8349 __ movq(rbx, FieldOperand(rax, JSArray::kElementsOffset));
8350
8351 // rbx: last_match_info backing store (FixedArray)
8352 // rdx: number of capture registers
8353 // Store the capture count.
8354 __ Integer32ToSmi(kScratchRegister, rdx);
8355 __ movq(FieldOperand(rbx, RegExpImpl::kLastCaptureCountOffset),
8356 kScratchRegister);
8357 // Store last subject and last input.
8358 __ movq(rax, Operand(rsp, kSubjectOffset));
8359 __ movq(FieldOperand(rbx, RegExpImpl::kLastSubjectOffset), rax);
8360 __ movq(rcx, rbx);
8361 __ RecordWrite(rcx, RegExpImpl::kLastSubjectOffset, rax, rdi);
8362 __ movq(rax, Operand(rsp, kSubjectOffset));
8363 __ movq(FieldOperand(rbx, RegExpImpl::kLastInputOffset), rax);
8364 __ movq(rcx, rbx);
8365 __ RecordWrite(rcx, RegExpImpl::kLastInputOffset, rax, rdi);
8366
8367 // Get the static offsets vector filled by the native regexp code.
8368 __ movq(rcx, ExternalReference::address_of_static_offsets_vector());
8369
8370 // rbx: last_match_info backing store (FixedArray)
8371 // rcx: offsets vector
8372 // rdx: number of capture registers
8373 Label next_capture, done;
Leon Clarke4515c472010-02-03 11:58:03 +00008374 // Capture register counter starts from number of capture registers and
8375 // counts down until wraping after zero.
8376 __ bind(&next_capture);
8377 __ subq(rdx, Immediate(1));
8378 __ j(negative, &done);
8379 // Read the value from the static offsets vector buffer and make it a smi.
8380 __ movl(rdi, Operand(rcx, rdx, times_int_size, 0));
8381 __ Integer32ToSmi(rdi, rdi, &runtime);
Leon Clarke4515c472010-02-03 11:58:03 +00008382 // Store the smi value in the last match info.
8383 __ movq(FieldOperand(rbx,
8384 rdx,
8385 times_pointer_size,
8386 RegExpImpl::kFirstCaptureOffset),
8387 rdi);
8388 __ jmp(&next_capture);
8389 __ bind(&done);
8390
8391 // Return last match info.
8392 __ movq(rax, Operand(rsp, kLastMatchInfoOffset));
8393 __ ret(4 * kPointerSize);
8394
8395 // Do the runtime call to execute the regexp.
8396 __ bind(&runtime);
Steve Block6ded16b2010-05-10 14:33:55 +01008397 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
8398#endif // V8_INTERPRETED_REGEXP
8399}
8400
8401
8402void NumberToStringStub::GenerateConvertHashCodeToIndex(MacroAssembler* masm,
8403 Register hash,
8404 Register mask) {
8405 __ and_(hash, mask);
8406 // Each entry in string cache consists of two pointer sized fields,
8407 // but times_twice_pointer_size (multiplication by 16) scale factor
8408 // is not supported by addrmode on x64 platform.
8409 // So we have to premultiply entry index before lookup.
8410 __ shl(hash, Immediate(kPointerSizeLog2 + 1));
8411}
8412
8413
8414void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm,
8415 Register object,
8416 Register result,
8417 Register scratch1,
8418 Register scratch2,
8419 bool object_is_smi,
8420 Label* not_found) {
8421 // Use of registers. Register result is used as a temporary.
8422 Register number_string_cache = result;
8423 Register mask = scratch1;
8424 Register scratch = scratch2;
8425
8426 // Load the number string cache.
8427 __ LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex);
8428
8429 // Make the hash mask from the length of the number string cache. It
8430 // contains two elements (number and string) for each cache entry.
8431 __ movl(mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset));
8432 __ shrl(mask, Immediate(1)); // Divide length by two (length is not a smi).
8433 __ subl(mask, Immediate(1)); // Make mask.
8434
8435 // Calculate the entry in the number string cache. The hash value in the
8436 // number string cache for smis is just the smi value, and the hash for
8437 // doubles is the xor of the upper and lower words. See
8438 // Heap::GetNumberStringCache.
8439 Label is_smi;
8440 Label load_result_from_cache;
8441 if (!object_is_smi) {
8442 __ JumpIfSmi(object, &is_smi);
8443 __ CheckMap(object, Factory::heap_number_map(), not_found, true);
8444
8445 ASSERT_EQ(8, kDoubleSize);
8446 __ movl(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4));
8447 __ xor_(scratch, FieldOperand(object, HeapNumber::kValueOffset));
8448 GenerateConvertHashCodeToIndex(masm, scratch, mask);
8449
8450 Register index = scratch;
8451 Register probe = mask;
8452 __ movq(probe,
8453 FieldOperand(number_string_cache,
8454 index,
8455 times_1,
8456 FixedArray::kHeaderSize));
8457 __ JumpIfSmi(probe, not_found);
8458 ASSERT(CpuFeatures::IsSupported(SSE2));
8459 CpuFeatures::Scope fscope(SSE2);
8460 __ movsd(xmm0, FieldOperand(object, HeapNumber::kValueOffset));
8461 __ movsd(xmm1, FieldOperand(probe, HeapNumber::kValueOffset));
8462 __ comisd(xmm0, xmm1);
8463 __ j(parity_even, not_found); // Bail out if NaN is involved.
8464 __ j(not_equal, not_found); // The cache did not contain this value.
8465 __ jmp(&load_result_from_cache);
8466 }
8467
8468 __ bind(&is_smi);
8469 __ movq(scratch, object);
8470 __ SmiToInteger32(scratch, scratch);
8471 GenerateConvertHashCodeToIndex(masm, scratch, mask);
8472
8473 Register index = scratch;
8474 // Check if the entry is the smi we are looking for.
8475 __ cmpq(object,
8476 FieldOperand(number_string_cache,
8477 index,
8478 times_1,
8479 FixedArray::kHeaderSize));
8480 __ j(not_equal, not_found);
8481
8482 // Get the result from the cache.
8483 __ bind(&load_result_from_cache);
8484 __ movq(result,
8485 FieldOperand(number_string_cache,
8486 index,
8487 times_1,
8488 FixedArray::kHeaderSize + kPointerSize));
8489 __ IncrementCounter(&Counters::number_to_string_native, 1);
8490}
8491
8492
8493void NumberToStringStub::Generate(MacroAssembler* masm) {
8494 Label runtime;
8495
8496 __ movq(rbx, Operand(rsp, kPointerSize));
8497
8498 // Generate code to lookup number in the number string cache.
8499 GenerateLookupNumberStringCache(masm, rbx, rax, r8, r9, false, &runtime);
8500 __ ret(1 * kPointerSize);
8501
8502 __ bind(&runtime);
8503 // Handle number to string in the runtime system if not found in the cache.
8504 __ TailCallRuntime(Runtime::kNumberToStringSkipCache, 1, 1);
8505}
8506
8507
8508void RecordWriteStub::Generate(MacroAssembler* masm) {
8509 masm->RecordWriteHelper(object_, addr_, scratch_);
8510 masm->ret(0);
8511}
8512
8513
8514static int NegativeComparisonResult(Condition cc) {
8515 ASSERT(cc != equal);
8516 ASSERT((cc == less) || (cc == less_equal)
8517 || (cc == greater) || (cc == greater_equal));
8518 return (cc == greater || cc == greater_equal) ? LESS : GREATER;
Leon Clarke4515c472010-02-03 11:58:03 +00008519}
8520
8521
Steve Blocka7e24c12009-10-30 11:49:00 +00008522void CompareStub::Generate(MacroAssembler* masm) {
8523 Label call_builtin, done;
Steve Block6ded16b2010-05-10 14:33:55 +01008524 // The compare stub returns a positive, negative, or zero 64-bit integer
8525 // value in rax, corresponding to result of comparing the two inputs.
Steve Blocka7e24c12009-10-30 11:49:00 +00008526 // NOTICE! This code is only reached after a smi-fast-case check, so
8527 // it is certain that at least one operand isn't a smi.
8528
Steve Block6ded16b2010-05-10 14:33:55 +01008529 // Two identical objects are equal unless they are both NaN or undefined.
8530 {
8531 Label not_identical;
8532 __ cmpq(rax, rdx);
8533 __ j(not_equal, &not_identical);
Steve Blocka7e24c12009-10-30 11:49:00 +00008534
Steve Block6ded16b2010-05-10 14:33:55 +01008535 if (cc_ != equal) {
8536 // Check for undefined. undefined OP undefined is false even though
8537 // undefined == undefined.
8538 Label check_for_nan;
8539 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex);
8540 __ j(not_equal, &check_for_nan);
8541 __ Set(rax, NegativeComparisonResult(cc_));
8542 __ ret(0);
8543 __ bind(&check_for_nan);
8544 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008545
Steve Block6ded16b2010-05-10 14:33:55 +01008546 // Test for NaN. Sadly, we can't just compare to Factory::nan_value(),
8547 // so we do the second best thing - test it ourselves.
8548 // Note: if cc_ != equal, never_nan_nan_ is not used.
8549 if (never_nan_nan_ && (cc_ == equal)) {
8550 __ Set(rax, EQUAL);
8551 __ ret(0);
8552 } else {
8553 Label return_equal;
8554 Label heap_number;
8555 // If it's not a heap number, then return equal.
8556 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
8557 Factory::heap_number_map());
8558 __ j(equal, &heap_number);
8559 __ bind(&return_equal);
8560 __ Set(rax, EQUAL);
8561 __ ret(0);
Steve Blockd0582a62009-12-15 09:54:21 +00008562
Steve Block6ded16b2010-05-10 14:33:55 +01008563 __ bind(&heap_number);
8564 // It is a heap number, so return non-equal if it's NaN and equal if
8565 // it's not NaN.
8566 // The representation of NaN values has all exponent bits (52..62) set,
8567 // and not all mantissa bits (0..51) clear.
8568 // We only allow QNaNs, which have bit 51 set (which also rules out
8569 // the value being Infinity).
8570
8571 // Value is a QNaN if value & kQuietNaNMask == kQuietNaNMask, i.e.,
8572 // all bits in the mask are set. We only need to check the word
8573 // that contains the exponent and high bit of the mantissa.
8574 ASSERT_NE(0, (kQuietNaNHighBitsMask << 1) & 0x80000000u);
8575 __ movl(rdx, FieldOperand(rdx, HeapNumber::kExponentOffset));
8576 __ xorl(rax, rax);
8577 __ addl(rdx, rdx); // Shift value and mask so mask applies to top bits.
8578 __ cmpl(rdx, Immediate(kQuietNaNHighBitsMask << 1));
8579 if (cc_ == equal) {
Leon Clarkee46be812010-01-19 14:06:41 +00008580 __ setcc(above_equal, rax);
8581 __ ret(0);
Steve Block6ded16b2010-05-10 14:33:55 +01008582 } else {
8583 Label nan;
8584 __ j(above_equal, &nan);
8585 __ Set(rax, EQUAL);
8586 __ ret(0);
8587 __ bind(&nan);
8588 __ Set(rax, NegativeComparisonResult(cc_));
8589 __ ret(0);
Leon Clarkee46be812010-01-19 14:06:41 +00008590 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008591 }
8592
Steve Block6ded16b2010-05-10 14:33:55 +01008593 __ bind(&not_identical);
8594 }
8595
8596 if (cc_ == equal) { // Both strict and non-strict.
8597 Label slow; // Fallthrough label.
8598
Steve Blocka7e24c12009-10-30 11:49:00 +00008599 // If we're doing a strict equality comparison, we don't have to do
8600 // type conversion, so we generate code to do fast comparison for objects
8601 // and oddballs. Non-smi numbers and strings still go through the usual
8602 // slow-case code.
8603 if (strict_) {
8604 // If either is a Smi (we know that not both are), then they can only
8605 // be equal if the other is a HeapNumber. If so, use the slow case.
8606 {
8607 Label not_smis;
8608 __ SelectNonSmi(rbx, rax, rdx, &not_smis);
8609
8610 // Check if the non-smi operand is a heap number.
8611 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
8612 Factory::heap_number_map());
8613 // If heap number, handle it in the slow case.
8614 __ j(equal, &slow);
8615 // Return non-equal. ebx (the lower half of rbx) is not zero.
8616 __ movq(rax, rbx);
8617 __ ret(0);
8618
8619 __ bind(&not_smis);
8620 }
8621
8622 // If either operand is a JSObject or an oddball value, then they are not
8623 // equal since their pointers are different
8624 // There is no test for undetectability in strict equality.
8625
8626 // If the first object is a JS object, we have done pointer comparison.
8627 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
8628 Label first_non_object;
8629 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx);
8630 __ j(below, &first_non_object);
8631 // Return non-zero (eax (not rax) is not zero)
8632 Label return_not_equal;
8633 ASSERT(kHeapObjectTag != 0);
8634 __ bind(&return_not_equal);
8635 __ ret(0);
8636
8637 __ bind(&first_non_object);
8638 // Check for oddballs: true, false, null, undefined.
8639 __ CmpInstanceType(rcx, ODDBALL_TYPE);
8640 __ j(equal, &return_not_equal);
8641
8642 __ CmpObjectType(rdx, FIRST_JS_OBJECT_TYPE, rcx);
8643 __ j(above_equal, &return_not_equal);
8644
8645 // Check for oddballs: true, false, null, undefined.
8646 __ CmpInstanceType(rcx, ODDBALL_TYPE);
8647 __ j(equal, &return_not_equal);
8648
8649 // Fall through to the general case.
8650 }
8651 __ bind(&slow);
8652 }
8653
8654 // Push arguments below the return address to prepare jump to builtin.
8655 __ pop(rcx);
8656 __ push(rax);
8657 __ push(rdx);
8658 __ push(rcx);
8659
Steve Block6ded16b2010-05-10 14:33:55 +01008660 // Generate the number comparison code.
8661 if (include_number_compare_) {
8662 Label non_number_comparison;
8663 Label unordered;
8664 FloatingPointHelper::LoadFloatOperand(masm, rdx, xmm0,
8665 &non_number_comparison);
8666 FloatingPointHelper::LoadFloatOperand(masm, rax, xmm1,
8667 &non_number_comparison);
Steve Blocka7e24c12009-10-30 11:49:00 +00008668
Steve Block6ded16b2010-05-10 14:33:55 +01008669 __ comisd(xmm0, xmm1);
Steve Blocka7e24c12009-10-30 11:49:00 +00008670
Steve Block6ded16b2010-05-10 14:33:55 +01008671 // Don't base result on EFLAGS when a NaN is involved.
8672 __ j(parity_even, &unordered);
8673 // Return a result of -1, 0, or 1, based on EFLAGS.
8674 __ movq(rax, Immediate(0)); // equal
8675 __ movq(rcx, Immediate(1));
8676 __ cmovq(above, rax, rcx);
8677 __ movq(rcx, Immediate(-1));
8678 __ cmovq(below, rax, rcx);
8679 __ ret(2 * kPointerSize); // rax, rdx were pushed
Steve Blocka7e24c12009-10-30 11:49:00 +00008680
Steve Block6ded16b2010-05-10 14:33:55 +01008681 // If one of the numbers was NaN, then the result is always false.
8682 // The cc is never not-equal.
8683 __ bind(&unordered);
8684 ASSERT(cc_ != not_equal);
8685 if (cc_ == less || cc_ == less_equal) {
8686 __ Set(rax, 1);
8687 } else {
8688 __ Set(rax, -1);
8689 }
8690 __ ret(2 * kPointerSize); // rax, rdx were pushed
Steve Blocka7e24c12009-10-30 11:49:00 +00008691
Steve Block6ded16b2010-05-10 14:33:55 +01008692 // The number comparison code did not provide a valid result.
8693 __ bind(&non_number_comparison);
8694 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008695
8696 // Fast negative check for symbol-to-symbol equality.
Leon Clarkee46be812010-01-19 14:06:41 +00008697 Label check_for_strings;
Steve Blocka7e24c12009-10-30 11:49:00 +00008698 if (cc_ == equal) {
Leon Clarkee46be812010-01-19 14:06:41 +00008699 BranchIfNonSymbol(masm, &check_for_strings, rax, kScratchRegister);
8700 BranchIfNonSymbol(masm, &check_for_strings, rdx, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00008701
8702 // We've already checked for object identity, so if both operands
8703 // are symbols they aren't equal. Register eax (not rax) already holds a
8704 // non-zero value, which indicates not equal, so just return.
8705 __ ret(2 * kPointerSize);
8706 }
8707
Leon Clarkee46be812010-01-19 14:06:41 +00008708 __ bind(&check_for_strings);
8709
8710 __ JumpIfNotBothSequentialAsciiStrings(rdx, rax, rcx, rbx, &call_builtin);
8711
8712 // Inline comparison of ascii strings.
8713 StringCompareStub::GenerateCompareFlatAsciiStrings(masm,
8714 rdx,
8715 rax,
8716 rcx,
8717 rbx,
8718 rdi,
8719 r8);
8720
8721#ifdef DEBUG
8722 __ Abort("Unexpected fall-through from string comparison");
8723#endif
8724
Steve Blocka7e24c12009-10-30 11:49:00 +00008725 __ bind(&call_builtin);
8726 // must swap argument order
8727 __ pop(rcx);
8728 __ pop(rdx);
8729 __ pop(rax);
8730 __ push(rdx);
8731 __ push(rax);
8732
8733 // Figure out which native to call and setup the arguments.
8734 Builtins::JavaScript builtin;
8735 if (cc_ == equal) {
8736 builtin = strict_ ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
8737 } else {
8738 builtin = Builtins::COMPARE;
Steve Block6ded16b2010-05-10 14:33:55 +01008739 __ Push(Smi::FromInt(NegativeComparisonResult(cc_)));
Steve Blocka7e24c12009-10-30 11:49:00 +00008740 }
8741
8742 // Restore return address on the stack.
8743 __ push(rcx);
8744
8745 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
8746 // tagged as a small integer.
8747 __ InvokeBuiltin(builtin, JUMP_FUNCTION);
8748}
8749
8750
8751void CompareStub::BranchIfNonSymbol(MacroAssembler* masm,
8752 Label* label,
8753 Register object,
8754 Register scratch) {
8755 __ JumpIfSmi(object, label);
8756 __ movq(scratch, FieldOperand(object, HeapObject::kMapOffset));
8757 __ movzxbq(scratch,
8758 FieldOperand(scratch, Map::kInstanceTypeOffset));
Leon Clarkee46be812010-01-19 14:06:41 +00008759 // Ensure that no non-strings have the symbol bit set.
8760 ASSERT(kNotStringTag + kIsSymbolMask > LAST_TYPE);
8761 ASSERT(kSymbolTag != 0);
8762 __ testb(scratch, Immediate(kIsSymbolMask));
8763 __ j(zero, label);
Steve Blocka7e24c12009-10-30 11:49:00 +00008764}
8765
8766
8767// Call the function just below TOS on the stack with the given
8768// arguments. The receiver is the TOS.
8769void CodeGenerator::CallWithArguments(ZoneList<Expression*>* args,
Leon Clarkee46be812010-01-19 14:06:41 +00008770 CallFunctionFlags flags,
Steve Blocka7e24c12009-10-30 11:49:00 +00008771 int position) {
8772 // Push the arguments ("left-to-right") on the stack.
8773 int arg_count = args->length();
8774 for (int i = 0; i < arg_count; i++) {
8775 Load(args->at(i));
8776 }
8777
8778 // Record the position for debugging purposes.
8779 CodeForSourcePosition(position);
8780
8781 // Use the shared code stub to call the function.
8782 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Leon Clarkee46be812010-01-19 14:06:41 +00008783 CallFunctionStub call_function(arg_count, in_loop, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00008784 Result answer = frame_->CallStub(&call_function, arg_count + 1);
8785 // Restore context and replace function on the stack with the
8786 // result of the stub invocation.
8787 frame_->RestoreContextRegister();
8788 frame_->SetElementAt(0, &answer);
8789}
8790
8791
8792void InstanceofStub::Generate(MacroAssembler* masm) {
8793 // Implements "value instanceof function" operator.
8794 // Expected input state:
8795 // rsp[0] : return address
8796 // rsp[1] : function pointer
8797 // rsp[2] : value
8798
8799 // Get the object - go slow case if it's a smi.
8800 Label slow;
8801 __ movq(rax, Operand(rsp, 2 * kPointerSize));
8802 __ JumpIfSmi(rax, &slow);
8803
8804 // Check that the left hand is a JS object. Leave its map in rax.
8805 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rax);
8806 __ j(below, &slow);
8807 __ CmpInstanceType(rax, LAST_JS_OBJECT_TYPE);
8808 __ j(above, &slow);
8809
8810 // Get the prototype of the function.
8811 __ movq(rdx, Operand(rsp, 1 * kPointerSize));
8812 __ TryGetFunctionPrototype(rdx, rbx, &slow);
8813
8814 // Check that the function prototype is a JS object.
8815 __ JumpIfSmi(rbx, &slow);
8816 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, kScratchRegister);
8817 __ j(below, &slow);
8818 __ CmpInstanceType(kScratchRegister, LAST_JS_OBJECT_TYPE);
8819 __ j(above, &slow);
8820
8821 // Register mapping: rax is object map and rbx is function prototype.
8822 __ movq(rcx, FieldOperand(rax, Map::kPrototypeOffset));
8823
8824 // Loop through the prototype chain looking for the function prototype.
8825 Label loop, is_instance, is_not_instance;
8826 __ LoadRoot(kScratchRegister, Heap::kNullValueRootIndex);
8827 __ bind(&loop);
8828 __ cmpq(rcx, rbx);
8829 __ j(equal, &is_instance);
8830 __ cmpq(rcx, kScratchRegister);
8831 __ j(equal, &is_not_instance);
8832 __ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
8833 __ movq(rcx, FieldOperand(rcx, Map::kPrototypeOffset));
8834 __ jmp(&loop);
8835
8836 __ bind(&is_instance);
Steve Blockd0582a62009-12-15 09:54:21 +00008837 __ xorl(rax, rax);
Steve Blocka7e24c12009-10-30 11:49:00 +00008838 __ ret(2 * kPointerSize);
8839
8840 __ bind(&is_not_instance);
Steve Blockd0582a62009-12-15 09:54:21 +00008841 __ movl(rax, Immediate(1));
Steve Blocka7e24c12009-10-30 11:49:00 +00008842 __ ret(2 * kPointerSize);
8843
8844 // Slow-case: Go through the JavaScript implementation.
8845 __ bind(&slow);
8846 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
8847}
8848
8849
8850void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
Andrei Popescu402d9372010-02-26 13:31:12 +00008851 // rsp[0] : return address
8852 // rsp[8] : number of parameters
8853 // rsp[16] : receiver displacement
8854 // rsp[24] : function
8855
Steve Blocka7e24c12009-10-30 11:49:00 +00008856 // The displacement is used for skipping the return address and the
8857 // frame pointer on the stack. It is the offset of the last
8858 // parameter (if any) relative to the frame pointer.
8859 static const int kDisplacement = 2 * kPointerSize;
8860
8861 // Check if the calling frame is an arguments adaptor frame.
Andrei Popescu402d9372010-02-26 13:31:12 +00008862 Label adaptor_frame, try_allocate, runtime;
Steve Blocka7e24c12009-10-30 11:49:00 +00008863 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
Steve Block3ce2e202009-11-05 08:53:23 +00008864 __ SmiCompare(Operand(rdx, StandardFrameConstants::kContextOffset),
8865 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
Andrei Popescu402d9372010-02-26 13:31:12 +00008866 __ j(equal, &adaptor_frame);
8867
8868 // Get the length from the frame.
8869 __ movq(rcx, Operand(rsp, 1 * kPointerSize));
8870 __ jmp(&try_allocate);
Steve Blocka7e24c12009-10-30 11:49:00 +00008871
8872 // Patch the arguments.length and the parameters pointer.
Andrei Popescu402d9372010-02-26 13:31:12 +00008873 __ bind(&adaptor_frame);
Steve Blocka7e24c12009-10-30 11:49:00 +00008874 __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset));
8875 __ movq(Operand(rsp, 1 * kPointerSize), rcx);
Andrei Popescu402d9372010-02-26 13:31:12 +00008876 // Do not clobber the length index for the indexing operation since
8877 // it is used compute the size for allocation later.
8878 SmiIndex index = masm->SmiToIndex(rbx, rcx, kPointerSizeLog2);
Steve Blocka7e24c12009-10-30 11:49:00 +00008879 __ lea(rdx, Operand(rdx, index.reg, index.scale, kDisplacement));
8880 __ movq(Operand(rsp, 2 * kPointerSize), rdx);
8881
Andrei Popescu402d9372010-02-26 13:31:12 +00008882 // Try the new space allocation. Start out with computing the size of
8883 // the arguments object and the elements array.
8884 Label add_arguments_object;
8885 __ bind(&try_allocate);
8886 __ testq(rcx, rcx);
8887 __ j(zero, &add_arguments_object);
8888 index = masm->SmiToIndex(rcx, rcx, kPointerSizeLog2);
8889 __ lea(rcx, Operand(index.reg, index.scale, FixedArray::kHeaderSize));
8890 __ bind(&add_arguments_object);
8891 __ addq(rcx, Immediate(Heap::kArgumentsObjectSize));
8892
8893 // Do the allocation of both objects in one go.
8894 __ AllocateInNewSpace(rcx, rax, rdx, rbx, &runtime, TAG_OBJECT);
8895
8896 // Get the arguments boilerplate from the current (global) context.
8897 int offset = Context::SlotOffset(Context::ARGUMENTS_BOILERPLATE_INDEX);
8898 __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
8899 __ movq(rdi, FieldOperand(rdi, GlobalObject::kGlobalContextOffset));
8900 __ movq(rdi, Operand(rdi, offset));
8901
8902 // Copy the JS object part.
8903 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
8904 __ movq(kScratchRegister, FieldOperand(rdi, i));
8905 __ movq(FieldOperand(rax, i), kScratchRegister);
8906 }
8907
8908 // Setup the callee in-object property.
8909 ASSERT(Heap::arguments_callee_index == 0);
8910 __ movq(kScratchRegister, Operand(rsp, 3 * kPointerSize));
8911 __ movq(FieldOperand(rax, JSObject::kHeaderSize), kScratchRegister);
8912
8913 // Get the length (smi tagged) and set that as an in-object property too.
8914 ASSERT(Heap::arguments_length_index == 1);
8915 __ movq(rcx, Operand(rsp, 1 * kPointerSize));
8916 __ movq(FieldOperand(rax, JSObject::kHeaderSize + kPointerSize), rcx);
8917
8918 // If there are no actual arguments, we're done.
8919 Label done;
8920 __ testq(rcx, rcx);
8921 __ j(zero, &done);
8922
8923 // Get the parameters pointer from the stack and untag the length.
8924 __ movq(rdx, Operand(rsp, 2 * kPointerSize));
8925 __ SmiToInteger32(rcx, rcx);
8926
8927 // Setup the elements pointer in the allocated arguments object and
8928 // initialize the header in the elements fixed array.
8929 __ lea(rdi, Operand(rax, Heap::kArgumentsObjectSize));
8930 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rdi);
8931 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex);
8932 __ movq(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister);
8933 __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rcx);
8934
8935 // Copy the fixed array slots.
8936 Label loop;
8937 __ bind(&loop);
8938 __ movq(kScratchRegister, Operand(rdx, -1 * kPointerSize)); // Skip receiver.
8939 __ movq(FieldOperand(rdi, FixedArray::kHeaderSize), kScratchRegister);
8940 __ addq(rdi, Immediate(kPointerSize));
8941 __ subq(rdx, Immediate(kPointerSize));
8942 __ decq(rcx);
8943 __ j(not_zero, &loop);
8944
8945 // Return and remove the on-stack parameters.
8946 __ bind(&done);
8947 __ ret(3 * kPointerSize);
8948
Steve Blocka7e24c12009-10-30 11:49:00 +00008949 // Do the runtime call to allocate the arguments object.
8950 __ bind(&runtime);
Steve Block6ded16b2010-05-10 14:33:55 +01008951 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00008952}
8953
8954
8955void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
8956 // The key is in rdx and the parameter count is in rax.
8957
8958 // The displacement is used for skipping the frame pointer on the
8959 // stack. It is the offset of the last parameter (if any) relative
8960 // to the frame pointer.
8961 static const int kDisplacement = 1 * kPointerSize;
8962
8963 // Check that the key is a smi.
8964 Label slow;
8965 __ JumpIfNotSmi(rdx, &slow);
8966
8967 // Check if the calling frame is an arguments adaptor frame.
8968 Label adaptor;
8969 __ movq(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
Steve Block3ce2e202009-11-05 08:53:23 +00008970 __ SmiCompare(Operand(rbx, StandardFrameConstants::kContextOffset),
8971 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
Steve Blocka7e24c12009-10-30 11:49:00 +00008972 __ j(equal, &adaptor);
8973
8974 // Check index against formal parameters count limit passed in
8975 // through register rax. Use unsigned comparison to get negative
8976 // check for free.
8977 __ cmpq(rdx, rax);
8978 __ j(above_equal, &slow);
8979
8980 // Read the argument from the stack and return it.
8981 SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2);
8982 __ lea(rbx, Operand(rbp, index.reg, index.scale, 0));
8983 index = masm->SmiToNegativeIndex(rdx, rdx, kPointerSizeLog2);
8984 __ movq(rax, Operand(rbx, index.reg, index.scale, kDisplacement));
8985 __ Ret();
8986
8987 // Arguments adaptor case: Check index against actual arguments
8988 // limit found in the arguments adaptor frame. Use unsigned
8989 // comparison to get negative check for free.
8990 __ bind(&adaptor);
8991 __ movq(rcx, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
8992 __ cmpq(rdx, rcx);
8993 __ j(above_equal, &slow);
8994
8995 // Read the argument from the stack and return it.
8996 index = masm->SmiToIndex(rax, rcx, kPointerSizeLog2);
8997 __ lea(rbx, Operand(rbx, index.reg, index.scale, 0));
8998 index = masm->SmiToNegativeIndex(rdx, rdx, kPointerSizeLog2);
8999 __ movq(rax, Operand(rbx, index.reg, index.scale, kDisplacement));
9000 __ Ret();
9001
9002 // Slow-case: Handle non-smi or out-of-bounds access to arguments
9003 // by calling the runtime system.
9004 __ bind(&slow);
9005 __ pop(rbx); // Return address.
9006 __ push(rdx);
9007 __ push(rbx);
Steve Block6ded16b2010-05-10 14:33:55 +01009008 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00009009}
9010
9011
Steve Blocka7e24c12009-10-30 11:49:00 +00009012void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
9013 // Check that stack should contain next handler, frame pointer, state and
9014 // return address in that order.
9015 ASSERT_EQ(StackHandlerConstants::kFPOffset + kPointerSize,
9016 StackHandlerConstants::kStateOffset);
9017 ASSERT_EQ(StackHandlerConstants::kStateOffset + kPointerSize,
9018 StackHandlerConstants::kPCOffset);
9019
9020 ExternalReference handler_address(Top::k_handler_address);
9021 __ movq(kScratchRegister, handler_address);
9022 __ movq(rsp, Operand(kScratchRegister, 0));
9023 // get next in chain
9024 __ pop(rcx);
9025 __ movq(Operand(kScratchRegister, 0), rcx);
9026 __ pop(rbp); // pop frame pointer
9027 __ pop(rdx); // remove state
9028
9029 // Before returning we restore the context from the frame pointer if not NULL.
9030 // The frame pointer is NULL in the exception handler of a JS entry frame.
9031 __ xor_(rsi, rsi); // tentatively set context pointer to NULL
9032 Label skip;
9033 __ cmpq(rbp, Immediate(0));
9034 __ j(equal, &skip);
9035 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
9036 __ bind(&skip);
9037 __ ret(0);
9038}
9039
9040
9041void CEntryStub::GenerateCore(MacroAssembler* masm,
9042 Label* throw_normal_exception,
9043 Label* throw_termination_exception,
9044 Label* throw_out_of_memory_exception,
Steve Blocka7e24c12009-10-30 11:49:00 +00009045 bool do_gc,
Steve Block6ded16b2010-05-10 14:33:55 +01009046 bool always_allocate_scope,
9047 int /* alignment_skew */) {
Steve Blocka7e24c12009-10-30 11:49:00 +00009048 // rax: result parameter for PerformGC, if any.
9049 // rbx: pointer to C function (C callee-saved).
9050 // rbp: frame pointer (restored after C call).
9051 // rsp: stack pointer (restored after C call).
9052 // r14: number of arguments including receiver (C callee-saved).
9053 // r15: pointer to the first argument (C callee-saved).
9054 // This pointer is reused in LeaveExitFrame(), so it is stored in a
9055 // callee-saved register.
9056
Leon Clarke4515c472010-02-03 11:58:03 +00009057 // Simple results returned in rax (both AMD64 and Win64 calling conventions).
9058 // Complex results must be written to address passed as first argument.
9059 // AMD64 calling convention: a struct of two pointers in rax+rdx
9060
Steve Block6ded16b2010-05-10 14:33:55 +01009061 // Check stack alignment.
9062 if (FLAG_debug_code) {
9063 __ CheckStackAlignment();
9064 }
9065
Steve Blocka7e24c12009-10-30 11:49:00 +00009066 if (do_gc) {
Steve Block6ded16b2010-05-10 14:33:55 +01009067 // Pass failure code returned from last attempt as first argument to
9068 // PerformGC. No need to use PrepareCallCFunction/CallCFunction here as the
9069 // stack is known to be aligned. This function takes one argument which is
9070 // passed in register.
Steve Blocka7e24c12009-10-30 11:49:00 +00009071#ifdef _WIN64
9072 __ movq(rcx, rax);
Steve Block6ded16b2010-05-10 14:33:55 +01009073#else // _WIN64
Steve Blocka7e24c12009-10-30 11:49:00 +00009074 __ movq(rdi, rax);
9075#endif
9076 __ movq(kScratchRegister,
9077 FUNCTION_ADDR(Runtime::PerformGC),
9078 RelocInfo::RUNTIME_ENTRY);
9079 __ call(kScratchRegister);
9080 }
9081
9082 ExternalReference scope_depth =
9083 ExternalReference::heap_always_allocate_scope_depth();
9084 if (always_allocate_scope) {
9085 __ movq(kScratchRegister, scope_depth);
9086 __ incl(Operand(kScratchRegister, 0));
9087 }
9088
9089 // Call C function.
9090#ifdef _WIN64
9091 // Windows 64-bit ABI passes arguments in rcx, rdx, r8, r9
9092 // Store Arguments object on stack, below the 4 WIN64 ABI parameter slots.
9093 __ movq(Operand(rsp, 4 * kPointerSize), r14); // argc.
9094 __ movq(Operand(rsp, 5 * kPointerSize), r15); // argv.
9095 if (result_size_ < 2) {
9096 // Pass a pointer to the Arguments object as the first argument.
9097 // Return result in single register (rax).
9098 __ lea(rcx, Operand(rsp, 4 * kPointerSize));
9099 } else {
9100 ASSERT_EQ(2, result_size_);
9101 // Pass a pointer to the result location as the first argument.
9102 __ lea(rcx, Operand(rsp, 6 * kPointerSize));
9103 // Pass a pointer to the Arguments object as the second argument.
9104 __ lea(rdx, Operand(rsp, 4 * kPointerSize));
9105 }
9106
Steve Block6ded16b2010-05-10 14:33:55 +01009107#else // _WIN64
Steve Blocka7e24c12009-10-30 11:49:00 +00009108 // GCC passes arguments in rdi, rsi, rdx, rcx, r8, r9.
9109 __ movq(rdi, r14); // argc.
9110 __ movq(rsi, r15); // argv.
9111#endif
9112 __ call(rbx);
9113 // Result is in rax - do not destroy this register!
9114
9115 if (always_allocate_scope) {
9116 __ movq(kScratchRegister, scope_depth);
9117 __ decl(Operand(kScratchRegister, 0));
9118 }
9119
9120 // Check for failure result.
9121 Label failure_returned;
9122 ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0);
Steve Block3ce2e202009-11-05 08:53:23 +00009123#ifdef _WIN64
9124 // If return value is on the stack, pop it to registers.
9125 if (result_size_ > 1) {
9126 ASSERT_EQ(2, result_size_);
Steve Blockd0582a62009-12-15 09:54:21 +00009127 // Read result values stored on stack. Result is stored
9128 // above the four argument mirror slots and the two
9129 // Arguments object slots.
Steve Block3ce2e202009-11-05 08:53:23 +00009130 __ movq(rax, Operand(rsp, 6 * kPointerSize));
9131 __ movq(rdx, Operand(rsp, 7 * kPointerSize));
9132 }
9133#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00009134 __ lea(rcx, Operand(rax, 1));
9135 // Lower 2 bits of rcx are 0 iff rax has failure tag.
9136 __ testl(rcx, Immediate(kFailureTagMask));
9137 __ j(zero, &failure_returned);
9138
9139 // Exit the JavaScript to C++ exit frame.
Leon Clarke4515c472010-02-03 11:58:03 +00009140 __ LeaveExitFrame(mode_, result_size_);
Steve Blocka7e24c12009-10-30 11:49:00 +00009141 __ ret(0);
9142
9143 // Handling of failure.
9144 __ bind(&failure_returned);
9145
9146 Label retry;
9147 // If the returned exception is RETRY_AFTER_GC continue at retry label
9148 ASSERT(Failure::RETRY_AFTER_GC == 0);
9149 __ testl(rax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
9150 __ j(zero, &retry);
9151
9152 // Special handling of out of memory exceptions.
9153 __ movq(kScratchRegister, Failure::OutOfMemoryException(), RelocInfo::NONE);
9154 __ cmpq(rax, kScratchRegister);
9155 __ j(equal, throw_out_of_memory_exception);
9156
9157 // Retrieve the pending exception and clear the variable.
9158 ExternalReference pending_exception_address(Top::k_pending_exception_address);
9159 __ movq(kScratchRegister, pending_exception_address);
9160 __ movq(rax, Operand(kScratchRegister, 0));
9161 __ movq(rdx, ExternalReference::the_hole_value_location());
9162 __ movq(rdx, Operand(rdx, 0));
9163 __ movq(Operand(kScratchRegister, 0), rdx);
9164
9165 // Special handling of termination exceptions which are uncatchable
9166 // by javascript code.
9167 __ CompareRoot(rax, Heap::kTerminationExceptionRootIndex);
9168 __ j(equal, throw_termination_exception);
9169
9170 // Handle normal exception.
9171 __ jmp(throw_normal_exception);
9172
9173 // Retry.
9174 __ bind(&retry);
9175}
9176
9177
9178void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm,
9179 UncatchableExceptionType type) {
9180 // Fetch top stack handler.
9181 ExternalReference handler_address(Top::k_handler_address);
9182 __ movq(kScratchRegister, handler_address);
9183 __ movq(rsp, Operand(kScratchRegister, 0));
9184
9185 // Unwind the handlers until the ENTRY handler is found.
9186 Label loop, done;
9187 __ bind(&loop);
9188 // Load the type of the current stack handler.
9189 const int kStateOffset = StackHandlerConstants::kStateOffset;
9190 __ cmpq(Operand(rsp, kStateOffset), Immediate(StackHandler::ENTRY));
9191 __ j(equal, &done);
9192 // Fetch the next handler in the list.
9193 const int kNextOffset = StackHandlerConstants::kNextOffset;
9194 __ movq(rsp, Operand(rsp, kNextOffset));
9195 __ jmp(&loop);
9196 __ bind(&done);
9197
9198 // Set the top handler address to next handler past the current ENTRY handler.
9199 __ movq(kScratchRegister, handler_address);
9200 __ pop(Operand(kScratchRegister, 0));
9201
9202 if (type == OUT_OF_MEMORY) {
9203 // Set external caught exception to false.
9204 ExternalReference external_caught(Top::k_external_caught_exception_address);
9205 __ movq(rax, Immediate(false));
9206 __ store_rax(external_caught);
9207
9208 // Set pending exception and rax to out of memory exception.
9209 ExternalReference pending_exception(Top::k_pending_exception_address);
9210 __ movq(rax, Failure::OutOfMemoryException(), RelocInfo::NONE);
9211 __ store_rax(pending_exception);
9212 }
9213
9214 // Clear the context pointer.
9215 __ xor_(rsi, rsi);
9216
9217 // Restore registers from handler.
9218 ASSERT_EQ(StackHandlerConstants::kNextOffset + kPointerSize,
9219 StackHandlerConstants::kFPOffset);
9220 __ pop(rbp); // FP
9221 ASSERT_EQ(StackHandlerConstants::kFPOffset + kPointerSize,
9222 StackHandlerConstants::kStateOffset);
9223 __ pop(rdx); // State
9224
9225 ASSERT_EQ(StackHandlerConstants::kStateOffset + kPointerSize,
9226 StackHandlerConstants::kPCOffset);
9227 __ ret(0);
9228}
9229
9230
9231void CallFunctionStub::Generate(MacroAssembler* masm) {
9232 Label slow;
9233
Leon Clarkee46be812010-01-19 14:06:41 +00009234 // If the receiver might be a value (string, number or boolean) check for this
9235 // and box it if it is.
9236 if (ReceiverMightBeValue()) {
9237 // Get the receiver from the stack.
9238 // +1 ~ return address
9239 Label receiver_is_value, receiver_is_js_object;
9240 __ movq(rax, Operand(rsp, (argc_ + 1) * kPointerSize));
9241
9242 // Check if receiver is a smi (which is a number value).
9243 __ JumpIfSmi(rax, &receiver_is_value);
9244
9245 // Check if the receiver is a valid JS object.
9246 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rdi);
9247 __ j(above_equal, &receiver_is_js_object);
9248
9249 // Call the runtime to box the value.
9250 __ bind(&receiver_is_value);
9251 __ EnterInternalFrame();
9252 __ push(rax);
9253 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
9254 __ LeaveInternalFrame();
9255 __ movq(Operand(rsp, (argc_ + 1) * kPointerSize), rax);
9256
9257 __ bind(&receiver_is_js_object);
9258 }
9259
Steve Blocka7e24c12009-10-30 11:49:00 +00009260 // Get the function to call from the stack.
9261 // +2 ~ receiver, return address
9262 __ movq(rdi, Operand(rsp, (argc_ + 2) * kPointerSize));
9263
9264 // Check that the function really is a JavaScript function.
9265 __ JumpIfSmi(rdi, &slow);
9266 // Goto slow case if we do not have a function.
9267 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
9268 __ j(not_equal, &slow);
9269
9270 // Fast-case: Just invoke the function.
9271 ParameterCount actual(argc_);
9272 __ InvokeFunction(rdi, actual, JUMP_FUNCTION);
9273
9274 // Slow-case: Non-function called.
9275 __ bind(&slow);
Andrei Popescu402d9372010-02-26 13:31:12 +00009276 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
9277 // of the original receiver from the call site).
9278 __ movq(Operand(rsp, (argc_ + 1) * kPointerSize), rdi);
Steve Blocka7e24c12009-10-30 11:49:00 +00009279 __ Set(rax, argc_);
9280 __ Set(rbx, 0);
9281 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
9282 Handle<Code> adaptor(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
9283 __ Jump(adaptor, RelocInfo::CODE_TARGET);
9284}
9285
9286
Leon Clarke4515c472010-02-03 11:58:03 +00009287void CEntryStub::Generate(MacroAssembler* masm) {
Steve Blocka7e24c12009-10-30 11:49:00 +00009288 // rax: number of arguments including receiver
9289 // rbx: pointer to C function (C callee-saved)
9290 // rbp: frame pointer of calling JS frame (restored after C call)
9291 // rsp: stack pointer (restored after C call)
9292 // rsi: current context (restored)
9293
9294 // NOTE: Invocations of builtins may return failure objects
9295 // instead of a proper result. The builtin entry handles
9296 // this by performing a garbage collection and retrying the
9297 // builtin once.
9298
Steve Blocka7e24c12009-10-30 11:49:00 +00009299 // Enter the exit frame that transitions from JavaScript to C++.
Leon Clarke4515c472010-02-03 11:58:03 +00009300 __ EnterExitFrame(mode_, result_size_);
Steve Blocka7e24c12009-10-30 11:49:00 +00009301
9302 // rax: Holds the context at this point, but should not be used.
9303 // On entry to code generated by GenerateCore, it must hold
9304 // a failure result if the collect_garbage argument to GenerateCore
9305 // is true. This failure result can be the result of code
9306 // generated by a previous call to GenerateCore. The value
9307 // of rax is then passed to Runtime::PerformGC.
9308 // rbx: pointer to builtin function (C callee-saved).
9309 // rbp: frame pointer of exit frame (restored after C call).
9310 // rsp: stack pointer (restored after C call).
9311 // r14: number of arguments including receiver (C callee-saved).
9312 // r15: argv pointer (C callee-saved).
9313
9314 Label throw_normal_exception;
9315 Label throw_termination_exception;
9316 Label throw_out_of_memory_exception;
9317
9318 // Call into the runtime system.
9319 GenerateCore(masm,
9320 &throw_normal_exception,
9321 &throw_termination_exception,
9322 &throw_out_of_memory_exception,
Steve Blocka7e24c12009-10-30 11:49:00 +00009323 false,
9324 false);
9325
9326 // Do space-specific GC and retry runtime call.
9327 GenerateCore(masm,
9328 &throw_normal_exception,
9329 &throw_termination_exception,
9330 &throw_out_of_memory_exception,
Steve Blocka7e24c12009-10-30 11:49:00 +00009331 true,
9332 false);
9333
9334 // Do full GC and retry runtime call one final time.
9335 Failure* failure = Failure::InternalError();
9336 __ movq(rax, failure, RelocInfo::NONE);
9337 GenerateCore(masm,
9338 &throw_normal_exception,
9339 &throw_termination_exception,
9340 &throw_out_of_memory_exception,
Steve Blocka7e24c12009-10-30 11:49:00 +00009341 true,
9342 true);
9343
9344 __ bind(&throw_out_of_memory_exception);
9345 GenerateThrowUncatchable(masm, OUT_OF_MEMORY);
9346
9347 __ bind(&throw_termination_exception);
9348 GenerateThrowUncatchable(masm, TERMINATION);
9349
9350 __ bind(&throw_normal_exception);
9351 GenerateThrowTOS(masm);
9352}
9353
9354
Steve Blockd0582a62009-12-15 09:54:21 +00009355void ApiGetterEntryStub::Generate(MacroAssembler* masm) {
9356 UNREACHABLE();
9357}
9358
9359
Steve Blocka7e24c12009-10-30 11:49:00 +00009360void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
9361 Label invoke, exit;
9362#ifdef ENABLE_LOGGING_AND_PROFILING
9363 Label not_outermost_js, not_outermost_js_2;
9364#endif
9365
9366 // Setup frame.
9367 __ push(rbp);
9368 __ movq(rbp, rsp);
9369
9370 // Push the stack frame type marker twice.
9371 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
Steve Block3ce2e202009-11-05 08:53:23 +00009372 __ Push(Smi::FromInt(marker)); // context slot
9373 __ Push(Smi::FromInt(marker)); // function slot
Steve Blocka7e24c12009-10-30 11:49:00 +00009374 // Save callee-saved registers (X64 calling conventions).
9375 __ push(r12);
9376 __ push(r13);
9377 __ push(r14);
9378 __ push(r15);
9379 __ push(rdi);
9380 __ push(rsi);
9381 __ push(rbx);
9382 // TODO(X64): Push XMM6-XMM15 (low 64 bits) as well, or make them
9383 // callee-save in JS code as well.
9384
9385 // Save copies of the top frame descriptor on the stack.
9386 ExternalReference c_entry_fp(Top::k_c_entry_fp_address);
9387 __ load_rax(c_entry_fp);
9388 __ push(rax);
9389
9390#ifdef ENABLE_LOGGING_AND_PROFILING
9391 // If this is the outermost JS call, set js_entry_sp value.
9392 ExternalReference js_entry_sp(Top::k_js_entry_sp_address);
9393 __ load_rax(js_entry_sp);
9394 __ testq(rax, rax);
9395 __ j(not_zero, &not_outermost_js);
9396 __ movq(rax, rbp);
9397 __ store_rax(js_entry_sp);
9398 __ bind(&not_outermost_js);
9399#endif
9400
9401 // Call a faked try-block that does the invoke.
9402 __ call(&invoke);
9403
9404 // Caught exception: Store result (exception) in the pending
9405 // exception field in the JSEnv and return a failure sentinel.
9406 ExternalReference pending_exception(Top::k_pending_exception_address);
9407 __ store_rax(pending_exception);
9408 __ movq(rax, Failure::Exception(), RelocInfo::NONE);
9409 __ jmp(&exit);
9410
9411 // Invoke: Link this frame into the handler chain.
9412 __ bind(&invoke);
9413 __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER);
9414
9415 // Clear any pending exceptions.
9416 __ load_rax(ExternalReference::the_hole_value_location());
9417 __ store_rax(pending_exception);
9418
9419 // Fake a receiver (NULL).
9420 __ push(Immediate(0)); // receiver
9421
9422 // Invoke the function by calling through JS entry trampoline
9423 // builtin and pop the faked function when we return. We load the address
9424 // from an external reference instead of inlining the call target address
9425 // directly in the code, because the builtin stubs may not have been
9426 // generated yet at the time this code is generated.
9427 if (is_construct) {
9428 ExternalReference construct_entry(Builtins::JSConstructEntryTrampoline);
9429 __ load_rax(construct_entry);
9430 } else {
9431 ExternalReference entry(Builtins::JSEntryTrampoline);
9432 __ load_rax(entry);
9433 }
9434 __ lea(kScratchRegister, FieldOperand(rax, Code::kHeaderSize));
9435 __ call(kScratchRegister);
9436
9437 // Unlink this frame from the handler chain.
9438 __ movq(kScratchRegister, ExternalReference(Top::k_handler_address));
9439 __ pop(Operand(kScratchRegister, 0));
9440 // Pop next_sp.
9441 __ addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
9442
9443#ifdef ENABLE_LOGGING_AND_PROFILING
9444 // If current EBP value is the same as js_entry_sp value, it means that
9445 // the current function is the outermost.
9446 __ movq(kScratchRegister, js_entry_sp);
9447 __ cmpq(rbp, Operand(kScratchRegister, 0));
9448 __ j(not_equal, &not_outermost_js_2);
9449 __ movq(Operand(kScratchRegister, 0), Immediate(0));
9450 __ bind(&not_outermost_js_2);
9451#endif
9452
9453 // Restore the top frame descriptor from the stack.
9454 __ bind(&exit);
9455 __ movq(kScratchRegister, ExternalReference(Top::k_c_entry_fp_address));
9456 __ pop(Operand(kScratchRegister, 0));
9457
9458 // Restore callee-saved registers (X64 conventions).
9459 __ pop(rbx);
9460 __ pop(rsi);
9461 __ pop(rdi);
9462 __ pop(r15);
9463 __ pop(r14);
9464 __ pop(r13);
9465 __ pop(r12);
9466 __ addq(rsp, Immediate(2 * kPointerSize)); // remove markers
9467
9468 // Restore frame pointer and return.
9469 __ pop(rbp);
9470 __ ret(0);
9471}
9472
9473
9474// -----------------------------------------------------------------------------
9475// Implementation of stubs.
9476
9477// Stub classes have public member named masm, not masm_.
9478
9479void StackCheckStub::Generate(MacroAssembler* masm) {
9480 // Because builtins always remove the receiver from the stack, we
9481 // have to fake one to avoid underflowing the stack. The receiver
9482 // must be inserted below the return address on the stack so we
9483 // temporarily store that in a register.
9484 __ pop(rax);
Steve Block3ce2e202009-11-05 08:53:23 +00009485 __ Push(Smi::FromInt(0));
Steve Blocka7e24c12009-10-30 11:49:00 +00009486 __ push(rax);
9487
9488 // Do tail-call to runtime routine.
Steve Block6ded16b2010-05-10 14:33:55 +01009489 __ TailCallRuntime(Runtime::kStackGuard, 1, 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00009490}
9491
9492
Steve Blocka7e24c12009-10-30 11:49:00 +00009493void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm,
9494 Register number) {
9495 Label load_smi, done;
9496
9497 __ JumpIfSmi(number, &load_smi);
9498 __ fld_d(FieldOperand(number, HeapNumber::kValueOffset));
9499 __ jmp(&done);
9500
9501 __ bind(&load_smi);
9502 __ SmiToInteger32(number, number);
9503 __ push(number);
9504 __ fild_s(Operand(rsp, 0));
9505 __ pop(number);
9506
9507 __ bind(&done);
9508}
9509
9510
9511void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm,
9512 Register src,
9513 XMMRegister dst) {
9514 Label load_smi, done;
9515
9516 __ JumpIfSmi(src, &load_smi);
9517 __ movsd(dst, FieldOperand(src, HeapNumber::kValueOffset));
9518 __ jmp(&done);
9519
9520 __ bind(&load_smi);
9521 __ SmiToInteger32(src, src);
9522 __ cvtlsi2sd(dst, src);
9523
9524 __ bind(&done);
9525}
9526
9527
Steve Block6ded16b2010-05-10 14:33:55 +01009528void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm,
9529 Register src,
9530 XMMRegister dst,
9531 Label* not_number) {
9532 Label load_smi, done;
9533 ASSERT(!src.is(kScratchRegister));
9534 __ JumpIfSmi(src, &load_smi);
9535 __ LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex);
9536 __ cmpq(FieldOperand(src, HeapObject::kMapOffset), kScratchRegister);
9537 __ j(not_equal, not_number);
9538 __ movsd(dst, FieldOperand(src, HeapNumber::kValueOffset));
9539 __ jmp(&done);
9540
9541 __ bind(&load_smi);
9542 __ SmiToInteger32(kScratchRegister, src);
9543 __ cvtlsi2sd(dst, kScratchRegister);
9544
9545 __ bind(&done);
9546}
9547
9548
Steve Blocka7e24c12009-10-30 11:49:00 +00009549void FloatingPointHelper::LoadFloatOperands(MacroAssembler* masm,
9550 XMMRegister dst1,
9551 XMMRegister dst2) {
Leon Clarke4515c472010-02-03 11:58:03 +00009552 __ movq(kScratchRegister, rdx);
Steve Blocka7e24c12009-10-30 11:49:00 +00009553 LoadFloatOperand(masm, kScratchRegister, dst1);
Leon Clarke4515c472010-02-03 11:58:03 +00009554 __ movq(kScratchRegister, rax);
Steve Blocka7e24c12009-10-30 11:49:00 +00009555 LoadFloatOperand(masm, kScratchRegister, dst2);
9556}
9557
9558
Leon Clarke4515c472010-02-03 11:58:03 +00009559void FloatingPointHelper::LoadFloatOperandsFromSmis(MacroAssembler* masm,
9560 XMMRegister dst1,
9561 XMMRegister dst2) {
9562 __ SmiToInteger32(kScratchRegister, rdx);
9563 __ cvtlsi2sd(dst1, kScratchRegister);
9564 __ SmiToInteger32(kScratchRegister, rax);
9565 __ cvtlsi2sd(dst2, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00009566}
9567
9568
Leon Clarked91b9f72010-01-27 17:25:45 +00009569// Input: rdx, rax are the left and right objects of a bit op.
9570// Output: rax, rcx are left and right integers for a bit op.
9571void FloatingPointHelper::LoadAsIntegers(MacroAssembler* masm,
9572 bool use_sse3,
9573 Label* conversion_failure) {
9574 // Check float operands.
9575 Label arg1_is_object, check_undefined_arg1;
9576 Label arg2_is_object, check_undefined_arg2;
9577 Label load_arg2, done;
9578
9579 __ JumpIfNotSmi(rdx, &arg1_is_object);
9580 __ SmiToInteger32(rdx, rdx);
9581 __ jmp(&load_arg2);
9582
9583 // If the argument is undefined it converts to zero (ECMA-262, section 9.5).
9584 __ bind(&check_undefined_arg1);
9585 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex);
9586 __ j(not_equal, conversion_failure);
9587 __ movl(rdx, Immediate(0));
9588 __ jmp(&load_arg2);
9589
9590 __ bind(&arg1_is_object);
9591 __ movq(rbx, FieldOperand(rdx, HeapObject::kMapOffset));
9592 __ CompareRoot(rbx, Heap::kHeapNumberMapRootIndex);
9593 __ j(not_equal, &check_undefined_arg1);
9594 // Get the untagged integer version of the edx heap number in rcx.
9595 IntegerConvert(masm, rdx, use_sse3, conversion_failure);
9596 __ movl(rdx, rcx);
9597
9598 // Here edx has the untagged integer, eax has a Smi or a heap number.
9599 __ bind(&load_arg2);
9600 // Test if arg2 is a Smi.
9601 __ JumpIfNotSmi(rax, &arg2_is_object);
9602 __ SmiToInteger32(rax, rax);
9603 __ movl(rcx, rax);
9604 __ jmp(&done);
9605
9606 // If the argument is undefined it converts to zero (ECMA-262, section 9.5).
9607 __ bind(&check_undefined_arg2);
9608 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
9609 __ j(not_equal, conversion_failure);
9610 __ movl(rcx, Immediate(0));
9611 __ jmp(&done);
9612
9613 __ bind(&arg2_is_object);
9614 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
9615 __ CompareRoot(rbx, Heap::kHeapNumberMapRootIndex);
9616 __ j(not_equal, &check_undefined_arg2);
9617 // Get the untagged integer version of the eax heap number in ecx.
9618 IntegerConvert(masm, rax, use_sse3, conversion_failure);
9619 __ bind(&done);
9620 __ movl(rax, rdx);
9621}
9622
9623
Steve Blocka7e24c12009-10-30 11:49:00 +00009624void FloatingPointHelper::LoadFloatOperands(MacroAssembler* masm,
9625 Register lhs,
9626 Register rhs) {
9627 Label load_smi_lhs, load_smi_rhs, done_load_lhs, done;
9628 __ JumpIfSmi(lhs, &load_smi_lhs);
9629 __ fld_d(FieldOperand(lhs, HeapNumber::kValueOffset));
9630 __ bind(&done_load_lhs);
9631
9632 __ JumpIfSmi(rhs, &load_smi_rhs);
9633 __ fld_d(FieldOperand(rhs, HeapNumber::kValueOffset));
9634 __ jmp(&done);
9635
9636 __ bind(&load_smi_lhs);
9637 __ SmiToInteger64(kScratchRegister, lhs);
9638 __ push(kScratchRegister);
9639 __ fild_d(Operand(rsp, 0));
9640 __ pop(kScratchRegister);
9641 __ jmp(&done_load_lhs);
9642
9643 __ bind(&load_smi_rhs);
9644 __ SmiToInteger64(kScratchRegister, rhs);
9645 __ push(kScratchRegister);
9646 __ fild_d(Operand(rsp, 0));
9647 __ pop(kScratchRegister);
9648
9649 __ bind(&done);
9650}
9651
9652
Steve Block3ce2e202009-11-05 08:53:23 +00009653void FloatingPointHelper::CheckNumberOperands(MacroAssembler* masm,
9654 Label* non_float) {
Steve Blocka7e24c12009-10-30 11:49:00 +00009655 Label test_other, done;
9656 // Test if both operands are numbers (heap_numbers or smis).
9657 // If not, jump to label non_float.
9658 __ JumpIfSmi(rdx, &test_other); // argument in rdx is OK
9659 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset), Factory::heap_number_map());
9660 __ j(not_equal, non_float); // The argument in rdx is not a number.
9661
9662 __ bind(&test_other);
9663 __ JumpIfSmi(rax, &done); // argument in rax is OK
9664 __ Cmp(FieldOperand(rax, HeapObject::kMapOffset), Factory::heap_number_map());
9665 __ j(not_equal, non_float); // The argument in rax is not a number.
9666
9667 // Fall-through: Both operands are numbers.
9668 __ bind(&done);
9669}
9670
9671
9672const char* GenericBinaryOpStub::GetName() {
Leon Clarkee46be812010-01-19 14:06:41 +00009673 if (name_ != NULL) return name_;
9674 const int len = 100;
9675 name_ = Bootstrapper::AllocateAutoDeletedArray(len);
9676 if (name_ == NULL) return "OOM";
9677 const char* op_name = Token::Name(op_);
9678 const char* overwrite_name;
9679 switch (mode_) {
9680 case NO_OVERWRITE: overwrite_name = "Alloc"; break;
9681 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break;
9682 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
9683 default: overwrite_name = "UnknownOverwrite"; break;
Steve Blocka7e24c12009-10-30 11:49:00 +00009684 }
Leon Clarkee46be812010-01-19 14:06:41 +00009685
9686 OS::SNPrintF(Vector<char>(name_, len),
Steve Block6ded16b2010-05-10 14:33:55 +01009687 "GenericBinaryOpStub_%s_%s%s_%s%s_%s%s_%s",
Leon Clarkee46be812010-01-19 14:06:41 +00009688 op_name,
9689 overwrite_name,
9690 (flags_ & NO_SMI_CODE_IN_STUB) ? "_NoSmiInStub" : "",
9691 args_in_registers_ ? "RegArgs" : "StackArgs",
9692 args_reversed_ ? "_R" : "",
Andrei Popescu402d9372010-02-26 13:31:12 +00009693 use_sse3_ ? "SSE3" : "SSE2",
Steve Block6ded16b2010-05-10 14:33:55 +01009694 static_operands_type_.ToString(),
9695 BinaryOpIC::GetName(runtime_operands_type_));
Leon Clarkee46be812010-01-19 14:06:41 +00009696 return name_;
Steve Blocka7e24c12009-10-30 11:49:00 +00009697}
9698
9699
Steve Blockd0582a62009-12-15 09:54:21 +00009700void GenericBinaryOpStub::GenerateCall(
9701 MacroAssembler* masm,
9702 Register left,
9703 Register right) {
9704 if (!ArgsInRegistersSupported()) {
9705 // Pass arguments on the stack.
9706 __ push(left);
9707 __ push(right);
9708 } else {
9709 // The calling convention with registers is left in rdx and right in rax.
9710 Register left_arg = rdx;
9711 Register right_arg = rax;
9712 if (!(left.is(left_arg) && right.is(right_arg))) {
9713 if (left.is(right_arg) && right.is(left_arg)) {
9714 if (IsOperationCommutative()) {
9715 SetArgsReversed();
9716 } else {
9717 __ xchg(left, right);
9718 }
9719 } else if (left.is(left_arg)) {
9720 __ movq(right_arg, right);
Andrei Popescu402d9372010-02-26 13:31:12 +00009721 } else if (right.is(right_arg)) {
9722 __ movq(left_arg, left);
Steve Blockd0582a62009-12-15 09:54:21 +00009723 } else if (left.is(right_arg)) {
9724 if (IsOperationCommutative()) {
9725 __ movq(left_arg, right);
9726 SetArgsReversed();
9727 } else {
9728 // Order of moves important to avoid destroying left argument.
9729 __ movq(left_arg, left);
9730 __ movq(right_arg, right);
9731 }
9732 } else if (right.is(left_arg)) {
9733 if (IsOperationCommutative()) {
9734 __ movq(right_arg, left);
9735 SetArgsReversed();
9736 } else {
9737 // Order of moves important to avoid destroying right argument.
9738 __ movq(right_arg, right);
9739 __ movq(left_arg, left);
9740 }
Steve Blockd0582a62009-12-15 09:54:21 +00009741 } else {
9742 // Order of moves is not important.
9743 __ movq(left_arg, left);
9744 __ movq(right_arg, right);
9745 }
9746 }
9747
9748 // Update flags to indicate that arguments are in registers.
9749 SetArgsInRegisters();
9750 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
9751 }
9752
9753 // Call the stub.
9754 __ CallStub(this);
9755}
9756
9757
9758void GenericBinaryOpStub::GenerateCall(
9759 MacroAssembler* masm,
9760 Register left,
9761 Smi* right) {
9762 if (!ArgsInRegistersSupported()) {
9763 // Pass arguments on the stack.
9764 __ push(left);
9765 __ Push(right);
9766 } else {
9767 // The calling convention with registers is left in rdx and right in rax.
9768 Register left_arg = rdx;
9769 Register right_arg = rax;
9770 if (left.is(left_arg)) {
9771 __ Move(right_arg, right);
9772 } else if (left.is(right_arg) && IsOperationCommutative()) {
9773 __ Move(left_arg, right);
9774 SetArgsReversed();
9775 } else {
Andrei Popescu402d9372010-02-26 13:31:12 +00009776 // For non-commutative operations, left and right_arg might be
9777 // the same register. Therefore, the order of the moves is
9778 // important here in order to not overwrite left before moving
9779 // it to left_arg.
Steve Blockd0582a62009-12-15 09:54:21 +00009780 __ movq(left_arg, left);
9781 __ Move(right_arg, right);
9782 }
9783
9784 // Update flags to indicate that arguments are in registers.
9785 SetArgsInRegisters();
9786 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
9787 }
9788
9789 // Call the stub.
9790 __ CallStub(this);
9791}
9792
9793
9794void GenericBinaryOpStub::GenerateCall(
9795 MacroAssembler* masm,
9796 Smi* left,
9797 Register right) {
9798 if (!ArgsInRegistersSupported()) {
9799 // Pass arguments on the stack.
9800 __ Push(left);
9801 __ push(right);
9802 } else {
9803 // The calling convention with registers is left in rdx and right in rax.
9804 Register left_arg = rdx;
9805 Register right_arg = rax;
9806 if (right.is(right_arg)) {
9807 __ Move(left_arg, left);
9808 } else if (right.is(left_arg) && IsOperationCommutative()) {
9809 __ Move(right_arg, left);
9810 SetArgsReversed();
9811 } else {
Andrei Popescu402d9372010-02-26 13:31:12 +00009812 // For non-commutative operations, right and left_arg might be
9813 // the same register. Therefore, the order of the moves is
9814 // important here in order to not overwrite right before moving
9815 // it to right_arg.
Steve Blockd0582a62009-12-15 09:54:21 +00009816 __ movq(right_arg, right);
Andrei Popescu402d9372010-02-26 13:31:12 +00009817 __ Move(left_arg, left);
Steve Blockd0582a62009-12-15 09:54:21 +00009818 }
9819 // Update flags to indicate that arguments are in registers.
9820 SetArgsInRegisters();
9821 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
9822 }
9823
9824 // Call the stub.
9825 __ CallStub(this);
9826}
9827
9828
Leon Clarke4515c472010-02-03 11:58:03 +00009829Result GenericBinaryOpStub::GenerateCall(MacroAssembler* masm,
9830 VirtualFrame* frame,
9831 Result* left,
9832 Result* right) {
9833 if (ArgsInRegistersSupported()) {
9834 SetArgsInRegisters();
9835 return frame->CallStub(this, left, right);
9836 } else {
9837 frame->Push(left);
9838 frame->Push(right);
9839 return frame->CallStub(this, 2);
9840 }
9841}
9842
9843
Steve Blocka7e24c12009-10-30 11:49:00 +00009844void GenericBinaryOpStub::GenerateSmiCode(MacroAssembler* masm, Label* slow) {
Steve Block6ded16b2010-05-10 14:33:55 +01009845 // 1. Move arguments into rdx, rax except for DIV and MOD, which need the
9846 // dividend in rax and rdx free for the division. Use rax, rbx for those.
Leon Clarke4515c472010-02-03 11:58:03 +00009847 Comment load_comment(masm, "-- Load arguments");
9848 Register left = rdx;
9849 Register right = rax;
9850 if (op_ == Token::DIV || op_ == Token::MOD) {
9851 left = rax;
9852 right = rbx;
9853 if (HasArgsInRegisters()) {
9854 __ movq(rbx, rax);
9855 __ movq(rax, rdx);
9856 }
9857 }
9858 if (!HasArgsInRegisters()) {
9859 __ movq(right, Operand(rsp, 1 * kPointerSize));
9860 __ movq(left, Operand(rsp, 2 * kPointerSize));
9861 }
Steve Blocka7e24c12009-10-30 11:49:00 +00009862
Leon Clarke4515c472010-02-03 11:58:03 +00009863 // 2. Smi check both operands. Skip the check for OR as it is better combined
9864 // with the actual operation.
9865 Label not_smis;
9866 if (op_ != Token::BIT_OR) {
9867 Comment smi_check_comment(masm, "-- Smi check arguments");
9868 __ JumpIfNotBothSmi(left, right, &not_smis);
9869 }
Steve Blocka7e24c12009-10-30 11:49:00 +00009870
Leon Clarke4515c472010-02-03 11:58:03 +00009871 // 3. Operands are both smis (except for OR), perform the operation leaving
9872 // the result in rax and check the result if necessary.
9873 Comment perform_smi(masm, "-- Perform smi operation");
9874 Label use_fp_on_smis;
Steve Blocka7e24c12009-10-30 11:49:00 +00009875 switch (op_) {
9876 case Token::ADD: {
Leon Clarke4515c472010-02-03 11:58:03 +00009877 ASSERT(right.is(rax));
9878 __ SmiAdd(right, right, left, &use_fp_on_smis); // ADD is commutative.
Steve Blocka7e24c12009-10-30 11:49:00 +00009879 break;
9880 }
9881
9882 case Token::SUB: {
Leon Clarke4515c472010-02-03 11:58:03 +00009883 __ SmiSub(left, left, right, &use_fp_on_smis);
9884 __ movq(rax, left);
Steve Blocka7e24c12009-10-30 11:49:00 +00009885 break;
9886 }
9887
9888 case Token::MUL:
Leon Clarke4515c472010-02-03 11:58:03 +00009889 ASSERT(right.is(rax));
9890 __ SmiMul(right, right, left, &use_fp_on_smis); // MUL is commutative.
Steve Blocka7e24c12009-10-30 11:49:00 +00009891 break;
9892
9893 case Token::DIV:
Leon Clarke4515c472010-02-03 11:58:03 +00009894 ASSERT(left.is(rax));
9895 __ SmiDiv(left, left, right, &use_fp_on_smis);
Steve Blocka7e24c12009-10-30 11:49:00 +00009896 break;
9897
9898 case Token::MOD:
Leon Clarke4515c472010-02-03 11:58:03 +00009899 ASSERT(left.is(rax));
9900 __ SmiMod(left, left, right, slow);
Steve Blocka7e24c12009-10-30 11:49:00 +00009901 break;
9902
9903 case Token::BIT_OR:
Leon Clarke4515c472010-02-03 11:58:03 +00009904 ASSERT(right.is(rax));
9905 __ movq(rcx, right); // Save the right operand.
9906 __ SmiOr(right, right, left); // BIT_OR is commutative.
9907 __ testb(right, Immediate(kSmiTagMask));
9908 __ j(not_zero, &not_smis);
Steve Blocka7e24c12009-10-30 11:49:00 +00009909 break;
9910
9911 case Token::BIT_AND:
Leon Clarke4515c472010-02-03 11:58:03 +00009912 ASSERT(right.is(rax));
9913 __ SmiAnd(right, right, left); // BIT_AND is commutative.
Steve Blocka7e24c12009-10-30 11:49:00 +00009914 break;
9915
9916 case Token::BIT_XOR:
Leon Clarke4515c472010-02-03 11:58:03 +00009917 ASSERT(right.is(rax));
9918 __ SmiXor(right, right, left); // BIT_XOR is commutative.
Steve Blocka7e24c12009-10-30 11:49:00 +00009919 break;
9920
9921 case Token::SHL:
9922 case Token::SHR:
9923 case Token::SAR:
Steve Blocka7e24c12009-10-30 11:49:00 +00009924 switch (op_) {
9925 case Token::SAR:
Leon Clarke4515c472010-02-03 11:58:03 +00009926 __ SmiShiftArithmeticRight(left, left, right);
Steve Blocka7e24c12009-10-30 11:49:00 +00009927 break;
9928 case Token::SHR:
Leon Clarke4515c472010-02-03 11:58:03 +00009929 __ SmiShiftLogicalRight(left, left, right, slow);
Steve Blocka7e24c12009-10-30 11:49:00 +00009930 break;
9931 case Token::SHL:
Leon Clarke4515c472010-02-03 11:58:03 +00009932 __ SmiShiftLeft(left, left, right, slow);
Steve Blocka7e24c12009-10-30 11:49:00 +00009933 break;
9934 default:
9935 UNREACHABLE();
9936 }
Leon Clarke4515c472010-02-03 11:58:03 +00009937 __ movq(rax, left);
Steve Blocka7e24c12009-10-30 11:49:00 +00009938 break;
9939
9940 default:
9941 UNREACHABLE();
9942 break;
9943 }
Leon Clarke4515c472010-02-03 11:58:03 +00009944
Steve Block6ded16b2010-05-10 14:33:55 +01009945 // 4. Emit return of result in rax.
Leon Clarke4515c472010-02-03 11:58:03 +00009946 GenerateReturn(masm);
9947
9948 // 5. For some operations emit inline code to perform floating point
9949 // operations on known smis (e.g., if the result of the operation
9950 // overflowed the smi range).
9951 switch (op_) {
9952 case Token::ADD:
9953 case Token::SUB:
9954 case Token::MUL:
9955 case Token::DIV: {
9956 __ bind(&use_fp_on_smis);
9957 if (op_ == Token::DIV) {
9958 __ movq(rdx, rax);
9959 __ movq(rax, rbx);
9960 }
9961 // left is rdx, right is rax.
9962 __ AllocateHeapNumber(rbx, rcx, slow);
9963 FloatingPointHelper::LoadFloatOperandsFromSmis(masm, xmm4, xmm5);
9964 switch (op_) {
9965 case Token::ADD: __ addsd(xmm4, xmm5); break;
9966 case Token::SUB: __ subsd(xmm4, xmm5); break;
9967 case Token::MUL: __ mulsd(xmm4, xmm5); break;
9968 case Token::DIV: __ divsd(xmm4, xmm5); break;
9969 default: UNREACHABLE();
9970 }
9971 __ movsd(FieldOperand(rbx, HeapNumber::kValueOffset), xmm4);
9972 __ movq(rax, rbx);
9973 GenerateReturn(masm);
9974 }
9975 default:
9976 break;
9977 }
9978
9979 // 6. Non-smi operands, fall out to the non-smi code with the operands in
9980 // rdx and rax.
9981 Comment done_comment(masm, "-- Enter non-smi code");
9982 __ bind(&not_smis);
9983
9984 switch (op_) {
9985 case Token::DIV:
9986 case Token::MOD:
9987 // Operands are in rax, rbx at this point.
9988 __ movq(rdx, rax);
9989 __ movq(rax, rbx);
9990 break;
9991
9992 case Token::BIT_OR:
9993 // Right operand is saved in rcx and rax was destroyed by the smi
9994 // operation.
9995 __ movq(rax, rcx);
9996 break;
9997
9998 default:
9999 break;
10000 }
Steve Blocka7e24c12009-10-30 11:49:00 +000010001}
10002
10003
10004void GenericBinaryOpStub::Generate(MacroAssembler* masm) {
10005 Label call_runtime;
Steve Block6ded16b2010-05-10 14:33:55 +010010006
10007 if (ShouldGenerateSmiCode()) {
Leon Clarke4515c472010-02-03 11:58:03 +000010008 GenerateSmiCode(masm, &call_runtime);
10009 } else if (op_ != Token::MOD) {
Steve Block6ded16b2010-05-10 14:33:55 +010010010 if (!HasArgsInRegisters()) {
10011 GenerateLoadArguments(masm);
10012 }
Steve Blocka7e24c12009-10-30 11:49:00 +000010013 }
Steve Blocka7e24c12009-10-30 11:49:00 +000010014 // Floating point case.
Steve Block6ded16b2010-05-10 14:33:55 +010010015 if (ShouldGenerateFPCode()) {
10016 switch (op_) {
10017 case Token::ADD:
10018 case Token::SUB:
10019 case Token::MUL:
10020 case Token::DIV: {
10021 if (runtime_operands_type_ == BinaryOpIC::DEFAULT &&
10022 HasSmiCodeInStub()) {
10023 // Execution reaches this point when the first non-smi argument occurs
10024 // (and only if smi code is generated). This is the right moment to
10025 // patch to HEAP_NUMBERS state. The transition is attempted only for
10026 // the four basic operations. The stub stays in the DEFAULT state
10027 // forever for all other operations (also if smi code is skipped).
10028 GenerateTypeTransition(masm);
Andrei Popescu402d9372010-02-26 13:31:12 +000010029 }
Steve Block6ded16b2010-05-10 14:33:55 +010010030
10031 Label not_floats;
10032 // rax: y
10033 // rdx: x
10034 if (static_operands_type_.IsNumber() && FLAG_debug_code) {
10035 // Assert at runtime that inputs are only numbers.
10036 __ AbortIfNotNumber(rdx, "GenericBinaryOpStub operand not a number.");
10037 __ AbortIfNotNumber(rax, "GenericBinaryOpStub operand not a number.");
Andrei Popescu402d9372010-02-26 13:31:12 +000010038 } else {
10039 FloatingPointHelper::CheckNumberOperands(masm, &call_runtime);
10040 }
Steve Block6ded16b2010-05-10 14:33:55 +010010041 // Fast-case: Both operands are numbers.
10042 // xmm4 and xmm5 are volatile XMM registers.
10043 FloatingPointHelper::LoadFloatOperands(masm, xmm4, xmm5);
Leon Clarke4515c472010-02-03 11:58:03 +000010044
Steve Block6ded16b2010-05-10 14:33:55 +010010045 switch (op_) {
10046 case Token::ADD: __ addsd(xmm4, xmm5); break;
10047 case Token::SUB: __ subsd(xmm4, xmm5); break;
10048 case Token::MUL: __ mulsd(xmm4, xmm5); break;
10049 case Token::DIV: __ divsd(xmm4, xmm5); break;
10050 default: UNREACHABLE();
Leon Clarke4515c472010-02-03 11:58:03 +000010051 }
Steve Block6ded16b2010-05-10 14:33:55 +010010052 // Allocate a heap number, if needed.
10053 Label skip_allocation;
10054 OverwriteMode mode = mode_;
10055 if (HasArgsReversed()) {
10056 if (mode == OVERWRITE_RIGHT) {
10057 mode = OVERWRITE_LEFT;
10058 } else if (mode == OVERWRITE_LEFT) {
10059 mode = OVERWRITE_RIGHT;
10060 }
10061 }
10062 switch (mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +000010063 case OVERWRITE_LEFT:
Steve Block6ded16b2010-05-10 14:33:55 +010010064 __ JumpIfNotSmi(rdx, &skip_allocation);
10065 __ AllocateHeapNumber(rbx, rcx, &call_runtime);
10066 __ movq(rdx, rbx);
10067 __ bind(&skip_allocation);
10068 __ movq(rax, rdx);
10069 break;
Steve Blocka7e24c12009-10-30 11:49:00 +000010070 case OVERWRITE_RIGHT:
Steve Block6ded16b2010-05-10 14:33:55 +010010071 // If the argument in rax is already an object, we skip the
Steve Blocka7e24c12009-10-30 11:49:00 +000010072 // allocation of a heap number.
Steve Blocka7e24c12009-10-30 11:49:00 +000010073 __ JumpIfNotSmi(rax, &skip_allocation);
10074 // Fall through!
10075 case NO_OVERWRITE:
Steve Block6ded16b2010-05-10 14:33:55 +010010076 // Allocate a heap number for the result. Keep rax and rdx intact
10077 // for the possible runtime call.
10078 __ AllocateHeapNumber(rbx, rcx, &call_runtime);
10079 __ movq(rax, rbx);
Steve Blocka7e24c12009-10-30 11:49:00 +000010080 __ bind(&skip_allocation);
10081 break;
10082 default: UNREACHABLE();
10083 }
Steve Block6ded16b2010-05-10 14:33:55 +010010084 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm4);
Steve Blockd0582a62009-12-15 09:54:21 +000010085 GenerateReturn(masm);
Steve Block6ded16b2010-05-10 14:33:55 +010010086 __ bind(&not_floats);
10087 if (runtime_operands_type_ == BinaryOpIC::DEFAULT &&
10088 !HasSmiCodeInStub()) {
10089 // Execution reaches this point when the first non-number argument
10090 // occurs (and only if smi code is skipped from the stub, otherwise
10091 // the patching has already been done earlier in this case branch).
10092 // A perfect moment to try patching to STRINGS for ADD operation.
10093 if (op_ == Token::ADD) {
10094 GenerateTypeTransition(masm);
10095 }
10096 }
10097 break;
Steve Blocka7e24c12009-10-30 11:49:00 +000010098 }
Steve Block6ded16b2010-05-10 14:33:55 +010010099 case Token::MOD: {
10100 // For MOD we go directly to runtime in the non-smi case.
10101 break;
10102 }
10103 case Token::BIT_OR:
10104 case Token::BIT_AND:
10105 case Token::BIT_XOR:
10106 case Token::SAR:
10107 case Token::SHL:
10108 case Token::SHR: {
10109 Label skip_allocation, non_smi_result;
10110 FloatingPointHelper::LoadAsIntegers(masm, use_sse3_, &call_runtime);
10111 switch (op_) {
10112 case Token::BIT_OR: __ orl(rax, rcx); break;
10113 case Token::BIT_AND: __ andl(rax, rcx); break;
10114 case Token::BIT_XOR: __ xorl(rax, rcx); break;
10115 case Token::SAR: __ sarl_cl(rax); break;
10116 case Token::SHL: __ shll_cl(rax); break;
10117 case Token::SHR: __ shrl_cl(rax); break;
10118 default: UNREACHABLE();
10119 }
10120 if (op_ == Token::SHR) {
10121 // Check if result is non-negative. This can only happen for a shift
10122 // by zero, which also doesn't update the sign flag.
10123 __ testl(rax, rax);
10124 __ j(negative, &non_smi_result);
10125 }
10126 __ JumpIfNotValidSmiValue(rax, &non_smi_result);
10127 // Tag smi result, if possible, and return.
10128 __ Integer32ToSmi(rax, rax);
10129 GenerateReturn(masm);
Steve Blocka7e24c12009-10-30 11:49:00 +000010130
Steve Block6ded16b2010-05-10 14:33:55 +010010131 // All ops except SHR return a signed int32 that we load in
10132 // a HeapNumber.
10133 if (op_ != Token::SHR && non_smi_result.is_linked()) {
10134 __ bind(&non_smi_result);
10135 // Allocate a heap number if needed.
10136 __ movsxlq(rbx, rax); // rbx: sign extended 32-bit result
10137 switch (mode_) {
10138 case OVERWRITE_LEFT:
10139 case OVERWRITE_RIGHT:
10140 // If the operand was an object, we skip the
10141 // allocation of a heap number.
10142 __ movq(rax, Operand(rsp, mode_ == OVERWRITE_RIGHT ?
10143 1 * kPointerSize : 2 * kPointerSize));
10144 __ JumpIfNotSmi(rax, &skip_allocation);
10145 // Fall through!
10146 case NO_OVERWRITE:
10147 __ AllocateHeapNumber(rax, rcx, &call_runtime);
10148 __ bind(&skip_allocation);
10149 break;
10150 default: UNREACHABLE();
10151 }
10152 // Store the result in the HeapNumber and return.
10153 __ movq(Operand(rsp, 1 * kPointerSize), rbx);
10154 __ fild_s(Operand(rsp, 1 * kPointerSize));
10155 __ fstp_d(FieldOperand(rax, HeapNumber::kValueOffset));
10156 GenerateReturn(masm);
10157 }
10158
10159 // SHR should return uint32 - go to runtime for non-smi/negative result.
10160 if (op_ == Token::SHR) {
10161 __ bind(&non_smi_result);
10162 }
10163 break;
Steve Blocka7e24c12009-10-30 11:49:00 +000010164 }
Steve Block6ded16b2010-05-10 14:33:55 +010010165 default: UNREACHABLE(); break;
Steve Blocka7e24c12009-10-30 11:49:00 +000010166 }
Steve Blocka7e24c12009-10-30 11:49:00 +000010167 }
10168
10169 // If all else fails, use the runtime system to get the correct
Steve Blockd0582a62009-12-15 09:54:21 +000010170 // result. If arguments was passed in registers now place them on the
10171 // stack in the correct order below the return address.
Steve Blocka7e24c12009-10-30 11:49:00 +000010172 __ bind(&call_runtime);
Steve Block6ded16b2010-05-10 14:33:55 +010010173
Leon Clarke4515c472010-02-03 11:58:03 +000010174 if (HasArgsInRegisters()) {
Steve Block6ded16b2010-05-10 14:33:55 +010010175 GenerateRegisterArgsPush(masm);
Steve Blockd0582a62009-12-15 09:54:21 +000010176 }
Steve Block6ded16b2010-05-10 14:33:55 +010010177
Steve Blocka7e24c12009-10-30 11:49:00 +000010178 switch (op_) {
Steve Blockd0582a62009-12-15 09:54:21 +000010179 case Token::ADD: {
Steve Block6ded16b2010-05-10 14:33:55 +010010180 // Registers containing left and right operands respectively.
10181 Register lhs, rhs;
10182
10183 if (HasArgsReversed()) {
10184 lhs = rax;
10185 rhs = rdx;
10186 } else {
10187 lhs = rdx;
10188 rhs = rax;
10189 }
10190
Steve Blockd0582a62009-12-15 09:54:21 +000010191 // Test for string arguments before calling runtime.
Steve Block6ded16b2010-05-10 14:33:55 +010010192 Label not_strings, both_strings, not_string1, string1, string1_smi2;
10193
10194 // If this stub has already generated FP-specific code then the arguments
10195 // are already in rdx, rax
10196 if (!ShouldGenerateFPCode() && !HasArgsInRegisters()) {
10197 GenerateLoadArguments(masm);
10198 }
10199
Steve Blockd0582a62009-12-15 09:54:21 +000010200 Condition is_smi;
Steve Block6ded16b2010-05-10 14:33:55 +010010201 is_smi = masm->CheckSmi(lhs);
Steve Blockd0582a62009-12-15 09:54:21 +000010202 __ j(is_smi, &not_string1);
Steve Block6ded16b2010-05-10 14:33:55 +010010203 __ CmpObjectType(lhs, FIRST_NONSTRING_TYPE, r8);
Steve Blockd0582a62009-12-15 09:54:21 +000010204 __ j(above_equal, &not_string1);
10205
10206 // First argument is a a string, test second.
Steve Block6ded16b2010-05-10 14:33:55 +010010207 is_smi = masm->CheckSmi(rhs);
10208 __ j(is_smi, &string1_smi2);
10209 __ CmpObjectType(rhs, FIRST_NONSTRING_TYPE, r9);
Steve Blockd0582a62009-12-15 09:54:21 +000010210 __ j(above_equal, &string1);
10211
10212 // First and second argument are strings.
Steve Block6ded16b2010-05-10 14:33:55 +010010213 StringAddStub string_add_stub(NO_STRING_CHECK_IN_STUB);
10214 __ TailCallStub(&string_add_stub);
10215
10216 __ bind(&string1_smi2);
10217 // First argument is a string, second is a smi. Try to lookup the number
10218 // string for the smi in the number string cache.
10219 NumberToStringStub::GenerateLookupNumberStringCache(
10220 masm, rhs, rbx, rcx, r8, true, &string1);
10221
10222 // Replace second argument on stack and tailcall string add stub to make
10223 // the result.
10224 __ movq(Operand(rsp, 1 * kPointerSize), rbx);
10225 __ TailCallStub(&string_add_stub);
Steve Blockd0582a62009-12-15 09:54:21 +000010226
10227 // Only first argument is a string.
10228 __ bind(&string1);
Steve Block6ded16b2010-05-10 14:33:55 +010010229 __ InvokeBuiltin(Builtins::STRING_ADD_LEFT, JUMP_FUNCTION);
Steve Blockd0582a62009-12-15 09:54:21 +000010230
10231 // First argument was not a string, test second.
10232 __ bind(&not_string1);
Steve Block6ded16b2010-05-10 14:33:55 +010010233 is_smi = masm->CheckSmi(rhs);
Steve Blockd0582a62009-12-15 09:54:21 +000010234 __ j(is_smi, &not_strings);
Steve Block6ded16b2010-05-10 14:33:55 +010010235 __ CmpObjectType(rhs, FIRST_NONSTRING_TYPE, rhs);
Steve Blockd0582a62009-12-15 09:54:21 +000010236 __ j(above_equal, &not_strings);
10237
10238 // Only second argument is a string.
Steve Block6ded16b2010-05-10 14:33:55 +010010239 __ InvokeBuiltin(Builtins::STRING_ADD_RIGHT, JUMP_FUNCTION);
Steve Blockd0582a62009-12-15 09:54:21 +000010240
10241 __ bind(&not_strings);
10242 // Neither argument is a string.
Steve Blocka7e24c12009-10-30 11:49:00 +000010243 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION);
10244 break;
Steve Blockd0582a62009-12-15 09:54:21 +000010245 }
Steve Blocka7e24c12009-10-30 11:49:00 +000010246 case Token::SUB:
10247 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION);
10248 break;
10249 case Token::MUL:
10250 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION);
Leon Clarke4515c472010-02-03 11:58:03 +000010251 break;
Steve Blocka7e24c12009-10-30 11:49:00 +000010252 case Token::DIV:
10253 __ InvokeBuiltin(Builtins::DIV, JUMP_FUNCTION);
10254 break;
10255 case Token::MOD:
10256 __ InvokeBuiltin(Builtins::MOD, JUMP_FUNCTION);
10257 break;
10258 case Token::BIT_OR:
10259 __ InvokeBuiltin(Builtins::BIT_OR, JUMP_FUNCTION);
10260 break;
10261 case Token::BIT_AND:
10262 __ InvokeBuiltin(Builtins::BIT_AND, JUMP_FUNCTION);
10263 break;
10264 case Token::BIT_XOR:
10265 __ InvokeBuiltin(Builtins::BIT_XOR, JUMP_FUNCTION);
10266 break;
10267 case Token::SAR:
10268 __ InvokeBuiltin(Builtins::SAR, JUMP_FUNCTION);
10269 break;
10270 case Token::SHL:
10271 __ InvokeBuiltin(Builtins::SHL, JUMP_FUNCTION);
10272 break;
10273 case Token::SHR:
10274 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION);
10275 break;
10276 default:
10277 UNREACHABLE();
10278 }
10279}
10280
10281
Steve Blockd0582a62009-12-15 09:54:21 +000010282void GenericBinaryOpStub::GenerateLoadArguments(MacroAssembler* masm) {
Steve Block6ded16b2010-05-10 14:33:55 +010010283 ASSERT(!HasArgsInRegisters());
10284 __ movq(rax, Operand(rsp, 1 * kPointerSize));
10285 __ movq(rdx, Operand(rsp, 2 * kPointerSize));
Steve Blockd0582a62009-12-15 09:54:21 +000010286}
10287
10288
10289void GenericBinaryOpStub::GenerateReturn(MacroAssembler* masm) {
10290 // If arguments are not passed in registers remove them from the stack before
10291 // returning.
Leon Clarke4515c472010-02-03 11:58:03 +000010292 if (!HasArgsInRegisters()) {
Steve Blockd0582a62009-12-15 09:54:21 +000010293 __ ret(2 * kPointerSize); // Remove both operands
10294 } else {
10295 __ ret(0);
10296 }
10297}
10298
10299
Steve Block6ded16b2010-05-10 14:33:55 +010010300void GenericBinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) {
10301 ASSERT(HasArgsInRegisters());
10302 __ pop(rcx);
10303 if (HasArgsReversed()) {
10304 __ push(rax);
10305 __ push(rdx);
10306 } else {
10307 __ push(rdx);
10308 __ push(rax);
10309 }
10310 __ push(rcx);
Leon Clarkee46be812010-01-19 14:06:41 +000010311}
10312
10313
Steve Block6ded16b2010-05-10 14:33:55 +010010314void GenericBinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
10315 Label get_result;
10316
10317 // Keep a copy of operands on the stack and make sure they are also in
10318 // rdx, rax.
10319 if (HasArgsInRegisters()) {
10320 GenerateRegisterArgsPush(masm);
10321 } else {
10322 GenerateLoadArguments(masm);
10323 }
10324
10325 // Internal frame is necessary to handle exceptions properly.
10326 __ EnterInternalFrame();
10327
10328 // Push arguments on stack if the stub expects them there.
10329 if (!HasArgsInRegisters()) {
10330 __ push(rdx);
10331 __ push(rax);
10332 }
10333 // Call the stub proper to get the result in rax.
10334 __ call(&get_result);
10335 __ LeaveInternalFrame();
10336
10337 // Left and right arguments are already on stack.
10338 __ pop(rcx);
10339 // Push the operation result. The tail call to BinaryOp_Patch will
10340 // return it to the original caller..
10341 __ push(rax);
10342
10343 // Push this stub's key.
10344 __ movq(rax, Immediate(MinorKey()));
10345 __ Integer32ToSmi(rax, rax);
10346 __ push(rax);
10347
10348 // Although the operation and the type info are encoded into the key,
10349 // the encoding is opaque, so push them too.
10350 __ movq(rax, Immediate(op_));
10351 __ Integer32ToSmi(rax, rax);
10352 __ push(rax);
10353
10354 __ movq(rax, Immediate(runtime_operands_type_));
10355 __ Integer32ToSmi(rax, rax);
10356 __ push(rax);
10357
10358 __ push(rcx);
10359
10360 // Perform patching to an appropriate fast case and return the result.
10361 __ TailCallExternalReference(
10362 ExternalReference(IC_Utility(IC::kBinaryOp_Patch)),
10363 6,
10364 1);
10365
10366 // The entry point for the result calculation is assumed to be immediately
10367 // after this sequence.
10368 __ bind(&get_result);
10369}
10370
10371
10372Handle<Code> GetBinaryOpStub(int key, BinaryOpIC::TypeInfo type_info) {
10373 GenericBinaryOpStub stub(key, type_info);
10374 return stub.GetCode();
10375}
10376
10377
10378int CompareStub::MinorKey() {
10379 // Encode the three parameters in a unique 16 bit value. To avoid duplicate
10380 // stubs the never NaN NaN condition is only taken into account if the
10381 // condition is equals.
10382 ASSERT(static_cast<unsigned>(cc_) < (1 << 13));
10383 return ConditionField::encode(static_cast<unsigned>(cc_))
10384 | StrictField::encode(strict_)
10385 | NeverNanNanField::encode(cc_ == equal ? never_nan_nan_ : false)
10386 | IncludeNumberCompareField::encode(include_number_compare_);
10387}
10388
10389
10390// Unfortunately you have to run without snapshots to see most of these
10391// names in the profile since most compare stubs end up in the snapshot.
Leon Clarkee46be812010-01-19 14:06:41 +000010392const char* CompareStub::GetName() {
Steve Block6ded16b2010-05-10 14:33:55 +010010393 if (name_ != NULL) return name_;
10394 const int kMaxNameLength = 100;
10395 name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);
10396 if (name_ == NULL) return "OOM";
10397
10398 const char* cc_name;
Leon Clarkee46be812010-01-19 14:06:41 +000010399 switch (cc_) {
Steve Block6ded16b2010-05-10 14:33:55 +010010400 case less: cc_name = "LT"; break;
10401 case greater: cc_name = "GT"; break;
10402 case less_equal: cc_name = "LE"; break;
10403 case greater_equal: cc_name = "GE"; break;
10404 case equal: cc_name = "EQ"; break;
10405 case not_equal: cc_name = "NE"; break;
10406 default: cc_name = "UnknownCondition"; break;
10407 }
10408
10409 const char* strict_name = "";
10410 if (strict_ && (cc_ == equal || cc_ == not_equal)) {
10411 strict_name = "_STRICT";
10412 }
10413
10414 const char* never_nan_nan_name = "";
10415 if (never_nan_nan_ && (cc_ == equal || cc_ == not_equal)) {
10416 never_nan_nan_name = "_NO_NAN";
10417 }
10418
10419 const char* include_number_compare_name = "";
10420 if (!include_number_compare_) {
10421 include_number_compare_name = "_NO_NUMBER";
10422 }
10423
10424 OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
10425 "CompareStub_%s%s%s%s",
10426 cc_name,
10427 strict_name,
10428 never_nan_nan_name,
10429 include_number_compare_name);
10430 return name_;
10431}
10432
10433
10434void StringHelper::GenerateFastCharCodeAt(MacroAssembler* masm,
10435 Register object,
10436 Register index,
10437 Register scratch,
10438 Register result,
10439 Label* receiver_not_string,
10440 Label* index_not_smi,
10441 Label* index_out_of_range,
10442 Label* slow_case) {
10443 Label not_a_flat_string;
10444 Label try_again_with_new_string;
10445 Label ascii_string;
10446 Label got_char_code;
10447
10448 // If the receiver is a smi trigger the non-string case.
10449 __ JumpIfSmi(object, receiver_not_string);
10450
10451 // Fetch the instance type of the receiver into result register.
10452 __ movq(result, FieldOperand(object, HeapObject::kMapOffset));
10453 __ movzxbl(result, FieldOperand(result, Map::kInstanceTypeOffset));
10454 // If the receiver is not a string trigger the non-string case.
10455 __ testb(result, Immediate(kIsNotStringMask));
10456 __ j(not_zero, receiver_not_string);
10457
10458 // If the index is non-smi trigger the non-smi case.
10459 __ JumpIfNotSmi(index, index_not_smi);
10460
10461 // Check for index out of range.
10462 __ SmiCompare(index, FieldOperand(object, String::kLengthOffset));
10463 __ j(above_equal, index_out_of_range);
10464
10465 __ bind(&try_again_with_new_string);
10466 // ----------- S t a t e -------------
10467 // -- object : string to access
10468 // -- result : instance type of the string
10469 // -- scratch : non-negative index < length
10470 // -----------------------------------
10471
10472 // We need special handling for non-flat strings.
10473 ASSERT_EQ(0, kSeqStringTag);
10474 __ testb(result, Immediate(kStringRepresentationMask));
10475 __ j(not_zero, &not_a_flat_string);
10476
10477 // Put untagged index into scratch register.
10478 __ SmiToInteger32(scratch, index);
10479
10480 // Check for 1-byte or 2-byte string.
10481 ASSERT_EQ(0, kTwoByteStringTag);
10482 __ testb(result, Immediate(kStringEncodingMask));
10483 __ j(not_zero, &ascii_string);
10484
10485 // 2-byte string.
10486 // Load the 2-byte character code into the result register.
10487 __ movzxwl(result, FieldOperand(object,
10488 scratch,
10489 times_2,
10490 SeqTwoByteString::kHeaderSize));
10491 __ jmp(&got_char_code);
10492
10493 // Handle non-flat strings.
10494 __ bind(&not_a_flat_string);
10495 __ and_(result, Immediate(kStringRepresentationMask));
10496 __ cmpb(result, Immediate(kConsStringTag));
10497 __ j(not_equal, slow_case);
10498
10499 // ConsString.
10500 // Check that the right hand side is the empty string (ie if this is really a
10501 // flat string in a cons string). If that is not the case we would rather go
10502 // to the runtime system now, to flatten the string.
10503 __ movq(result, FieldOperand(object, ConsString::kSecondOffset));
10504 __ CompareRoot(result, Heap::kEmptyStringRootIndex);
10505 __ j(not_equal, slow_case);
10506 // Get the first of the two strings and load its instance type.
10507 __ movq(object, FieldOperand(object, ConsString::kFirstOffset));
10508 __ movq(result, FieldOperand(object, HeapObject::kMapOffset));
10509 __ movzxbl(result, FieldOperand(result, Map::kInstanceTypeOffset));
10510 __ jmp(&try_again_with_new_string);
10511
10512 // ASCII string.
10513 __ bind(&ascii_string);
10514 // Load the byte into the result register.
10515 __ movzxbl(result, FieldOperand(object,
10516 scratch,
10517 times_1,
10518 SeqAsciiString::kHeaderSize));
10519 __ bind(&got_char_code);
10520 __ Integer32ToSmi(result, result);
10521}
10522
10523
10524void StringHelper::GenerateCharFromCode(MacroAssembler* masm,
10525 Register code,
10526 Register result,
10527 Register scratch,
10528 InvokeFlag flag) {
10529 ASSERT(!code.is(result));
10530
10531 Label slow_case;
10532 Label exit;
10533
10534 // Fast case of Heap::LookupSingleCharacterStringFromCode.
10535 __ JumpIfNotSmi(code, &slow_case);
10536 __ SmiToInteger32(scratch, code);
10537 __ cmpl(scratch, Immediate(String::kMaxAsciiCharCode));
10538 __ j(above, &slow_case);
10539
10540 __ Move(result, Factory::single_character_string_cache());
10541 __ movq(result, FieldOperand(result,
10542 scratch,
10543 times_pointer_size,
10544 FixedArray::kHeaderSize));
10545
10546 __ CompareRoot(result, Heap::kUndefinedValueRootIndex);
10547 __ j(equal, &slow_case);
10548 __ jmp(&exit);
10549
10550 __ bind(&slow_case);
10551 if (flag == CALL_FUNCTION) {
10552 __ push(code);
10553 __ CallRuntime(Runtime::kCharFromCode, 1);
10554 if (!result.is(rax)) {
10555 __ movq(result, rax);
Leon Clarkee46be812010-01-19 14:06:41 +000010556 }
Steve Block6ded16b2010-05-10 14:33:55 +010010557 } else {
10558 ASSERT(flag == JUMP_FUNCTION);
10559 ASSERT(result.is(rax));
10560 __ pop(rax); // Save return address.
10561 __ push(code);
10562 __ push(rax); // Restore return address.
10563 __ TailCallRuntime(Runtime::kCharFromCode, 1, 1);
10564 }
10565
10566 __ bind(&exit);
10567 if (flag == JUMP_FUNCTION) {
10568 ASSERT(result.is(rax));
10569 __ ret(0);
Leon Clarkee46be812010-01-19 14:06:41 +000010570 }
10571}
10572
10573
10574void StringAddStub::Generate(MacroAssembler* masm) {
10575 Label string_add_runtime;
10576
10577 // Load the two arguments.
10578 __ movq(rax, Operand(rsp, 2 * kPointerSize)); // First argument.
10579 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); // Second argument.
10580
10581 // Make sure that both arguments are strings if not known in advance.
10582 if (string_check_) {
10583 Condition is_smi;
10584 is_smi = masm->CheckSmi(rax);
10585 __ j(is_smi, &string_add_runtime);
10586 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, r8);
10587 __ j(above_equal, &string_add_runtime);
10588
10589 // First argument is a a string, test second.
10590 is_smi = masm->CheckSmi(rdx);
10591 __ j(is_smi, &string_add_runtime);
10592 __ CmpObjectType(rdx, FIRST_NONSTRING_TYPE, r9);
10593 __ j(above_equal, &string_add_runtime);
10594 }
10595
10596 // Both arguments are strings.
10597 // rax: first string
10598 // rdx: second string
10599 // Check if either of the strings are empty. In that case return the other.
10600 Label second_not_zero_length, both_not_zero_length;
Steve Block6ded16b2010-05-10 14:33:55 +010010601 __ movq(rcx, FieldOperand(rdx, String::kLengthOffset));
10602 __ SmiTest(rcx);
Leon Clarkee46be812010-01-19 14:06:41 +000010603 __ j(not_zero, &second_not_zero_length);
10604 // Second string is empty, result is first string which is already in rax.
10605 __ IncrementCounter(&Counters::string_add_native, 1);
10606 __ ret(2 * kPointerSize);
10607 __ bind(&second_not_zero_length);
Steve Block6ded16b2010-05-10 14:33:55 +010010608 __ movq(rbx, FieldOperand(rax, String::kLengthOffset));
10609 __ SmiTest(rbx);
Leon Clarkee46be812010-01-19 14:06:41 +000010610 __ j(not_zero, &both_not_zero_length);
10611 // First string is empty, result is second string which is in rdx.
10612 __ movq(rax, rdx);
10613 __ IncrementCounter(&Counters::string_add_native, 1);
10614 __ ret(2 * kPointerSize);
10615
10616 // Both strings are non-empty.
10617 // rax: first string
10618 // rbx: length of first string
Leon Clarked91b9f72010-01-27 17:25:45 +000010619 // rcx: length of second string
10620 // rdx: second string
Steve Block6ded16b2010-05-10 14:33:55 +010010621 // r8: map of first string if string check was performed above
10622 // r9: map of second string if string check was performed above
10623 Label string_add_flat_result, longer_than_two;
Leon Clarkee46be812010-01-19 14:06:41 +000010624 __ bind(&both_not_zero_length);
Steve Block6ded16b2010-05-10 14:33:55 +010010625
Leon Clarkee46be812010-01-19 14:06:41 +000010626 // If arguments where known to be strings, maps are not loaded to r8 and r9
10627 // by the code above.
10628 if (!string_check_) {
10629 __ movq(r8, FieldOperand(rax, HeapObject::kMapOffset));
10630 __ movq(r9, FieldOperand(rdx, HeapObject::kMapOffset));
10631 }
10632 // Get the instance types of the two strings as they will be needed soon.
10633 __ movzxbl(r8, FieldOperand(r8, Map::kInstanceTypeOffset));
10634 __ movzxbl(r9, FieldOperand(r9, Map::kInstanceTypeOffset));
Steve Block6ded16b2010-05-10 14:33:55 +010010635
10636 // Look at the length of the result of adding the two strings.
10637 ASSERT(String::kMaxLength <= Smi::kMaxValue / 2);
10638 __ SmiAdd(rbx, rbx, rcx, NULL);
10639 // Use the runtime system when adding two one character strings, as it
10640 // contains optimizations for this specific case using the symbol table.
10641 __ SmiCompare(rbx, Smi::FromInt(2));
10642 __ j(not_equal, &longer_than_two);
10643
10644 // Check that both strings are non-external ascii strings.
10645 __ JumpIfBothInstanceTypesAreNotSequentialAscii(r8, r9, rbx, rcx,
10646 &string_add_runtime);
10647
10648 // Get the two characters forming the sub string.
10649 __ movzxbq(rbx, FieldOperand(rax, SeqAsciiString::kHeaderSize));
10650 __ movzxbq(rcx, FieldOperand(rdx, SeqAsciiString::kHeaderSize));
10651
10652 // Try to lookup two character string in symbol table. If it is not found
10653 // just allocate a new one.
10654 Label make_two_character_string, make_flat_ascii_string;
10655 StringHelper::GenerateTwoCharacterSymbolTableProbe(
10656 masm, rbx, rcx, r14, r12, rdi, r15, &make_two_character_string);
10657 __ IncrementCounter(&Counters::string_add_native, 1);
10658 __ ret(2 * kPointerSize);
10659
10660 __ bind(&make_two_character_string);
10661 __ Set(rbx, 2);
10662 __ jmp(&make_flat_ascii_string);
10663
10664 __ bind(&longer_than_two);
Leon Clarkee46be812010-01-19 14:06:41 +000010665 // Check if resulting string will be flat.
Steve Block6ded16b2010-05-10 14:33:55 +010010666 __ SmiCompare(rbx, Smi::FromInt(String::kMinNonFlatLength));
Leon Clarkee46be812010-01-19 14:06:41 +000010667 __ j(below, &string_add_flat_result);
10668 // Handle exceptionally long strings in the runtime system.
10669 ASSERT((String::kMaxLength & 0x80000000) == 0);
Steve Block6ded16b2010-05-10 14:33:55 +010010670 __ SmiCompare(rbx, Smi::FromInt(String::kMaxLength));
Leon Clarkee46be812010-01-19 14:06:41 +000010671 __ j(above, &string_add_runtime);
10672
10673 // If result is not supposed to be flat, allocate a cons string object. If
10674 // both strings are ascii the result is an ascii cons string.
10675 // rax: first string
10676 // ebx: length of resulting flat string
10677 // rdx: second string
10678 // r8: instance type of first string
10679 // r9: instance type of second string
10680 Label non_ascii, allocated;
10681 __ movl(rcx, r8);
10682 __ and_(rcx, r9);
10683 ASSERT(kStringEncodingMask == kAsciiStringTag);
10684 __ testl(rcx, Immediate(kAsciiStringTag));
10685 __ j(zero, &non_ascii);
10686 // Allocate an acsii cons string.
10687 __ AllocateAsciiConsString(rcx, rdi, no_reg, &string_add_runtime);
10688 __ bind(&allocated);
10689 // Fill the fields of the cons string.
Steve Block6ded16b2010-05-10 14:33:55 +010010690 __ movq(FieldOperand(rcx, ConsString::kLengthOffset), rbx);
Leon Clarkee46be812010-01-19 14:06:41 +000010691 __ movl(FieldOperand(rcx, ConsString::kHashFieldOffset),
10692 Immediate(String::kEmptyHashField));
10693 __ movq(FieldOperand(rcx, ConsString::kFirstOffset), rax);
10694 __ movq(FieldOperand(rcx, ConsString::kSecondOffset), rdx);
10695 __ movq(rax, rcx);
10696 __ IncrementCounter(&Counters::string_add_native, 1);
10697 __ ret(2 * kPointerSize);
10698 __ bind(&non_ascii);
10699 // Allocate a two byte cons string.
10700 __ AllocateConsString(rcx, rdi, no_reg, &string_add_runtime);
10701 __ jmp(&allocated);
10702
10703 // Handle creating a flat result. First check that both strings are not
10704 // external strings.
10705 // rax: first string
Steve Block6ded16b2010-05-10 14:33:55 +010010706 // ebx: length of resulting flat string as smi
Leon Clarkee46be812010-01-19 14:06:41 +000010707 // rdx: second string
10708 // r8: instance type of first string
10709 // r9: instance type of first string
10710 __ bind(&string_add_flat_result);
Steve Block6ded16b2010-05-10 14:33:55 +010010711 __ SmiToInteger32(rbx, rbx);
Leon Clarkee46be812010-01-19 14:06:41 +000010712 __ movl(rcx, r8);
10713 __ and_(rcx, Immediate(kStringRepresentationMask));
10714 __ cmpl(rcx, Immediate(kExternalStringTag));
10715 __ j(equal, &string_add_runtime);
10716 __ movl(rcx, r9);
10717 __ and_(rcx, Immediate(kStringRepresentationMask));
10718 __ cmpl(rcx, Immediate(kExternalStringTag));
10719 __ j(equal, &string_add_runtime);
10720 // Now check if both strings are ascii strings.
10721 // rax: first string
10722 // ebx: length of resulting flat string
10723 // rdx: second string
10724 // r8: instance type of first string
10725 // r9: instance type of second string
10726 Label non_ascii_string_add_flat_result;
10727 ASSERT(kStringEncodingMask == kAsciiStringTag);
10728 __ testl(r8, Immediate(kAsciiStringTag));
10729 __ j(zero, &non_ascii_string_add_flat_result);
10730 __ testl(r9, Immediate(kAsciiStringTag));
10731 __ j(zero, &string_add_runtime);
Steve Block6ded16b2010-05-10 14:33:55 +010010732
10733 __ bind(&make_flat_ascii_string);
Leon Clarkee46be812010-01-19 14:06:41 +000010734 // Both strings are ascii strings. As they are short they are both flat.
10735 __ AllocateAsciiString(rcx, rbx, rdi, r14, r15, &string_add_runtime);
10736 // rcx: result string
10737 __ movq(rbx, rcx);
10738 // Locate first character of result.
10739 __ addq(rcx, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
10740 // Locate first character of first argument
Steve Block6ded16b2010-05-10 14:33:55 +010010741 __ movq(rdi, FieldOperand(rax, String::kLengthOffset));
10742 __ SmiToInteger32(rdi, rdi);
Leon Clarkee46be812010-01-19 14:06:41 +000010743 __ addq(rax, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
10744 // rax: first char of first argument
10745 // rbx: result string
10746 // rcx: first character of result
10747 // rdx: second string
10748 // rdi: length of first argument
Steve Block6ded16b2010-05-10 14:33:55 +010010749 StringHelper::GenerateCopyCharacters(masm, rcx, rax, rdi, true);
Leon Clarkee46be812010-01-19 14:06:41 +000010750 // Locate first character of second argument.
Steve Block6ded16b2010-05-10 14:33:55 +010010751 __ movq(rdi, FieldOperand(rdx, String::kLengthOffset));
10752 __ SmiToInteger32(rdi, rdi);
Leon Clarkee46be812010-01-19 14:06:41 +000010753 __ addq(rdx, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
10754 // rbx: result string
10755 // rcx: next character of result
10756 // rdx: first char of second argument
10757 // rdi: length of second argument
Steve Block6ded16b2010-05-10 14:33:55 +010010758 StringHelper::GenerateCopyCharacters(masm, rcx, rdx, rdi, true);
Leon Clarkee46be812010-01-19 14:06:41 +000010759 __ movq(rax, rbx);
10760 __ IncrementCounter(&Counters::string_add_native, 1);
10761 __ ret(2 * kPointerSize);
10762
10763 // Handle creating a flat two byte result.
10764 // rax: first string - known to be two byte
10765 // rbx: length of resulting flat string
10766 // rdx: second string
10767 // r8: instance type of first string
10768 // r9: instance type of first string
10769 __ bind(&non_ascii_string_add_flat_result);
10770 __ and_(r9, Immediate(kAsciiStringTag));
10771 __ j(not_zero, &string_add_runtime);
10772 // Both strings are two byte strings. As they are short they are both
10773 // flat.
10774 __ AllocateTwoByteString(rcx, rbx, rdi, r14, r15, &string_add_runtime);
10775 // rcx: result string
10776 __ movq(rbx, rcx);
10777 // Locate first character of result.
10778 __ addq(rcx, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
10779 // Locate first character of first argument.
Steve Block6ded16b2010-05-10 14:33:55 +010010780 __ movq(rdi, FieldOperand(rax, String::kLengthOffset));
10781 __ SmiToInteger32(rdi, rdi);
Leon Clarkee46be812010-01-19 14:06:41 +000010782 __ addq(rax, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
10783 // rax: first char of first argument
10784 // rbx: result string
10785 // rcx: first character of result
10786 // rdx: second argument
10787 // rdi: length of first argument
Steve Block6ded16b2010-05-10 14:33:55 +010010788 StringHelper::GenerateCopyCharacters(masm, rcx, rax, rdi, false);
Leon Clarkee46be812010-01-19 14:06:41 +000010789 // Locate first character of second argument.
Steve Block6ded16b2010-05-10 14:33:55 +010010790 __ movq(rdi, FieldOperand(rdx, String::kLengthOffset));
10791 __ SmiToInteger32(rdi, rdi);
Leon Clarkee46be812010-01-19 14:06:41 +000010792 __ addq(rdx, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
10793 // rbx: result string
10794 // rcx: next character of result
10795 // rdx: first char of second argument
10796 // rdi: length of second argument
Steve Block6ded16b2010-05-10 14:33:55 +010010797 StringHelper::GenerateCopyCharacters(masm, rcx, rdx, rdi, false);
Leon Clarkee46be812010-01-19 14:06:41 +000010798 __ movq(rax, rbx);
10799 __ IncrementCounter(&Counters::string_add_native, 1);
10800 __ ret(2 * kPointerSize);
10801
10802 // Just jump to runtime to add the two strings.
10803 __ bind(&string_add_runtime);
Steve Block6ded16b2010-05-10 14:33:55 +010010804 __ TailCallRuntime(Runtime::kStringAdd, 2, 1);
Leon Clarkee46be812010-01-19 14:06:41 +000010805}
10806
10807
Steve Block6ded16b2010-05-10 14:33:55 +010010808void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
10809 Register dest,
10810 Register src,
10811 Register count,
10812 bool ascii) {
Leon Clarkee46be812010-01-19 14:06:41 +000010813 Label loop;
10814 __ bind(&loop);
10815 // This loop just copies one character at a time, as it is only used for very
10816 // short strings.
10817 if (ascii) {
10818 __ movb(kScratchRegister, Operand(src, 0));
10819 __ movb(Operand(dest, 0), kScratchRegister);
10820 __ addq(src, Immediate(1));
10821 __ addq(dest, Immediate(1));
10822 } else {
10823 __ movzxwl(kScratchRegister, Operand(src, 0));
10824 __ movw(Operand(dest, 0), kScratchRegister);
10825 __ addq(src, Immediate(2));
10826 __ addq(dest, Immediate(2));
10827 }
10828 __ subl(count, Immediate(1));
10829 __ j(not_zero, &loop);
10830}
10831
10832
Steve Block6ded16b2010-05-10 14:33:55 +010010833void StringHelper::GenerateCopyCharactersREP(MacroAssembler* masm,
10834 Register dest,
10835 Register src,
10836 Register count,
10837 bool ascii) {
Leon Clarked91b9f72010-01-27 17:25:45 +000010838 // Copy characters using rep movs of doublewords. Align destination on 4 byte
10839 // boundary before starting rep movs. Copy remaining characters after running
10840 // rep movs.
10841 ASSERT(dest.is(rdi)); // rep movs destination
10842 ASSERT(src.is(rsi)); // rep movs source
10843 ASSERT(count.is(rcx)); // rep movs count
10844
10845 // Nothing to do for zero characters.
10846 Label done;
10847 __ testq(count, count);
10848 __ j(zero, &done);
10849
10850 // Make count the number of bytes to copy.
10851 if (!ascii) {
10852 ASSERT_EQ(2, sizeof(uc16)); // NOLINT
10853 __ addq(count, count);
10854 }
10855
10856 // Don't enter the rep movs if there are less than 4 bytes to copy.
10857 Label last_bytes;
10858 __ testq(count, Immediate(~7));
10859 __ j(zero, &last_bytes);
10860
10861 // Copy from edi to esi using rep movs instruction.
10862 __ movq(kScratchRegister, count);
10863 __ sar(count, Immediate(3)); // Number of doublewords to copy.
10864 __ repmovsq();
10865
10866 // Find number of bytes left.
10867 __ movq(count, kScratchRegister);
10868 __ and_(count, Immediate(7));
10869
10870 // Check if there are more bytes to copy.
10871 __ bind(&last_bytes);
10872 __ testq(count, count);
10873 __ j(zero, &done);
10874
10875 // Copy remaining characters.
10876 Label loop;
10877 __ bind(&loop);
10878 __ movb(kScratchRegister, Operand(src, 0));
10879 __ movb(Operand(dest, 0), kScratchRegister);
10880 __ addq(src, Immediate(1));
10881 __ addq(dest, Immediate(1));
10882 __ subq(count, Immediate(1));
10883 __ j(not_zero, &loop);
10884
10885 __ bind(&done);
10886}
10887
Steve Block6ded16b2010-05-10 14:33:55 +010010888void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
10889 Register c1,
10890 Register c2,
10891 Register scratch1,
10892 Register scratch2,
10893 Register scratch3,
10894 Register scratch4,
10895 Label* not_found) {
10896 // Register scratch3 is the general scratch register in this function.
10897 Register scratch = scratch3;
10898
10899 // Make sure that both characters are not digits as such strings has a
10900 // different hash algorithm. Don't try to look for these in the symbol table.
10901 Label not_array_index;
10902 __ movq(scratch, c1);
10903 __ subq(scratch, Immediate(static_cast<int>('0')));
10904 __ cmpq(scratch, Immediate(static_cast<int>('9' - '0')));
10905 __ j(above, &not_array_index);
10906 __ movq(scratch, c2);
10907 __ subq(scratch, Immediate(static_cast<int>('0')));
10908 __ cmpq(scratch, Immediate(static_cast<int>('9' - '0')));
10909 __ j(below_equal, not_found);
10910
10911 __ bind(&not_array_index);
10912 // Calculate the two character string hash.
10913 Register hash = scratch1;
10914 GenerateHashInit(masm, hash, c1, scratch);
10915 GenerateHashAddCharacter(masm, hash, c2, scratch);
10916 GenerateHashGetHash(masm, hash, scratch);
10917
10918 // Collect the two characters in a register.
10919 Register chars = c1;
10920 __ shl(c2, Immediate(kBitsPerByte));
10921 __ orl(chars, c2);
10922
10923 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
10924 // hash: hash of two character string.
10925
10926 // Load the symbol table.
10927 Register symbol_table = c2;
10928 __ LoadRoot(symbol_table, Heap::kSymbolTableRootIndex);
10929
10930 // Calculate capacity mask from the symbol table capacity.
10931 Register mask = scratch2;
10932 __ movq(mask, FieldOperand(symbol_table, SymbolTable::kCapacityOffset));
10933 __ SmiToInteger32(mask, mask);
10934 __ decl(mask);
10935
10936 Register undefined = scratch4;
10937 __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex);
10938
10939 // Registers
10940 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
10941 // hash: hash of two character string (32-bit int)
10942 // symbol_table: symbol table
10943 // mask: capacity mask (32-bit int)
10944 // undefined: undefined value
10945 // scratch: -
10946
10947 // Perform a number of probes in the symbol table.
10948 static const int kProbes = 4;
10949 Label found_in_symbol_table;
10950 Label next_probe[kProbes];
10951 for (int i = 0; i < kProbes; i++) {
10952 // Calculate entry in symbol table.
10953 __ movl(scratch, hash);
10954 if (i > 0) {
10955 __ addl(scratch, Immediate(SymbolTable::GetProbeOffset(i)));
10956 }
10957 __ andl(scratch, mask);
10958
10959 // Load the entry from the symble table.
10960 Register candidate = scratch; // Scratch register contains candidate.
10961 ASSERT_EQ(1, SymbolTable::kEntrySize);
10962 __ movq(candidate,
10963 FieldOperand(symbol_table,
10964 scratch,
10965 times_pointer_size,
10966 SymbolTable::kElementsStartOffset));
10967
10968 // If entry is undefined no string with this hash can be found.
10969 __ cmpq(candidate, undefined);
10970 __ j(equal, not_found);
10971
10972 // If length is not 2 the string is not a candidate.
10973 __ SmiCompare(FieldOperand(candidate, String::kLengthOffset),
10974 Smi::FromInt(2));
10975 __ j(not_equal, &next_probe[i]);
10976
10977 // We use kScratchRegister as a temporary register in assumption that
10978 // JumpIfInstanceTypeIsNotSequentialAscii does not use it implicitly
10979 Register temp = kScratchRegister;
10980
10981 // Check that the candidate is a non-external ascii string.
10982 __ movq(temp, FieldOperand(candidate, HeapObject::kMapOffset));
10983 __ movzxbl(temp, FieldOperand(temp, Map::kInstanceTypeOffset));
10984 __ JumpIfInstanceTypeIsNotSequentialAscii(
10985 temp, temp, &next_probe[i]);
10986
10987 // Check if the two characters match.
10988 __ movl(temp, FieldOperand(candidate, SeqAsciiString::kHeaderSize));
10989 __ andl(temp, Immediate(0x0000ffff));
10990 __ cmpl(chars, temp);
10991 __ j(equal, &found_in_symbol_table);
10992 __ bind(&next_probe[i]);
10993 }
10994
10995 // No matching 2 character string found by probing.
10996 __ jmp(not_found);
10997
10998 // Scratch register contains result when we fall through to here.
10999 Register result = scratch;
11000 __ bind(&found_in_symbol_table);
11001 if (!result.is(rax)) {
11002 __ movq(rax, result);
11003 }
11004}
11005
11006
11007void StringHelper::GenerateHashInit(MacroAssembler* masm,
11008 Register hash,
11009 Register character,
11010 Register scratch) {
11011 // hash = character + (character << 10);
11012 __ movl(hash, character);
11013 __ shll(hash, Immediate(10));
11014 __ addl(hash, character);
11015 // hash ^= hash >> 6;
11016 __ movl(scratch, hash);
11017 __ sarl(scratch, Immediate(6));
11018 __ xorl(hash, scratch);
11019}
11020
11021
11022void StringHelper::GenerateHashAddCharacter(MacroAssembler* masm,
11023 Register hash,
11024 Register character,
11025 Register scratch) {
11026 // hash += character;
11027 __ addl(hash, character);
11028 // hash += hash << 10;
11029 __ movl(scratch, hash);
11030 __ shll(scratch, Immediate(10));
11031 __ addl(hash, scratch);
11032 // hash ^= hash >> 6;
11033 __ movl(scratch, hash);
11034 __ sarl(scratch, Immediate(6));
11035 __ xorl(hash, scratch);
11036}
11037
11038
11039void StringHelper::GenerateHashGetHash(MacroAssembler* masm,
11040 Register hash,
11041 Register scratch) {
11042 // hash += hash << 3;
11043 __ movl(scratch, hash);
11044 __ shll(scratch, Immediate(3));
11045 __ addl(hash, scratch);
11046 // hash ^= hash >> 11;
11047 __ movl(scratch, hash);
11048 __ sarl(scratch, Immediate(11));
11049 __ xorl(hash, scratch);
11050 // hash += hash << 15;
11051 __ movl(scratch, hash);
11052 __ shll(scratch, Immediate(15));
11053 __ addl(hash, scratch);
11054
11055 // if (hash == 0) hash = 27;
11056 Label hash_not_zero;
11057 __ testl(hash, hash);
11058 __ j(not_zero, &hash_not_zero);
11059 __ movl(hash, Immediate(27));
11060 __ bind(&hash_not_zero);
11061}
Leon Clarked91b9f72010-01-27 17:25:45 +000011062
11063void SubStringStub::Generate(MacroAssembler* masm) {
11064 Label runtime;
11065
11066 // Stack frame on entry.
11067 // rsp[0]: return address
11068 // rsp[8]: to
11069 // rsp[16]: from
11070 // rsp[24]: string
11071
11072 const int kToOffset = 1 * kPointerSize;
11073 const int kFromOffset = kToOffset + kPointerSize;
11074 const int kStringOffset = kFromOffset + kPointerSize;
11075 const int kArgumentsSize = (kStringOffset + kPointerSize) - kToOffset;
11076
11077 // Make sure first argument is a string.
11078 __ movq(rax, Operand(rsp, kStringOffset));
11079 ASSERT_EQ(0, kSmiTag);
11080 __ testl(rax, Immediate(kSmiTagMask));
11081 __ j(zero, &runtime);
11082 Condition is_string = masm->IsObjectStringType(rax, rbx, rbx);
11083 __ j(NegateCondition(is_string), &runtime);
11084
11085 // rax: string
11086 // rbx: instance type
11087 // Calculate length of sub string using the smi values.
Steve Block6ded16b2010-05-10 14:33:55 +010011088 Label result_longer_than_two;
Leon Clarked91b9f72010-01-27 17:25:45 +000011089 __ movq(rcx, Operand(rsp, kToOffset));
11090 __ movq(rdx, Operand(rsp, kFromOffset));
11091 __ JumpIfNotBothPositiveSmi(rcx, rdx, &runtime);
11092
11093 __ SmiSub(rcx, rcx, rdx, NULL); // Overflow doesn't happen.
11094 __ j(negative, &runtime);
Steve Block6ded16b2010-05-10 14:33:55 +010011095 // Special handling of sub-strings of length 1 and 2. One character strings
11096 // are handled in the runtime system (looked up in the single character
11097 // cache). Two character strings are looked for in the symbol cache.
Leon Clarked91b9f72010-01-27 17:25:45 +000011098 __ SmiToInteger32(rcx, rcx);
11099 __ cmpl(rcx, Immediate(2));
Steve Block6ded16b2010-05-10 14:33:55 +010011100 __ j(greater, &result_longer_than_two);
11101 __ j(less, &runtime);
11102
11103 // Sub string of length 2 requested.
11104 // rax: string
11105 // rbx: instance type
11106 // rcx: sub string length (value is 2)
11107 // rdx: from index (smi)
11108 __ JumpIfInstanceTypeIsNotSequentialAscii(rbx, rbx, &runtime);
11109
11110 // Get the two characters forming the sub string.
11111 __ SmiToInteger32(rdx, rdx); // From index is no longer smi.
11112 __ movzxbq(rbx, FieldOperand(rax, rdx, times_1, SeqAsciiString::kHeaderSize));
11113 __ movzxbq(rcx,
11114 FieldOperand(rax, rdx, times_1, SeqAsciiString::kHeaderSize + 1));
11115
11116 // Try to lookup two character string in symbol table.
11117 Label make_two_character_string;
11118 StringHelper::GenerateTwoCharacterSymbolTableProbe(
11119 masm, rbx, rcx, rax, rdx, rdi, r14, &make_two_character_string);
11120 __ ret(3 * kPointerSize);
11121
11122 __ bind(&make_two_character_string);
11123 // Setup registers for allocating the two character string.
11124 __ movq(rax, Operand(rsp, kStringOffset));
11125 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
11126 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
11127 __ Set(rcx, 2);
11128
11129 __ bind(&result_longer_than_two);
Leon Clarked91b9f72010-01-27 17:25:45 +000011130
11131 // rax: string
11132 // rbx: instance type
11133 // rcx: result string length
11134 // Check for flat ascii string
11135 Label non_ascii_flat;
Steve Block6ded16b2010-05-10 14:33:55 +010011136 __ JumpIfInstanceTypeIsNotSequentialAscii(rbx, rbx, &non_ascii_flat);
Leon Clarked91b9f72010-01-27 17:25:45 +000011137
11138 // Allocate the result.
11139 __ AllocateAsciiString(rax, rcx, rbx, rdx, rdi, &runtime);
11140
11141 // rax: result string
11142 // rcx: result string length
11143 __ movq(rdx, rsi); // esi used by following code.
11144 // Locate first character of result.
11145 __ lea(rdi, FieldOperand(rax, SeqAsciiString::kHeaderSize));
11146 // Load string argument and locate character of sub string start.
11147 __ movq(rsi, Operand(rsp, kStringOffset));
11148 __ movq(rbx, Operand(rsp, kFromOffset));
11149 {
11150 SmiIndex smi_as_index = masm->SmiToIndex(rbx, rbx, times_1);
11151 __ lea(rsi, Operand(rsi, smi_as_index.reg, smi_as_index.scale,
11152 SeqAsciiString::kHeaderSize - kHeapObjectTag));
11153 }
11154
11155 // rax: result string
11156 // rcx: result length
11157 // rdx: original value of rsi
11158 // rdi: first character of result
11159 // rsi: character of sub string start
Steve Block6ded16b2010-05-10 14:33:55 +010011160 StringHelper::GenerateCopyCharactersREP(masm, rdi, rsi, rcx, true);
Leon Clarked91b9f72010-01-27 17:25:45 +000011161 __ movq(rsi, rdx); // Restore rsi.
11162 __ IncrementCounter(&Counters::sub_string_native, 1);
11163 __ ret(kArgumentsSize);
11164
11165 __ bind(&non_ascii_flat);
11166 // rax: string
11167 // rbx: instance type & kStringRepresentationMask | kStringEncodingMask
11168 // rcx: result string length
11169 // Check for sequential two byte string
11170 __ cmpb(rbx, Immediate(kSeqStringTag | kTwoByteStringTag));
11171 __ j(not_equal, &runtime);
11172
11173 // Allocate the result.
11174 __ AllocateTwoByteString(rax, rcx, rbx, rdx, rdi, &runtime);
11175
11176 // rax: result string
11177 // rcx: result string length
11178 __ movq(rdx, rsi); // esi used by following code.
11179 // Locate first character of result.
11180 __ lea(rdi, FieldOperand(rax, SeqTwoByteString::kHeaderSize));
11181 // Load string argument and locate character of sub string start.
11182 __ movq(rsi, Operand(rsp, kStringOffset));
11183 __ movq(rbx, Operand(rsp, kFromOffset));
11184 {
11185 SmiIndex smi_as_index = masm->SmiToIndex(rbx, rbx, times_2);
11186 __ lea(rsi, Operand(rsi, smi_as_index.reg, smi_as_index.scale,
11187 SeqAsciiString::kHeaderSize - kHeapObjectTag));
11188 }
11189
11190 // rax: result string
11191 // rcx: result length
11192 // rdx: original value of rsi
11193 // rdi: first character of result
11194 // rsi: character of sub string start
Steve Block6ded16b2010-05-10 14:33:55 +010011195 StringHelper::GenerateCopyCharactersREP(masm, rdi, rsi, rcx, false);
Leon Clarked91b9f72010-01-27 17:25:45 +000011196 __ movq(rsi, rdx); // Restore esi.
11197 __ IncrementCounter(&Counters::sub_string_native, 1);
11198 __ ret(kArgumentsSize);
11199
11200 // Just jump to runtime to create the sub string.
11201 __ bind(&runtime);
Steve Block6ded16b2010-05-10 14:33:55 +010011202 __ TailCallRuntime(Runtime::kSubString, 3, 1);
Leon Clarked91b9f72010-01-27 17:25:45 +000011203}
11204
Leon Clarkee46be812010-01-19 14:06:41 +000011205
11206void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
11207 Register left,
11208 Register right,
11209 Register scratch1,
11210 Register scratch2,
11211 Register scratch3,
11212 Register scratch4) {
11213 // Ensure that you can always subtract a string length from a non-negative
11214 // number (e.g. another length).
11215 ASSERT(String::kMaxLength < 0x7fffffff);
11216
11217 // Find minimum length and length difference.
Steve Block6ded16b2010-05-10 14:33:55 +010011218 __ movq(scratch1, FieldOperand(left, String::kLengthOffset));
11219 __ movq(scratch4, scratch1);
11220 __ SmiSub(scratch4,
11221 scratch4,
11222 FieldOperand(right, String::kLengthOffset),
11223 NULL);
Leon Clarkee46be812010-01-19 14:06:41 +000011224 // Register scratch4 now holds left.length - right.length.
11225 const Register length_difference = scratch4;
11226 Label left_shorter;
11227 __ j(less, &left_shorter);
11228 // The right string isn't longer that the left one.
11229 // Get the right string's length by subtracting the (non-negative) difference
11230 // from the left string's length.
Steve Block6ded16b2010-05-10 14:33:55 +010011231 __ SmiSub(scratch1, scratch1, length_difference, NULL);
Leon Clarkee46be812010-01-19 14:06:41 +000011232 __ bind(&left_shorter);
11233 // Register scratch1 now holds Min(left.length, right.length).
11234 const Register min_length = scratch1;
11235
11236 Label compare_lengths;
11237 // If min-length is zero, go directly to comparing lengths.
Steve Block6ded16b2010-05-10 14:33:55 +010011238 __ SmiTest(min_length);
Leon Clarkee46be812010-01-19 14:06:41 +000011239 __ j(zero, &compare_lengths);
11240
Steve Block6ded16b2010-05-10 14:33:55 +010011241 __ SmiToInteger32(min_length, min_length);
11242
Leon Clarkee46be812010-01-19 14:06:41 +000011243 // Registers scratch2 and scratch3 are free.
11244 Label result_not_equal;
11245 Label loop;
11246 {
11247 // Check characters 0 .. min_length - 1 in a loop.
11248 // Use scratch3 as loop index, min_length as limit and scratch2
11249 // for computation.
11250 const Register index = scratch3;
11251 __ movl(index, Immediate(0)); // Index into strings.
11252 __ bind(&loop);
11253 // Compare characters.
11254 // TODO(lrn): Could we load more than one character at a time?
11255 __ movb(scratch2, FieldOperand(left,
11256 index,
11257 times_1,
11258 SeqAsciiString::kHeaderSize));
11259 // Increment index and use -1 modifier on next load to give
11260 // the previous load extra time to complete.
11261 __ addl(index, Immediate(1));
11262 __ cmpb(scratch2, FieldOperand(right,
11263 index,
11264 times_1,
11265 SeqAsciiString::kHeaderSize - 1));
11266 __ j(not_equal, &result_not_equal);
11267 __ cmpl(index, min_length);
11268 __ j(not_equal, &loop);
11269 }
11270 // Completed loop without finding different characters.
11271 // Compare lengths (precomputed).
11272 __ bind(&compare_lengths);
Steve Block6ded16b2010-05-10 14:33:55 +010011273 __ SmiTest(length_difference);
Leon Clarkee46be812010-01-19 14:06:41 +000011274 __ j(not_zero, &result_not_equal);
11275
11276 // Result is EQUAL.
11277 __ Move(rax, Smi::FromInt(EQUAL));
Leon Clarkee46be812010-01-19 14:06:41 +000011278 __ ret(2 * kPointerSize);
11279
11280 Label result_greater;
11281 __ bind(&result_not_equal);
11282 // Unequal comparison of left to right, either character or length.
11283 __ j(greater, &result_greater);
11284
11285 // Result is LESS.
11286 __ Move(rax, Smi::FromInt(LESS));
Leon Clarkee46be812010-01-19 14:06:41 +000011287 __ ret(2 * kPointerSize);
11288
11289 // Result is GREATER.
11290 __ bind(&result_greater);
11291 __ Move(rax, Smi::FromInt(GREATER));
Leon Clarkee46be812010-01-19 14:06:41 +000011292 __ ret(2 * kPointerSize);
11293}
11294
11295
11296void StringCompareStub::Generate(MacroAssembler* masm) {
11297 Label runtime;
11298
11299 // Stack frame on entry.
11300 // rsp[0]: return address
11301 // rsp[8]: right string
11302 // rsp[16]: left string
11303
11304 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); // left
11305 __ movq(rax, Operand(rsp, 1 * kPointerSize)); // right
11306
11307 // Check for identity.
11308 Label not_same;
11309 __ cmpq(rdx, rax);
11310 __ j(not_equal, &not_same);
11311 __ Move(rax, Smi::FromInt(EQUAL));
11312 __ IncrementCounter(&Counters::string_compare_native, 1);
11313 __ ret(2 * kPointerSize);
11314
11315 __ bind(&not_same);
11316
11317 // Check that both are sequential ASCII strings.
11318 __ JumpIfNotBothSequentialAsciiStrings(rdx, rax, rcx, rbx, &runtime);
11319
11320 // Inline comparison of ascii strings.
Leon Clarked91b9f72010-01-27 17:25:45 +000011321 __ IncrementCounter(&Counters::string_compare_native, 1);
Leon Clarkee46be812010-01-19 14:06:41 +000011322 GenerateCompareFlatAsciiStrings(masm, rdx, rax, rcx, rbx, rdi, r8);
11323
11324 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
11325 // tagged as a small integer.
11326 __ bind(&runtime);
Steve Block6ded16b2010-05-10 14:33:55 +010011327 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
Steve Blocka7e24c12009-10-30 11:49:00 +000011328}
11329
Steve Block3ce2e202009-11-05 08:53:23 +000011330#undef __
11331
11332#define __ masm.
11333
11334#ifdef _WIN64
11335typedef double (*ModuloFunction)(double, double);
11336// Define custom fmod implementation.
11337ModuloFunction CreateModuloFunction() {
11338 size_t actual_size;
11339 byte* buffer = static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize,
11340 &actual_size,
11341 true));
11342 CHECK(buffer);
Steve Blockd0582a62009-12-15 09:54:21 +000011343 Assembler masm(buffer, static_cast<int>(actual_size));
Steve Block3ce2e202009-11-05 08:53:23 +000011344 // Generated code is put into a fixed, unmovable, buffer, and not into
11345 // the V8 heap. We can't, and don't, refer to any relocatable addresses
11346 // (e.g. the JavaScript nan-object).
11347
11348 // Windows 64 ABI passes double arguments in xmm0, xmm1 and
11349 // returns result in xmm0.
11350 // Argument backing space is allocated on the stack above
11351 // the return address.
11352
11353 // Compute x mod y.
11354 // Load y and x (use argument backing store as temporary storage).
11355 __ movsd(Operand(rsp, kPointerSize * 2), xmm1);
11356 __ movsd(Operand(rsp, kPointerSize), xmm0);
11357 __ fld_d(Operand(rsp, kPointerSize * 2));
11358 __ fld_d(Operand(rsp, kPointerSize));
11359
11360 // Clear exception flags before operation.
11361 {
11362 Label no_exceptions;
11363 __ fwait();
11364 __ fnstsw_ax();
11365 // Clear if Illegal Operand or Zero Division exceptions are set.
11366 __ testb(rax, Immediate(5));
11367 __ j(zero, &no_exceptions);
11368 __ fnclex();
11369 __ bind(&no_exceptions);
11370 }
11371
11372 // Compute st(0) % st(1)
11373 {
11374 Label partial_remainder_loop;
11375 __ bind(&partial_remainder_loop);
11376 __ fprem();
11377 __ fwait();
11378 __ fnstsw_ax();
11379 __ testl(rax, Immediate(0x400 /* C2 */));
11380 // If C2 is set, computation only has partial result. Loop to
11381 // continue computation.
11382 __ j(not_zero, &partial_remainder_loop);
11383 }
11384
11385 Label valid_result;
11386 Label return_result;
11387 // If Invalid Operand or Zero Division exceptions are set,
11388 // return NaN.
11389 __ testb(rax, Immediate(5));
11390 __ j(zero, &valid_result);
11391 __ fstp(0); // Drop result in st(0).
11392 int64_t kNaNValue = V8_INT64_C(0x7ff8000000000000);
11393 __ movq(rcx, kNaNValue, RelocInfo::NONE);
11394 __ movq(Operand(rsp, kPointerSize), rcx);
11395 __ movsd(xmm0, Operand(rsp, kPointerSize));
11396 __ jmp(&return_result);
11397
11398 // If result is valid, return that.
11399 __ bind(&valid_result);
11400 __ fstp_d(Operand(rsp, kPointerSize));
11401 __ movsd(xmm0, Operand(rsp, kPointerSize));
11402
11403 // Clean up FPU stack and exceptions and return xmm0
11404 __ bind(&return_result);
11405 __ fstp(0); // Unload y.
11406
11407 Label clear_exceptions;
11408 __ testb(rax, Immediate(0x3f /* Any Exception*/));
11409 __ j(not_zero, &clear_exceptions);
11410 __ ret(0);
11411 __ bind(&clear_exceptions);
11412 __ fnclex();
11413 __ ret(0);
11414
11415 CodeDesc desc;
11416 masm.GetCode(&desc);
11417 // Call the function from C++.
11418 return FUNCTION_CAST<ModuloFunction>(buffer);
11419}
11420
11421#endif
Steve Blocka7e24c12009-10-30 11:49:00 +000011422
Leon Clarkee46be812010-01-19 14:06:41 +000011423
Steve Blocka7e24c12009-10-30 11:49:00 +000011424#undef __
11425
11426} } // namespace v8::internal