blob: d9586cca5c4b39cf4943cf54eaebd617bf4c1ac0 [file] [log] [blame]
Leon Clarked91b9f72010-01-27 17:25:45 +00001// Copyright 2010 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "bootstrapper.h"
31#include "codegen-inl.h"
Steve Blockd0582a62009-12-15 09:54:21 +000032#include "compiler.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000033#include "debug.h"
34#include "ic-inl.h"
35#include "parser.h"
Leon Clarke4515c472010-02-03 11:58:03 +000036#include "regexp-macro-assembler.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000037#include "register-allocator-inl.h"
38#include "scopes.h"
Steve Block6ded16b2010-05-10 14:33:55 +010039#include "virtual-frame-inl.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000040
41namespace v8 {
42namespace internal {
43
44#define __ ACCESS_MASM(masm_)
45
46// -------------------------------------------------------------------------
47// Platform-specific DeferredCode functions.
48
49void DeferredCode::SaveRegisters() {
50 for (int i = 0; i < RegisterAllocator::kNumRegisters; i++) {
51 int action = registers_[i];
52 if (action == kPush) {
53 __ push(RegisterAllocator::ToRegister(i));
54 } else if (action != kIgnore && (action & kSyncedFlag) == 0) {
55 __ movq(Operand(rbp, action), RegisterAllocator::ToRegister(i));
56 }
57 }
58}
59
Steve Block3ce2e202009-11-05 08:53:23 +000060
Steve Blocka7e24c12009-10-30 11:49:00 +000061void DeferredCode::RestoreRegisters() {
62 // Restore registers in reverse order due to the stack.
63 for (int i = RegisterAllocator::kNumRegisters - 1; i >= 0; i--) {
64 int action = registers_[i];
65 if (action == kPush) {
66 __ pop(RegisterAllocator::ToRegister(i));
67 } else if (action != kIgnore) {
68 action &= ~kSyncedFlag;
69 __ movq(RegisterAllocator::ToRegister(i), Operand(rbp, action));
70 }
71 }
72}
73
74
75// -------------------------------------------------------------------------
76// CodeGenState implementation.
77
78CodeGenState::CodeGenState(CodeGenerator* owner)
79 : owner_(owner),
Steve Blocka7e24c12009-10-30 11:49:00 +000080 destination_(NULL),
81 previous_(NULL) {
82 owner_->set_state(this);
83}
84
85
86CodeGenState::CodeGenState(CodeGenerator* owner,
Steve Blocka7e24c12009-10-30 11:49:00 +000087 ControlDestination* destination)
88 : owner_(owner),
Steve Blocka7e24c12009-10-30 11:49:00 +000089 destination_(destination),
90 previous_(owner->state()) {
91 owner_->set_state(this);
92}
93
94
95CodeGenState::~CodeGenState() {
96 ASSERT(owner_->state() == this);
97 owner_->set_state(previous_);
98}
99
100
101// -------------------------------------------------------------------------
102// Deferred code objects
103//
104// These subclasses of DeferredCode add pieces of code to the end of generated
105// code. They are branched to from the generated code, and
106// keep some slower code out of the main body of the generated code.
107// Many of them call a code stub or a runtime function.
108
109class DeferredInlineSmiAdd: public DeferredCode {
110 public:
111 DeferredInlineSmiAdd(Register dst,
112 Smi* value,
113 OverwriteMode overwrite_mode)
114 : dst_(dst), value_(value), overwrite_mode_(overwrite_mode) {
115 set_comment("[ DeferredInlineSmiAdd");
116 }
117
118 virtual void Generate();
119
120 private:
121 Register dst_;
122 Smi* value_;
123 OverwriteMode overwrite_mode_;
124};
125
126
127// The result of value + src is in dst. It either overflowed or was not
128// smi tagged. Undo the speculative addition and call the appropriate
129// specialized stub for add. The result is left in dst.
130class DeferredInlineSmiAddReversed: public DeferredCode {
131 public:
132 DeferredInlineSmiAddReversed(Register dst,
133 Smi* value,
134 OverwriteMode overwrite_mode)
135 : dst_(dst), value_(value), overwrite_mode_(overwrite_mode) {
136 set_comment("[ DeferredInlineSmiAddReversed");
137 }
138
139 virtual void Generate();
140
141 private:
142 Register dst_;
143 Smi* value_;
144 OverwriteMode overwrite_mode_;
145};
146
147
148class DeferredInlineSmiSub: public DeferredCode {
149 public:
150 DeferredInlineSmiSub(Register dst,
151 Smi* value,
152 OverwriteMode overwrite_mode)
153 : dst_(dst), value_(value), overwrite_mode_(overwrite_mode) {
154 set_comment("[ DeferredInlineSmiSub");
155 }
156
157 virtual void Generate();
158
159 private:
160 Register dst_;
161 Smi* value_;
162 OverwriteMode overwrite_mode_;
163};
164
165
166// Call the appropriate binary operation stub to compute src op value
167// and leave the result in dst.
168class DeferredInlineSmiOperation: public DeferredCode {
169 public:
170 DeferredInlineSmiOperation(Token::Value op,
171 Register dst,
172 Register src,
173 Smi* value,
174 OverwriteMode overwrite_mode)
175 : op_(op),
176 dst_(dst),
177 src_(src),
178 value_(value),
179 overwrite_mode_(overwrite_mode) {
180 set_comment("[ DeferredInlineSmiOperation");
181 }
182
183 virtual void Generate();
184
185 private:
186 Token::Value op_;
187 Register dst_;
188 Register src_;
189 Smi* value_;
190 OverwriteMode overwrite_mode_;
191};
192
193
Steve Block6ded16b2010-05-10 14:33:55 +0100194// Call the appropriate binary operation stub to compute value op src
195// and leave the result in dst.
196class DeferredInlineSmiOperationReversed: public DeferredCode {
197 public:
198 DeferredInlineSmiOperationReversed(Token::Value op,
199 Register dst,
200 Smi* value,
201 Register src,
202 OverwriteMode overwrite_mode)
203 : op_(op),
204 dst_(dst),
205 value_(value),
206 src_(src),
207 overwrite_mode_(overwrite_mode) {
208 set_comment("[ DeferredInlineSmiOperationReversed");
209 }
210
211 virtual void Generate();
212
213 private:
214 Token::Value op_;
215 Register dst_;
216 Smi* value_;
217 Register src_;
218 OverwriteMode overwrite_mode_;
219};
220
221
Steve Blocka7e24c12009-10-30 11:49:00 +0000222class FloatingPointHelper : public AllStatic {
223 public:
224 // Code pattern for loading a floating point value. Input value must
225 // be either a smi or a heap number object (fp value). Requirements:
226 // operand on TOS+1. Returns operand as floating point number on FPU
227 // stack.
228 static void LoadFloatOperand(MacroAssembler* masm, Register scratch);
229
230 // Code pattern for loading a floating point value. Input value must
231 // be either a smi or a heap number object (fp value). Requirements:
232 // operand in src register. Returns operand as floating point number
Steve Block6ded16b2010-05-10 14:33:55 +0100233 // in XMM register. May destroy src register.
Steve Blocka7e24c12009-10-30 11:49:00 +0000234 static void LoadFloatOperand(MacroAssembler* masm,
235 Register src,
236 XMMRegister dst);
237
Steve Block6ded16b2010-05-10 14:33:55 +0100238 // Code pattern for loading a possible number into a XMM register.
239 // If the contents of src is not a number, control branches to
240 // the Label not_number. If contents of src is a smi or a heap number
241 // object (fp value), it is loaded into the XMM register as a double.
242 // The register src is not changed, and src may not be kScratchRegister.
243 static void LoadFloatOperand(MacroAssembler* masm,
244 Register src,
245 XMMRegister dst,
246 Label *not_number);
247
Steve Blocka7e24c12009-10-30 11:49:00 +0000248 // Code pattern for loading floating point values. Input values must
249 // be either smi or heap number objects (fp values). Requirements:
Leon Clarke4515c472010-02-03 11:58:03 +0000250 // operand_1 in rdx, operand_2 in rax; Returns operands as
Steve Blocka7e24c12009-10-30 11:49:00 +0000251 // floating point numbers in XMM registers.
252 static void LoadFloatOperands(MacroAssembler* masm,
253 XMMRegister dst1,
254 XMMRegister dst2);
255
Leon Clarke4515c472010-02-03 11:58:03 +0000256 // Similar to LoadFloatOperands, assumes that the operands are smis.
257 static void LoadFloatOperandsFromSmis(MacroAssembler* masm,
258 XMMRegister dst1,
259 XMMRegister dst2);
260
Steve Blocka7e24c12009-10-30 11:49:00 +0000261 // Code pattern for loading floating point values onto the fp stack.
262 // Input values must be either smi or heap number objects (fp values).
263 // Requirements:
264 // Register version: operands in registers lhs and rhs.
265 // Stack version: operands on TOS+1 and TOS+2.
266 // Returns operands as floating point numbers on fp stack.
Steve Blocka7e24c12009-10-30 11:49:00 +0000267 static void LoadFloatOperands(MacroAssembler* masm,
268 Register lhs,
269 Register rhs);
270
Steve Blocka7e24c12009-10-30 11:49:00 +0000271 // Test if operands are smi or number objects (fp). Requirements:
272 // operand_1 in rax, operand_2 in rdx; falls through on float or smi
273 // operands, jumps to the non_float label otherwise.
Steve Block3ce2e202009-11-05 08:53:23 +0000274 static void CheckNumberOperands(MacroAssembler* masm,
275 Label* non_float);
Leon Clarked91b9f72010-01-27 17:25:45 +0000276
277 // Takes the operands in rdx and rax and loads them as integers in rax
278 // and rcx.
279 static void LoadAsIntegers(MacroAssembler* masm,
Leon Clarked91b9f72010-01-27 17:25:45 +0000280 Label* operand_conversion_failure);
Steve Blocka7e24c12009-10-30 11:49:00 +0000281};
282
283
284// -----------------------------------------------------------------------------
285// CodeGenerator implementation.
286
Andrei Popescu31002712010-02-23 13:46:05 +0000287CodeGenerator::CodeGenerator(MacroAssembler* masm)
288 : deferred_(8),
Leon Clarke4515c472010-02-03 11:58:03 +0000289 masm_(masm),
Andrei Popescu31002712010-02-23 13:46:05 +0000290 info_(NULL),
Steve Blocka7e24c12009-10-30 11:49:00 +0000291 frame_(NULL),
292 allocator_(NULL),
293 state_(NULL),
294 loop_nesting_(0),
295 function_return_is_shadowed_(false),
296 in_spilled_code_(false) {
297}
298
299
300void CodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
301 // Call the runtime to declare the globals. The inevitable call
302 // will sync frame elements to memory anyway, so we do it eagerly to
303 // allow us to push the arguments directly into place.
304 frame_->SyncRange(0, frame_->element_count() - 1);
305
306 __ movq(kScratchRegister, pairs, RelocInfo::EMBEDDED_OBJECT);
Steve Block3ce2e202009-11-05 08:53:23 +0000307 frame_->EmitPush(rsi); // The context is the first argument.
Steve Blocka7e24c12009-10-30 11:49:00 +0000308 frame_->EmitPush(kScratchRegister);
Steve Block3ce2e202009-11-05 08:53:23 +0000309 frame_->EmitPush(Smi::FromInt(is_eval() ? 1 : 0));
Steve Blocka7e24c12009-10-30 11:49:00 +0000310 Result ignored = frame_->CallRuntime(Runtime::kDeclareGlobals, 3);
311 // Return value is ignored.
312}
313
314
Andrei Popescu402d9372010-02-26 13:31:12 +0000315void CodeGenerator::Generate(CompilationInfo* info) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000316 // Record the position for debugging purposes.
Andrei Popescu31002712010-02-23 13:46:05 +0000317 CodeForFunctionPosition(info->function());
Steve Block6ded16b2010-05-10 14:33:55 +0100318 Comment cmnt(masm_, "[ function compiled by virtual frame code generator");
Steve Blocka7e24c12009-10-30 11:49:00 +0000319
320 // Initialize state.
Andrei Popescu31002712010-02-23 13:46:05 +0000321 info_ = info;
Steve Blocka7e24c12009-10-30 11:49:00 +0000322 ASSERT(allocator_ == NULL);
323 RegisterAllocator register_allocator(this);
324 allocator_ = &register_allocator;
325 ASSERT(frame_ == NULL);
326 frame_ = new VirtualFrame();
327 set_in_spilled_code(false);
328
329 // Adjust for function-level loop nesting.
Steve Block6ded16b2010-05-10 14:33:55 +0100330 ASSERT_EQ(0, loop_nesting_);
Leon Clarke4515c472010-02-03 11:58:03 +0000331 loop_nesting_ += info->loop_nesting();
Steve Blocka7e24c12009-10-30 11:49:00 +0000332
333 JumpTarget::set_compiling_deferred_code(false);
334
335#ifdef DEBUG
336 if (strlen(FLAG_stop_at) > 0 &&
Andrei Popescu31002712010-02-23 13:46:05 +0000337 info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000338 frame_->SpillAll();
339 __ int3();
340 }
341#endif
342
343 // New scope to get automatic timing calculation.
Steve Block6ded16b2010-05-10 14:33:55 +0100344 { HistogramTimerScope codegen_timer(&Counters::code_generation);
Steve Blocka7e24c12009-10-30 11:49:00 +0000345 CodeGenState state(this);
346
347 // Entry:
348 // Stack: receiver, arguments, return address.
349 // rbp: caller's frame pointer
350 // rsp: stack pointer
351 // rdi: called JS function
352 // rsi: callee's context
353 allocator_->Initialize();
Steve Blocka7e24c12009-10-30 11:49:00 +0000354
Andrei Popescu402d9372010-02-26 13:31:12 +0000355 if (info->mode() == CompilationInfo::PRIMARY) {
Leon Clarke4515c472010-02-03 11:58:03 +0000356 frame_->Enter();
357
358 // Allocate space for locals and initialize them.
359 frame_->AllocateStackSlots();
360
361 // Allocate the local context if needed.
Kristian Monsen25f61362010-05-21 11:50:48 +0100362 int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
Leon Clarke4515c472010-02-03 11:58:03 +0000363 if (heap_slots > 0) {
364 Comment cmnt(masm_, "[ allocate local context");
365 // Allocate local context.
366 // Get outer context and create a new context based on it.
367 frame_->PushFunction();
368 Result context;
369 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
370 FastNewContextStub stub(heap_slots);
371 context = frame_->CallStub(&stub, 1);
372 } else {
373 context = frame_->CallRuntime(Runtime::kNewContext, 1);
374 }
375
376 // Update context local.
377 frame_->SaveContextRegister();
378
379 // Verify that the runtime call result and rsi agree.
380 if (FLAG_debug_code) {
381 __ cmpq(context.reg(), rsi);
382 __ Assert(equal, "Runtime::NewContext should end up in rsi");
383 }
384 }
385
386 // TODO(1241774): Improve this code:
387 // 1) only needed if we have a context
388 // 2) no need to recompute context ptr every single time
389 // 3) don't copy parameter operand code from SlotOperand!
390 {
391 Comment cmnt2(masm_, "[ copy context parameters into .context");
Leon Clarke4515c472010-02-03 11:58:03 +0000392 // Note that iteration order is relevant here! If we have the same
393 // parameter twice (e.g., function (x, y, x)), and that parameter
394 // needs to be copied into the context, it must be the last argument
395 // passed to the parameter that needs to be copied. This is a rare
396 // case so we don't check for it, instead we rely on the copying
397 // order: such a parameter is copied repeatedly into the same
398 // context location and thus the last value is what is seen inside
399 // the function.
Andrei Popescu31002712010-02-23 13:46:05 +0000400 for (int i = 0; i < scope()->num_parameters(); i++) {
401 Variable* par = scope()->parameter(i);
Leon Clarke4515c472010-02-03 11:58:03 +0000402 Slot* slot = par->slot();
403 if (slot != NULL && slot->type() == Slot::CONTEXT) {
404 // The use of SlotOperand below is safe in unspilled code
405 // because the slot is guaranteed to be a context slot.
406 //
407 // There are no parameters in the global scope.
Andrei Popescu31002712010-02-23 13:46:05 +0000408 ASSERT(!scope()->is_global_scope());
Leon Clarke4515c472010-02-03 11:58:03 +0000409 frame_->PushParameterAt(i);
410 Result value = frame_->Pop();
411 value.ToRegister();
412
413 // SlotOperand loads context.reg() with the context object
414 // stored to, used below in RecordWrite.
415 Result context = allocator_->Allocate();
416 ASSERT(context.is_valid());
417 __ movq(SlotOperand(slot, context.reg()), value.reg());
418 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
419 Result scratch = allocator_->Allocate();
420 ASSERT(scratch.is_valid());
421 frame_->Spill(context.reg());
422 frame_->Spill(value.reg());
423 __ RecordWrite(context.reg(), offset, value.reg(), scratch.reg());
424 }
425 }
426 }
427
428 // Store the arguments object. This must happen after context
429 // initialization because the arguments object may be stored in
430 // the context.
431 if (ArgumentsMode() != NO_ARGUMENTS_ALLOCATION) {
432 StoreArgumentsObject(true);
433 }
434
435 // Initialize ThisFunction reference if present.
Andrei Popescu31002712010-02-23 13:46:05 +0000436 if (scope()->is_function_scope() && scope()->function() != NULL) {
Leon Clarke4515c472010-02-03 11:58:03 +0000437 frame_->Push(Factory::the_hole_value());
Andrei Popescu31002712010-02-23 13:46:05 +0000438 StoreToSlot(scope()->function()->slot(), NOT_CONST_INIT);
Leon Clarke4515c472010-02-03 11:58:03 +0000439 }
440 } else {
441 // When used as the secondary compiler for splitting, rbp, rsi,
442 // and rdi have been pushed on the stack. Adjust the virtual
443 // frame to match this state.
444 frame_->Adjust(3);
445 allocator_->Unuse(rdi);
Andrei Popescu402d9372010-02-26 13:31:12 +0000446
447 // Bind all the bailout labels to the beginning of the function.
448 List<CompilationInfo::Bailout*>* bailouts = info->bailouts();
449 for (int i = 0; i < bailouts->length(); i++) {
450 __ bind(bailouts->at(i)->label());
451 }
Leon Clarke4515c472010-02-03 11:58:03 +0000452 }
453
Steve Blocka7e24c12009-10-30 11:49:00 +0000454 // Initialize the function return target after the locals are set
455 // up, because it needs the expected frame height from the frame.
456 function_return_.set_direction(JumpTarget::BIDIRECTIONAL);
457 function_return_is_shadowed_ = false;
458
Steve Blocka7e24c12009-10-30 11:49:00 +0000459 // Generate code to 'execute' declarations and initialize functions
460 // (source elements). In case of an illegal redeclaration we need to
461 // handle that instead of processing the declarations.
Andrei Popescu31002712010-02-23 13:46:05 +0000462 if (scope()->HasIllegalRedeclaration()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000463 Comment cmnt(masm_, "[ illegal redeclarations");
Andrei Popescu31002712010-02-23 13:46:05 +0000464 scope()->VisitIllegalRedeclaration(this);
Steve Blocka7e24c12009-10-30 11:49:00 +0000465 } else {
466 Comment cmnt(masm_, "[ declarations");
Andrei Popescu31002712010-02-23 13:46:05 +0000467 ProcessDeclarations(scope()->declarations());
Steve Blocka7e24c12009-10-30 11:49:00 +0000468 // Bail out if a stack-overflow exception occurred when processing
469 // declarations.
470 if (HasStackOverflow()) return;
471 }
472
473 if (FLAG_trace) {
474 frame_->CallRuntime(Runtime::kTraceEnter, 0);
475 // Ignore the return value.
476 }
477 CheckStack();
478
479 // Compile the body of the function in a vanilla state. Don't
480 // bother compiling all the code if the scope has an illegal
481 // redeclaration.
Andrei Popescu31002712010-02-23 13:46:05 +0000482 if (!scope()->HasIllegalRedeclaration()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000483 Comment cmnt(masm_, "[ function body");
484#ifdef DEBUG
485 bool is_builtin = Bootstrapper::IsActive();
486 bool should_trace =
487 is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls;
488 if (should_trace) {
489 frame_->CallRuntime(Runtime::kDebugTrace, 0);
490 // Ignore the return value.
491 }
492#endif
Andrei Popescu31002712010-02-23 13:46:05 +0000493 VisitStatements(info->function()->body());
Steve Blocka7e24c12009-10-30 11:49:00 +0000494
495 // Handle the return from the function.
496 if (has_valid_frame()) {
497 // If there is a valid frame, control flow can fall off the end of
498 // the body. In that case there is an implicit return statement.
499 ASSERT(!function_return_is_shadowed_);
Andrei Popescu31002712010-02-23 13:46:05 +0000500 CodeForReturnPosition(info->function());
Steve Blocka7e24c12009-10-30 11:49:00 +0000501 frame_->PrepareForReturn();
502 Result undefined(Factory::undefined_value());
503 if (function_return_.is_bound()) {
504 function_return_.Jump(&undefined);
505 } else {
506 function_return_.Bind(&undefined);
507 GenerateReturnSequence(&undefined);
508 }
509 } else if (function_return_.is_linked()) {
510 // If the return target has dangling jumps to it, then we have not
511 // yet generated the return sequence. This can happen when (a)
512 // control does not flow off the end of the body so we did not
513 // compile an artificial return statement just above, and (b) there
514 // are return statements in the body but (c) they are all shadowed.
515 Result return_value;
516 function_return_.Bind(&return_value);
517 GenerateReturnSequence(&return_value);
518 }
519 }
520 }
521
522 // Adjust for function-level loop nesting.
Steve Block6ded16b2010-05-10 14:33:55 +0100523 ASSERT_EQ(loop_nesting_, info->loop_nesting());
524 loop_nesting_ = 0;
Steve Blocka7e24c12009-10-30 11:49:00 +0000525
526 // Code generation state must be reset.
527 ASSERT(state_ == NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000528 ASSERT(!function_return_is_shadowed_);
529 function_return_.Unuse();
530 DeleteFrame();
531
532 // Process any deferred code using the register allocator.
533 if (!HasStackOverflow()) {
534 HistogramTimerScope deferred_timer(&Counters::deferred_code_generation);
535 JumpTarget::set_compiling_deferred_code(true);
536 ProcessDeferred();
537 JumpTarget::set_compiling_deferred_code(false);
538 }
539
540 // There is no need to delete the register allocator, it is a
541 // stack-allocated local.
542 allocator_ = NULL;
Steve Blocka7e24c12009-10-30 11:49:00 +0000543}
544
545void CodeGenerator::GenerateReturnSequence(Result* return_value) {
546 // The return value is a live (but not currently reference counted)
547 // reference to rax. This is safe because the current frame does not
548 // contain a reference to rax (it is prepared for the return by spilling
549 // all registers).
550 if (FLAG_trace) {
551 frame_->Push(return_value);
552 *return_value = frame_->CallRuntime(Runtime::kTraceExit, 1);
553 }
554 return_value->ToRegister(rax);
555
556 // Add a label for checking the size of the code used for returning.
557#ifdef DEBUG
558 Label check_exit_codesize;
559 masm_->bind(&check_exit_codesize);
560#endif
561
562 // Leave the frame and return popping the arguments and the
563 // receiver.
564 frame_->Exit();
Andrei Popescu31002712010-02-23 13:46:05 +0000565 masm_->ret((scope()->num_parameters() + 1) * kPointerSize);
Steve Blocka7e24c12009-10-30 11:49:00 +0000566#ifdef ENABLE_DEBUGGER_SUPPORT
567 // Add padding that will be overwritten by a debugger breakpoint.
568 // frame_->Exit() generates "movq rsp, rbp; pop rbp; ret k"
569 // with length 7 (3 + 1 + 3).
Steve Blockd0582a62009-12-15 09:54:21 +0000570 const int kPadding = Assembler::kJSReturnSequenceLength - 7;
Steve Blocka7e24c12009-10-30 11:49:00 +0000571 for (int i = 0; i < kPadding; ++i) {
572 masm_->int3();
573 }
574 // Check that the size of the code used for returning matches what is
575 // expected by the debugger.
Steve Blockd0582a62009-12-15 09:54:21 +0000576 ASSERT_EQ(Assembler::kJSReturnSequenceLength,
Steve Blocka7e24c12009-10-30 11:49:00 +0000577 masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
578#endif
579 DeleteFrame();
580}
581
582
583#ifdef DEBUG
584bool CodeGenerator::HasValidEntryRegisters() {
585 return (allocator()->count(rax) == (frame()->is_used(rax) ? 1 : 0))
586 && (allocator()->count(rbx) == (frame()->is_used(rbx) ? 1 : 0))
587 && (allocator()->count(rcx) == (frame()->is_used(rcx) ? 1 : 0))
588 && (allocator()->count(rdx) == (frame()->is_used(rdx) ? 1 : 0))
589 && (allocator()->count(rdi) == (frame()->is_used(rdi) ? 1 : 0))
590 && (allocator()->count(r8) == (frame()->is_used(r8) ? 1 : 0))
591 && (allocator()->count(r9) == (frame()->is_used(r9) ? 1 : 0))
592 && (allocator()->count(r11) == (frame()->is_used(r11) ? 1 : 0))
593 && (allocator()->count(r14) == (frame()->is_used(r14) ? 1 : 0))
594 && (allocator()->count(r15) == (frame()->is_used(r15) ? 1 : 0))
595 && (allocator()->count(r12) == (frame()->is_used(r12) ? 1 : 0));
596}
597#endif
598
599
600class DeferredReferenceGetKeyedValue: public DeferredCode {
601 public:
602 explicit DeferredReferenceGetKeyedValue(Register dst,
603 Register receiver,
604 Register key,
605 bool is_global)
606 : dst_(dst), receiver_(receiver), key_(key), is_global_(is_global) {
607 set_comment("[ DeferredReferenceGetKeyedValue");
608 }
609
610 virtual void Generate();
611
612 Label* patch_site() { return &patch_site_; }
613
614 private:
615 Label patch_site_;
616 Register dst_;
617 Register receiver_;
618 Register key_;
619 bool is_global_;
620};
621
622
623void DeferredReferenceGetKeyedValue::Generate() {
624 __ push(receiver_); // First IC argument.
625 __ push(key_); // Second IC argument.
626
627 // Calculate the delta from the IC call instruction to the map check
628 // movq instruction in the inlined version. This delta is stored in
629 // a test(rax, delta) instruction after the call so that we can find
630 // it in the IC initialization code and patch the movq instruction.
631 // This means that we cannot allow test instructions after calls to
632 // KeyedLoadIC stubs in other places.
633 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
634 RelocInfo::Mode mode = is_global_
635 ? RelocInfo::CODE_TARGET_CONTEXT
636 : RelocInfo::CODE_TARGET;
637 __ Call(ic, mode);
638 // The delta from the start of the map-compare instruction to the
639 // test instruction. We use masm_-> directly here instead of the __
640 // macro because the macro sometimes uses macro expansion to turn
641 // into something that can't return a value. This is encountered
642 // when doing generated code coverage tests.
643 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
644 // Here we use masm_-> instead of the __ macro because this is the
645 // instruction that gets patched and coverage code gets in the way.
646 // TODO(X64): Consider whether it's worth switching the test to a
647 // 7-byte NOP with non-zero immediate (0f 1f 80 xxxxxxxx) which won't
648 // be generated normally.
649 masm_->testl(rax, Immediate(-delta_to_patch_site));
650 __ IncrementCounter(&Counters::keyed_load_inline_miss, 1);
651
652 if (!dst_.is(rax)) __ movq(dst_, rax);
653 __ pop(key_);
654 __ pop(receiver_);
655}
656
657
658class DeferredReferenceSetKeyedValue: public DeferredCode {
659 public:
660 DeferredReferenceSetKeyedValue(Register value,
661 Register key,
662 Register receiver)
663 : value_(value), key_(key), receiver_(receiver) {
664 set_comment("[ DeferredReferenceSetKeyedValue");
665 }
666
667 virtual void Generate();
668
669 Label* patch_site() { return &patch_site_; }
670
671 private:
672 Register value_;
673 Register key_;
674 Register receiver_;
675 Label patch_site_;
676};
677
678
679void DeferredReferenceSetKeyedValue::Generate() {
680 __ IncrementCounter(&Counters::keyed_store_inline_miss, 1);
681 // Push receiver and key arguments on the stack.
682 __ push(receiver_);
683 __ push(key_);
684 // Move value argument to eax as expected by the IC stub.
685 if (!value_.is(rax)) __ movq(rax, value_);
686 // Call the IC stub.
687 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
688 __ Call(ic, RelocInfo::CODE_TARGET);
689 // The delta from the start of the map-compare instructions (initial movq)
690 // to the test instruction. We use masm_-> directly here instead of the
691 // __ macro because the macro sometimes uses macro expansion to turn
692 // into something that can't return a value. This is encountered
693 // when doing generated code coverage tests.
694 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
695 // Here we use masm_-> instead of the __ macro because this is the
696 // instruction that gets patched and coverage code gets in the way.
697 masm_->testl(rax, Immediate(-delta_to_patch_site));
698 // Restore value (returned from store IC), key and receiver
699 // registers.
700 if (!value_.is(rax)) __ movq(value_, rax);
701 __ pop(key_);
702 __ pop(receiver_);
703}
704
705
Leon Clarked91b9f72010-01-27 17:25:45 +0000706void CodeGenerator::CallApplyLazy(Expression* applicand,
Steve Blocka7e24c12009-10-30 11:49:00 +0000707 Expression* receiver,
708 VariableProxy* arguments,
709 int position) {
Leon Clarked91b9f72010-01-27 17:25:45 +0000710 // An optimized implementation of expressions of the form
711 // x.apply(y, arguments).
712 // If the arguments object of the scope has not been allocated,
713 // and x.apply is Function.prototype.apply, this optimization
714 // just copies y and the arguments of the current function on the
715 // stack, as receiver and arguments, and calls x.
716 // In the implementation comments, we call x the applicand
717 // and y the receiver.
Steve Blocka7e24c12009-10-30 11:49:00 +0000718 ASSERT(ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION);
719 ASSERT(arguments->IsArguments());
720
Leon Clarked91b9f72010-01-27 17:25:45 +0000721 // Load applicand.apply onto the stack. This will usually
Steve Blocka7e24c12009-10-30 11:49:00 +0000722 // give us a megamorphic load site. Not super, but it works.
Leon Clarked91b9f72010-01-27 17:25:45 +0000723 Load(applicand);
724 Handle<String> name = Factory::LookupAsciiSymbol("apply");
725 frame()->Push(name);
726 Result answer = frame()->CallLoadIC(RelocInfo::CODE_TARGET);
727 __ nop();
728 frame()->Push(&answer);
Steve Blocka7e24c12009-10-30 11:49:00 +0000729
730 // Load the receiver and the existing arguments object onto the
731 // expression stack. Avoid allocating the arguments object here.
732 Load(receiver);
Andrei Popescu31002712010-02-23 13:46:05 +0000733 LoadFromSlot(scope()->arguments()->var()->slot(), NOT_INSIDE_TYPEOF);
Steve Blocka7e24c12009-10-30 11:49:00 +0000734
735 // Emit the source position information after having loaded the
736 // receiver and the arguments.
737 CodeForSourcePosition(position);
Leon Clarked91b9f72010-01-27 17:25:45 +0000738 // Contents of frame at this point:
739 // Frame[0]: arguments object of the current function or the hole.
740 // Frame[1]: receiver
741 // Frame[2]: applicand.apply
742 // Frame[3]: applicand.
Steve Blocka7e24c12009-10-30 11:49:00 +0000743
744 // Check if the arguments object has been lazily allocated
745 // already. If so, just use that instead of copying the arguments
746 // from the stack. This also deals with cases where a local variable
747 // named 'arguments' has been introduced.
748 frame_->Dup();
749 Result probe = frame_->Pop();
Leon Clarked91b9f72010-01-27 17:25:45 +0000750 { VirtualFrame::SpilledScope spilled_scope;
751 Label slow, done;
752 bool try_lazy = true;
753 if (probe.is_constant()) {
754 try_lazy = probe.handle()->IsTheHole();
755 } else {
756 __ CompareRoot(probe.reg(), Heap::kTheHoleValueRootIndex);
757 probe.Unuse();
758 __ j(not_equal, &slow);
759 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000760
Leon Clarked91b9f72010-01-27 17:25:45 +0000761 if (try_lazy) {
762 Label build_args;
763 // Get rid of the arguments object probe.
764 frame_->Drop(); // Can be called on a spilled frame.
765 // Stack now has 3 elements on it.
766 // Contents of stack at this point:
767 // rsp[0]: receiver
768 // rsp[1]: applicand.apply
769 // rsp[2]: applicand.
Steve Blocka7e24c12009-10-30 11:49:00 +0000770
Leon Clarked91b9f72010-01-27 17:25:45 +0000771 // Check that the receiver really is a JavaScript object.
772 __ movq(rax, Operand(rsp, 0));
773 Condition is_smi = masm_->CheckSmi(rax);
774 __ j(is_smi, &build_args);
Steve Blocka7e24c12009-10-30 11:49:00 +0000775 // We allow all JSObjects including JSFunctions. As long as
776 // JS_FUNCTION_TYPE is the last instance type and it is right
777 // after LAST_JS_OBJECT_TYPE, we do not have to check the upper
778 // bound.
779 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
780 ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
Leon Clarked91b9f72010-01-27 17:25:45 +0000781 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx);
782 __ j(below, &build_args);
Steve Blocka7e24c12009-10-30 11:49:00 +0000783
Leon Clarked91b9f72010-01-27 17:25:45 +0000784 // Check that applicand.apply is Function.prototype.apply.
785 __ movq(rax, Operand(rsp, kPointerSize));
786 is_smi = masm_->CheckSmi(rax);
787 __ j(is_smi, &build_args);
788 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rcx);
789 __ j(not_equal, &build_args);
790 __ movq(rax, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset));
Leon Clarkeeab96aa2010-01-27 16:31:12 +0000791 Handle<Code> apply_code(Builtins::builtin(Builtins::FunctionApply));
Leon Clarked91b9f72010-01-27 17:25:45 +0000792 __ Cmp(FieldOperand(rax, SharedFunctionInfo::kCodeOffset), apply_code);
793 __ j(not_equal, &build_args);
794
795 // Check that applicand is a function.
796 __ movq(rdi, Operand(rsp, 2 * kPointerSize));
797 is_smi = masm_->CheckSmi(rdi);
798 __ j(is_smi, &build_args);
799 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
800 __ j(not_equal, &build_args);
801
802 // Copy the arguments to this function possibly from the
803 // adaptor frame below it.
804 Label invoke, adapted;
805 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
806 __ SmiCompare(Operand(rdx, StandardFrameConstants::kContextOffset),
807 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
808 __ j(equal, &adapted);
809
810 // No arguments adaptor frame. Copy fixed number of arguments.
Andrei Popescu31002712010-02-23 13:46:05 +0000811 __ movq(rax, Immediate(scope()->num_parameters()));
812 for (int i = 0; i < scope()->num_parameters(); i++) {
Leon Clarked91b9f72010-01-27 17:25:45 +0000813 __ push(frame_->ParameterAt(i));
814 }
815 __ jmp(&invoke);
816
817 // Arguments adaptor frame present. Copy arguments from there, but
818 // avoid copying too many arguments to avoid stack overflows.
819 __ bind(&adapted);
820 static const uint32_t kArgumentsLimit = 1 * KB;
821 __ movq(rax, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset));
822 __ SmiToInteger32(rax, rax);
823 __ movq(rcx, rax);
824 __ cmpq(rax, Immediate(kArgumentsLimit));
825 __ j(above, &build_args);
826
827 // Loop through the arguments pushing them onto the execution
828 // stack. We don't inform the virtual frame of the push, so we don't
829 // have to worry about getting rid of the elements from the virtual
830 // frame.
831 Label loop;
832 // rcx is a small non-negative integer, due to the test above.
833 __ testl(rcx, rcx);
834 __ j(zero, &invoke);
835 __ bind(&loop);
836 __ push(Operand(rdx, rcx, times_pointer_size, 1 * kPointerSize));
837 __ decl(rcx);
838 __ j(not_zero, &loop);
839
840 // Invoke the function.
841 __ bind(&invoke);
842 ParameterCount actual(rax);
843 __ InvokeFunction(rdi, actual, CALL_FUNCTION);
844 // Drop applicand.apply and applicand from the stack, and push
845 // the result of the function call, but leave the spilled frame
846 // unchanged, with 3 elements, so it is correct when we compile the
847 // slow-case code.
848 __ addq(rsp, Immediate(2 * kPointerSize));
849 __ push(rax);
850 // Stack now has 1 element:
851 // rsp[0]: result
852 __ jmp(&done);
853
854 // Slow-case: Allocate the arguments object since we know it isn't
855 // there, and fall-through to the slow-case where we call
856 // applicand.apply.
857 __ bind(&build_args);
858 // Stack now has 3 elements, because we have jumped from where:
859 // rsp[0]: receiver
860 // rsp[1]: applicand.apply
861 // rsp[2]: applicand.
862
863 // StoreArgumentsObject requires a correct frame, and may modify it.
864 Result arguments_object = StoreArgumentsObject(false);
865 frame_->SpillAll();
866 arguments_object.ToRegister();
867 frame_->EmitPush(arguments_object.reg());
868 arguments_object.Unuse();
869 // Stack and frame now have 4 elements.
870 __ bind(&slow);
Leon Clarkeeab96aa2010-01-27 16:31:12 +0000871 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000872
Leon Clarked91b9f72010-01-27 17:25:45 +0000873 // Generic computation of x.apply(y, args) with no special optimization.
874 // Flip applicand.apply and applicand on the stack, so
875 // applicand looks like the receiver of the applicand.apply call.
876 // Then process it as a normal function call.
877 __ movq(rax, Operand(rsp, 3 * kPointerSize));
878 __ movq(rbx, Operand(rsp, 2 * kPointerSize));
879 __ movq(Operand(rsp, 2 * kPointerSize), rax);
880 __ movq(Operand(rsp, 3 * kPointerSize), rbx);
Leon Clarkeeab96aa2010-01-27 16:31:12 +0000881
Leon Clarked91b9f72010-01-27 17:25:45 +0000882 CallFunctionStub call_function(2, NOT_IN_LOOP, NO_CALL_FUNCTION_FLAGS);
883 Result res = frame_->CallStub(&call_function, 3);
884 // The function and its two arguments have been dropped.
885 frame_->Drop(1); // Drop the receiver as well.
886 res.ToRegister();
887 frame_->EmitPush(res.reg());
888 // Stack now has 1 element:
889 // rsp[0]: result
890 if (try_lazy) __ bind(&done);
891 } // End of spilled scope.
892 // Restore the context register after a call.
Steve Blocka7e24c12009-10-30 11:49:00 +0000893 frame_->RestoreContextRegister();
894}
895
896
897class DeferredStackCheck: public DeferredCode {
898 public:
899 DeferredStackCheck() {
900 set_comment("[ DeferredStackCheck");
901 }
902
903 virtual void Generate();
904};
905
906
907void DeferredStackCheck::Generate() {
908 StackCheckStub stub;
909 __ CallStub(&stub);
910}
911
912
913void CodeGenerator::CheckStack() {
Steve Blockd0582a62009-12-15 09:54:21 +0000914 DeferredStackCheck* deferred = new DeferredStackCheck;
915 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
916 deferred->Branch(below);
917 deferred->BindExit();
Steve Blocka7e24c12009-10-30 11:49:00 +0000918}
919
920
921void CodeGenerator::VisitAndSpill(Statement* statement) {
922 // TODO(X64): No architecture specific code. Move to shared location.
923 ASSERT(in_spilled_code());
924 set_in_spilled_code(false);
925 Visit(statement);
926 if (frame_ != NULL) {
927 frame_->SpillAll();
928 }
929 set_in_spilled_code(true);
930}
931
932
933void CodeGenerator::VisitStatementsAndSpill(ZoneList<Statement*>* statements) {
934 ASSERT(in_spilled_code());
935 set_in_spilled_code(false);
936 VisitStatements(statements);
937 if (frame_ != NULL) {
938 frame_->SpillAll();
939 }
940 set_in_spilled_code(true);
941}
942
943
944void CodeGenerator::VisitStatements(ZoneList<Statement*>* statements) {
945 ASSERT(!in_spilled_code());
946 for (int i = 0; has_valid_frame() && i < statements->length(); i++) {
947 Visit(statements->at(i));
948 }
949}
950
951
952void CodeGenerator::VisitBlock(Block* node) {
953 ASSERT(!in_spilled_code());
954 Comment cmnt(masm_, "[ Block");
955 CodeForStatementPosition(node);
956 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
957 VisitStatements(node->statements());
958 if (node->break_target()->is_linked()) {
959 node->break_target()->Bind();
960 }
961 node->break_target()->Unuse();
962}
963
964
965void CodeGenerator::VisitDeclaration(Declaration* node) {
966 Comment cmnt(masm_, "[ Declaration");
967 Variable* var = node->proxy()->var();
968 ASSERT(var != NULL); // must have been resolved
969 Slot* slot = var->slot();
970
971 // If it was not possible to allocate the variable at compile time,
972 // we need to "declare" it at runtime to make sure it actually
973 // exists in the local context.
974 if (slot != NULL && slot->type() == Slot::LOOKUP) {
975 // Variables with a "LOOKUP" slot were introduced as non-locals
976 // during variable resolution and must have mode DYNAMIC.
977 ASSERT(var->is_dynamic());
978 // For now, just do a runtime call. Sync the virtual frame eagerly
979 // so we can simply push the arguments into place.
980 frame_->SyncRange(0, frame_->element_count() - 1);
981 frame_->EmitPush(rsi);
982 __ movq(kScratchRegister, var->name(), RelocInfo::EMBEDDED_OBJECT);
983 frame_->EmitPush(kScratchRegister);
984 // Declaration nodes are always introduced in one of two modes.
985 ASSERT(node->mode() == Variable::VAR || node->mode() == Variable::CONST);
986 PropertyAttributes attr = node->mode() == Variable::VAR ? NONE : READ_ONLY;
Steve Block3ce2e202009-11-05 08:53:23 +0000987 frame_->EmitPush(Smi::FromInt(attr));
Steve Blocka7e24c12009-10-30 11:49:00 +0000988 // Push initial value, if any.
989 // Note: For variables we must not push an initial value (such as
990 // 'undefined') because we may have a (legal) redeclaration and we
991 // must not destroy the current value.
992 if (node->mode() == Variable::CONST) {
993 frame_->EmitPush(Heap::kTheHoleValueRootIndex);
994 } else if (node->fun() != NULL) {
995 Load(node->fun());
996 } else {
Steve Block3ce2e202009-11-05 08:53:23 +0000997 frame_->EmitPush(Smi::FromInt(0)); // no initial value!
Steve Blocka7e24c12009-10-30 11:49:00 +0000998 }
999 Result ignored = frame_->CallRuntime(Runtime::kDeclareContextSlot, 4);
1000 // Ignore the return value (declarations are statements).
1001 return;
1002 }
1003
1004 ASSERT(!var->is_global());
1005
1006 // If we have a function or a constant, we need to initialize the variable.
1007 Expression* val = NULL;
1008 if (node->mode() == Variable::CONST) {
1009 val = new Literal(Factory::the_hole_value());
1010 } else {
1011 val = node->fun(); // NULL if we don't have a function
1012 }
1013
1014 if (val != NULL) {
1015 {
1016 // Set the initial value.
1017 Reference target(this, node->proxy());
1018 Load(val);
1019 target.SetValue(NOT_CONST_INIT);
1020 // The reference is removed from the stack (preserving TOS) when
1021 // it goes out of scope.
1022 }
1023 // Get rid of the assigned value (declarations are statements).
1024 frame_->Drop();
1025 }
1026}
1027
1028
1029void CodeGenerator::VisitExpressionStatement(ExpressionStatement* node) {
1030 ASSERT(!in_spilled_code());
1031 Comment cmnt(masm_, "[ ExpressionStatement");
1032 CodeForStatementPosition(node);
1033 Expression* expression = node->expression();
1034 expression->MarkAsStatement();
1035 Load(expression);
1036 // Remove the lingering expression result from the top of stack.
1037 frame_->Drop();
1038}
1039
1040
1041void CodeGenerator::VisitEmptyStatement(EmptyStatement* node) {
1042 ASSERT(!in_spilled_code());
1043 Comment cmnt(masm_, "// EmptyStatement");
1044 CodeForStatementPosition(node);
1045 // nothing to do
1046}
1047
1048
1049void CodeGenerator::VisitIfStatement(IfStatement* node) {
1050 ASSERT(!in_spilled_code());
1051 Comment cmnt(masm_, "[ IfStatement");
1052 // Generate different code depending on which parts of the if statement
1053 // are present or not.
1054 bool has_then_stm = node->HasThenStatement();
1055 bool has_else_stm = node->HasElseStatement();
1056
1057 CodeForStatementPosition(node);
1058 JumpTarget exit;
1059 if (has_then_stm && has_else_stm) {
1060 JumpTarget then;
1061 JumpTarget else_;
1062 ControlDestination dest(&then, &else_, true);
Steve Blockd0582a62009-12-15 09:54:21 +00001063 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00001064
1065 if (dest.false_was_fall_through()) {
1066 // The else target was bound, so we compile the else part first.
1067 Visit(node->else_statement());
1068
1069 // We may have dangling jumps to the then part.
1070 if (then.is_linked()) {
1071 if (has_valid_frame()) exit.Jump();
1072 then.Bind();
1073 Visit(node->then_statement());
1074 }
1075 } else {
1076 // The then target was bound, so we compile the then part first.
1077 Visit(node->then_statement());
1078
1079 if (else_.is_linked()) {
1080 if (has_valid_frame()) exit.Jump();
1081 else_.Bind();
1082 Visit(node->else_statement());
1083 }
1084 }
1085
1086 } else if (has_then_stm) {
1087 ASSERT(!has_else_stm);
1088 JumpTarget then;
1089 ControlDestination dest(&then, &exit, true);
Steve Blockd0582a62009-12-15 09:54:21 +00001090 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00001091
1092 if (dest.false_was_fall_through()) {
1093 // The exit label was bound. We may have dangling jumps to the
1094 // then part.
1095 if (then.is_linked()) {
1096 exit.Unuse();
1097 exit.Jump();
1098 then.Bind();
1099 Visit(node->then_statement());
1100 }
1101 } else {
1102 // The then label was bound.
1103 Visit(node->then_statement());
1104 }
1105
1106 } else if (has_else_stm) {
1107 ASSERT(!has_then_stm);
1108 JumpTarget else_;
1109 ControlDestination dest(&exit, &else_, false);
Steve Blockd0582a62009-12-15 09:54:21 +00001110 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00001111
1112 if (dest.true_was_fall_through()) {
1113 // The exit label was bound. We may have dangling jumps to the
1114 // else part.
1115 if (else_.is_linked()) {
1116 exit.Unuse();
1117 exit.Jump();
1118 else_.Bind();
1119 Visit(node->else_statement());
1120 }
1121 } else {
1122 // The else label was bound.
1123 Visit(node->else_statement());
1124 }
1125
1126 } else {
1127 ASSERT(!has_then_stm && !has_else_stm);
1128 // We only care about the condition's side effects (not its value
1129 // or control flow effect). LoadCondition is called without
1130 // forcing control flow.
1131 ControlDestination dest(&exit, &exit, true);
Steve Blockd0582a62009-12-15 09:54:21 +00001132 LoadCondition(node->condition(), &dest, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00001133 if (!dest.is_used()) {
1134 // We got a value on the frame rather than (or in addition to)
1135 // control flow.
1136 frame_->Drop();
1137 }
1138 }
1139
1140 if (exit.is_linked()) {
1141 exit.Bind();
1142 }
1143}
1144
1145
1146void CodeGenerator::VisitContinueStatement(ContinueStatement* node) {
1147 ASSERT(!in_spilled_code());
1148 Comment cmnt(masm_, "[ ContinueStatement");
1149 CodeForStatementPosition(node);
1150 node->target()->continue_target()->Jump();
1151}
1152
1153
1154void CodeGenerator::VisitBreakStatement(BreakStatement* node) {
1155 ASSERT(!in_spilled_code());
1156 Comment cmnt(masm_, "[ BreakStatement");
1157 CodeForStatementPosition(node);
1158 node->target()->break_target()->Jump();
1159}
1160
1161
1162void CodeGenerator::VisitReturnStatement(ReturnStatement* node) {
1163 ASSERT(!in_spilled_code());
1164 Comment cmnt(masm_, "[ ReturnStatement");
1165
1166 CodeForStatementPosition(node);
1167 Load(node->expression());
1168 Result return_value = frame_->Pop();
1169 if (function_return_is_shadowed_) {
1170 function_return_.Jump(&return_value);
1171 } else {
1172 frame_->PrepareForReturn();
1173 if (function_return_.is_bound()) {
1174 // If the function return label is already bound we reuse the
1175 // code by jumping to the return site.
1176 function_return_.Jump(&return_value);
1177 } else {
1178 function_return_.Bind(&return_value);
1179 GenerateReturnSequence(&return_value);
1180 }
1181 }
1182}
1183
1184
1185void CodeGenerator::VisitWithEnterStatement(WithEnterStatement* node) {
1186 ASSERT(!in_spilled_code());
1187 Comment cmnt(masm_, "[ WithEnterStatement");
1188 CodeForStatementPosition(node);
1189 Load(node->expression());
1190 Result context;
1191 if (node->is_catch_block()) {
1192 context = frame_->CallRuntime(Runtime::kPushCatchContext, 1);
1193 } else {
1194 context = frame_->CallRuntime(Runtime::kPushContext, 1);
1195 }
1196
1197 // Update context local.
1198 frame_->SaveContextRegister();
1199
1200 // Verify that the runtime call result and rsi agree.
1201 if (FLAG_debug_code) {
1202 __ cmpq(context.reg(), rsi);
1203 __ Assert(equal, "Runtime::NewContext should end up in rsi");
1204 }
1205}
1206
1207
1208void CodeGenerator::VisitWithExitStatement(WithExitStatement* node) {
1209 ASSERT(!in_spilled_code());
1210 Comment cmnt(masm_, "[ WithExitStatement");
1211 CodeForStatementPosition(node);
1212 // Pop context.
1213 __ movq(rsi, ContextOperand(rsi, Context::PREVIOUS_INDEX));
1214 // Update context local.
1215 frame_->SaveContextRegister();
1216}
1217
1218
1219void CodeGenerator::VisitSwitchStatement(SwitchStatement* node) {
1220 // TODO(X64): This code is completely generic and should be moved somewhere
1221 // where it can be shared between architectures.
1222 ASSERT(!in_spilled_code());
1223 Comment cmnt(masm_, "[ SwitchStatement");
1224 CodeForStatementPosition(node);
1225 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
1226
1227 // Compile the switch value.
1228 Load(node->tag());
1229
1230 ZoneList<CaseClause*>* cases = node->cases();
1231 int length = cases->length();
1232 CaseClause* default_clause = NULL;
1233
1234 JumpTarget next_test;
1235 // Compile the case label expressions and comparisons. Exit early
1236 // if a comparison is unconditionally true. The target next_test is
1237 // bound before the loop in order to indicate control flow to the
1238 // first comparison.
1239 next_test.Bind();
1240 for (int i = 0; i < length && !next_test.is_unused(); i++) {
1241 CaseClause* clause = cases->at(i);
1242 // The default is not a test, but remember it for later.
1243 if (clause->is_default()) {
1244 default_clause = clause;
1245 continue;
1246 }
1247
1248 Comment cmnt(masm_, "[ Case comparison");
1249 // We recycle the same target next_test for each test. Bind it if
1250 // the previous test has not done so and then unuse it for the
1251 // loop.
1252 if (next_test.is_linked()) {
1253 next_test.Bind();
1254 }
1255 next_test.Unuse();
1256
1257 // Duplicate the switch value.
1258 frame_->Dup();
1259
1260 // Compile the label expression.
1261 Load(clause->label());
1262
1263 // Compare and branch to the body if true or the next test if
1264 // false. Prefer the next test as a fall through.
1265 ControlDestination dest(clause->body_target(), &next_test, false);
Andrei Popescu402d9372010-02-26 13:31:12 +00001266 Comparison(node, equal, true, &dest);
Steve Blocka7e24c12009-10-30 11:49:00 +00001267
1268 // If the comparison fell through to the true target, jump to the
1269 // actual body.
1270 if (dest.true_was_fall_through()) {
1271 clause->body_target()->Unuse();
1272 clause->body_target()->Jump();
1273 }
1274 }
1275
1276 // If there was control flow to a next test from the last one
1277 // compiled, compile a jump to the default or break target.
1278 if (!next_test.is_unused()) {
1279 if (next_test.is_linked()) {
1280 next_test.Bind();
1281 }
1282 // Drop the switch value.
1283 frame_->Drop();
1284 if (default_clause != NULL) {
1285 default_clause->body_target()->Jump();
1286 } else {
1287 node->break_target()->Jump();
1288 }
1289 }
1290
1291 // The last instruction emitted was a jump, either to the default
1292 // clause or the break target, or else to a case body from the loop
1293 // that compiles the tests.
1294 ASSERT(!has_valid_frame());
1295 // Compile case bodies as needed.
1296 for (int i = 0; i < length; i++) {
1297 CaseClause* clause = cases->at(i);
1298
1299 // There are two ways to reach the body: from the corresponding
1300 // test or as the fall through of the previous body.
1301 if (clause->body_target()->is_linked() || has_valid_frame()) {
1302 if (clause->body_target()->is_linked()) {
1303 if (has_valid_frame()) {
1304 // If we have both a jump to the test and a fall through, put
1305 // a jump on the fall through path to avoid the dropping of
1306 // the switch value on the test path. The exception is the
1307 // default which has already had the switch value dropped.
1308 if (clause->is_default()) {
1309 clause->body_target()->Bind();
1310 } else {
1311 JumpTarget body;
1312 body.Jump();
1313 clause->body_target()->Bind();
1314 frame_->Drop();
1315 body.Bind();
1316 }
1317 } else {
1318 // No fall through to worry about.
1319 clause->body_target()->Bind();
1320 if (!clause->is_default()) {
1321 frame_->Drop();
1322 }
1323 }
1324 } else {
1325 // Otherwise, we have only fall through.
1326 ASSERT(has_valid_frame());
1327 }
1328
1329 // We are now prepared to compile the body.
1330 Comment cmnt(masm_, "[ Case body");
1331 VisitStatements(clause->statements());
1332 }
1333 clause->body_target()->Unuse();
1334 }
1335
1336 // We may not have a valid frame here so bind the break target only
1337 // if needed.
1338 if (node->break_target()->is_linked()) {
1339 node->break_target()->Bind();
1340 }
1341 node->break_target()->Unuse();
1342}
1343
1344
Steve Block3ce2e202009-11-05 08:53:23 +00001345void CodeGenerator::VisitDoWhileStatement(DoWhileStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001346 ASSERT(!in_spilled_code());
Steve Block3ce2e202009-11-05 08:53:23 +00001347 Comment cmnt(masm_, "[ DoWhileStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00001348 CodeForStatementPosition(node);
1349 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
Steve Block3ce2e202009-11-05 08:53:23 +00001350 JumpTarget body(JumpTarget::BIDIRECTIONAL);
1351 IncrementLoopNesting();
Steve Blocka7e24c12009-10-30 11:49:00 +00001352
Steve Block3ce2e202009-11-05 08:53:23 +00001353 ConditionAnalysis info = AnalyzeCondition(node->cond());
1354 // Label the top of the loop for the backward jump if necessary.
1355 switch (info) {
1356 case ALWAYS_TRUE:
1357 // Use the continue target.
1358 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
1359 node->continue_target()->Bind();
1360 break;
1361 case ALWAYS_FALSE:
1362 // No need to label it.
1363 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
1364 break;
1365 case DONT_KNOW:
1366 // Continue is the test, so use the backward body target.
1367 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
1368 body.Bind();
1369 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00001370 }
1371
Steve Block3ce2e202009-11-05 08:53:23 +00001372 CheckStack(); // TODO(1222600): ignore if body contains calls.
1373 Visit(node->body());
Steve Blocka7e24c12009-10-30 11:49:00 +00001374
Steve Block3ce2e202009-11-05 08:53:23 +00001375 // Compile the test.
1376 switch (info) {
1377 case ALWAYS_TRUE:
1378 // If control flow can fall off the end of the body, jump back
1379 // to the top and bind the break target at the exit.
1380 if (has_valid_frame()) {
1381 node->continue_target()->Jump();
Steve Blocka7e24c12009-10-30 11:49:00 +00001382 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001383 if (node->break_target()->is_linked()) {
1384 node->break_target()->Bind();
1385 }
1386 break;
Steve Block3ce2e202009-11-05 08:53:23 +00001387 case ALWAYS_FALSE:
1388 // We may have had continues or breaks in the body.
1389 if (node->continue_target()->is_linked()) {
1390 node->continue_target()->Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00001391 }
Steve Block3ce2e202009-11-05 08:53:23 +00001392 if (node->break_target()->is_linked()) {
1393 node->break_target()->Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00001394 }
Steve Block3ce2e202009-11-05 08:53:23 +00001395 break;
1396 case DONT_KNOW:
1397 // We have to compile the test expression if it can be reached by
1398 // control flow falling out of the body or via continue.
1399 if (node->continue_target()->is_linked()) {
1400 node->continue_target()->Bind();
1401 }
1402 if (has_valid_frame()) {
Steve Blockd0582a62009-12-15 09:54:21 +00001403 Comment cmnt(masm_, "[ DoWhileCondition");
1404 CodeForDoWhileConditionPosition(node);
Steve Block3ce2e202009-11-05 08:53:23 +00001405 ControlDestination dest(&body, node->break_target(), false);
Steve Blockd0582a62009-12-15 09:54:21 +00001406 LoadCondition(node->cond(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00001407 }
Steve Block3ce2e202009-11-05 08:53:23 +00001408 if (node->break_target()->is_linked()) {
1409 node->break_target()->Bind();
1410 }
1411 break;
1412 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001413
Steve Block3ce2e202009-11-05 08:53:23 +00001414 DecrementLoopNesting();
1415 node->continue_target()->Unuse();
1416 node->break_target()->Unuse();
1417}
Steve Blocka7e24c12009-10-30 11:49:00 +00001418
Steve Block3ce2e202009-11-05 08:53:23 +00001419
1420void CodeGenerator::VisitWhileStatement(WhileStatement* node) {
1421 ASSERT(!in_spilled_code());
1422 Comment cmnt(masm_, "[ WhileStatement");
1423 CodeForStatementPosition(node);
1424
1425 // If the condition is always false and has no side effects, we do not
1426 // need to compile anything.
1427 ConditionAnalysis info = AnalyzeCondition(node->cond());
1428 if (info == ALWAYS_FALSE) return;
1429
1430 // Do not duplicate conditions that may have function literal
1431 // subexpressions. This can cause us to compile the function literal
1432 // twice.
1433 bool test_at_bottom = !node->may_have_function_literal();
1434 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
1435 IncrementLoopNesting();
1436 JumpTarget body;
1437 if (test_at_bottom) {
1438 body.set_direction(JumpTarget::BIDIRECTIONAL);
1439 }
1440
1441 // Based on the condition analysis, compile the test as necessary.
1442 switch (info) {
1443 case ALWAYS_TRUE:
1444 // We will not compile the test expression. Label the top of the
1445 // loop with the continue target.
1446 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
1447 node->continue_target()->Bind();
1448 break;
1449 case DONT_KNOW: {
1450 if (test_at_bottom) {
1451 // Continue is the test at the bottom, no need to label the test
1452 // at the top. The body is a backward target.
1453 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
1454 } else {
1455 // Label the test at the top as the continue target. The body
1456 // is a forward-only target.
1457 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
1458 node->continue_target()->Bind();
1459 }
1460 // Compile the test with the body as the true target and preferred
1461 // fall-through and with the break target as the false target.
1462 ControlDestination dest(&body, node->break_target(), true);
Steve Blockd0582a62009-12-15 09:54:21 +00001463 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00001464
1465 if (dest.false_was_fall_through()) {
1466 // If we got the break target as fall-through, the test may have
1467 // been unconditionally false (if there are no jumps to the
1468 // body).
1469 if (!body.is_linked()) {
1470 DecrementLoopNesting();
1471 return;
1472 }
1473
1474 // Otherwise, jump around the body on the fall through and then
1475 // bind the body target.
1476 node->break_target()->Unuse();
1477 node->break_target()->Jump();
1478 body.Bind();
1479 }
1480 break;
1481 }
1482 case ALWAYS_FALSE:
1483 UNREACHABLE();
1484 break;
1485 }
1486
1487 CheckStack(); // TODO(1222600): ignore if body contains calls.
1488 Visit(node->body());
1489
1490 // Based on the condition analysis, compile the backward jump as
1491 // necessary.
1492 switch (info) {
1493 case ALWAYS_TRUE:
1494 // The loop body has been labeled with the continue target.
1495 if (has_valid_frame()) {
1496 node->continue_target()->Jump();
1497 }
1498 break;
1499 case DONT_KNOW:
1500 if (test_at_bottom) {
1501 // If we have chosen to recompile the test at the bottom,
1502 // then it is the continue target.
Steve Blocka7e24c12009-10-30 11:49:00 +00001503 if (node->continue_target()->is_linked()) {
1504 node->continue_target()->Bind();
1505 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001506 if (has_valid_frame()) {
Steve Block3ce2e202009-11-05 08:53:23 +00001507 // The break target is the fall-through (body is a backward
1508 // jump from here and thus an invalid fall-through).
1509 ControlDestination dest(&body, node->break_target(), false);
Steve Blockd0582a62009-12-15 09:54:21 +00001510 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00001511 }
1512 } else {
1513 // If we have chosen not to recompile the test at the
1514 // bottom, jump back to the one at the top.
1515 if (has_valid_frame()) {
1516 node->continue_target()->Jump();
Steve Blocka7e24c12009-10-30 11:49:00 +00001517 }
1518 }
Steve Block3ce2e202009-11-05 08:53:23 +00001519 break;
1520 case ALWAYS_FALSE:
1521 UNREACHABLE();
1522 break;
1523 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001524
Steve Block3ce2e202009-11-05 08:53:23 +00001525 // The break target may be already bound (by the condition), or there
1526 // may not be a valid frame. Bind it only if needed.
1527 if (node->break_target()->is_linked()) {
1528 node->break_target()->Bind();
1529 }
1530 DecrementLoopNesting();
1531}
1532
1533
Steve Block6ded16b2010-05-10 14:33:55 +01001534void CodeGenerator::SetTypeForStackSlot(Slot* slot, TypeInfo info) {
1535 ASSERT(slot->type() == Slot::LOCAL || slot->type() == Slot::PARAMETER);
1536 if (slot->type() == Slot::LOCAL) {
1537 frame_->SetTypeForLocalAt(slot->index(), info);
1538 } else {
1539 frame_->SetTypeForParamAt(slot->index(), info);
1540 }
1541 if (FLAG_debug_code && info.IsSmi()) {
1542 if (slot->type() == Slot::LOCAL) {
1543 frame_->PushLocalAt(slot->index());
1544 } else {
1545 frame_->PushParameterAt(slot->index());
1546 }
1547 Result var = frame_->Pop();
1548 var.ToRegister();
1549 __ AbortIfNotSmi(var.reg(), "Non-smi value in smi-typed stack slot.");
1550 }
1551}
1552
1553
Steve Block3ce2e202009-11-05 08:53:23 +00001554void CodeGenerator::VisitForStatement(ForStatement* node) {
1555 ASSERT(!in_spilled_code());
1556 Comment cmnt(masm_, "[ ForStatement");
1557 CodeForStatementPosition(node);
1558
1559 // Compile the init expression if present.
1560 if (node->init() != NULL) {
1561 Visit(node->init());
1562 }
1563
1564 // If the condition is always false and has no side effects, we do not
1565 // need to compile anything else.
1566 ConditionAnalysis info = AnalyzeCondition(node->cond());
1567 if (info == ALWAYS_FALSE) return;
1568
1569 // Do not duplicate conditions that may have function literal
1570 // subexpressions. This can cause us to compile the function literal
1571 // twice.
1572 bool test_at_bottom = !node->may_have_function_literal();
1573 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
1574 IncrementLoopNesting();
1575
1576 // Target for backward edge if no test at the bottom, otherwise
1577 // unused.
1578 JumpTarget loop(JumpTarget::BIDIRECTIONAL);
1579
1580 // Target for backward edge if there is a test at the bottom,
1581 // otherwise used as target for test at the top.
1582 JumpTarget body;
1583 if (test_at_bottom) {
1584 body.set_direction(JumpTarget::BIDIRECTIONAL);
1585 }
1586
1587 // Based on the condition analysis, compile the test as necessary.
1588 switch (info) {
1589 case ALWAYS_TRUE:
1590 // We will not compile the test expression. Label the top of the
1591 // loop.
1592 if (node->next() == NULL) {
1593 // Use the continue target if there is no update expression.
1594 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
1595 node->continue_target()->Bind();
1596 } else {
1597 // Otherwise use the backward loop target.
1598 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
1599 loop.Bind();
1600 }
1601 break;
1602 case DONT_KNOW: {
1603 if (test_at_bottom) {
1604 // Continue is either the update expression or the test at the
1605 // bottom, no need to label the test at the top.
1606 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
1607 } else if (node->next() == NULL) {
1608 // We are not recompiling the test at the bottom and there is no
1609 // update expression.
1610 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
1611 node->continue_target()->Bind();
1612 } else {
1613 // We are not recompiling the test at the bottom and there is an
1614 // update expression.
1615 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
1616 loop.Bind();
1617 }
1618
1619 // Compile the test with the body as the true target and preferred
1620 // fall-through and with the break target as the false target.
1621 ControlDestination dest(&body, node->break_target(), true);
Steve Blockd0582a62009-12-15 09:54:21 +00001622 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00001623
1624 if (dest.false_was_fall_through()) {
1625 // If we got the break target as fall-through, the test may have
1626 // been unconditionally false (if there are no jumps to the
1627 // body).
1628 if (!body.is_linked()) {
1629 DecrementLoopNesting();
1630 return;
1631 }
1632
1633 // Otherwise, jump around the body on the fall through and then
1634 // bind the body target.
1635 node->break_target()->Unuse();
1636 node->break_target()->Jump();
1637 body.Bind();
1638 }
1639 break;
1640 }
1641 case ALWAYS_FALSE:
1642 UNREACHABLE();
1643 break;
1644 }
1645
1646 CheckStack(); // TODO(1222600): ignore if body contains calls.
Steve Block6ded16b2010-05-10 14:33:55 +01001647
1648 // We know that the loop index is a smi if it is not modified in the
1649 // loop body and it is checked against a constant limit in the loop
1650 // condition. In this case, we reset the static type information of the
1651 // loop index to smi before compiling the body, the update expression, and
1652 // the bottom check of the loop condition.
1653 if (node->is_fast_smi_loop()) {
1654 // Set number type of the loop variable to smi.
1655 SetTypeForStackSlot(node->loop_variable()->slot(), TypeInfo::Smi());
1656 }
1657
Steve Block3ce2e202009-11-05 08:53:23 +00001658 Visit(node->body());
1659
1660 // If there is an update expression, compile it if necessary.
1661 if (node->next() != NULL) {
1662 if (node->continue_target()->is_linked()) {
1663 node->continue_target()->Bind();
1664 }
1665
1666 // Control can reach the update by falling out of the body or by a
1667 // continue.
1668 if (has_valid_frame()) {
1669 // Record the source position of the statement as this code which
1670 // is after the code for the body actually belongs to the loop
1671 // statement and not the body.
1672 CodeForStatementPosition(node);
1673 Visit(node->next());
1674 }
1675 }
1676
Steve Block6ded16b2010-05-10 14:33:55 +01001677 // Set the type of the loop variable to smi before compiling the test
1678 // expression if we are in a fast smi loop condition.
1679 if (node->is_fast_smi_loop() && has_valid_frame()) {
1680 // Set number type of the loop variable to smi.
1681 SetTypeForStackSlot(node->loop_variable()->slot(), TypeInfo::Smi());
1682 }
1683
Steve Block3ce2e202009-11-05 08:53:23 +00001684 // Based on the condition analysis, compile the backward jump as
1685 // necessary.
1686 switch (info) {
1687 case ALWAYS_TRUE:
1688 if (has_valid_frame()) {
1689 if (node->next() == NULL) {
1690 node->continue_target()->Jump();
1691 } else {
1692 loop.Jump();
1693 }
1694 }
1695 break;
1696 case DONT_KNOW:
1697 if (test_at_bottom) {
1698 if (node->continue_target()->is_linked()) {
1699 // We can have dangling jumps to the continue target if there
1700 // was no update expression.
1701 node->continue_target()->Bind();
1702 }
1703 // Control can reach the test at the bottom by falling out of
1704 // the body, by a continue in the body, or from the update
1705 // expression.
1706 if (has_valid_frame()) {
1707 // The break target is the fall-through (body is a backward
1708 // jump from here).
1709 ControlDestination dest(&body, node->break_target(), false);
Steve Blockd0582a62009-12-15 09:54:21 +00001710 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00001711 }
1712 } else {
1713 // Otherwise, jump back to the test at the top.
Steve Blocka7e24c12009-10-30 11:49:00 +00001714 if (has_valid_frame()) {
1715 if (node->next() == NULL) {
1716 node->continue_target()->Jump();
1717 } else {
1718 loop.Jump();
1719 }
1720 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001721 }
1722 break;
Steve Block3ce2e202009-11-05 08:53:23 +00001723 case ALWAYS_FALSE:
1724 UNREACHABLE();
1725 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00001726 }
1727
Steve Block3ce2e202009-11-05 08:53:23 +00001728 // The break target may be already bound (by the condition), or there
1729 // may not be a valid frame. Bind it only if needed.
1730 if (node->break_target()->is_linked()) {
1731 node->break_target()->Bind();
1732 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001733 DecrementLoopNesting();
Steve Blocka7e24c12009-10-30 11:49:00 +00001734}
1735
1736
1737void CodeGenerator::VisitForInStatement(ForInStatement* node) {
1738 ASSERT(!in_spilled_code());
1739 VirtualFrame::SpilledScope spilled_scope;
1740 Comment cmnt(masm_, "[ ForInStatement");
1741 CodeForStatementPosition(node);
1742
1743 JumpTarget primitive;
1744 JumpTarget jsobject;
1745 JumpTarget fixed_array;
1746 JumpTarget entry(JumpTarget::BIDIRECTIONAL);
1747 JumpTarget end_del_check;
1748 JumpTarget exit;
1749
1750 // Get the object to enumerate over (converted to JSObject).
1751 LoadAndSpill(node->enumerable());
1752
1753 // Both SpiderMonkey and kjs ignore null and undefined in contrast
1754 // to the specification. 12.6.4 mandates a call to ToObject.
1755 frame_->EmitPop(rax);
1756
1757 // rax: value to be iterated over
1758 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
1759 exit.Branch(equal);
1760 __ CompareRoot(rax, Heap::kNullValueRootIndex);
1761 exit.Branch(equal);
1762
1763 // Stack layout in body:
1764 // [iteration counter (smi)] <- slot 0
1765 // [length of array] <- slot 1
1766 // [FixedArray] <- slot 2
1767 // [Map or 0] <- slot 3
1768 // [Object] <- slot 4
1769
1770 // Check if enumerable is already a JSObject
1771 // rax: value to be iterated over
1772 Condition is_smi = masm_->CheckSmi(rax);
1773 primitive.Branch(is_smi);
1774 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx);
1775 jsobject.Branch(above_equal);
1776
1777 primitive.Bind();
1778 frame_->EmitPush(rax);
1779 frame_->InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION, 1);
1780 // function call returns the value in rax, which is where we want it below
1781
1782 jsobject.Bind();
1783 // Get the set of properties (as a FixedArray or Map).
1784 // rax: value to be iterated over
Steve Blockd0582a62009-12-15 09:54:21 +00001785 frame_->EmitPush(rax); // Push the object being iterated over.
Steve Blocka7e24c12009-10-30 11:49:00 +00001786
Steve Blockd0582a62009-12-15 09:54:21 +00001787
1788 // Check cache validity in generated code. This is a fast case for
1789 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1790 // guarantee cache validity, call the runtime system to check cache
1791 // validity or get the property names in a fixed array.
1792 JumpTarget call_runtime;
1793 JumpTarget loop(JumpTarget::BIDIRECTIONAL);
1794 JumpTarget check_prototype;
1795 JumpTarget use_cache;
1796 __ movq(rcx, rax);
1797 loop.Bind();
1798 // Check that there are no elements.
1799 __ movq(rdx, FieldOperand(rcx, JSObject::kElementsOffset));
1800 __ CompareRoot(rdx, Heap::kEmptyFixedArrayRootIndex);
1801 call_runtime.Branch(not_equal);
1802 // Check that instance descriptors are not empty so that we can
1803 // check for an enum cache. Leave the map in ebx for the subsequent
1804 // prototype load.
1805 __ movq(rbx, FieldOperand(rcx, HeapObject::kMapOffset));
1806 __ movq(rdx, FieldOperand(rbx, Map::kInstanceDescriptorsOffset));
1807 __ CompareRoot(rdx, Heap::kEmptyDescriptorArrayRootIndex);
1808 call_runtime.Branch(equal);
1809 // Check that there in an enum cache in the non-empty instance
1810 // descriptors. This is the case if the next enumeration index
1811 // field does not contain a smi.
1812 __ movq(rdx, FieldOperand(rdx, DescriptorArray::kEnumerationIndexOffset));
1813 is_smi = masm_->CheckSmi(rdx);
1814 call_runtime.Branch(is_smi);
1815 // For all objects but the receiver, check that the cache is empty.
1816 __ cmpq(rcx, rax);
1817 check_prototype.Branch(equal);
1818 __ movq(rdx, FieldOperand(rdx, DescriptorArray::kEnumCacheBridgeCacheOffset));
1819 __ CompareRoot(rdx, Heap::kEmptyFixedArrayRootIndex);
1820 call_runtime.Branch(not_equal);
1821 check_prototype.Bind();
1822 // Load the prototype from the map and loop if non-null.
1823 __ movq(rcx, FieldOperand(rbx, Map::kPrototypeOffset));
1824 __ CompareRoot(rcx, Heap::kNullValueRootIndex);
1825 loop.Branch(not_equal);
1826 // The enum cache is valid. Load the map of the object being
1827 // iterated over and use the cache for the iteration.
1828 __ movq(rax, FieldOperand(rax, HeapObject::kMapOffset));
1829 use_cache.Jump();
1830
1831 call_runtime.Bind();
1832 // Call the runtime to get the property names for the object.
Steve Blocka7e24c12009-10-30 11:49:00 +00001833 frame_->EmitPush(rax); // push the Object (slot 4) for the runtime call
1834 frame_->CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1835
1836 // If we got a Map, we can do a fast modification check.
1837 // Otherwise, we got a FixedArray, and we have to do a slow check.
1838 // rax: map or fixed array (result from call to
1839 // Runtime::kGetPropertyNamesFast)
1840 __ movq(rdx, rax);
1841 __ movq(rcx, FieldOperand(rdx, HeapObject::kMapOffset));
1842 __ CompareRoot(rcx, Heap::kMetaMapRootIndex);
1843 fixed_array.Branch(not_equal);
1844
Steve Blockd0582a62009-12-15 09:54:21 +00001845 use_cache.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00001846 // Get enum cache
Steve Blockd0582a62009-12-15 09:54:21 +00001847 // rax: map (either the result from a call to
1848 // Runtime::kGetPropertyNamesFast or has been fetched directly from
1849 // the object)
Steve Blocka7e24c12009-10-30 11:49:00 +00001850 __ movq(rcx, rax);
1851 __ movq(rcx, FieldOperand(rcx, Map::kInstanceDescriptorsOffset));
1852 // Get the bridge array held in the enumeration index field.
1853 __ movq(rcx, FieldOperand(rcx, DescriptorArray::kEnumerationIndexOffset));
1854 // Get the cache from the bridge array.
1855 __ movq(rdx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset));
1856
1857 frame_->EmitPush(rax); // <- slot 3
1858 frame_->EmitPush(rdx); // <- slot 2
1859 __ movl(rax, FieldOperand(rdx, FixedArray::kLengthOffset));
1860 __ Integer32ToSmi(rax, rax);
1861 frame_->EmitPush(rax); // <- slot 1
Steve Block3ce2e202009-11-05 08:53:23 +00001862 frame_->EmitPush(Smi::FromInt(0)); // <- slot 0
Steve Blocka7e24c12009-10-30 11:49:00 +00001863 entry.Jump();
1864
1865 fixed_array.Bind();
1866 // rax: fixed array (result from call to Runtime::kGetPropertyNamesFast)
Steve Block3ce2e202009-11-05 08:53:23 +00001867 frame_->EmitPush(Smi::FromInt(0)); // <- slot 3
Steve Blocka7e24c12009-10-30 11:49:00 +00001868 frame_->EmitPush(rax); // <- slot 2
1869
1870 // Push the length of the array and the initial index onto the stack.
1871 __ movl(rax, FieldOperand(rax, FixedArray::kLengthOffset));
1872 __ Integer32ToSmi(rax, rax);
1873 frame_->EmitPush(rax); // <- slot 1
Steve Block3ce2e202009-11-05 08:53:23 +00001874 frame_->EmitPush(Smi::FromInt(0)); // <- slot 0
Steve Blocka7e24c12009-10-30 11:49:00 +00001875
1876 // Condition.
1877 entry.Bind();
1878 // Grab the current frame's height for the break and continue
1879 // targets only after all the state is pushed on the frame.
1880 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
1881 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
1882
1883 __ movq(rax, frame_->ElementAt(0)); // load the current count
Steve Block3ce2e202009-11-05 08:53:23 +00001884 __ SmiCompare(frame_->ElementAt(1), rax); // compare to the array length
1885 node->break_target()->Branch(below_equal);
Steve Blocka7e24c12009-10-30 11:49:00 +00001886
1887 // Get the i'th entry of the array.
1888 __ movq(rdx, frame_->ElementAt(2));
1889 SmiIndex index = masm_->SmiToIndex(rbx, rax, kPointerSizeLog2);
1890 __ movq(rbx,
1891 FieldOperand(rdx, index.reg, index.scale, FixedArray::kHeaderSize));
1892
1893 // Get the expected map from the stack or a zero map in the
1894 // permanent slow case rax: current iteration count rbx: i'th entry
1895 // of the enum cache
1896 __ movq(rdx, frame_->ElementAt(3));
1897 // Check if the expected map still matches that of the enumerable.
1898 // If not, we have to filter the key.
1899 // rax: current iteration count
1900 // rbx: i'th entry of the enum cache
1901 // rdx: expected map value
1902 __ movq(rcx, frame_->ElementAt(4));
1903 __ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
1904 __ cmpq(rcx, rdx);
1905 end_del_check.Branch(equal);
1906
1907 // Convert the entry to a string (or null if it isn't a property anymore).
1908 frame_->EmitPush(frame_->ElementAt(4)); // push enumerable
1909 frame_->EmitPush(rbx); // push entry
1910 frame_->InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION, 2);
1911 __ movq(rbx, rax);
1912
1913 // If the property has been removed while iterating, we just skip it.
1914 __ CompareRoot(rbx, Heap::kNullValueRootIndex);
1915 node->continue_target()->Branch(equal);
1916
1917 end_del_check.Bind();
1918 // Store the entry in the 'each' expression and take another spin in the
1919 // loop. rdx: i'th entry of the enum cache (or string there of)
1920 frame_->EmitPush(rbx);
1921 { Reference each(this, node->each());
1922 // Loading a reference may leave the frame in an unspilled state.
1923 frame_->SpillAll();
1924 if (!each.is_illegal()) {
1925 if (each.size() > 0) {
1926 frame_->EmitPush(frame_->ElementAt(each.size()));
Leon Clarked91b9f72010-01-27 17:25:45 +00001927 each.SetValue(NOT_CONST_INIT);
1928 frame_->Drop(2); // Drop the original and the copy of the element.
1929 } else {
1930 // If the reference has size zero then we can use the value below
1931 // the reference as if it were above the reference, instead of pushing
1932 // a new copy of it above the reference.
1933 each.SetValue(NOT_CONST_INIT);
1934 frame_->Drop(); // Drop the original of the element.
Steve Blocka7e24c12009-10-30 11:49:00 +00001935 }
1936 }
1937 }
1938 // Unloading a reference may leave the frame in an unspilled state.
1939 frame_->SpillAll();
1940
Steve Blocka7e24c12009-10-30 11:49:00 +00001941 // Body.
1942 CheckStack(); // TODO(1222600): ignore if body contains calls.
1943 VisitAndSpill(node->body());
1944
1945 // Next. Reestablish a spilled frame in case we are coming here via
1946 // a continue in the body.
1947 node->continue_target()->Bind();
1948 frame_->SpillAll();
1949 frame_->EmitPop(rax);
Steve Block3ce2e202009-11-05 08:53:23 +00001950 __ SmiAddConstant(rax, rax, Smi::FromInt(1));
Steve Blocka7e24c12009-10-30 11:49:00 +00001951 frame_->EmitPush(rax);
1952 entry.Jump();
1953
1954 // Cleanup. No need to spill because VirtualFrame::Drop is safe for
1955 // any frame.
1956 node->break_target()->Bind();
1957 frame_->Drop(5);
1958
1959 // Exit.
1960 exit.Bind();
1961
1962 node->continue_target()->Unuse();
1963 node->break_target()->Unuse();
1964}
1965
Steve Block3ce2e202009-11-05 08:53:23 +00001966void CodeGenerator::VisitTryCatchStatement(TryCatchStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001967 ASSERT(!in_spilled_code());
1968 VirtualFrame::SpilledScope spilled_scope;
Steve Block3ce2e202009-11-05 08:53:23 +00001969 Comment cmnt(masm_, "[ TryCatchStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00001970 CodeForStatementPosition(node);
1971
1972 JumpTarget try_block;
1973 JumpTarget exit;
1974
1975 try_block.Call();
1976 // --- Catch block ---
1977 frame_->EmitPush(rax);
1978
1979 // Store the caught exception in the catch variable.
Leon Clarkee46be812010-01-19 14:06:41 +00001980 Variable* catch_var = node->catch_var()->var();
1981 ASSERT(catch_var != NULL && catch_var->slot() != NULL);
1982 StoreToSlot(catch_var->slot(), NOT_CONST_INIT);
Steve Blocka7e24c12009-10-30 11:49:00 +00001983
1984 // Remove the exception from the stack.
1985 frame_->Drop();
1986
1987 VisitStatementsAndSpill(node->catch_block()->statements());
1988 if (has_valid_frame()) {
1989 exit.Jump();
1990 }
1991
1992
1993 // --- Try block ---
1994 try_block.Bind();
1995
1996 frame_->PushTryHandler(TRY_CATCH_HANDLER);
1997 int handler_height = frame_->height();
1998
1999 // Shadow the jump targets for all escapes from the try block, including
2000 // returns. During shadowing, the original target is hidden as the
2001 // ShadowTarget and operations on the original actually affect the
2002 // shadowing target.
2003 //
2004 // We should probably try to unify the escaping targets and the return
2005 // target.
2006 int nof_escapes = node->escaping_targets()->length();
2007 List<ShadowTarget*> shadows(1 + nof_escapes);
2008
2009 // Add the shadow target for the function return.
2010 static const int kReturnShadowIndex = 0;
2011 shadows.Add(new ShadowTarget(&function_return_));
2012 bool function_return_was_shadowed = function_return_is_shadowed_;
2013 function_return_is_shadowed_ = true;
2014 ASSERT(shadows[kReturnShadowIndex]->other_target() == &function_return_);
2015
2016 // Add the remaining shadow targets.
2017 for (int i = 0; i < nof_escapes; i++) {
2018 shadows.Add(new ShadowTarget(node->escaping_targets()->at(i)));
2019 }
2020
2021 // Generate code for the statements in the try block.
2022 VisitStatementsAndSpill(node->try_block()->statements());
2023
2024 // Stop the introduced shadowing and count the number of required unlinks.
2025 // After shadowing stops, the original targets are unshadowed and the
2026 // ShadowTargets represent the formerly shadowing targets.
2027 bool has_unlinks = false;
2028 for (int i = 0; i < shadows.length(); i++) {
2029 shadows[i]->StopShadowing();
2030 has_unlinks = has_unlinks || shadows[i]->is_linked();
2031 }
2032 function_return_is_shadowed_ = function_return_was_shadowed;
2033
2034 // Get an external reference to the handler address.
2035 ExternalReference handler_address(Top::k_handler_address);
2036
2037 // Make sure that there's nothing left on the stack above the
2038 // handler structure.
2039 if (FLAG_debug_code) {
2040 __ movq(kScratchRegister, handler_address);
2041 __ cmpq(rsp, Operand(kScratchRegister, 0));
2042 __ Assert(equal, "stack pointer should point to top handler");
2043 }
2044
2045 // If we can fall off the end of the try block, unlink from try chain.
2046 if (has_valid_frame()) {
2047 // The next handler address is on top of the frame. Unlink from
2048 // the handler list and drop the rest of this handler from the
2049 // frame.
2050 ASSERT(StackHandlerConstants::kNextOffset == 0);
2051 __ movq(kScratchRegister, handler_address);
2052 frame_->EmitPop(Operand(kScratchRegister, 0));
2053 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
2054 if (has_unlinks) {
2055 exit.Jump();
2056 }
2057 }
2058
2059 // Generate unlink code for the (formerly) shadowing targets that
2060 // have been jumped to. Deallocate each shadow target.
2061 Result return_value;
2062 for (int i = 0; i < shadows.length(); i++) {
2063 if (shadows[i]->is_linked()) {
2064 // Unlink from try chain; be careful not to destroy the TOS if
2065 // there is one.
2066 if (i == kReturnShadowIndex) {
2067 shadows[i]->Bind(&return_value);
2068 return_value.ToRegister(rax);
2069 } else {
2070 shadows[i]->Bind();
2071 }
2072 // Because we can be jumping here (to spilled code) from
2073 // unspilled code, we need to reestablish a spilled frame at
2074 // this block.
2075 frame_->SpillAll();
2076
2077 // Reload sp from the top handler, because some statements that we
2078 // break from (eg, for...in) may have left stuff on the stack.
2079 __ movq(kScratchRegister, handler_address);
2080 __ movq(rsp, Operand(kScratchRegister, 0));
2081 frame_->Forget(frame_->height() - handler_height);
2082
2083 ASSERT(StackHandlerConstants::kNextOffset == 0);
2084 __ movq(kScratchRegister, handler_address);
2085 frame_->EmitPop(Operand(kScratchRegister, 0));
2086 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
2087
2088 if (i == kReturnShadowIndex) {
2089 if (!function_return_is_shadowed_) frame_->PrepareForReturn();
2090 shadows[i]->other_target()->Jump(&return_value);
2091 } else {
2092 shadows[i]->other_target()->Jump();
2093 }
2094 }
2095 }
2096
2097 exit.Bind();
2098}
2099
2100
Steve Block3ce2e202009-11-05 08:53:23 +00002101void CodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002102 ASSERT(!in_spilled_code());
2103 VirtualFrame::SpilledScope spilled_scope;
Steve Block3ce2e202009-11-05 08:53:23 +00002104 Comment cmnt(masm_, "[ TryFinallyStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00002105 CodeForStatementPosition(node);
2106
2107 // State: Used to keep track of reason for entering the finally
2108 // block. Should probably be extended to hold information for
2109 // break/continue from within the try block.
2110 enum { FALLING, THROWING, JUMPING };
2111
2112 JumpTarget try_block;
2113 JumpTarget finally_block;
2114
2115 try_block.Call();
2116
2117 frame_->EmitPush(rax);
2118 // In case of thrown exceptions, this is where we continue.
Steve Block3ce2e202009-11-05 08:53:23 +00002119 __ Move(rcx, Smi::FromInt(THROWING));
Steve Blocka7e24c12009-10-30 11:49:00 +00002120 finally_block.Jump();
2121
2122 // --- Try block ---
2123 try_block.Bind();
2124
2125 frame_->PushTryHandler(TRY_FINALLY_HANDLER);
2126 int handler_height = frame_->height();
2127
2128 // Shadow the jump targets for all escapes from the try block, including
2129 // returns. During shadowing, the original target is hidden as the
2130 // ShadowTarget and operations on the original actually affect the
2131 // shadowing target.
2132 //
2133 // We should probably try to unify the escaping targets and the return
2134 // target.
2135 int nof_escapes = node->escaping_targets()->length();
2136 List<ShadowTarget*> shadows(1 + nof_escapes);
2137
2138 // Add the shadow target for the function return.
2139 static const int kReturnShadowIndex = 0;
2140 shadows.Add(new ShadowTarget(&function_return_));
2141 bool function_return_was_shadowed = function_return_is_shadowed_;
2142 function_return_is_shadowed_ = true;
2143 ASSERT(shadows[kReturnShadowIndex]->other_target() == &function_return_);
2144
2145 // Add the remaining shadow targets.
2146 for (int i = 0; i < nof_escapes; i++) {
2147 shadows.Add(new ShadowTarget(node->escaping_targets()->at(i)));
2148 }
2149
2150 // Generate code for the statements in the try block.
2151 VisitStatementsAndSpill(node->try_block()->statements());
2152
2153 // Stop the introduced shadowing and count the number of required unlinks.
2154 // After shadowing stops, the original targets are unshadowed and the
2155 // ShadowTargets represent the formerly shadowing targets.
2156 int nof_unlinks = 0;
2157 for (int i = 0; i < shadows.length(); i++) {
2158 shadows[i]->StopShadowing();
2159 if (shadows[i]->is_linked()) nof_unlinks++;
2160 }
2161 function_return_is_shadowed_ = function_return_was_shadowed;
2162
2163 // Get an external reference to the handler address.
2164 ExternalReference handler_address(Top::k_handler_address);
2165
2166 // If we can fall off the end of the try block, unlink from the try
2167 // chain and set the state on the frame to FALLING.
2168 if (has_valid_frame()) {
2169 // The next handler address is on top of the frame.
2170 ASSERT(StackHandlerConstants::kNextOffset == 0);
2171 __ movq(kScratchRegister, handler_address);
2172 frame_->EmitPop(Operand(kScratchRegister, 0));
2173 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
2174
2175 // Fake a top of stack value (unneeded when FALLING) and set the
2176 // state in ecx, then jump around the unlink blocks if any.
2177 frame_->EmitPush(Heap::kUndefinedValueRootIndex);
Steve Block3ce2e202009-11-05 08:53:23 +00002178 __ Move(rcx, Smi::FromInt(FALLING));
Steve Blocka7e24c12009-10-30 11:49:00 +00002179 if (nof_unlinks > 0) {
2180 finally_block.Jump();
2181 }
2182 }
2183
2184 // Generate code to unlink and set the state for the (formerly)
2185 // shadowing targets that have been jumped to.
2186 for (int i = 0; i < shadows.length(); i++) {
2187 if (shadows[i]->is_linked()) {
2188 // If we have come from the shadowed return, the return value is
2189 // on the virtual frame. We must preserve it until it is
2190 // pushed.
2191 if (i == kReturnShadowIndex) {
2192 Result return_value;
2193 shadows[i]->Bind(&return_value);
2194 return_value.ToRegister(rax);
2195 } else {
2196 shadows[i]->Bind();
2197 }
2198 // Because we can be jumping here (to spilled code) from
2199 // unspilled code, we need to reestablish a spilled frame at
2200 // this block.
2201 frame_->SpillAll();
2202
2203 // Reload sp from the top handler, because some statements that
2204 // we break from (eg, for...in) may have left stuff on the
2205 // stack.
2206 __ movq(kScratchRegister, handler_address);
2207 __ movq(rsp, Operand(kScratchRegister, 0));
2208 frame_->Forget(frame_->height() - handler_height);
2209
2210 // Unlink this handler and drop it from the frame.
2211 ASSERT(StackHandlerConstants::kNextOffset == 0);
2212 __ movq(kScratchRegister, handler_address);
2213 frame_->EmitPop(Operand(kScratchRegister, 0));
2214 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
2215
2216 if (i == kReturnShadowIndex) {
2217 // If this target shadowed the function return, materialize
2218 // the return value on the stack.
2219 frame_->EmitPush(rax);
2220 } else {
2221 // Fake TOS for targets that shadowed breaks and continues.
2222 frame_->EmitPush(Heap::kUndefinedValueRootIndex);
2223 }
Steve Block3ce2e202009-11-05 08:53:23 +00002224 __ Move(rcx, Smi::FromInt(JUMPING + i));
Steve Blocka7e24c12009-10-30 11:49:00 +00002225 if (--nof_unlinks > 0) {
2226 // If this is not the last unlink block, jump around the next.
2227 finally_block.Jump();
2228 }
2229 }
2230 }
2231
2232 // --- Finally block ---
2233 finally_block.Bind();
2234
2235 // Push the state on the stack.
2236 frame_->EmitPush(rcx);
2237
2238 // We keep two elements on the stack - the (possibly faked) result
2239 // and the state - while evaluating the finally block.
2240 //
2241 // Generate code for the statements in the finally block.
2242 VisitStatementsAndSpill(node->finally_block()->statements());
2243
2244 if (has_valid_frame()) {
2245 // Restore state and return value or faked TOS.
2246 frame_->EmitPop(rcx);
2247 frame_->EmitPop(rax);
2248 }
2249
2250 // Generate code to jump to the right destination for all used
2251 // formerly shadowing targets. Deallocate each shadow target.
2252 for (int i = 0; i < shadows.length(); i++) {
2253 if (has_valid_frame() && shadows[i]->is_bound()) {
2254 BreakTarget* original = shadows[i]->other_target();
Steve Block3ce2e202009-11-05 08:53:23 +00002255 __ SmiCompare(rcx, Smi::FromInt(JUMPING + i));
Steve Blocka7e24c12009-10-30 11:49:00 +00002256 if (i == kReturnShadowIndex) {
2257 // The return value is (already) in rax.
2258 Result return_value = allocator_->Allocate(rax);
2259 ASSERT(return_value.is_valid());
2260 if (function_return_is_shadowed_) {
2261 original->Branch(equal, &return_value);
2262 } else {
2263 // Branch around the preparation for return which may emit
2264 // code.
2265 JumpTarget skip;
2266 skip.Branch(not_equal);
2267 frame_->PrepareForReturn();
2268 original->Jump(&return_value);
2269 skip.Bind();
2270 }
2271 } else {
2272 original->Branch(equal);
2273 }
2274 }
2275 }
2276
2277 if (has_valid_frame()) {
2278 // Check if we need to rethrow the exception.
2279 JumpTarget exit;
Steve Block3ce2e202009-11-05 08:53:23 +00002280 __ SmiCompare(rcx, Smi::FromInt(THROWING));
Steve Blocka7e24c12009-10-30 11:49:00 +00002281 exit.Branch(not_equal);
2282
2283 // Rethrow exception.
2284 frame_->EmitPush(rax); // undo pop from above
2285 frame_->CallRuntime(Runtime::kReThrow, 1);
2286
2287 // Done.
2288 exit.Bind();
2289 }
2290}
2291
2292
2293void CodeGenerator::VisitDebuggerStatement(DebuggerStatement* node) {
2294 ASSERT(!in_spilled_code());
2295 Comment cmnt(masm_, "[ DebuggerStatement");
2296 CodeForStatementPosition(node);
2297#ifdef ENABLE_DEBUGGER_SUPPORT
2298 // Spill everything, even constants, to the frame.
2299 frame_->SpillAll();
Leon Clarke4515c472010-02-03 11:58:03 +00002300
Andrei Popescu402d9372010-02-26 13:31:12 +00002301 frame_->DebugBreak();
Steve Blocka7e24c12009-10-30 11:49:00 +00002302 // Ignore the return value.
2303#endif
2304}
2305
2306
Steve Block6ded16b2010-05-10 14:33:55 +01002307void CodeGenerator::InstantiateFunction(
2308 Handle<SharedFunctionInfo> function_info) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002309 // The inevitable call will sync frame elements to memory anyway, so
2310 // we do it eagerly to allow us to push the arguments directly into
2311 // place.
Steve Blocka7e24c12009-10-30 11:49:00 +00002312 frame_->SyncRange(0, frame_->element_count() - 1);
2313
Leon Clarkee46be812010-01-19 14:06:41 +00002314 // Use the fast case closure allocation code that allocates in new
2315 // space for nested functions that don't need literals cloning.
Steve Block6ded16b2010-05-10 14:33:55 +01002316 if (scope()->is_function_scope() && function_info->num_literals() == 0) {
Leon Clarkee46be812010-01-19 14:06:41 +00002317 FastNewClosureStub stub;
Steve Block6ded16b2010-05-10 14:33:55 +01002318 frame_->Push(function_info);
Leon Clarkee46be812010-01-19 14:06:41 +00002319 Result answer = frame_->CallStub(&stub, 1);
2320 frame_->Push(&answer);
2321 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01002322 // Call the runtime to instantiate the function based on the
2323 // shared function info.
Leon Clarkee46be812010-01-19 14:06:41 +00002324 frame_->EmitPush(rsi);
Steve Block6ded16b2010-05-10 14:33:55 +01002325 frame_->EmitPush(function_info);
Leon Clarkee46be812010-01-19 14:06:41 +00002326 Result result = frame_->CallRuntime(Runtime::kNewClosure, 2);
2327 frame_->Push(&result);
2328 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002329}
2330
2331
2332void CodeGenerator::VisitFunctionLiteral(FunctionLiteral* node) {
2333 Comment cmnt(masm_, "[ FunctionLiteral");
2334
Steve Block6ded16b2010-05-10 14:33:55 +01002335 // Build the function info and instantiate it.
2336 Handle<SharedFunctionInfo> function_info =
2337 Compiler::BuildFunctionInfo(node, script(), this);
Steve Blocka7e24c12009-10-30 11:49:00 +00002338 // Check for stack-overflow exception.
2339 if (HasStackOverflow()) return;
Steve Block6ded16b2010-05-10 14:33:55 +01002340 InstantiateFunction(function_info);
Steve Blocka7e24c12009-10-30 11:49:00 +00002341}
2342
2343
Steve Block6ded16b2010-05-10 14:33:55 +01002344void CodeGenerator::VisitSharedFunctionInfoLiteral(
2345 SharedFunctionInfoLiteral* node) {
2346 Comment cmnt(masm_, "[ SharedFunctionInfoLiteral");
2347 InstantiateFunction(node->shared_function_info());
Steve Blocka7e24c12009-10-30 11:49:00 +00002348}
2349
2350
2351void CodeGenerator::VisitConditional(Conditional* node) {
2352 Comment cmnt(masm_, "[ Conditional");
2353 JumpTarget then;
2354 JumpTarget else_;
2355 JumpTarget exit;
2356 ControlDestination dest(&then, &else_, true);
Steve Blockd0582a62009-12-15 09:54:21 +00002357 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00002358
2359 if (dest.false_was_fall_through()) {
2360 // The else target was bound, so we compile the else part first.
Steve Blockd0582a62009-12-15 09:54:21 +00002361 Load(node->else_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00002362
2363 if (then.is_linked()) {
2364 exit.Jump();
2365 then.Bind();
Steve Blockd0582a62009-12-15 09:54:21 +00002366 Load(node->then_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00002367 }
2368 } else {
2369 // The then target was bound, so we compile the then part first.
Steve Blockd0582a62009-12-15 09:54:21 +00002370 Load(node->then_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00002371
2372 if (else_.is_linked()) {
2373 exit.Jump();
2374 else_.Bind();
Steve Blockd0582a62009-12-15 09:54:21 +00002375 Load(node->else_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00002376 }
2377 }
2378
2379 exit.Bind();
2380}
2381
2382
2383void CodeGenerator::VisitSlot(Slot* node) {
2384 Comment cmnt(masm_, "[ Slot");
Steve Blockd0582a62009-12-15 09:54:21 +00002385 LoadFromSlotCheckForArguments(node, NOT_INSIDE_TYPEOF);
Steve Blocka7e24c12009-10-30 11:49:00 +00002386}
2387
2388
2389void CodeGenerator::VisitVariableProxy(VariableProxy* node) {
2390 Comment cmnt(masm_, "[ VariableProxy");
2391 Variable* var = node->var();
2392 Expression* expr = var->rewrite();
2393 if (expr != NULL) {
2394 Visit(expr);
2395 } else {
2396 ASSERT(var->is_global());
2397 Reference ref(this, node);
Steve Blockd0582a62009-12-15 09:54:21 +00002398 ref.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00002399 }
2400}
2401
2402
2403void CodeGenerator::VisitLiteral(Literal* node) {
2404 Comment cmnt(masm_, "[ Literal");
2405 frame_->Push(node->handle());
2406}
2407
2408
2409// Materialize the regexp literal 'node' in the literals array
2410// 'literals' of the function. Leave the regexp boilerplate in
2411// 'boilerplate'.
2412class DeferredRegExpLiteral: public DeferredCode {
2413 public:
2414 DeferredRegExpLiteral(Register boilerplate,
2415 Register literals,
2416 RegExpLiteral* node)
2417 : boilerplate_(boilerplate), literals_(literals), node_(node) {
2418 set_comment("[ DeferredRegExpLiteral");
2419 }
2420
2421 void Generate();
2422
2423 private:
2424 Register boilerplate_;
2425 Register literals_;
2426 RegExpLiteral* node_;
2427};
2428
2429
2430void DeferredRegExpLiteral::Generate() {
2431 // Since the entry is undefined we call the runtime system to
2432 // compute the literal.
2433 // Literal array (0).
2434 __ push(literals_);
2435 // Literal index (1).
Steve Block3ce2e202009-11-05 08:53:23 +00002436 __ Push(Smi::FromInt(node_->literal_index()));
Steve Blocka7e24c12009-10-30 11:49:00 +00002437 // RegExp pattern (2).
2438 __ Push(node_->pattern());
2439 // RegExp flags (3).
2440 __ Push(node_->flags());
2441 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
2442 if (!boilerplate_.is(rax)) __ movq(boilerplate_, rax);
2443}
2444
2445
2446void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) {
2447 Comment cmnt(masm_, "[ RegExp Literal");
2448
2449 // Retrieve the literals array and check the allocated entry. Begin
2450 // with a writable copy of the function of this activation in a
2451 // register.
2452 frame_->PushFunction();
2453 Result literals = frame_->Pop();
2454 literals.ToRegister();
2455 frame_->Spill(literals.reg());
2456
2457 // Load the literals array of the function.
2458 __ movq(literals.reg(),
2459 FieldOperand(literals.reg(), JSFunction::kLiteralsOffset));
2460
2461 // Load the literal at the ast saved index.
2462 Result boilerplate = allocator_->Allocate();
2463 ASSERT(boilerplate.is_valid());
2464 int literal_offset =
2465 FixedArray::kHeaderSize + node->literal_index() * kPointerSize;
2466 __ movq(boilerplate.reg(), FieldOperand(literals.reg(), literal_offset));
2467
2468 // Check whether we need to materialize the RegExp object. If so,
2469 // jump to the deferred code passing the literals array.
2470 DeferredRegExpLiteral* deferred =
2471 new DeferredRegExpLiteral(boilerplate.reg(), literals.reg(), node);
2472 __ CompareRoot(boilerplate.reg(), Heap::kUndefinedValueRootIndex);
2473 deferred->Branch(equal);
2474 deferred->BindExit();
2475 literals.Unuse();
2476
2477 // Push the boilerplate object.
2478 frame_->Push(&boilerplate);
2479}
2480
2481
Steve Blocka7e24c12009-10-30 11:49:00 +00002482void CodeGenerator::VisitObjectLiteral(ObjectLiteral* node) {
2483 Comment cmnt(masm_, "[ ObjectLiteral");
2484
Leon Clarkee46be812010-01-19 14:06:41 +00002485 // Load a writable copy of the function of this activation in a
Steve Blocka7e24c12009-10-30 11:49:00 +00002486 // register.
2487 frame_->PushFunction();
2488 Result literals = frame_->Pop();
2489 literals.ToRegister();
2490 frame_->Spill(literals.reg());
2491
2492 // Load the literals array of the function.
2493 __ movq(literals.reg(),
2494 FieldOperand(literals.reg(), JSFunction::kLiteralsOffset));
Leon Clarkee46be812010-01-19 14:06:41 +00002495 // Literal array.
2496 frame_->Push(&literals);
2497 // Literal index.
2498 frame_->Push(Smi::FromInt(node->literal_index()));
2499 // Constant properties.
2500 frame_->Push(node->constant_properties());
Steve Block6ded16b2010-05-10 14:33:55 +01002501 // Should the object literal have fast elements?
2502 frame_->Push(Smi::FromInt(node->fast_elements() ? 1 : 0));
Leon Clarkee46be812010-01-19 14:06:41 +00002503 Result clone;
2504 if (node->depth() > 1) {
Steve Block6ded16b2010-05-10 14:33:55 +01002505 clone = frame_->CallRuntime(Runtime::kCreateObjectLiteral, 4);
Leon Clarkee46be812010-01-19 14:06:41 +00002506 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01002507 clone = frame_->CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
Steve Blocka7e24c12009-10-30 11:49:00 +00002508 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002509 frame_->Push(&clone);
2510
2511 for (int i = 0; i < node->properties()->length(); i++) {
2512 ObjectLiteral::Property* property = node->properties()->at(i);
2513 switch (property->kind()) {
2514 case ObjectLiteral::Property::CONSTANT:
2515 break;
2516 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2517 if (CompileTimeValue::IsCompileTimeValue(property->value())) break;
2518 // else fall through.
2519 case ObjectLiteral::Property::COMPUTED: {
2520 Handle<Object> key(property->key()->handle());
2521 if (key->IsSymbol()) {
2522 // Duplicate the object as the IC receiver.
2523 frame_->Dup();
2524 Load(property->value());
2525 frame_->Push(key);
2526 Result ignored = frame_->CallStoreIC();
Steve Blocka7e24c12009-10-30 11:49:00 +00002527 break;
2528 }
2529 // Fall through
2530 }
2531 case ObjectLiteral::Property::PROTOTYPE: {
2532 // Duplicate the object as an argument to the runtime call.
2533 frame_->Dup();
2534 Load(property->key());
2535 Load(property->value());
2536 Result ignored = frame_->CallRuntime(Runtime::kSetProperty, 3);
2537 // Ignore the result.
2538 break;
2539 }
2540 case ObjectLiteral::Property::SETTER: {
2541 // Duplicate the object as an argument to the runtime call.
2542 frame_->Dup();
2543 Load(property->key());
2544 frame_->Push(Smi::FromInt(1));
2545 Load(property->value());
2546 Result ignored = frame_->CallRuntime(Runtime::kDefineAccessor, 4);
2547 // Ignore the result.
2548 break;
2549 }
2550 case ObjectLiteral::Property::GETTER: {
2551 // Duplicate the object as an argument to the runtime call.
2552 frame_->Dup();
2553 Load(property->key());
2554 frame_->Push(Smi::FromInt(0));
2555 Load(property->value());
2556 Result ignored = frame_->CallRuntime(Runtime::kDefineAccessor, 4);
2557 // Ignore the result.
2558 break;
2559 }
2560 default: UNREACHABLE();
2561 }
2562 }
2563}
2564
2565
Steve Blocka7e24c12009-10-30 11:49:00 +00002566void CodeGenerator::VisitArrayLiteral(ArrayLiteral* node) {
2567 Comment cmnt(masm_, "[ ArrayLiteral");
2568
Leon Clarkee46be812010-01-19 14:06:41 +00002569 // Load a writable copy of the function of this activation in a
Steve Blocka7e24c12009-10-30 11:49:00 +00002570 // register.
2571 frame_->PushFunction();
2572 Result literals = frame_->Pop();
2573 literals.ToRegister();
2574 frame_->Spill(literals.reg());
2575
2576 // Load the literals array of the function.
2577 __ movq(literals.reg(),
2578 FieldOperand(literals.reg(), JSFunction::kLiteralsOffset));
Andrei Popescu402d9372010-02-26 13:31:12 +00002579
Leon Clarkee46be812010-01-19 14:06:41 +00002580 frame_->Push(&literals);
Leon Clarkee46be812010-01-19 14:06:41 +00002581 frame_->Push(Smi::FromInt(node->literal_index()));
Leon Clarkee46be812010-01-19 14:06:41 +00002582 frame_->Push(node->constant_elements());
Andrei Popescu402d9372010-02-26 13:31:12 +00002583 int length = node->values()->length();
Leon Clarkee46be812010-01-19 14:06:41 +00002584 Result clone;
2585 if (node->depth() > 1) {
2586 clone = frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3);
Andrei Popescu402d9372010-02-26 13:31:12 +00002587 } else if (length > FastCloneShallowArrayStub::kMaximumLength) {
Leon Clarkee46be812010-01-19 14:06:41 +00002588 clone = frame_->CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
Andrei Popescu402d9372010-02-26 13:31:12 +00002589 } else {
2590 FastCloneShallowArrayStub stub(length);
2591 clone = frame_->CallStub(&stub, 3);
Steve Blocka7e24c12009-10-30 11:49:00 +00002592 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002593 frame_->Push(&clone);
2594
2595 // Generate code to set the elements in the array that are not
2596 // literals.
2597 for (int i = 0; i < node->values()->length(); i++) {
2598 Expression* value = node->values()->at(i);
2599
2600 // If value is a literal the property value is already set in the
2601 // boilerplate object.
2602 if (value->AsLiteral() != NULL) continue;
2603 // If value is a materialized literal the property value is already set
2604 // in the boilerplate object if it is simple.
2605 if (CompileTimeValue::IsCompileTimeValue(value)) continue;
2606
2607 // The property must be set by generated code.
2608 Load(value);
2609
2610 // Get the property value off the stack.
2611 Result prop_value = frame_->Pop();
2612 prop_value.ToRegister();
2613
2614 // Fetch the array literal while leaving a copy on the stack and
2615 // use it to get the elements array.
2616 frame_->Dup();
2617 Result elements = frame_->Pop();
2618 elements.ToRegister();
2619 frame_->Spill(elements.reg());
2620 // Get the elements FixedArray.
2621 __ movq(elements.reg(),
2622 FieldOperand(elements.reg(), JSObject::kElementsOffset));
2623
2624 // Write to the indexed properties array.
2625 int offset = i * kPointerSize + FixedArray::kHeaderSize;
2626 __ movq(FieldOperand(elements.reg(), offset), prop_value.reg());
2627
2628 // Update the write barrier for the array address.
2629 frame_->Spill(prop_value.reg()); // Overwritten by the write barrier.
2630 Result scratch = allocator_->Allocate();
2631 ASSERT(scratch.is_valid());
2632 __ RecordWrite(elements.reg(), offset, prop_value.reg(), scratch.reg());
2633 }
2634}
2635
2636
2637void CodeGenerator::VisitCatchExtensionObject(CatchExtensionObject* node) {
2638 ASSERT(!in_spilled_code());
2639 // Call runtime routine to allocate the catch extension object and
2640 // assign the exception value to the catch variable.
2641 Comment cmnt(masm_, "[ CatchExtensionObject");
2642 Load(node->key());
2643 Load(node->value());
2644 Result result =
2645 frame_->CallRuntime(Runtime::kCreateCatchExtensionObject, 2);
2646 frame_->Push(&result);
2647}
2648
2649
2650void CodeGenerator::VisitAssignment(Assignment* node) {
2651 Comment cmnt(masm_, "[ Assignment");
2652
Leon Clarked91b9f72010-01-27 17:25:45 +00002653 { Reference target(this, node->target(), node->is_compound());
Steve Blocka7e24c12009-10-30 11:49:00 +00002654 if (target.is_illegal()) {
2655 // Fool the virtual frame into thinking that we left the assignment's
2656 // value on the frame.
2657 frame_->Push(Smi::FromInt(0));
2658 return;
2659 }
2660 Variable* var = node->target()->AsVariableProxy()->AsVariable();
2661
2662 if (node->starts_initialization_block()) {
2663 ASSERT(target.type() == Reference::NAMED ||
2664 target.type() == Reference::KEYED);
2665 // Change to slow case in the beginning of an initialization
2666 // block to avoid the quadratic behavior of repeatedly adding
2667 // fast properties.
2668
2669 // The receiver is the argument to the runtime call. It is the
2670 // first value pushed when the reference was loaded to the
2671 // frame.
2672 frame_->PushElementAt(target.size() - 1);
2673 Result ignored = frame_->CallRuntime(Runtime::kToSlowProperties, 1);
2674 }
Leon Clarked91b9f72010-01-27 17:25:45 +00002675 if (node->ends_initialization_block()) {
2676 // Add an extra copy of the receiver to the frame, so that it can be
2677 // converted back to fast case after the assignment.
2678 ASSERT(target.type() == Reference::NAMED ||
2679 target.type() == Reference::KEYED);
2680 if (target.type() == Reference::NAMED) {
2681 frame_->Dup();
2682 // Dup target receiver on stack.
2683 } else {
2684 ASSERT(target.type() == Reference::KEYED);
2685 Result temp = frame_->Pop();
2686 frame_->Dup();
2687 frame_->Push(&temp);
2688 }
2689 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002690 if (node->op() == Token::ASSIGN ||
2691 node->op() == Token::INIT_VAR ||
2692 node->op() == Token::INIT_CONST) {
2693 Load(node->value());
2694
Leon Clarked91b9f72010-01-27 17:25:45 +00002695 } else { // Assignment is a compound assignment.
Steve Blocka7e24c12009-10-30 11:49:00 +00002696 Literal* literal = node->value()->AsLiteral();
2697 bool overwrite_value =
2698 (node->value()->AsBinaryOperation() != NULL &&
2699 node->value()->AsBinaryOperation()->ResultOverwriteAllowed());
2700 Variable* right_var = node->value()->AsVariableProxy()->AsVariable();
2701 // There are two cases where the target is not read in the right hand
2702 // side, that are easy to test for: the right hand side is a literal,
2703 // or the right hand side is a different variable. TakeValue invalidates
2704 // the target, with an implicit promise that it will be written to again
2705 // before it is read.
2706 if (literal != NULL || (right_var != NULL && right_var != var)) {
Steve Blockd0582a62009-12-15 09:54:21 +00002707 target.TakeValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00002708 } else {
Steve Blockd0582a62009-12-15 09:54:21 +00002709 target.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00002710 }
2711 Load(node->value());
Steve Block6ded16b2010-05-10 14:33:55 +01002712 BinaryOperation expr(node, node->binary_op(), node->target(),
2713 node->value());
2714 GenericBinaryOperation(&expr,
Steve Blocka7e24c12009-10-30 11:49:00 +00002715 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
2716 }
2717
2718 if (var != NULL &&
2719 var->mode() == Variable::CONST &&
2720 node->op() != Token::INIT_VAR && node->op() != Token::INIT_CONST) {
2721 // Assignment ignored - leave the value on the stack.
Leon Clarked91b9f72010-01-27 17:25:45 +00002722 UnloadReference(&target);
Steve Blocka7e24c12009-10-30 11:49:00 +00002723 } else {
2724 CodeForSourcePosition(node->position());
2725 if (node->op() == Token::INIT_CONST) {
2726 // Dynamic constant initializations must use the function context
2727 // and initialize the actual constant declared. Dynamic variable
2728 // initializations are simply assignments and use SetValue.
2729 target.SetValue(CONST_INIT);
2730 } else {
2731 target.SetValue(NOT_CONST_INIT);
2732 }
2733 if (node->ends_initialization_block()) {
Leon Clarked91b9f72010-01-27 17:25:45 +00002734 ASSERT(target.type() == Reference::UNLOADED);
Steve Blocka7e24c12009-10-30 11:49:00 +00002735 // End of initialization block. Revert to fast case. The
Leon Clarked91b9f72010-01-27 17:25:45 +00002736 // argument to the runtime call is the extra copy of the receiver,
2737 // which is below the value of the assignment.
2738 // Swap the receiver and the value of the assignment expression.
2739 Result lhs = frame_->Pop();
2740 Result receiver = frame_->Pop();
2741 frame_->Push(&lhs);
2742 frame_->Push(&receiver);
Steve Blocka7e24c12009-10-30 11:49:00 +00002743 Result ignored = frame_->CallRuntime(Runtime::kToFastProperties, 1);
2744 }
2745 }
2746 }
2747}
2748
2749
2750void CodeGenerator::VisitThrow(Throw* node) {
2751 Comment cmnt(masm_, "[ Throw");
2752 Load(node->exception());
2753 Result result = frame_->CallRuntime(Runtime::kThrow, 1);
2754 frame_->Push(&result);
2755}
2756
2757
2758void CodeGenerator::VisitProperty(Property* node) {
2759 Comment cmnt(masm_, "[ Property");
2760 Reference property(this, node);
Steve Blockd0582a62009-12-15 09:54:21 +00002761 property.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00002762}
2763
2764
2765void CodeGenerator::VisitCall(Call* node) {
2766 Comment cmnt(masm_, "[ Call");
2767
2768 ZoneList<Expression*>* args = node->arguments();
2769
2770 // Check if the function is a variable or a property.
2771 Expression* function = node->expression();
2772 Variable* var = function->AsVariableProxy()->AsVariable();
2773 Property* property = function->AsProperty();
2774
2775 // ------------------------------------------------------------------------
2776 // Fast-case: Use inline caching.
2777 // ---
2778 // According to ECMA-262, section 11.2.3, page 44, the function to call
2779 // must be resolved after the arguments have been evaluated. The IC code
2780 // automatically handles this by loading the arguments before the function
2781 // is resolved in cache misses (this also holds for megamorphic calls).
2782 // ------------------------------------------------------------------------
2783
2784 if (var != NULL && var->is_possibly_eval()) {
2785 // ----------------------------------
2786 // JavaScript example: 'eval(arg)' // eval is not known to be shadowed
2787 // ----------------------------------
2788
2789 // In a call to eval, we first call %ResolvePossiblyDirectEval to
2790 // resolve the function we need to call and the receiver of the
2791 // call. Then we call the resolved function using the given
2792 // arguments.
2793
2794 // Prepare the stack for the call to the resolved function.
2795 Load(function);
2796
2797 // Allocate a frame slot for the receiver.
2798 frame_->Push(Factory::undefined_value());
2799 int arg_count = args->length();
2800 for (int i = 0; i < arg_count; i++) {
2801 Load(args->at(i));
2802 }
2803
2804 // Prepare the stack for the call to ResolvePossiblyDirectEval.
2805 frame_->PushElementAt(arg_count + 1);
2806 if (arg_count > 0) {
2807 frame_->PushElementAt(arg_count);
2808 } else {
2809 frame_->Push(Factory::undefined_value());
2810 }
2811
Leon Clarkee46be812010-01-19 14:06:41 +00002812 // Push the receiver.
2813 frame_->PushParameterAt(-1);
2814
Steve Blocka7e24c12009-10-30 11:49:00 +00002815 // Resolve the call.
2816 Result result =
Leon Clarkee46be812010-01-19 14:06:41 +00002817 frame_->CallRuntime(Runtime::kResolvePossiblyDirectEval, 3);
Steve Blocka7e24c12009-10-30 11:49:00 +00002818
Leon Clarkee46be812010-01-19 14:06:41 +00002819 // The runtime call returns a pair of values in rax (function) and
2820 // rdx (receiver). Touch up the stack with the right values.
2821 Result receiver = allocator_->Allocate(rdx);
2822 frame_->SetElementAt(arg_count + 1, &result);
2823 frame_->SetElementAt(arg_count, &receiver);
2824 receiver.Unuse();
Steve Blocka7e24c12009-10-30 11:49:00 +00002825
2826 // Call the function.
2827 CodeForSourcePosition(node->position());
2828 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Leon Clarkee46be812010-01-19 14:06:41 +00002829 CallFunctionStub call_function(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
Steve Blocka7e24c12009-10-30 11:49:00 +00002830 result = frame_->CallStub(&call_function, arg_count + 1);
2831
2832 // Restore the context and overwrite the function on the stack with
2833 // the result.
2834 frame_->RestoreContextRegister();
2835 frame_->SetElementAt(0, &result);
2836
2837 } else if (var != NULL && !var->is_this() && var->is_global()) {
2838 // ----------------------------------
2839 // JavaScript example: 'foo(1, 2, 3)' // foo is global
2840 // ----------------------------------
2841
Steve Blocka7e24c12009-10-30 11:49:00 +00002842 // Pass the global object as the receiver and let the IC stub
2843 // patch the stack to use the global proxy as 'this' in the
2844 // invoked function.
2845 LoadGlobal();
2846
2847 // Load the arguments.
2848 int arg_count = args->length();
2849 for (int i = 0; i < arg_count; i++) {
2850 Load(args->at(i));
2851 }
2852
Andrei Popescu402d9372010-02-26 13:31:12 +00002853 // Push the name of the function on the frame.
2854 frame_->Push(var->name());
2855
Steve Blocka7e24c12009-10-30 11:49:00 +00002856 // Call the IC initialization code.
2857 CodeForSourcePosition(node->position());
2858 Result result = frame_->CallCallIC(RelocInfo::CODE_TARGET_CONTEXT,
2859 arg_count,
2860 loop_nesting());
2861 frame_->RestoreContextRegister();
2862 // Replace the function on the stack with the result.
Andrei Popescu402d9372010-02-26 13:31:12 +00002863 frame_->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00002864
2865 } else if (var != NULL && var->slot() != NULL &&
2866 var->slot()->type() == Slot::LOOKUP) {
2867 // ----------------------------------
Kristian Monsen25f61362010-05-21 11:50:48 +01002868 // JavaScript examples:
2869 //
2870 // with (obj) foo(1, 2, 3) // foo may be in obj.
2871 //
2872 // function f() {};
2873 // function g() {
2874 // eval(...);
2875 // f(); // f could be in extension object.
2876 // }
Steve Blocka7e24c12009-10-30 11:49:00 +00002877 // ----------------------------------
2878
Kristian Monsen25f61362010-05-21 11:50:48 +01002879 JumpTarget slow, done;
2880 Result function;
2881
2882 // Generate fast case for loading functions from slots that
2883 // correspond to local/global variables or arguments unless they
2884 // are shadowed by eval-introduced bindings.
2885 EmitDynamicLoadFromSlotFastCase(var->slot(),
2886 NOT_INSIDE_TYPEOF,
2887 &function,
2888 &slow,
2889 &done);
2890
2891 slow.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00002892 // Load the function from the context. Sync the frame so we can
2893 // push the arguments directly into place.
2894 frame_->SyncRange(0, frame_->element_count() - 1);
2895 frame_->EmitPush(rsi);
2896 frame_->EmitPush(var->name());
2897 frame_->CallRuntime(Runtime::kLoadContextSlot, 2);
2898 // The runtime call returns a pair of values in rax and rdx. The
2899 // looked-up function is in rax and the receiver is in rdx. These
2900 // register references are not ref counted here. We spill them
2901 // eagerly since they are arguments to an inevitable call (and are
2902 // not sharable by the arguments).
2903 ASSERT(!allocator()->is_used(rax));
2904 frame_->EmitPush(rax);
2905
2906 // Load the receiver.
2907 ASSERT(!allocator()->is_used(rdx));
2908 frame_->EmitPush(rdx);
2909
Kristian Monsen25f61362010-05-21 11:50:48 +01002910 // If fast case code has been generated, emit code to push the
2911 // function and receiver and have the slow path jump around this
2912 // code.
2913 if (done.is_linked()) {
2914 JumpTarget call;
2915 call.Jump();
2916 done.Bind(&function);
2917 frame_->Push(&function);
2918 LoadGlobalReceiver();
2919 call.Bind();
2920 }
2921
Steve Blocka7e24c12009-10-30 11:49:00 +00002922 // Call the function.
Leon Clarkee46be812010-01-19 14:06:41 +00002923 CallWithArguments(args, NO_CALL_FUNCTION_FLAGS, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00002924
2925 } else if (property != NULL) {
2926 // Check if the key is a literal string.
2927 Literal* literal = property->key()->AsLiteral();
2928
2929 if (literal != NULL && literal->handle()->IsSymbol()) {
2930 // ------------------------------------------------------------------
2931 // JavaScript example: 'object.foo(1, 2, 3)' or 'map["key"](1, 2, 3)'
2932 // ------------------------------------------------------------------
2933
2934 Handle<String> name = Handle<String>::cast(literal->handle());
2935
2936 if (ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION &&
2937 name->IsEqualTo(CStrVector("apply")) &&
2938 args->length() == 2 &&
2939 args->at(1)->AsVariableProxy() != NULL &&
2940 args->at(1)->AsVariableProxy()->IsArguments()) {
2941 // Use the optimized Function.prototype.apply that avoids
2942 // allocating lazily allocated arguments objects.
Leon Clarked91b9f72010-01-27 17:25:45 +00002943 CallApplyLazy(property->obj(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002944 args->at(0),
2945 args->at(1)->AsVariableProxy(),
2946 node->position());
2947
2948 } else {
Andrei Popescu402d9372010-02-26 13:31:12 +00002949 // Push the receiver onto the frame.
Steve Blocka7e24c12009-10-30 11:49:00 +00002950 Load(property->obj());
2951
2952 // Load the arguments.
2953 int arg_count = args->length();
2954 for (int i = 0; i < arg_count; i++) {
2955 Load(args->at(i));
2956 }
2957
Andrei Popescu402d9372010-02-26 13:31:12 +00002958 // Push the name of the function onto the frame.
2959 frame_->Push(name);
2960
Steve Blocka7e24c12009-10-30 11:49:00 +00002961 // Call the IC initialization code.
2962 CodeForSourcePosition(node->position());
2963 Result result = frame_->CallCallIC(RelocInfo::CODE_TARGET,
2964 arg_count,
2965 loop_nesting());
2966 frame_->RestoreContextRegister();
Andrei Popescu402d9372010-02-26 13:31:12 +00002967 frame_->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00002968 }
2969
2970 } else {
2971 // -------------------------------------------
2972 // JavaScript example: 'array[index](1, 2, 3)'
2973 // -------------------------------------------
2974
2975 // Load the function to call from the property through a reference.
Steve Blocka7e24c12009-10-30 11:49:00 +00002976 if (property->is_synthetic()) {
Leon Clarked91b9f72010-01-27 17:25:45 +00002977 Reference ref(this, property, false);
2978 ref.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00002979 // Use global object as receiver.
2980 LoadGlobalReceiver();
2981 } else {
Leon Clarked91b9f72010-01-27 17:25:45 +00002982 Reference ref(this, property, false);
2983 ASSERT(ref.size() == 2);
2984 Result key = frame_->Pop();
2985 frame_->Dup(); // Duplicate the receiver.
2986 frame_->Push(&key);
2987 ref.GetValue();
2988 // Top of frame contains function to call, with duplicate copy of
2989 // receiver below it. Swap them.
2990 Result function = frame_->Pop();
2991 Result receiver = frame_->Pop();
2992 frame_->Push(&function);
2993 frame_->Push(&receiver);
Steve Blocka7e24c12009-10-30 11:49:00 +00002994 }
2995
2996 // Call the function.
Leon Clarkee46be812010-01-19 14:06:41 +00002997 CallWithArguments(args, RECEIVER_MIGHT_BE_VALUE, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00002998 }
2999
3000 } else {
3001 // ----------------------------------
3002 // JavaScript example: 'foo(1, 2, 3)' // foo is not global
3003 // ----------------------------------
3004
3005 // Load the function.
3006 Load(function);
3007
3008 // Pass the global proxy as the receiver.
3009 LoadGlobalReceiver();
3010
3011 // Call the function.
Leon Clarkee46be812010-01-19 14:06:41 +00003012 CallWithArguments(args, NO_CALL_FUNCTION_FLAGS, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00003013 }
3014}
3015
3016
3017void CodeGenerator::VisitCallNew(CallNew* node) {
3018 Comment cmnt(masm_, "[ CallNew");
3019
3020 // According to ECMA-262, section 11.2.2, page 44, the function
3021 // expression in new calls must be evaluated before the
3022 // arguments. This is different from ordinary calls, where the
3023 // actual function to call is resolved after the arguments have been
3024 // evaluated.
3025
3026 // Compute function to call and use the global object as the
3027 // receiver. There is no need to use the global proxy here because
3028 // it will always be replaced with a newly allocated object.
3029 Load(node->expression());
3030 LoadGlobal();
3031
3032 // Push the arguments ("left-to-right") on the stack.
3033 ZoneList<Expression*>* args = node->arguments();
3034 int arg_count = args->length();
3035 for (int i = 0; i < arg_count; i++) {
3036 Load(args->at(i));
3037 }
3038
3039 // Call the construct call builtin that handles allocation and
3040 // constructor invocation.
3041 CodeForSourcePosition(node->position());
3042 Result result = frame_->CallConstructor(arg_count);
3043 // Replace the function on the stack with the result.
3044 frame_->SetElementAt(0, &result);
3045}
3046
3047
3048void CodeGenerator::VisitCallRuntime(CallRuntime* node) {
3049 if (CheckForInlineRuntimeCall(node)) {
3050 return;
3051 }
3052
3053 ZoneList<Expression*>* args = node->arguments();
3054 Comment cmnt(masm_, "[ CallRuntime");
3055 Runtime::Function* function = node->function();
3056
3057 if (function == NULL) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003058 // Push the builtins object found in the current global object.
3059 Result temp = allocator()->Allocate();
3060 ASSERT(temp.is_valid());
3061 __ movq(temp.reg(), GlobalObject());
3062 __ movq(temp.reg(),
3063 FieldOperand(temp.reg(), GlobalObject::kBuiltinsOffset));
3064 frame_->Push(&temp);
3065 }
3066
3067 // Push the arguments ("left-to-right").
3068 int arg_count = args->length();
3069 for (int i = 0; i < arg_count; i++) {
3070 Load(args->at(i));
3071 }
3072
3073 if (function == NULL) {
3074 // Call the JS runtime function.
Andrei Popescu402d9372010-02-26 13:31:12 +00003075 frame_->Push(node->name());
Steve Blocka7e24c12009-10-30 11:49:00 +00003076 Result answer = frame_->CallCallIC(RelocInfo::CODE_TARGET,
3077 arg_count,
3078 loop_nesting_);
3079 frame_->RestoreContextRegister();
Andrei Popescu402d9372010-02-26 13:31:12 +00003080 frame_->Push(&answer);
Steve Blocka7e24c12009-10-30 11:49:00 +00003081 } else {
3082 // Call the C runtime function.
3083 Result answer = frame_->CallRuntime(function, arg_count);
3084 frame_->Push(&answer);
3085 }
3086}
3087
3088
3089void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003090 Comment cmnt(masm_, "[ UnaryOperation");
3091
3092 Token::Value op = node->op();
3093
3094 if (op == Token::NOT) {
3095 // Swap the true and false targets but keep the same actual label
3096 // as the fall through.
3097 destination()->Invert();
Steve Blockd0582a62009-12-15 09:54:21 +00003098 LoadCondition(node->expression(), destination(), true);
Steve Blocka7e24c12009-10-30 11:49:00 +00003099 // Swap the labels back.
3100 destination()->Invert();
3101
3102 } else if (op == Token::DELETE) {
3103 Property* property = node->expression()->AsProperty();
3104 if (property != NULL) {
3105 Load(property->obj());
3106 Load(property->key());
3107 Result answer = frame_->InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, 2);
3108 frame_->Push(&answer);
3109 return;
3110 }
3111
3112 Variable* variable = node->expression()->AsVariableProxy()->AsVariable();
3113 if (variable != NULL) {
3114 Slot* slot = variable->slot();
3115 if (variable->is_global()) {
3116 LoadGlobal();
3117 frame_->Push(variable->name());
3118 Result answer = frame_->InvokeBuiltin(Builtins::DELETE,
3119 CALL_FUNCTION, 2);
3120 frame_->Push(&answer);
3121 return;
3122
3123 } else if (slot != NULL && slot->type() == Slot::LOOKUP) {
3124 // Call the runtime to look up the context holding the named
3125 // variable. Sync the virtual frame eagerly so we can push the
3126 // arguments directly into place.
3127 frame_->SyncRange(0, frame_->element_count() - 1);
3128 frame_->EmitPush(rsi);
3129 frame_->EmitPush(variable->name());
3130 Result context = frame_->CallRuntime(Runtime::kLookupContext, 2);
3131 ASSERT(context.is_register());
3132 frame_->EmitPush(context.reg());
3133 context.Unuse();
3134 frame_->EmitPush(variable->name());
3135 Result answer = frame_->InvokeBuiltin(Builtins::DELETE,
3136 CALL_FUNCTION, 2);
3137 frame_->Push(&answer);
3138 return;
3139 }
3140
3141 // Default: Result of deleting non-global, not dynamically
3142 // introduced variables is false.
3143 frame_->Push(Factory::false_value());
3144
3145 } else {
3146 // Default: Result of deleting expressions is true.
3147 Load(node->expression()); // may have side-effects
3148 frame_->SetElementAt(0, Factory::true_value());
3149 }
3150
3151 } else if (op == Token::TYPEOF) {
3152 // Special case for loading the typeof expression; see comment on
3153 // LoadTypeofExpression().
3154 LoadTypeofExpression(node->expression());
3155 Result answer = frame_->CallRuntime(Runtime::kTypeof, 1);
3156 frame_->Push(&answer);
3157
3158 } else if (op == Token::VOID) {
3159 Expression* expression = node->expression();
3160 if (expression && expression->AsLiteral() && (
3161 expression->AsLiteral()->IsTrue() ||
3162 expression->AsLiteral()->IsFalse() ||
3163 expression->AsLiteral()->handle()->IsNumber() ||
3164 expression->AsLiteral()->handle()->IsString() ||
3165 expression->AsLiteral()->handle()->IsJSRegExp() ||
3166 expression->AsLiteral()->IsNull())) {
3167 // Omit evaluating the value of the primitive literal.
3168 // It will be discarded anyway, and can have no side effect.
3169 frame_->Push(Factory::undefined_value());
3170 } else {
3171 Load(node->expression());
3172 frame_->SetElementAt(0, Factory::undefined_value());
3173 }
3174
3175 } else {
Leon Clarked91b9f72010-01-27 17:25:45 +00003176 bool overwrite =
3177 (node->expression()->AsBinaryOperation() != NULL &&
3178 node->expression()->AsBinaryOperation()->ResultOverwriteAllowed());
Steve Blocka7e24c12009-10-30 11:49:00 +00003179 Load(node->expression());
3180 switch (op) {
3181 case Token::NOT:
3182 case Token::DELETE:
3183 case Token::TYPEOF:
3184 UNREACHABLE(); // handled above
3185 break;
3186
3187 case Token::SUB: {
Leon Clarkee46be812010-01-19 14:06:41 +00003188 GenericUnaryOpStub stub(Token::SUB, overwrite);
Steve Blocka7e24c12009-10-30 11:49:00 +00003189 Result operand = frame_->Pop();
3190 Result answer = frame_->CallStub(&stub, &operand);
Steve Block6ded16b2010-05-10 14:33:55 +01003191 answer.set_type_info(TypeInfo::Number());
Steve Blocka7e24c12009-10-30 11:49:00 +00003192 frame_->Push(&answer);
3193 break;
3194 }
3195
3196 case Token::BIT_NOT: {
3197 // Smi check.
3198 JumpTarget smi_label;
3199 JumpTarget continue_label;
3200 Result operand = frame_->Pop();
3201 operand.ToRegister();
3202
3203 Condition is_smi = masm_->CheckSmi(operand.reg());
3204 smi_label.Branch(is_smi, &operand);
3205
Leon Clarked91b9f72010-01-27 17:25:45 +00003206 GenericUnaryOpStub stub(Token::BIT_NOT, overwrite);
3207 Result answer = frame_->CallStub(&stub, &operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00003208 continue_label.Jump(&answer);
Leon Clarked91b9f72010-01-27 17:25:45 +00003209
Steve Blocka7e24c12009-10-30 11:49:00 +00003210 smi_label.Bind(&answer);
3211 answer.ToRegister();
3212 frame_->Spill(answer.reg());
3213 __ SmiNot(answer.reg(), answer.reg());
3214 continue_label.Bind(&answer);
Steve Block6ded16b2010-05-10 14:33:55 +01003215 answer.set_type_info(TypeInfo::Smi());
Steve Blocka7e24c12009-10-30 11:49:00 +00003216 frame_->Push(&answer);
3217 break;
3218 }
3219
3220 case Token::ADD: {
3221 // Smi check.
3222 JumpTarget continue_label;
3223 Result operand = frame_->Pop();
Steve Block6ded16b2010-05-10 14:33:55 +01003224 TypeInfo operand_info = operand.type_info();
Steve Blocka7e24c12009-10-30 11:49:00 +00003225 operand.ToRegister();
3226 Condition is_smi = masm_->CheckSmi(operand.reg());
3227 continue_label.Branch(is_smi, &operand);
3228 frame_->Push(&operand);
3229 Result answer = frame_->InvokeBuiltin(Builtins::TO_NUMBER,
3230 CALL_FUNCTION, 1);
3231
3232 continue_label.Bind(&answer);
Steve Block6ded16b2010-05-10 14:33:55 +01003233 if (operand_info.IsSmi()) {
3234 answer.set_type_info(TypeInfo::Smi());
3235 } else if (operand_info.IsInteger32()) {
3236 answer.set_type_info(TypeInfo::Integer32());
3237 } else {
3238 answer.set_type_info(TypeInfo::Number());
3239 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003240 frame_->Push(&answer);
3241 break;
3242 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003243 default:
3244 UNREACHABLE();
3245 }
3246 }
3247}
3248
3249
Steve Block6ded16b2010-05-10 14:33:55 +01003250// The value in dst was optimistically incremented or decremented.
3251// The result overflowed or was not smi tagged. Call into the runtime
3252// to convert the argument to a number, and call the specialized add
3253// or subtract stub. The result is left in dst.
Steve Blocka7e24c12009-10-30 11:49:00 +00003254class DeferredPrefixCountOperation: public DeferredCode {
3255 public:
Steve Block6ded16b2010-05-10 14:33:55 +01003256 DeferredPrefixCountOperation(Register dst,
3257 bool is_increment,
3258 TypeInfo input_type)
3259 : dst_(dst), is_increment_(is_increment), input_type_(input_type) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003260 set_comment("[ DeferredCountOperation");
3261 }
3262
3263 virtual void Generate();
3264
3265 private:
3266 Register dst_;
3267 bool is_increment_;
Steve Block6ded16b2010-05-10 14:33:55 +01003268 TypeInfo input_type_;
Steve Blocka7e24c12009-10-30 11:49:00 +00003269};
3270
3271
3272void DeferredPrefixCountOperation::Generate() {
Steve Block6ded16b2010-05-10 14:33:55 +01003273 Register left;
3274 if (input_type_.IsNumber()) {
3275 left = dst_;
Steve Blocka7e24c12009-10-30 11:49:00 +00003276 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01003277 __ push(dst_);
3278 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION);
3279 left = rax;
Steve Blocka7e24c12009-10-30 11:49:00 +00003280 }
Steve Block6ded16b2010-05-10 14:33:55 +01003281
3282 GenericBinaryOpStub stub(is_increment_ ? Token::ADD : Token::SUB,
3283 NO_OVERWRITE,
3284 NO_GENERIC_BINARY_FLAGS,
3285 TypeInfo::Number());
3286 stub.GenerateCall(masm_, left, Smi::FromInt(1));
3287
Steve Blocka7e24c12009-10-30 11:49:00 +00003288 if (!dst_.is(rax)) __ movq(dst_, rax);
3289}
3290
3291
Steve Block6ded16b2010-05-10 14:33:55 +01003292// The value in dst was optimistically incremented or decremented.
3293// The result overflowed or was not smi tagged. Call into the runtime
3294// to convert the argument to a number. Update the original value in
3295// old. Call the specialized add or subtract stub. The result is
3296// left in dst.
Steve Blocka7e24c12009-10-30 11:49:00 +00003297class DeferredPostfixCountOperation: public DeferredCode {
3298 public:
Steve Block6ded16b2010-05-10 14:33:55 +01003299 DeferredPostfixCountOperation(Register dst,
3300 Register old,
3301 bool is_increment,
3302 TypeInfo input_type)
3303 : dst_(dst),
3304 old_(old),
3305 is_increment_(is_increment),
3306 input_type_(input_type) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003307 set_comment("[ DeferredCountOperation");
3308 }
3309
3310 virtual void Generate();
3311
3312 private:
3313 Register dst_;
3314 Register old_;
3315 bool is_increment_;
Steve Block6ded16b2010-05-10 14:33:55 +01003316 TypeInfo input_type_;
Steve Blocka7e24c12009-10-30 11:49:00 +00003317};
3318
3319
3320void DeferredPostfixCountOperation::Generate() {
Steve Block6ded16b2010-05-10 14:33:55 +01003321 Register left;
3322 if (input_type_.IsNumber()) {
3323 __ push(dst_); // Save the input to use as the old value.
3324 left = dst_;
Steve Blocka7e24c12009-10-30 11:49:00 +00003325 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01003326 __ push(dst_);
3327 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION);
3328 __ push(rax); // Save the result of ToNumber to use as the old value.
3329 left = rax;
Steve Blocka7e24c12009-10-30 11:49:00 +00003330 }
Steve Block6ded16b2010-05-10 14:33:55 +01003331
3332 GenericBinaryOpStub stub(is_increment_ ? Token::ADD : Token::SUB,
3333 NO_OVERWRITE,
3334 NO_GENERIC_BINARY_FLAGS,
3335 TypeInfo::Number());
3336 stub.GenerateCall(masm_, left, Smi::FromInt(1));
3337
Steve Blocka7e24c12009-10-30 11:49:00 +00003338 if (!dst_.is(rax)) __ movq(dst_, rax);
3339 __ pop(old_);
3340}
3341
3342
3343void CodeGenerator::VisitCountOperation(CountOperation* node) {
3344 Comment cmnt(masm_, "[ CountOperation");
3345
3346 bool is_postfix = node->is_postfix();
3347 bool is_increment = node->op() == Token::INC;
3348
3349 Variable* var = node->expression()->AsVariableProxy()->AsVariable();
3350 bool is_const = (var != NULL && var->mode() == Variable::CONST);
3351
3352 // Postfix operations need a stack slot under the reference to hold
3353 // the old value while the new value is being stored. This is so that
3354 // in the case that storing the new value requires a call, the old
3355 // value will be in the frame to be spilled.
3356 if (is_postfix) frame_->Push(Smi::FromInt(0));
3357
Leon Clarked91b9f72010-01-27 17:25:45 +00003358 // A constant reference is not saved to, so the reference is not a
3359 // compound assignment reference.
3360 { Reference target(this, node->expression(), !is_const);
Steve Blocka7e24c12009-10-30 11:49:00 +00003361 if (target.is_illegal()) {
3362 // Spoof the virtual frame to have the expected height (one higher
3363 // than on entry).
3364 if (!is_postfix) frame_->Push(Smi::FromInt(0));
3365 return;
3366 }
Steve Blockd0582a62009-12-15 09:54:21 +00003367 target.TakeValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00003368
3369 Result new_value = frame_->Pop();
3370 new_value.ToRegister();
3371
3372 Result old_value; // Only allocated in the postfix case.
3373 if (is_postfix) {
3374 // Allocate a temporary to preserve the old value.
3375 old_value = allocator_->Allocate();
3376 ASSERT(old_value.is_valid());
3377 __ movq(old_value.reg(), new_value.reg());
Steve Block6ded16b2010-05-10 14:33:55 +01003378
3379 // The return value for postfix operations is ToNumber(input).
3380 // Keep more precise type info if the input is some kind of
3381 // number already. If the input is not a number we have to wait
3382 // for the deferred code to convert it.
3383 if (new_value.type_info().IsNumber()) {
3384 old_value.set_type_info(new_value.type_info());
3385 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003386 }
3387 // Ensure the new value is writable.
3388 frame_->Spill(new_value.reg());
3389
3390 DeferredCode* deferred = NULL;
3391 if (is_postfix) {
3392 deferred = new DeferredPostfixCountOperation(new_value.reg(),
3393 old_value.reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01003394 is_increment,
3395 new_value.type_info());
Steve Blocka7e24c12009-10-30 11:49:00 +00003396 } else {
3397 deferred = new DeferredPrefixCountOperation(new_value.reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01003398 is_increment,
3399 new_value.type_info());
Steve Blocka7e24c12009-10-30 11:49:00 +00003400 }
3401
Steve Block3ce2e202009-11-05 08:53:23 +00003402 __ JumpIfNotSmi(new_value.reg(), deferred->entry_label());
Steve Blocka7e24c12009-10-30 11:49:00 +00003403 if (is_increment) {
Steve Block3ce2e202009-11-05 08:53:23 +00003404 __ SmiAddConstant(kScratchRegister,
3405 new_value.reg(),
3406 Smi::FromInt(1),
3407 deferred->entry_label());
Steve Blocka7e24c12009-10-30 11:49:00 +00003408 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00003409 __ SmiSubConstant(kScratchRegister,
3410 new_value.reg(),
3411 Smi::FromInt(1),
3412 deferred->entry_label());
Steve Blocka7e24c12009-10-30 11:49:00 +00003413 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003414 __ movq(new_value.reg(), kScratchRegister);
3415 deferred->BindExit();
3416
Steve Block6ded16b2010-05-10 14:33:55 +01003417 // Postfix count operations return their input converted to
3418 // number. The case when the input is already a number is covered
3419 // above in the allocation code for old_value.
3420 if (is_postfix && !new_value.type_info().IsNumber()) {
3421 old_value.set_type_info(TypeInfo::Number());
3422 }
3423
3424 new_value.set_type_info(TypeInfo::Number());
3425
Steve Blocka7e24c12009-10-30 11:49:00 +00003426 // Postfix: store the old value in the allocated slot under the
3427 // reference.
3428 if (is_postfix) frame_->SetElementAt(target.size(), &old_value);
3429
3430 frame_->Push(&new_value);
3431 // Non-constant: update the reference.
3432 if (!is_const) target.SetValue(NOT_CONST_INIT);
3433 }
3434
3435 // Postfix: drop the new value and use the old.
3436 if (is_postfix) frame_->Drop();
3437}
3438
3439
Steve Block6ded16b2010-05-10 14:33:55 +01003440void CodeGenerator::GenerateLogicalBooleanOperation(BinaryOperation* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003441 // According to ECMA-262 section 11.11, page 58, the binary logical
3442 // operators must yield the result of one of the two expressions
3443 // before any ToBoolean() conversions. This means that the value
3444 // produced by a && or || operator is not necessarily a boolean.
3445
3446 // NOTE: If the left hand side produces a materialized value (not
3447 // control flow), we force the right hand side to do the same. This
3448 // is necessary because we assume that if we get control flow on the
3449 // last path out of an expression we got it on all paths.
Steve Block6ded16b2010-05-10 14:33:55 +01003450 if (node->op() == Token::AND) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003451 JumpTarget is_true;
3452 ControlDestination dest(&is_true, destination()->false_target(), true);
Steve Blockd0582a62009-12-15 09:54:21 +00003453 LoadCondition(node->left(), &dest, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00003454
3455 if (dest.false_was_fall_through()) {
3456 // The current false target was used as the fall-through. If
3457 // there are no dangling jumps to is_true then the left
3458 // subexpression was unconditionally false. Otherwise we have
3459 // paths where we do have to evaluate the right subexpression.
3460 if (is_true.is_linked()) {
3461 // We need to compile the right subexpression. If the jump to
3462 // the current false target was a forward jump then we have a
3463 // valid frame, we have just bound the false target, and we
3464 // have to jump around the code for the right subexpression.
3465 if (has_valid_frame()) {
3466 destination()->false_target()->Unuse();
3467 destination()->false_target()->Jump();
3468 }
3469 is_true.Bind();
3470 // The left subexpression compiled to control flow, so the
3471 // right one is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00003472 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00003473 } else {
3474 // We have actually just jumped to or bound the current false
3475 // target but the current control destination is not marked as
3476 // used.
3477 destination()->Use(false);
3478 }
3479
3480 } else if (dest.is_used()) {
3481 // The left subexpression compiled to control flow (and is_true
3482 // was just bound), so the right is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00003483 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00003484
3485 } else {
3486 // We have a materialized value on the frame, so we exit with
3487 // one on all paths. There are possibly also jumps to is_true
3488 // from nested subexpressions.
3489 JumpTarget pop_and_continue;
3490 JumpTarget exit;
3491
3492 // Avoid popping the result if it converts to 'false' using the
3493 // standard ToBoolean() conversion as described in ECMA-262,
3494 // section 9.2, page 30.
3495 //
3496 // Duplicate the TOS value. The duplicate will be popped by
3497 // ToBoolean.
3498 frame_->Dup();
3499 ControlDestination dest(&pop_and_continue, &exit, true);
3500 ToBoolean(&dest);
3501
3502 // Pop the result of evaluating the first part.
3503 frame_->Drop();
3504
3505 // Compile right side expression.
3506 is_true.Bind();
3507 Load(node->right());
3508
3509 // Exit (always with a materialized value).
3510 exit.Bind();
3511 }
3512
Steve Block6ded16b2010-05-10 14:33:55 +01003513 } else {
3514 ASSERT(node->op() == Token::OR);
Steve Blocka7e24c12009-10-30 11:49:00 +00003515 JumpTarget is_false;
3516 ControlDestination dest(destination()->true_target(), &is_false, false);
Steve Blockd0582a62009-12-15 09:54:21 +00003517 LoadCondition(node->left(), &dest, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00003518
3519 if (dest.true_was_fall_through()) {
3520 // The current true target was used as the fall-through. If
3521 // there are no dangling jumps to is_false then the left
3522 // subexpression was unconditionally true. Otherwise we have
3523 // paths where we do have to evaluate the right subexpression.
3524 if (is_false.is_linked()) {
3525 // We need to compile the right subexpression. If the jump to
3526 // the current true target was a forward jump then we have a
3527 // valid frame, we have just bound the true target, and we
3528 // have to jump around the code for the right subexpression.
3529 if (has_valid_frame()) {
3530 destination()->true_target()->Unuse();
3531 destination()->true_target()->Jump();
3532 }
3533 is_false.Bind();
3534 // The left subexpression compiled to control flow, so the
3535 // right one is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00003536 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00003537 } else {
3538 // We have just jumped to or bound the current true target but
3539 // the current control destination is not marked as used.
3540 destination()->Use(true);
3541 }
3542
3543 } else if (dest.is_used()) {
3544 // The left subexpression compiled to control flow (and is_false
3545 // was just bound), so the right is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00003546 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00003547
3548 } else {
3549 // We have a materialized value on the frame, so we exit with
3550 // one on all paths. There are possibly also jumps to is_false
3551 // from nested subexpressions.
3552 JumpTarget pop_and_continue;
3553 JumpTarget exit;
3554
3555 // Avoid popping the result if it converts to 'true' using the
3556 // standard ToBoolean() conversion as described in ECMA-262,
3557 // section 9.2, page 30.
3558 //
3559 // Duplicate the TOS value. The duplicate will be popped by
3560 // ToBoolean.
3561 frame_->Dup();
3562 ControlDestination dest(&exit, &pop_and_continue, false);
3563 ToBoolean(&dest);
3564
3565 // Pop the result of evaluating the first part.
3566 frame_->Drop();
3567
3568 // Compile right side expression.
3569 is_false.Bind();
3570 Load(node->right());
3571
3572 // Exit (always with a materialized value).
3573 exit.Bind();
3574 }
Steve Block6ded16b2010-05-10 14:33:55 +01003575 }
3576}
Steve Blocka7e24c12009-10-30 11:49:00 +00003577
Steve Block6ded16b2010-05-10 14:33:55 +01003578void CodeGenerator::VisitBinaryOperation(BinaryOperation* node) {
3579 Comment cmnt(masm_, "[ BinaryOperation");
3580
3581 if (node->op() == Token::AND || node->op() == Token::OR) {
3582 GenerateLogicalBooleanOperation(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00003583 } else {
3584 // NOTE: The code below assumes that the slow cases (calls to runtime)
3585 // never return a constant/immutable object.
3586 OverwriteMode overwrite_mode = NO_OVERWRITE;
3587 if (node->left()->AsBinaryOperation() != NULL &&
3588 node->left()->AsBinaryOperation()->ResultOverwriteAllowed()) {
3589 overwrite_mode = OVERWRITE_LEFT;
3590 } else if (node->right()->AsBinaryOperation() != NULL &&
3591 node->right()->AsBinaryOperation()->ResultOverwriteAllowed()) {
3592 overwrite_mode = OVERWRITE_RIGHT;
3593 }
3594
Steve Block6ded16b2010-05-10 14:33:55 +01003595 if (node->left()->IsTrivial()) {
3596 Load(node->right());
3597 Result right = frame_->Pop();
3598 frame_->Push(node->left());
3599 frame_->Push(&right);
3600 } else {
3601 Load(node->left());
3602 Load(node->right());
3603 }
3604 GenericBinaryOperation(node, overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00003605 }
3606}
3607
3608
3609
3610void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
3611 Comment cmnt(masm_, "[ CompareOperation");
3612
3613 // Get the expressions from the node.
3614 Expression* left = node->left();
3615 Expression* right = node->right();
3616 Token::Value op = node->op();
3617 // To make typeof testing for natives implemented in JavaScript really
3618 // efficient, we generate special code for expressions of the form:
3619 // 'typeof <expression> == <string>'.
3620 UnaryOperation* operation = left->AsUnaryOperation();
3621 if ((op == Token::EQ || op == Token::EQ_STRICT) &&
3622 (operation != NULL && operation->op() == Token::TYPEOF) &&
3623 (right->AsLiteral() != NULL &&
3624 right->AsLiteral()->handle()->IsString())) {
3625 Handle<String> check(Handle<String>::cast(right->AsLiteral()->handle()));
3626
3627 // Load the operand and move it to a register.
3628 LoadTypeofExpression(operation->expression());
3629 Result answer = frame_->Pop();
3630 answer.ToRegister();
3631
3632 if (check->Equals(Heap::number_symbol())) {
3633 Condition is_smi = masm_->CheckSmi(answer.reg());
3634 destination()->true_target()->Branch(is_smi);
3635 frame_->Spill(answer.reg());
3636 __ movq(answer.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset));
3637 __ CompareRoot(answer.reg(), Heap::kHeapNumberMapRootIndex);
3638 answer.Unuse();
3639 destination()->Split(equal);
3640
3641 } else if (check->Equals(Heap::string_symbol())) {
3642 Condition is_smi = masm_->CheckSmi(answer.reg());
3643 destination()->false_target()->Branch(is_smi);
3644
3645 // It can be an undetectable string object.
3646 __ movq(kScratchRegister,
3647 FieldOperand(answer.reg(), HeapObject::kMapOffset));
3648 __ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset),
3649 Immediate(1 << Map::kIsUndetectable));
3650 destination()->false_target()->Branch(not_zero);
3651 __ CmpInstanceType(kScratchRegister, FIRST_NONSTRING_TYPE);
3652 answer.Unuse();
3653 destination()->Split(below); // Unsigned byte comparison needed.
3654
3655 } else if (check->Equals(Heap::boolean_symbol())) {
3656 __ CompareRoot(answer.reg(), Heap::kTrueValueRootIndex);
3657 destination()->true_target()->Branch(equal);
3658 __ CompareRoot(answer.reg(), Heap::kFalseValueRootIndex);
3659 answer.Unuse();
3660 destination()->Split(equal);
3661
3662 } else if (check->Equals(Heap::undefined_symbol())) {
3663 __ CompareRoot(answer.reg(), Heap::kUndefinedValueRootIndex);
3664 destination()->true_target()->Branch(equal);
3665
3666 Condition is_smi = masm_->CheckSmi(answer.reg());
3667 destination()->false_target()->Branch(is_smi);
3668
3669 // It can be an undetectable object.
3670 __ movq(kScratchRegister,
3671 FieldOperand(answer.reg(), HeapObject::kMapOffset));
3672 __ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset),
3673 Immediate(1 << Map::kIsUndetectable));
3674 answer.Unuse();
3675 destination()->Split(not_zero);
3676
3677 } else if (check->Equals(Heap::function_symbol())) {
3678 Condition is_smi = masm_->CheckSmi(answer.reg());
3679 destination()->false_target()->Branch(is_smi);
3680 frame_->Spill(answer.reg());
3681 __ CmpObjectType(answer.reg(), JS_FUNCTION_TYPE, answer.reg());
Steve Blockd0582a62009-12-15 09:54:21 +00003682 destination()->true_target()->Branch(equal);
3683 // Regular expressions are callable so typeof == 'function'.
3684 __ CmpInstanceType(answer.reg(), JS_REGEXP_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00003685 answer.Unuse();
3686 destination()->Split(equal);
3687
3688 } else if (check->Equals(Heap::object_symbol())) {
3689 Condition is_smi = masm_->CheckSmi(answer.reg());
3690 destination()->false_target()->Branch(is_smi);
3691 __ CompareRoot(answer.reg(), Heap::kNullValueRootIndex);
3692 destination()->true_target()->Branch(equal);
3693
Steve Blockd0582a62009-12-15 09:54:21 +00003694 // Regular expressions are typeof == 'function', not 'object'.
3695 __ CmpObjectType(answer.reg(), JS_REGEXP_TYPE, kScratchRegister);
3696 destination()->false_target()->Branch(equal);
3697
Steve Blocka7e24c12009-10-30 11:49:00 +00003698 // It can be an undetectable object.
Steve Blocka7e24c12009-10-30 11:49:00 +00003699 __ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset),
3700 Immediate(1 << Map::kIsUndetectable));
3701 destination()->false_target()->Branch(not_zero);
3702 __ CmpInstanceType(kScratchRegister, FIRST_JS_OBJECT_TYPE);
3703 destination()->false_target()->Branch(below);
3704 __ CmpInstanceType(kScratchRegister, LAST_JS_OBJECT_TYPE);
3705 answer.Unuse();
3706 destination()->Split(below_equal);
3707 } else {
3708 // Uncommon case: typeof testing against a string literal that is
3709 // never returned from the typeof operator.
3710 answer.Unuse();
3711 destination()->Goto(false);
3712 }
3713 return;
3714 }
3715
3716 Condition cc = no_condition;
3717 bool strict = false;
3718 switch (op) {
3719 case Token::EQ_STRICT:
3720 strict = true;
3721 // Fall through
3722 case Token::EQ:
3723 cc = equal;
3724 break;
3725 case Token::LT:
3726 cc = less;
3727 break;
3728 case Token::GT:
3729 cc = greater;
3730 break;
3731 case Token::LTE:
3732 cc = less_equal;
3733 break;
3734 case Token::GTE:
3735 cc = greater_equal;
3736 break;
3737 case Token::IN: {
3738 Load(left);
3739 Load(right);
3740 Result answer = frame_->InvokeBuiltin(Builtins::IN, CALL_FUNCTION, 2);
3741 frame_->Push(&answer); // push the result
3742 return;
3743 }
3744 case Token::INSTANCEOF: {
3745 Load(left);
3746 Load(right);
3747 InstanceofStub stub;
3748 Result answer = frame_->CallStub(&stub, 2);
3749 answer.ToRegister();
3750 __ testq(answer.reg(), answer.reg());
3751 answer.Unuse();
3752 destination()->Split(zero);
3753 return;
3754 }
3755 default:
3756 UNREACHABLE();
3757 }
3758 Load(left);
3759 Load(right);
Andrei Popescu402d9372010-02-26 13:31:12 +00003760 Comparison(node, cc, strict, destination());
Steve Blocka7e24c12009-10-30 11:49:00 +00003761}
3762
3763
3764void CodeGenerator::VisitThisFunction(ThisFunction* node) {
3765 frame_->PushFunction();
3766}
3767
3768
Steve Block6ded16b2010-05-10 14:33:55 +01003769void CodeGenerator::GenerateArguments(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003770 ASSERT(args->length() == 1);
3771
3772 // ArgumentsAccessStub expects the key in rdx and the formal
3773 // parameter count in rax.
3774 Load(args->at(0));
3775 Result key = frame_->Pop();
3776 // Explicitly create a constant result.
Andrei Popescu31002712010-02-23 13:46:05 +00003777 Result count(Handle<Smi>(Smi::FromInt(scope()->num_parameters())));
Steve Blocka7e24c12009-10-30 11:49:00 +00003778 // Call the shared stub to get to arguments[key].
3779 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
3780 Result result = frame_->CallStub(&stub, &key, &count);
3781 frame_->Push(&result);
3782}
3783
3784
3785void CodeGenerator::GenerateIsArray(ZoneList<Expression*>* args) {
3786 ASSERT(args->length() == 1);
3787 Load(args->at(0));
3788 Result value = frame_->Pop();
3789 value.ToRegister();
3790 ASSERT(value.is_valid());
3791 Condition is_smi = masm_->CheckSmi(value.reg());
3792 destination()->false_target()->Branch(is_smi);
3793 // It is a heap object - get map.
3794 // Check if the object is a JS array or not.
3795 __ CmpObjectType(value.reg(), JS_ARRAY_TYPE, kScratchRegister);
3796 value.Unuse();
3797 destination()->Split(equal);
3798}
3799
3800
Andrei Popescu402d9372010-02-26 13:31:12 +00003801void CodeGenerator::GenerateIsRegExp(ZoneList<Expression*>* args) {
3802 ASSERT(args->length() == 1);
3803 Load(args->at(0));
3804 Result value = frame_->Pop();
3805 value.ToRegister();
3806 ASSERT(value.is_valid());
3807 Condition is_smi = masm_->CheckSmi(value.reg());
3808 destination()->false_target()->Branch(is_smi);
3809 // It is a heap object - get map.
3810 // Check if the object is a regexp.
3811 __ CmpObjectType(value.reg(), JS_REGEXP_TYPE, kScratchRegister);
3812 value.Unuse();
3813 destination()->Split(equal);
3814}
3815
3816
Steve Blockd0582a62009-12-15 09:54:21 +00003817void CodeGenerator::GenerateIsObject(ZoneList<Expression*>* args) {
3818 // This generates a fast version of:
3819 // (typeof(arg) === 'object' || %_ClassOf(arg) == 'RegExp')
3820 ASSERT(args->length() == 1);
3821 Load(args->at(0));
3822 Result obj = frame_->Pop();
3823 obj.ToRegister();
3824 Condition is_smi = masm_->CheckSmi(obj.reg());
3825 destination()->false_target()->Branch(is_smi);
3826
3827 __ Move(kScratchRegister, Factory::null_value());
3828 __ cmpq(obj.reg(), kScratchRegister);
3829 destination()->true_target()->Branch(equal);
3830
3831 __ movq(kScratchRegister, FieldOperand(obj.reg(), HeapObject::kMapOffset));
3832 // Undetectable objects behave like undefined when tested with typeof.
3833 __ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset),
3834 Immediate(1 << Map::kIsUndetectable));
3835 destination()->false_target()->Branch(not_zero);
3836 __ CmpInstanceType(kScratchRegister, FIRST_JS_OBJECT_TYPE);
3837 destination()->false_target()->Branch(less);
3838 __ CmpInstanceType(kScratchRegister, LAST_JS_OBJECT_TYPE);
3839 obj.Unuse();
3840 destination()->Split(less_equal);
3841}
3842
3843
3844void CodeGenerator::GenerateIsFunction(ZoneList<Expression*>* args) {
3845 // This generates a fast version of:
3846 // (%_ClassOf(arg) === 'Function')
3847 ASSERT(args->length() == 1);
3848 Load(args->at(0));
3849 Result obj = frame_->Pop();
3850 obj.ToRegister();
3851 Condition is_smi = masm_->CheckSmi(obj.reg());
3852 destination()->false_target()->Branch(is_smi);
3853 __ CmpObjectType(obj.reg(), JS_FUNCTION_TYPE, kScratchRegister);
3854 obj.Unuse();
3855 destination()->Split(equal);
3856}
3857
3858
Leon Clarked91b9f72010-01-27 17:25:45 +00003859void CodeGenerator::GenerateIsUndetectableObject(ZoneList<Expression*>* args) {
3860 ASSERT(args->length() == 1);
3861 Load(args->at(0));
3862 Result obj = frame_->Pop();
3863 obj.ToRegister();
3864 Condition is_smi = masm_->CheckSmi(obj.reg());
3865 destination()->false_target()->Branch(is_smi);
3866 __ movq(kScratchRegister, FieldOperand(obj.reg(), HeapObject::kMapOffset));
3867 __ movzxbl(kScratchRegister,
3868 FieldOperand(kScratchRegister, Map::kBitFieldOffset));
3869 __ testl(kScratchRegister, Immediate(1 << Map::kIsUndetectable));
3870 obj.Unuse();
3871 destination()->Split(not_zero);
3872}
3873
3874
Steve Blocka7e24c12009-10-30 11:49:00 +00003875void CodeGenerator::GenerateIsConstructCall(ZoneList<Expression*>* args) {
3876 ASSERT(args->length() == 0);
3877
3878 // Get the frame pointer for the calling frame.
3879 Result fp = allocator()->Allocate();
3880 __ movq(fp.reg(), Operand(rbp, StandardFrameConstants::kCallerFPOffset));
3881
3882 // Skip the arguments adaptor frame if it exists.
3883 Label check_frame_marker;
Steve Block3ce2e202009-11-05 08:53:23 +00003884 __ SmiCompare(Operand(fp.reg(), StandardFrameConstants::kContextOffset),
3885 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
Steve Blocka7e24c12009-10-30 11:49:00 +00003886 __ j(not_equal, &check_frame_marker);
3887 __ movq(fp.reg(), Operand(fp.reg(), StandardFrameConstants::kCallerFPOffset));
3888
3889 // Check the marker in the calling frame.
3890 __ bind(&check_frame_marker);
Steve Block3ce2e202009-11-05 08:53:23 +00003891 __ SmiCompare(Operand(fp.reg(), StandardFrameConstants::kMarkerOffset),
3892 Smi::FromInt(StackFrame::CONSTRUCT));
Steve Blocka7e24c12009-10-30 11:49:00 +00003893 fp.Unuse();
3894 destination()->Split(equal);
3895}
3896
3897
3898void CodeGenerator::GenerateArgumentsLength(ZoneList<Expression*>* args) {
3899 ASSERT(args->length() == 0);
Steve Block6ded16b2010-05-10 14:33:55 +01003900
3901 Result fp = allocator_->Allocate();
3902 Result result = allocator_->Allocate();
3903 ASSERT(fp.is_valid() && result.is_valid());
3904
3905 Label exit;
3906
3907 // Get the number of formal parameters.
3908 __ Move(result.reg(), Smi::FromInt(scope()->num_parameters()));
3909
3910 // Check if the calling frame is an arguments adaptor frame.
3911 __ movq(fp.reg(), Operand(rbp, StandardFrameConstants::kCallerFPOffset));
3912 __ SmiCompare(Operand(fp.reg(), StandardFrameConstants::kContextOffset),
3913 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
3914 __ j(not_equal, &exit);
3915
3916 // Arguments adaptor case: Read the arguments length from the
3917 // adaptor frame.
3918 __ movq(result.reg(),
3919 Operand(fp.reg(), ArgumentsAdaptorFrameConstants::kLengthOffset));
3920
3921 __ bind(&exit);
3922 result.set_type_info(TypeInfo::Smi());
3923 if (FLAG_debug_code) {
3924 __ AbortIfNotSmi(result.reg(), "Computed arguments.length is not a smi.");
3925 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003926 frame_->Push(&result);
3927}
3928
3929
3930void CodeGenerator::GenerateFastCharCodeAt(ZoneList<Expression*>* args) {
3931 Comment(masm_, "[ GenerateFastCharCodeAt");
3932 ASSERT(args->length() == 2);
3933
Steve Blocka7e24c12009-10-30 11:49:00 +00003934 Load(args->at(0));
3935 Load(args->at(1));
3936 Result index = frame_->Pop();
3937 Result object = frame_->Pop();
3938
Steve Blocka7e24c12009-10-30 11:49:00 +00003939 // We will mutate the index register and possibly the object register.
3940 // The case where they are somehow the same register is handled
3941 // because we only mutate them in the case where the receiver is a
3942 // heap object and the index is not.
3943 object.ToRegister();
3944 index.ToRegister();
3945 frame_->Spill(object.reg());
3946 frame_->Spill(index.reg());
3947
Steve Block6ded16b2010-05-10 14:33:55 +01003948 // We need two extra registers.
3949 Result result = allocator()->Allocate();
3950 ASSERT(result.is_valid());
3951 Result scratch = allocator()->Allocate();
3952 ASSERT(scratch.is_valid());
Steve Blocka7e24c12009-10-30 11:49:00 +00003953
3954 // There is no virtual frame effect from here up to the final result
3955 // push.
Steve Block6ded16b2010-05-10 14:33:55 +01003956 Label slow_case;
3957 Label exit;
3958 StringHelper::GenerateFastCharCodeAt(masm_,
3959 object.reg(),
3960 index.reg(),
3961 scratch.reg(),
3962 result.reg(),
3963 &slow_case,
3964 &slow_case,
3965 &slow_case,
3966 &slow_case);
3967 __ jmp(&exit);
Steve Blocka7e24c12009-10-30 11:49:00 +00003968
3969 __ bind(&slow_case);
3970 // Move the undefined value into the result register, which will
3971 // trigger the slow case.
Steve Block6ded16b2010-05-10 14:33:55 +01003972 __ LoadRoot(result.reg(), Heap::kUndefinedValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00003973
Steve Block6ded16b2010-05-10 14:33:55 +01003974 __ bind(&exit);
3975 frame_->Push(&result);
3976}
3977
3978
3979void CodeGenerator::GenerateCharFromCode(ZoneList<Expression*>* args) {
3980 Comment(masm_, "[ GenerateCharFromCode");
3981 ASSERT(args->length() == 1);
3982
3983 Load(args->at(0));
3984
3985 Result code = frame_->Pop();
3986 code.ToRegister();
3987 ASSERT(code.is_valid());
3988
3989 // StringHelper::GenerateCharFromCode may do a runtime call.
3990 frame_->SpillAll();
3991
3992 Result result = allocator()->Allocate();
3993 ASSERT(result.is_valid());
3994 Result scratch = allocator()->Allocate();
3995 ASSERT(scratch.is_valid());
3996
3997 StringHelper::GenerateCharFromCode(masm_,
3998 code.reg(),
3999 result.reg(),
4000 scratch.reg(),
4001 CALL_FUNCTION);
4002 frame_->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00004003}
4004
4005
4006void CodeGenerator::GenerateIsNonNegativeSmi(ZoneList<Expression*>* args) {
4007 ASSERT(args->length() == 1);
4008 Load(args->at(0));
4009 Result value = frame_->Pop();
4010 value.ToRegister();
4011 ASSERT(value.is_valid());
4012 Condition positive_smi = masm_->CheckPositiveSmi(value.reg());
4013 value.Unuse();
4014 destination()->Split(positive_smi);
4015}
4016
4017
Steve Block6ded16b2010-05-10 14:33:55 +01004018// Generates the Math.pow method. Only handles special cases and
4019// branches to the runtime system for everything else. Please note
4020// that this function assumes that the callsite has executed ToNumber
4021// on both arguments.
4022void CodeGenerator::GenerateMathPow(ZoneList<Expression*>* args) {
4023 ASSERT(args->length() == 2);
4024 Load(args->at(0));
4025 Load(args->at(1));
4026
4027 Label allocate_return;
4028 // Load the two operands while leaving the values on the frame.
4029 frame()->Dup();
4030 Result exponent = frame()->Pop();
4031 exponent.ToRegister();
4032 frame()->Spill(exponent.reg());
4033 frame()->PushElementAt(1);
4034 Result base = frame()->Pop();
4035 base.ToRegister();
4036 frame()->Spill(base.reg());
4037
4038 Result answer = allocator()->Allocate();
4039 ASSERT(answer.is_valid());
4040 ASSERT(!exponent.reg().is(base.reg()));
4041 JumpTarget call_runtime;
4042
4043 // Save 1 in xmm3 - we need this several times later on.
4044 __ movl(answer.reg(), Immediate(1));
4045 __ cvtlsi2sd(xmm3, answer.reg());
4046
4047 Label exponent_nonsmi;
4048 Label base_nonsmi;
4049 // If the exponent is a heap number go to that specific case.
4050 __ JumpIfNotSmi(exponent.reg(), &exponent_nonsmi);
4051 __ JumpIfNotSmi(base.reg(), &base_nonsmi);
4052
4053 // Optimized version when y is an integer.
4054 Label powi;
4055 __ SmiToInteger32(base.reg(), base.reg());
4056 __ cvtlsi2sd(xmm0, base.reg());
4057 __ jmp(&powi);
4058 // exponent is smi and base is a heapnumber.
4059 __ bind(&base_nonsmi);
4060 __ CompareRoot(FieldOperand(base.reg(), HeapObject::kMapOffset),
4061 Heap::kHeapNumberMapRootIndex);
4062 call_runtime.Branch(not_equal);
4063
4064 __ movsd(xmm0, FieldOperand(base.reg(), HeapNumber::kValueOffset));
4065
4066 // Optimized version of pow if y is an integer.
4067 __ bind(&powi);
4068 __ SmiToInteger32(exponent.reg(), exponent.reg());
4069
4070 // Save exponent in base as we need to check if exponent is negative later.
4071 // We know that base and exponent are in different registers.
4072 __ movl(base.reg(), exponent.reg());
4073
4074 // Get absolute value of exponent.
4075 Label no_neg;
4076 __ cmpl(exponent.reg(), Immediate(0));
4077 __ j(greater_equal, &no_neg);
4078 __ negl(exponent.reg());
4079 __ bind(&no_neg);
4080
4081 // Load xmm1 with 1.
4082 __ movsd(xmm1, xmm3);
4083 Label while_true;
4084 Label no_multiply;
4085
4086 __ bind(&while_true);
4087 __ shrl(exponent.reg(), Immediate(1));
4088 __ j(not_carry, &no_multiply);
4089 __ mulsd(xmm1, xmm0);
4090 __ bind(&no_multiply);
4091 __ testl(exponent.reg(), exponent.reg());
4092 __ mulsd(xmm0, xmm0);
4093 __ j(not_zero, &while_true);
4094
4095 // x has the original value of y - if y is negative return 1/result.
4096 __ testl(base.reg(), base.reg());
4097 __ j(positive, &allocate_return);
4098 // Special case if xmm1 has reached infinity.
4099 __ movl(answer.reg(), Immediate(0x7FB00000));
4100 __ movd(xmm0, answer.reg());
4101 __ cvtss2sd(xmm0, xmm0);
4102 __ ucomisd(xmm0, xmm1);
4103 call_runtime.Branch(equal);
4104 __ divsd(xmm3, xmm1);
4105 __ movsd(xmm1, xmm3);
4106 __ jmp(&allocate_return);
4107
4108 // exponent (or both) is a heapnumber - no matter what we should now work
4109 // on doubles.
4110 __ bind(&exponent_nonsmi);
4111 __ CompareRoot(FieldOperand(exponent.reg(), HeapObject::kMapOffset),
4112 Heap::kHeapNumberMapRootIndex);
4113 call_runtime.Branch(not_equal);
4114 __ movsd(xmm1, FieldOperand(exponent.reg(), HeapNumber::kValueOffset));
4115 // Test if exponent is nan.
4116 __ ucomisd(xmm1, xmm1);
4117 call_runtime.Branch(parity_even);
4118
4119 Label base_not_smi;
4120 Label handle_special_cases;
4121 __ JumpIfNotSmi(base.reg(), &base_not_smi);
4122 __ SmiToInteger32(base.reg(), base.reg());
4123 __ cvtlsi2sd(xmm0, base.reg());
4124 __ jmp(&handle_special_cases);
4125 __ bind(&base_not_smi);
4126 __ CompareRoot(FieldOperand(base.reg(), HeapObject::kMapOffset),
4127 Heap::kHeapNumberMapRootIndex);
4128 call_runtime.Branch(not_equal);
4129 __ movl(answer.reg(), FieldOperand(base.reg(), HeapNumber::kExponentOffset));
4130 __ andl(answer.reg(), Immediate(HeapNumber::kExponentMask));
4131 __ cmpl(answer.reg(), Immediate(HeapNumber::kExponentMask));
4132 // base is NaN or +/-Infinity
4133 call_runtime.Branch(greater_equal);
4134 __ movsd(xmm0, FieldOperand(base.reg(), HeapNumber::kValueOffset));
4135
4136 // base is in xmm0 and exponent is in xmm1.
4137 __ bind(&handle_special_cases);
4138 Label not_minus_half;
4139 // Test for -0.5.
4140 // Load xmm2 with -0.5.
4141 __ movl(answer.reg(), Immediate(0xBF000000));
4142 __ movd(xmm2, answer.reg());
4143 __ cvtss2sd(xmm2, xmm2);
4144 // xmm2 now has -0.5.
4145 __ ucomisd(xmm2, xmm1);
4146 __ j(not_equal, &not_minus_half);
4147
4148 // Calculates reciprocal of square root.
4149 // Note that 1/sqrt(x) = sqrt(1/x))
4150 __ divsd(xmm3, xmm0);
4151 __ movsd(xmm1, xmm3);
4152 __ sqrtsd(xmm1, xmm1);
4153 __ jmp(&allocate_return);
4154
4155 // Test for 0.5.
4156 __ bind(&not_minus_half);
4157 // Load xmm2 with 0.5.
4158 // Since xmm3 is 1 and xmm2 is -0.5 this is simply xmm2 + xmm3.
4159 __ addsd(xmm2, xmm3);
4160 // xmm2 now has 0.5.
4161 __ comisd(xmm2, xmm1);
4162 call_runtime.Branch(not_equal);
4163
4164 // Calculates square root.
4165 __ movsd(xmm1, xmm0);
4166 __ sqrtsd(xmm1, xmm1);
4167
4168 JumpTarget done;
4169 Label failure, success;
4170 __ bind(&allocate_return);
4171 // Make a copy of the frame to enable us to handle allocation
4172 // failure after the JumpTarget jump.
4173 VirtualFrame* clone = new VirtualFrame(frame());
4174 __ AllocateHeapNumber(answer.reg(), exponent.reg(), &failure);
4175 __ movsd(FieldOperand(answer.reg(), HeapNumber::kValueOffset), xmm1);
4176 // Remove the two original values from the frame - we only need those
4177 // in the case where we branch to runtime.
4178 frame()->Drop(2);
4179 exponent.Unuse();
4180 base.Unuse();
4181 done.Jump(&answer);
4182 // Use the copy of the original frame as our current frame.
4183 RegisterFile empty_regs;
4184 SetFrame(clone, &empty_regs);
4185 // If we experience an allocation failure we branch to runtime.
4186 __ bind(&failure);
4187 call_runtime.Bind();
4188 answer = frame()->CallRuntime(Runtime::kMath_pow_cfunction, 2);
4189
4190 done.Bind(&answer);
4191 frame()->Push(&answer);
4192}
4193
4194
4195// Generates the Math.sqrt method. Please note - this function assumes that
4196// the callsite has executed ToNumber on the argument.
4197void CodeGenerator::GenerateMathSqrt(ZoneList<Expression*>* args) {
4198 ASSERT(args->length() == 1);
4199 Load(args->at(0));
4200
4201 // Leave original value on the frame if we need to call runtime.
4202 frame()->Dup();
4203 Result result = frame()->Pop();
4204 result.ToRegister();
4205 frame()->Spill(result.reg());
4206 Label runtime;
4207 Label non_smi;
4208 Label load_done;
4209 JumpTarget end;
4210
4211 __ JumpIfNotSmi(result.reg(), &non_smi);
4212 __ SmiToInteger32(result.reg(), result.reg());
4213 __ cvtlsi2sd(xmm0, result.reg());
4214 __ jmp(&load_done);
4215 __ bind(&non_smi);
4216 __ CompareRoot(FieldOperand(result.reg(), HeapObject::kMapOffset),
4217 Heap::kHeapNumberMapRootIndex);
4218 __ j(not_equal, &runtime);
4219 __ movsd(xmm0, FieldOperand(result.reg(), HeapNumber::kValueOffset));
4220
4221 __ bind(&load_done);
4222 __ sqrtsd(xmm0, xmm0);
4223 // A copy of the virtual frame to allow us to go to runtime after the
4224 // JumpTarget jump.
4225 Result scratch = allocator()->Allocate();
4226 VirtualFrame* clone = new VirtualFrame(frame());
4227 __ AllocateHeapNumber(result.reg(), scratch.reg(), &runtime);
4228
4229 __ movsd(FieldOperand(result.reg(), HeapNumber::kValueOffset), xmm0);
4230 frame()->Drop(1);
4231 scratch.Unuse();
4232 end.Jump(&result);
4233 // We only branch to runtime if we have an allocation error.
4234 // Use the copy of the original frame as our current frame.
4235 RegisterFile empty_regs;
4236 SetFrame(clone, &empty_regs);
4237 __ bind(&runtime);
4238 result = frame()->CallRuntime(Runtime::kMath_sqrt, 1);
4239
4240 end.Bind(&result);
4241 frame()->Push(&result);
4242}
4243
4244
Steve Blocka7e24c12009-10-30 11:49:00 +00004245void CodeGenerator::GenerateIsSmi(ZoneList<Expression*>* args) {
4246 ASSERT(args->length() == 1);
4247 Load(args->at(0));
4248 Result value = frame_->Pop();
4249 value.ToRegister();
4250 ASSERT(value.is_valid());
4251 Condition is_smi = masm_->CheckSmi(value.reg());
4252 value.Unuse();
4253 destination()->Split(is_smi);
4254}
4255
4256
4257void CodeGenerator::GenerateLog(ZoneList<Expression*>* args) {
4258 // Conditionally generate a log call.
4259 // Args:
4260 // 0 (literal string): The type of logging (corresponds to the flags).
4261 // This is used to determine whether or not to generate the log call.
4262 // 1 (string): Format string. Access the string at argument index 2
4263 // with '%2s' (see Logger::LogRuntime for all the formats).
4264 // 2 (array): Arguments to the format string.
4265 ASSERT_EQ(args->length(), 3);
4266#ifdef ENABLE_LOGGING_AND_PROFILING
4267 if (ShouldGenerateLog(args->at(0))) {
4268 Load(args->at(1));
4269 Load(args->at(2));
4270 frame_->CallRuntime(Runtime::kLog, 2);
4271 }
4272#endif
4273 // Finally, we're expected to leave a value on the top of the stack.
4274 frame_->Push(Factory::undefined_value());
4275}
4276
4277
4278void CodeGenerator::GenerateObjectEquals(ZoneList<Expression*>* args) {
4279 ASSERT(args->length() == 2);
4280
4281 // Load the two objects into registers and perform the comparison.
4282 Load(args->at(0));
4283 Load(args->at(1));
4284 Result right = frame_->Pop();
4285 Result left = frame_->Pop();
4286 right.ToRegister();
4287 left.ToRegister();
4288 __ cmpq(right.reg(), left.reg());
4289 right.Unuse();
4290 left.Unuse();
4291 destination()->Split(equal);
4292}
4293
4294
4295void CodeGenerator::GenerateGetFramePointer(ZoneList<Expression*>* args) {
4296 ASSERT(args->length() == 0);
4297 // RBP value is aligned, so it should be tagged as a smi (without necesarily
Steve Block3ce2e202009-11-05 08:53:23 +00004298 // being padded as a smi, so it should not be treated as a smi.).
Steve Blocka7e24c12009-10-30 11:49:00 +00004299 ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
4300 Result rbp_as_smi = allocator_->Allocate();
4301 ASSERT(rbp_as_smi.is_valid());
4302 __ movq(rbp_as_smi.reg(), rbp);
4303 frame_->Push(&rbp_as_smi);
4304}
4305
4306
Steve Block6ded16b2010-05-10 14:33:55 +01004307void CodeGenerator::GenerateRandomHeapNumber(
4308 ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004309 ASSERT(args->length() == 0);
4310 frame_->SpillAll();
Steve Blocka7e24c12009-10-30 11:49:00 +00004311
Steve Block6ded16b2010-05-10 14:33:55 +01004312 Label slow_allocate_heapnumber;
4313 Label heapnumber_allocated;
4314 __ AllocateHeapNumber(rbx, rcx, &slow_allocate_heapnumber);
4315 __ jmp(&heapnumber_allocated);
Steve Blocka7e24c12009-10-30 11:49:00 +00004316
Steve Block6ded16b2010-05-10 14:33:55 +01004317 __ bind(&slow_allocate_heapnumber);
4318 // To allocate a heap number, and ensure that it is not a smi, we
4319 // call the runtime function FUnaryMinus on 0, returning the double
4320 // -0.0. A new, distinct heap number is returned each time.
4321 __ Push(Smi::FromInt(0));
4322 __ CallRuntime(Runtime::kNumberUnaryMinus, 1);
4323 __ movq(rbx, rax);
Steve Blocka7e24c12009-10-30 11:49:00 +00004324
Steve Block6ded16b2010-05-10 14:33:55 +01004325 __ bind(&heapnumber_allocated);
Steve Blocka7e24c12009-10-30 11:49:00 +00004326
Steve Block6ded16b2010-05-10 14:33:55 +01004327 // Return a random uint32 number in rax.
4328 // The fresh HeapNumber is in rbx, which is callee-save on both x64 ABIs.
4329 __ PrepareCallCFunction(0);
4330 __ CallCFunction(ExternalReference::random_uint32_function(), 0);
4331
4332 // Convert 32 random bits in eax to 0.(32 random bits) in a double
4333 // by computing:
4334 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
4335 __ movl(rcx, Immediate(0x49800000)); // 1.0 x 2^20 as single.
4336 __ movd(xmm1, rcx);
4337 __ movd(xmm0, rax);
4338 __ cvtss2sd(xmm1, xmm1);
4339 __ xorpd(xmm0, xmm1);
4340 __ subsd(xmm0, xmm1);
4341 __ movsd(FieldOperand(rbx, HeapNumber::kValueOffset), xmm0);
4342
4343 __ movq(rax, rbx);
Steve Blocka7e24c12009-10-30 11:49:00 +00004344 Result result = allocator_->Allocate(rax);
4345 frame_->Push(&result);
4346}
4347
4348
Leon Clarkee46be812010-01-19 14:06:41 +00004349void CodeGenerator::GenerateRegExpExec(ZoneList<Expression*>* args) {
4350 ASSERT_EQ(args->length(), 4);
Steve Blocka7e24c12009-10-30 11:49:00 +00004351
Leon Clarkee46be812010-01-19 14:06:41 +00004352 // Load the arguments on the stack and call the runtime system.
Steve Blocka7e24c12009-10-30 11:49:00 +00004353 Load(args->at(0));
Leon Clarkee46be812010-01-19 14:06:41 +00004354 Load(args->at(1));
4355 Load(args->at(2));
4356 Load(args->at(3));
Leon Clarke4515c472010-02-03 11:58:03 +00004357 RegExpExecStub stub;
4358 Result result = frame_->CallStub(&stub, 4);
Leon Clarkee46be812010-01-19 14:06:41 +00004359 frame_->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00004360}
4361
4362
Steve Block6ded16b2010-05-10 14:33:55 +01004363void CodeGenerator::GenerateRegExpConstructResult(ZoneList<Expression*>* args) {
4364 // No stub. This code only occurs a few times in regexp.js.
4365 const int kMaxInlineLength = 100;
4366 ASSERT_EQ(3, args->length());
4367 Load(args->at(0)); // Size of array, smi.
4368 Load(args->at(1)); // "index" property value.
4369 Load(args->at(2)); // "input" property value.
4370 {
4371 VirtualFrame::SpilledScope spilled_scope;
4372
4373 Label slowcase;
4374 Label done;
4375 __ movq(r8, Operand(rsp, kPointerSize * 2));
4376 __ JumpIfNotSmi(r8, &slowcase);
4377 __ SmiToInteger32(rbx, r8);
4378 __ cmpl(rbx, Immediate(kMaxInlineLength));
4379 __ j(above, &slowcase);
4380 // Smi-tagging is equivalent to multiplying by 2.
4381 STATIC_ASSERT(kSmiTag == 0);
4382 STATIC_ASSERT(kSmiTagSize == 1);
4383 // Allocate RegExpResult followed by FixedArray with size in ebx.
4384 // JSArray: [Map][empty properties][Elements][Length-smi][index][input]
4385 // Elements: [Map][Length][..elements..]
4386 __ AllocateInNewSpace(JSRegExpResult::kSize + FixedArray::kHeaderSize,
4387 times_pointer_size,
4388 rbx, // In: Number of elements.
4389 rax, // Out: Start of allocation (tagged).
4390 rcx, // Out: End of allocation.
4391 rdx, // Scratch register
4392 &slowcase,
4393 TAG_OBJECT);
4394 // rax: Start of allocated area, object-tagged.
4395 // rbx: Number of array elements as int32.
4396 // r8: Number of array elements as smi.
4397
4398 // Set JSArray map to global.regexp_result_map().
4399 __ movq(rdx, ContextOperand(rsi, Context::GLOBAL_INDEX));
4400 __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalContextOffset));
4401 __ movq(rdx, ContextOperand(rdx, Context::REGEXP_RESULT_MAP_INDEX));
4402 __ movq(FieldOperand(rax, HeapObject::kMapOffset), rdx);
4403
4404 // Set empty properties FixedArray.
4405 __ Move(FieldOperand(rax, JSObject::kPropertiesOffset),
4406 Factory::empty_fixed_array());
4407
4408 // Set elements to point to FixedArray allocated right after the JSArray.
4409 __ lea(rcx, Operand(rax, JSRegExpResult::kSize));
4410 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rcx);
4411
4412 // Set input, index and length fields from arguments.
4413 __ pop(FieldOperand(rax, JSRegExpResult::kInputOffset));
4414 __ pop(FieldOperand(rax, JSRegExpResult::kIndexOffset));
4415 __ lea(rsp, Operand(rsp, kPointerSize));
4416 __ movq(FieldOperand(rax, JSArray::kLengthOffset), r8);
4417
4418 // Fill out the elements FixedArray.
4419 // rax: JSArray.
4420 // rcx: FixedArray.
4421 // rbx: Number of elements in array as int32.
4422
4423 // Set map.
4424 __ Move(FieldOperand(rcx, HeapObject::kMapOffset),
4425 Factory::fixed_array_map());
4426 // Set length.
Kristian Monsen25f61362010-05-21 11:50:48 +01004427 __ movl(FieldOperand(rcx, FixedArray::kLengthOffset), rbx);
Steve Block6ded16b2010-05-10 14:33:55 +01004428 // Fill contents of fixed-array with the-hole.
4429 __ Move(rdx, Factory::the_hole_value());
4430 __ lea(rcx, FieldOperand(rcx, FixedArray::kHeaderSize));
4431 // Fill fixed array elements with hole.
4432 // rax: JSArray.
4433 // rbx: Number of elements in array that remains to be filled, as int32.
4434 // rcx: Start of elements in FixedArray.
4435 // rdx: the hole.
4436 Label loop;
4437 __ testl(rbx, rbx);
4438 __ bind(&loop);
4439 __ j(less_equal, &done); // Jump if ecx is negative or zero.
4440 __ subl(rbx, Immediate(1));
4441 __ movq(Operand(rcx, rbx, times_pointer_size, 0), rdx);
4442 __ jmp(&loop);
4443
4444 __ bind(&slowcase);
4445 __ CallRuntime(Runtime::kRegExpConstructResult, 3);
4446
4447 __ bind(&done);
4448 }
4449 frame_->Forget(3);
4450 frame_->Push(rax);
4451}
4452
4453
4454class DeferredSearchCache: public DeferredCode {
4455 public:
Kristian Monsen25f61362010-05-21 11:50:48 +01004456 DeferredSearchCache(Register dst,
4457 Register cache,
4458 Register key,
4459 Register scratch)
4460 : dst_(dst), cache_(cache), key_(key), scratch_(scratch) {
Steve Block6ded16b2010-05-10 14:33:55 +01004461 set_comment("[ DeferredSearchCache");
4462 }
4463
4464 virtual void Generate();
4465
4466 private:
Kristian Monsen25f61362010-05-21 11:50:48 +01004467 Register dst_; // on invocation index of finger (as Smi), on exit
4468 // holds value being looked up.
4469 Register cache_; // instance of JSFunctionResultCache.
4470 Register key_; // key being looked up.
4471 Register scratch_;
Steve Block6ded16b2010-05-10 14:33:55 +01004472};
4473
4474
Kristian Monsen25f61362010-05-21 11:50:48 +01004475// Return a position of the element at |index| + |additional_offset|
4476// in FixedArray pointer to which is held in |array|. |index| is int32.
4477static Operand ArrayElement(Register array,
4478 Register index,
4479 int additional_offset = 0) {
4480 int offset = FixedArray::kHeaderSize + additional_offset * kPointerSize;
4481 return FieldOperand(array, index, times_pointer_size, offset);
4482}
4483
4484
Steve Block6ded16b2010-05-10 14:33:55 +01004485void DeferredSearchCache::Generate() {
Kristian Monsen25f61362010-05-21 11:50:48 +01004486 Label first_loop, search_further, second_loop, cache_miss;
4487
4488 Immediate kEntriesIndexImm = Immediate(JSFunctionResultCache::kEntriesIndex);
4489 Immediate kEntrySizeImm = Immediate(JSFunctionResultCache::kEntrySize);
4490
4491 __ SmiToInteger32(dst_, dst_);
4492 // Check the cache from finger to start of the cache.
4493 __ bind(&first_loop);
4494 __ subq(dst_, kEntrySizeImm);
4495 __ cmpq(dst_, kEntriesIndexImm);
4496 __ j(less, &search_further);
4497
4498 __ cmpq(ArrayElement(cache_, dst_), key_);
4499 __ j(not_equal, &first_loop);
4500
4501 __ Integer32ToSmi(scratch_, dst_);
4502 __ movq(FieldOperand(cache_, JSFunctionResultCache::kFingerOffset), scratch_);
4503 __ movq(dst_, ArrayElement(cache_, dst_, 1));
4504 __ jmp(exit_label());
4505
4506 __ bind(&search_further);
4507
4508 // Check the cache from end of cache up to finger.
4509 __ movq(dst_, FieldOperand(cache_, JSFunctionResultCache::kCacheSizeOffset));
4510 __ movq(scratch_, FieldOperand(cache_, JSFunctionResultCache::kFingerOffset));
4511 __ SmiToInteger32(dst_, dst_);
4512 __ SmiToInteger32(scratch_, scratch_);
4513
4514 __ bind(&second_loop);
4515 __ subq(dst_, kEntrySizeImm);
4516 __ cmpq(dst_, scratch_);
4517 __ j(less_equal, &cache_miss);
4518
4519 __ cmpq(ArrayElement(cache_, dst_), key_);
4520 __ j(not_equal, &second_loop);
4521
4522 __ Integer32ToSmi(scratch_, dst_);
4523 __ movq(FieldOperand(cache_, JSFunctionResultCache::kFingerOffset), scratch_);
4524 __ movq(dst_, ArrayElement(cache_, dst_, 1));
4525 __ jmp(exit_label());
4526
4527 __ bind(&cache_miss);
4528 __ push(cache_); // store a reference to cache
4529 __ push(key_); // store a key
4530 Handle<Object> receiver(Top::global_context()->global());
4531 __ Push(receiver);
Steve Block6ded16b2010-05-10 14:33:55 +01004532 __ push(key_);
Kristian Monsen25f61362010-05-21 11:50:48 +01004533 // On x64 function must be in rdi.
4534 __ movq(rdi, FieldOperand(cache_, JSFunctionResultCache::kFactoryOffset));
4535 ParameterCount expected(1);
4536 __ InvokeFunction(rdi, expected, CALL_FUNCTION);
4537
4538 // Find a place to put new cached value into.
4539 Label add_new_entry, update_cache;
4540 __ movq(rcx, Operand(rsp, kPointerSize)); // restore the cache
4541 // Possible optimization: cache size is constant for the given cache
4542 // so technically we could use a constant here. However, if we have
4543 // cache miss this optimization would hardly matter much.
4544
4545 // Check if we could add new entry to cache.
4546 __ movl(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
4547 __ movq(r9, FieldOperand(rcx, JSFunctionResultCache::kCacheSizeOffset));
4548 __ SmiToInteger32(r9, r9);
4549 __ cmpq(rbx, r9);
4550 __ j(greater, &add_new_entry);
4551
4552 // Check if we could evict entry after finger.
4553 __ movq(rdx, FieldOperand(rcx, JSFunctionResultCache::kFingerOffset));
4554 __ SmiToInteger32(rdx, rdx);
4555 __ addq(rdx, kEntrySizeImm);
4556 Label forward;
4557 __ cmpq(rbx, rdx);
4558 __ j(greater, &forward);
4559 // Need to wrap over the cache.
4560 __ movq(rdx, kEntriesIndexImm);
4561 __ bind(&forward);
4562 __ Integer32ToSmi(r9, rdx);
4563 __ jmp(&update_cache);
4564
4565 __ bind(&add_new_entry);
4566 // r9 holds cache size as int.
4567 __ movq(rdx, r9);
4568 __ Integer32ToSmi(r9, r9);
4569 __ SmiAddConstant(rbx, r9, Smi::FromInt(JSFunctionResultCache::kEntrySize));
4570 __ movq(FieldOperand(rcx, JSFunctionResultCache::kCacheSizeOffset), rbx);
4571
4572 // Update the cache itself.
4573 // rdx holds the index as int.
4574 // r9 holds the index as smi.
4575 __ bind(&update_cache);
4576 __ pop(rbx); // restore the key
4577 __ movq(FieldOperand(rcx, JSFunctionResultCache::kFingerOffset), r9);
4578 // Store key.
4579 __ movq(ArrayElement(rcx, rdx), rbx);
4580 __ RecordWrite(rcx, 0, rbx, r9);
4581
4582 // Store value.
4583 __ pop(rcx); // restore the cache.
4584 __ movq(rdx, FieldOperand(rcx, JSFunctionResultCache::kFingerOffset));
4585 __ SmiAddConstant(rdx, rdx, Smi::FromInt(1));
4586 __ movq(r9, rdx);
4587 __ SmiToInteger32(rdx, rdx);
4588 __ movq(rbx, rax);
4589 __ movq(ArrayElement(rcx, rdx), rbx);
4590 __ RecordWrite(rcx, 0, rbx, r9);
4591
Steve Block6ded16b2010-05-10 14:33:55 +01004592 if (!dst_.is(rax)) {
4593 __ movq(dst_, rax);
4594 }
4595}
4596
4597
4598void CodeGenerator::GenerateGetFromCache(ZoneList<Expression*>* args) {
4599 ASSERT_EQ(2, args->length());
4600
4601 ASSERT_NE(NULL, args->at(0)->AsLiteral());
4602 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
4603
4604 Handle<FixedArray> jsfunction_result_caches(
4605 Top::global_context()->jsfunction_result_caches());
4606 if (jsfunction_result_caches->length() <= cache_id) {
4607 __ Abort("Attempt to use undefined cache.");
4608 frame_->Push(Factory::undefined_value());
4609 return;
4610 }
4611
4612 Load(args->at(1));
4613 Result key = frame_->Pop();
4614 key.ToRegister();
4615
4616 Result cache = allocator()->Allocate();
4617 ASSERT(cache.is_valid());
4618 __ movq(cache.reg(), ContextOperand(rsi, Context::GLOBAL_INDEX));
4619 __ movq(cache.reg(),
4620 FieldOperand(cache.reg(), GlobalObject::kGlobalContextOffset));
4621 __ movq(cache.reg(),
4622 ContextOperand(cache.reg(), Context::JSFUNCTION_RESULT_CACHES_INDEX));
4623 __ movq(cache.reg(),
4624 FieldOperand(cache.reg(), FixedArray::OffsetOfElementAt(cache_id)));
4625
4626 Result tmp = allocator()->Allocate();
4627 ASSERT(tmp.is_valid());
4628
Kristian Monsen25f61362010-05-21 11:50:48 +01004629 Result scratch = allocator()->Allocate();
4630 ASSERT(scratch.is_valid());
4631
Steve Block6ded16b2010-05-10 14:33:55 +01004632 DeferredSearchCache* deferred = new DeferredSearchCache(tmp.reg(),
4633 cache.reg(),
Kristian Monsen25f61362010-05-21 11:50:48 +01004634 key.reg(),
4635 scratch.reg());
Steve Block6ded16b2010-05-10 14:33:55 +01004636
4637 const int kFingerOffset =
4638 FixedArray::OffsetOfElementAt(JSFunctionResultCache::kFingerIndex);
4639 // tmp.reg() now holds finger offset as a smi.
Steve Block6ded16b2010-05-10 14:33:55 +01004640 __ movq(tmp.reg(), FieldOperand(cache.reg(), kFingerOffset));
4641 SmiIndex index =
4642 masm()->SmiToIndex(kScratchRegister, tmp.reg(), kPointerSizeLog2);
4643 __ cmpq(key.reg(), FieldOperand(cache.reg(),
Kristian Monsen25f61362010-05-21 11:50:48 +01004644 index.reg, index.scale,
Steve Block6ded16b2010-05-10 14:33:55 +01004645 FixedArray::kHeaderSize));
Kristian Monsen25f61362010-05-21 11:50:48 +01004646 // Do NOT alter index.reg or tmp.reg() before cmpq below.
Steve Block6ded16b2010-05-10 14:33:55 +01004647 deferred->Branch(not_equal);
Steve Block6ded16b2010-05-10 14:33:55 +01004648 __ movq(tmp.reg(), FieldOperand(cache.reg(),
Kristian Monsen25f61362010-05-21 11:50:48 +01004649 index.reg, index.scale,
4650 FixedArray::kHeaderSize + kPointerSize));
Steve Block6ded16b2010-05-10 14:33:55 +01004651
4652 deferred->BindExit();
4653 frame_->Push(&tmp);
4654}
4655
4656
Andrei Popescu402d9372010-02-26 13:31:12 +00004657void CodeGenerator::GenerateNumberToString(ZoneList<Expression*>* args) {
4658 ASSERT_EQ(args->length(), 1);
4659
4660 // Load the argument on the stack and jump to the runtime.
4661 Load(args->at(0));
4662
Steve Block6ded16b2010-05-10 14:33:55 +01004663 NumberToStringStub stub;
4664 Result result = frame_->CallStub(&stub, 1);
4665 frame_->Push(&result);
4666}
4667
4668
4669class DeferredSwapElements: public DeferredCode {
4670 public:
4671 DeferredSwapElements(Register object, Register index1, Register index2)
4672 : object_(object), index1_(index1), index2_(index2) {
4673 set_comment("[ DeferredSwapElements");
4674 }
4675
4676 virtual void Generate();
4677
4678 private:
4679 Register object_, index1_, index2_;
4680};
4681
4682
4683void DeferredSwapElements::Generate() {
4684 __ push(object_);
4685 __ push(index1_);
4686 __ push(index2_);
4687 __ CallRuntime(Runtime::kSwapElements, 3);
4688}
4689
4690
4691void CodeGenerator::GenerateSwapElements(ZoneList<Expression*>* args) {
4692 Comment cmnt(masm_, "[ GenerateSwapElements");
4693
4694 ASSERT_EQ(3, args->length());
4695
4696 Load(args->at(0));
4697 Load(args->at(1));
4698 Load(args->at(2));
4699
4700 Result index2 = frame_->Pop();
4701 index2.ToRegister();
4702
4703 Result index1 = frame_->Pop();
4704 index1.ToRegister();
4705
4706 Result object = frame_->Pop();
4707 object.ToRegister();
4708
4709 Result tmp1 = allocator()->Allocate();
4710 tmp1.ToRegister();
4711 Result tmp2 = allocator()->Allocate();
4712 tmp2.ToRegister();
4713
4714 frame_->Spill(object.reg());
4715 frame_->Spill(index1.reg());
4716 frame_->Spill(index2.reg());
4717
4718 DeferredSwapElements* deferred = new DeferredSwapElements(object.reg(),
4719 index1.reg(),
4720 index2.reg());
4721
4722 // Fetch the map and check if array is in fast case.
4723 // Check that object doesn't require security checks and
4724 // has no indexed interceptor.
4725 __ CmpObjectType(object.reg(), FIRST_JS_OBJECT_TYPE, tmp1.reg());
4726 deferred->Branch(below);
4727 __ testb(FieldOperand(tmp1.reg(), Map::kBitFieldOffset),
4728 Immediate(KeyedLoadIC::kSlowCaseBitFieldMask));
4729 deferred->Branch(not_zero);
4730
4731 // Check the object's elements are in fast case.
4732 __ movq(tmp1.reg(), FieldOperand(object.reg(), JSObject::kElementsOffset));
4733 __ CompareRoot(FieldOperand(tmp1.reg(), HeapObject::kMapOffset),
4734 Heap::kFixedArrayMapRootIndex);
4735 deferred->Branch(not_equal);
4736
4737 // Check that both indices are smis.
4738 Condition both_smi = __ CheckBothSmi(index1.reg(), index2.reg());
4739 deferred->Branch(NegateCondition(both_smi));
4740
4741 // Bring addresses into index1 and index2.
4742 __ SmiToInteger32(index1.reg(), index1.reg());
4743 __ lea(index1.reg(), FieldOperand(tmp1.reg(),
4744 index1.reg(),
4745 times_pointer_size,
4746 FixedArray::kHeaderSize));
4747 __ SmiToInteger32(index2.reg(), index2.reg());
4748 __ lea(index2.reg(), FieldOperand(tmp1.reg(),
4749 index2.reg(),
4750 times_pointer_size,
4751 FixedArray::kHeaderSize));
4752
4753 // Swap elements.
4754 __ movq(object.reg(), Operand(index1.reg(), 0));
4755 __ movq(tmp2.reg(), Operand(index2.reg(), 0));
4756 __ movq(Operand(index2.reg(), 0), object.reg());
4757 __ movq(Operand(index1.reg(), 0), tmp2.reg());
4758
4759 Label done;
4760 __ InNewSpace(tmp1.reg(), tmp2.reg(), equal, &done);
4761 // Possible optimization: do a check that both values are Smis
4762 // (or them and test against Smi mask.)
4763
4764 __ movq(tmp2.reg(), tmp1.reg());
4765 RecordWriteStub recordWrite1(tmp2.reg(), index1.reg(), object.reg());
4766 __ CallStub(&recordWrite1);
4767
4768 RecordWriteStub recordWrite2(tmp1.reg(), index2.reg(), object.reg());
4769 __ CallStub(&recordWrite2);
4770
4771 __ bind(&done);
4772
4773 deferred->BindExit();
4774 frame_->Push(Factory::undefined_value());
4775}
4776
4777
4778void CodeGenerator::GenerateCallFunction(ZoneList<Expression*>* args) {
4779 Comment cmnt(masm_, "[ GenerateCallFunction");
4780
4781 ASSERT(args->length() >= 2);
4782
4783 int n_args = args->length() - 2; // for receiver and function.
4784 Load(args->at(0)); // receiver
4785 for (int i = 0; i < n_args; i++) {
4786 Load(args->at(i + 1));
4787 }
4788 Load(args->at(n_args + 1)); // function
4789 Result result = frame_->CallJSFunction(n_args);
4790 frame_->Push(&result);
Andrei Popescu402d9372010-02-26 13:31:12 +00004791}
4792
4793
4794void CodeGenerator::GenerateMathSin(ZoneList<Expression*>* args) {
4795 ASSERT_EQ(args->length(), 1);
Andrei Popescu402d9372010-02-26 13:31:12 +00004796 Load(args->at(0));
Steve Block6ded16b2010-05-10 14:33:55 +01004797 TranscendentalCacheStub stub(TranscendentalCache::SIN);
4798 Result result = frame_->CallStub(&stub, 1);
4799 frame_->Push(&result);
Andrei Popescu402d9372010-02-26 13:31:12 +00004800}
4801
4802
4803void CodeGenerator::GenerateMathCos(ZoneList<Expression*>* args) {
4804 ASSERT_EQ(args->length(), 1);
Andrei Popescu402d9372010-02-26 13:31:12 +00004805 Load(args->at(0));
Steve Block6ded16b2010-05-10 14:33:55 +01004806 TranscendentalCacheStub stub(TranscendentalCache::COS);
4807 Result result = frame_->CallStub(&stub, 1);
4808 frame_->Push(&result);
Andrei Popescu402d9372010-02-26 13:31:12 +00004809}
4810
4811
Steve Blockd0582a62009-12-15 09:54:21 +00004812void CodeGenerator::GenerateStringAdd(ZoneList<Expression*>* args) {
4813 ASSERT_EQ(2, args->length());
4814
4815 Load(args->at(0));
4816 Load(args->at(1));
4817
Leon Clarkee46be812010-01-19 14:06:41 +00004818 StringAddStub stub(NO_STRING_ADD_FLAGS);
4819 Result answer = frame_->CallStub(&stub, 2);
4820 frame_->Push(&answer);
4821}
4822
4823
4824void CodeGenerator::GenerateSubString(ZoneList<Expression*>* args) {
4825 ASSERT_EQ(3, args->length());
4826
4827 Load(args->at(0));
4828 Load(args->at(1));
4829 Load(args->at(2));
4830
Leon Clarked91b9f72010-01-27 17:25:45 +00004831 SubStringStub stub;
4832 Result answer = frame_->CallStub(&stub, 3);
Leon Clarkee46be812010-01-19 14:06:41 +00004833 frame_->Push(&answer);
4834}
4835
4836
4837void CodeGenerator::GenerateStringCompare(ZoneList<Expression*>* args) {
4838 ASSERT_EQ(2, args->length());
4839
4840 Load(args->at(0));
4841 Load(args->at(1));
4842
4843 StringCompareStub stub;
4844 Result answer = frame_->CallStub(&stub, 2);
Steve Blockd0582a62009-12-15 09:54:21 +00004845 frame_->Push(&answer);
4846}
4847
4848
Steve Blocka7e24c12009-10-30 11:49:00 +00004849void CodeGenerator::GenerateClassOf(ZoneList<Expression*>* args) {
4850 ASSERT(args->length() == 1);
4851 JumpTarget leave, null, function, non_function_constructor;
4852 Load(args->at(0)); // Load the object.
4853 Result obj = frame_->Pop();
4854 obj.ToRegister();
4855 frame_->Spill(obj.reg());
4856
4857 // If the object is a smi, we return null.
4858 Condition is_smi = masm_->CheckSmi(obj.reg());
4859 null.Branch(is_smi);
4860
4861 // Check that the object is a JS object but take special care of JS
4862 // functions to make sure they have 'Function' as their class.
4863
4864 __ CmpObjectType(obj.reg(), FIRST_JS_OBJECT_TYPE, obj.reg());
4865 null.Branch(below);
4866
4867 // As long as JS_FUNCTION_TYPE is the last instance type and it is
4868 // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
4869 // LAST_JS_OBJECT_TYPE.
4870 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
4871 ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
4872 __ CmpInstanceType(obj.reg(), JS_FUNCTION_TYPE);
4873 function.Branch(equal);
4874
4875 // Check if the constructor in the map is a function.
4876 __ movq(obj.reg(), FieldOperand(obj.reg(), Map::kConstructorOffset));
4877 __ CmpObjectType(obj.reg(), JS_FUNCTION_TYPE, kScratchRegister);
4878 non_function_constructor.Branch(not_equal);
4879
4880 // The obj register now contains the constructor function. Grab the
4881 // instance class name from there.
4882 __ movq(obj.reg(),
4883 FieldOperand(obj.reg(), JSFunction::kSharedFunctionInfoOffset));
4884 __ movq(obj.reg(),
4885 FieldOperand(obj.reg(),
4886 SharedFunctionInfo::kInstanceClassNameOffset));
4887 frame_->Push(&obj);
4888 leave.Jump();
4889
4890 // Functions have class 'Function'.
4891 function.Bind();
4892 frame_->Push(Factory::function_class_symbol());
4893 leave.Jump();
4894
4895 // Objects with a non-function constructor have class 'Object'.
4896 non_function_constructor.Bind();
4897 frame_->Push(Factory::Object_symbol());
4898 leave.Jump();
4899
4900 // Non-JS objects have class null.
4901 null.Bind();
4902 frame_->Push(Factory::null_value());
4903
4904 // All done.
4905 leave.Bind();
4906}
4907
4908
4909void CodeGenerator::GenerateSetValueOf(ZoneList<Expression*>* args) {
4910 ASSERT(args->length() == 2);
4911 JumpTarget leave;
4912 Load(args->at(0)); // Load the object.
4913 Load(args->at(1)); // Load the value.
4914 Result value = frame_->Pop();
4915 Result object = frame_->Pop();
4916 value.ToRegister();
4917 object.ToRegister();
4918
4919 // if (object->IsSmi()) return value.
4920 Condition is_smi = masm_->CheckSmi(object.reg());
4921 leave.Branch(is_smi, &value);
4922
4923 // It is a heap object - get its map.
4924 Result scratch = allocator_->Allocate();
4925 ASSERT(scratch.is_valid());
4926 // if (!object->IsJSValue()) return value.
4927 __ CmpObjectType(object.reg(), JS_VALUE_TYPE, scratch.reg());
4928 leave.Branch(not_equal, &value);
4929
4930 // Store the value.
4931 __ movq(FieldOperand(object.reg(), JSValue::kValueOffset), value.reg());
4932 // Update the write barrier. Save the value as it will be
4933 // overwritten by the write barrier code and is needed afterward.
4934 Result duplicate_value = allocator_->Allocate();
4935 ASSERT(duplicate_value.is_valid());
4936 __ movq(duplicate_value.reg(), value.reg());
4937 // The object register is also overwritten by the write barrier and
4938 // possibly aliased in the frame.
4939 frame_->Spill(object.reg());
4940 __ RecordWrite(object.reg(), JSValue::kValueOffset, duplicate_value.reg(),
4941 scratch.reg());
4942 object.Unuse();
4943 scratch.Unuse();
4944 duplicate_value.Unuse();
4945
4946 // Leave.
4947 leave.Bind(&value);
4948 frame_->Push(&value);
4949}
4950
4951
4952void CodeGenerator::GenerateValueOf(ZoneList<Expression*>* args) {
4953 ASSERT(args->length() == 1);
4954 JumpTarget leave;
4955 Load(args->at(0)); // Load the object.
4956 frame_->Dup();
4957 Result object = frame_->Pop();
4958 object.ToRegister();
4959 ASSERT(object.is_valid());
4960 // if (object->IsSmi()) return object.
4961 Condition is_smi = masm_->CheckSmi(object.reg());
4962 leave.Branch(is_smi);
4963 // It is a heap object - get map.
4964 Result temp = allocator()->Allocate();
4965 ASSERT(temp.is_valid());
4966 // if (!object->IsJSValue()) return object.
4967 __ CmpObjectType(object.reg(), JS_VALUE_TYPE, temp.reg());
4968 leave.Branch(not_equal);
4969 __ movq(temp.reg(), FieldOperand(object.reg(), JSValue::kValueOffset));
4970 object.Unuse();
4971 frame_->SetElementAt(0, &temp);
4972 leave.Bind();
4973}
4974
4975
4976// -----------------------------------------------------------------------------
4977// CodeGenerator implementation of Expressions
4978
Steve Blockd0582a62009-12-15 09:54:21 +00004979void CodeGenerator::LoadAndSpill(Expression* expression) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004980 // TODO(x64): No architecture specific code. Move to shared location.
4981 ASSERT(in_spilled_code());
4982 set_in_spilled_code(false);
Steve Blockd0582a62009-12-15 09:54:21 +00004983 Load(expression);
Steve Blocka7e24c12009-10-30 11:49:00 +00004984 frame_->SpillAll();
4985 set_in_spilled_code(true);
4986}
4987
4988
Steve Blockd0582a62009-12-15 09:54:21 +00004989void CodeGenerator::Load(Expression* expr) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004990#ifdef DEBUG
4991 int original_height = frame_->height();
4992#endif
4993 ASSERT(!in_spilled_code());
4994 JumpTarget true_target;
4995 JumpTarget false_target;
4996 ControlDestination dest(&true_target, &false_target, true);
Steve Blockd0582a62009-12-15 09:54:21 +00004997 LoadCondition(expr, &dest, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00004998
4999 if (dest.false_was_fall_through()) {
5000 // The false target was just bound.
5001 JumpTarget loaded;
5002 frame_->Push(Factory::false_value());
5003 // There may be dangling jumps to the true target.
5004 if (true_target.is_linked()) {
5005 loaded.Jump();
5006 true_target.Bind();
5007 frame_->Push(Factory::true_value());
5008 loaded.Bind();
5009 }
5010
5011 } else if (dest.is_used()) {
5012 // There is true, and possibly false, control flow (with true as
5013 // the fall through).
5014 JumpTarget loaded;
5015 frame_->Push(Factory::true_value());
5016 if (false_target.is_linked()) {
5017 loaded.Jump();
5018 false_target.Bind();
5019 frame_->Push(Factory::false_value());
5020 loaded.Bind();
5021 }
5022
5023 } else {
5024 // We have a valid value on top of the frame, but we still may
5025 // have dangling jumps to the true and false targets from nested
5026 // subexpressions (eg, the left subexpressions of the
5027 // short-circuited boolean operators).
5028 ASSERT(has_valid_frame());
5029 if (true_target.is_linked() || false_target.is_linked()) {
5030 JumpTarget loaded;
5031 loaded.Jump(); // Don't lose the current TOS.
5032 if (true_target.is_linked()) {
5033 true_target.Bind();
5034 frame_->Push(Factory::true_value());
5035 if (false_target.is_linked()) {
5036 loaded.Jump();
5037 }
5038 }
5039 if (false_target.is_linked()) {
5040 false_target.Bind();
5041 frame_->Push(Factory::false_value());
5042 }
5043 loaded.Bind();
5044 }
5045 }
5046
5047 ASSERT(has_valid_frame());
5048 ASSERT(frame_->height() == original_height + 1);
5049}
5050
5051
5052// Emit code to load the value of an expression to the top of the
5053// frame. If the expression is boolean-valued it may be compiled (or
5054// partially compiled) into control flow to the control destination.
5055// If force_control is true, control flow is forced.
5056void CodeGenerator::LoadCondition(Expression* x,
Steve Blocka7e24c12009-10-30 11:49:00 +00005057 ControlDestination* dest,
5058 bool force_control) {
5059 ASSERT(!in_spilled_code());
5060 int original_height = frame_->height();
5061
Steve Blockd0582a62009-12-15 09:54:21 +00005062 { CodeGenState new_state(this, dest);
Steve Blocka7e24c12009-10-30 11:49:00 +00005063 Visit(x);
5064
5065 // If we hit a stack overflow, we may not have actually visited
5066 // the expression. In that case, we ensure that we have a
5067 // valid-looking frame state because we will continue to generate
5068 // code as we unwind the C++ stack.
5069 //
5070 // It's possible to have both a stack overflow and a valid frame
5071 // state (eg, a subexpression overflowed, visiting it returned
5072 // with a dummied frame state, and visiting this expression
5073 // returned with a normal-looking state).
5074 if (HasStackOverflow() &&
5075 !dest->is_used() &&
5076 frame_->height() == original_height) {
5077 dest->Goto(true);
5078 }
5079 }
5080
5081 if (force_control && !dest->is_used()) {
5082 // Convert the TOS value into flow to the control destination.
5083 // TODO(X64): Make control flow to control destinations work.
5084 ToBoolean(dest);
5085 }
5086
5087 ASSERT(!(force_control && !dest->is_used()));
5088 ASSERT(dest->is_used() || frame_->height() == original_height + 1);
5089}
5090
5091
Steve Blocka7e24c12009-10-30 11:49:00 +00005092// ECMA-262, section 9.2, page 30: ToBoolean(). Pop the top of stack and
5093// convert it to a boolean in the condition code register or jump to
5094// 'false_target'/'true_target' as appropriate.
5095void CodeGenerator::ToBoolean(ControlDestination* dest) {
5096 Comment cmnt(masm_, "[ ToBoolean");
5097
5098 // The value to convert should be popped from the frame.
5099 Result value = frame_->Pop();
5100 value.ToRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +00005101
Andrei Popescu402d9372010-02-26 13:31:12 +00005102 if (value.is_number()) {
5103 Comment cmnt(masm_, "ONLY_NUMBER");
Steve Block6ded16b2010-05-10 14:33:55 +01005104 // Fast case if TypeInfo indicates only numbers.
Andrei Popescu402d9372010-02-26 13:31:12 +00005105 if (FLAG_debug_code) {
5106 __ AbortIfNotNumber(value.reg(), "ToBoolean operand is not a number.");
5107 }
5108 // Smi => false iff zero.
5109 __ SmiCompare(value.reg(), Smi::FromInt(0));
5110 dest->false_target()->Branch(equal);
5111 Condition is_smi = masm_->CheckSmi(value.reg());
5112 dest->true_target()->Branch(is_smi);
5113 __ fldz();
5114 __ fld_d(FieldOperand(value.reg(), HeapNumber::kValueOffset));
5115 __ FCmp();
5116 value.Unuse();
5117 dest->Split(not_zero);
5118 } else {
5119 // Fast case checks.
5120 // 'false' => false.
5121 __ CompareRoot(value.reg(), Heap::kFalseValueRootIndex);
5122 dest->false_target()->Branch(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +00005123
Andrei Popescu402d9372010-02-26 13:31:12 +00005124 // 'true' => true.
5125 __ CompareRoot(value.reg(), Heap::kTrueValueRootIndex);
5126 dest->true_target()->Branch(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +00005127
Andrei Popescu402d9372010-02-26 13:31:12 +00005128 // 'undefined' => false.
5129 __ CompareRoot(value.reg(), Heap::kUndefinedValueRootIndex);
5130 dest->false_target()->Branch(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +00005131
Andrei Popescu402d9372010-02-26 13:31:12 +00005132 // Smi => false iff zero.
5133 __ SmiCompare(value.reg(), Smi::FromInt(0));
5134 dest->false_target()->Branch(equal);
5135 Condition is_smi = masm_->CheckSmi(value.reg());
5136 dest->true_target()->Branch(is_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +00005137
Andrei Popescu402d9372010-02-26 13:31:12 +00005138 // Call the stub for all other cases.
5139 frame_->Push(&value); // Undo the Pop() from above.
5140 ToBooleanStub stub;
5141 Result temp = frame_->CallStub(&stub, 1);
5142 // Convert the result to a condition code.
5143 __ testq(temp.reg(), temp.reg());
5144 temp.Unuse();
5145 dest->Split(not_equal);
5146 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005147}
5148
5149
5150void CodeGenerator::LoadUnsafeSmi(Register target, Handle<Object> value) {
5151 UNIMPLEMENTED();
5152 // TODO(X64): Implement security policy for loads of smis.
5153}
5154
5155
5156bool CodeGenerator::IsUnsafeSmi(Handle<Object> value) {
5157 return false;
5158}
5159
5160//------------------------------------------------------------------------------
5161// CodeGenerator implementation of variables, lookups, and stores.
5162
Leon Clarked91b9f72010-01-27 17:25:45 +00005163Reference::Reference(CodeGenerator* cgen,
5164 Expression* expression,
5165 bool persist_after_get)
5166 : cgen_(cgen),
5167 expression_(expression),
5168 type_(ILLEGAL),
5169 persist_after_get_(persist_after_get) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005170 cgen->LoadReference(this);
5171}
5172
5173
5174Reference::~Reference() {
Leon Clarked91b9f72010-01-27 17:25:45 +00005175 ASSERT(is_unloaded() || is_illegal());
Steve Blocka7e24c12009-10-30 11:49:00 +00005176}
5177
5178
5179void CodeGenerator::LoadReference(Reference* ref) {
5180 // References are loaded from both spilled and unspilled code. Set the
5181 // state to unspilled to allow that (and explicitly spill after
5182 // construction at the construction sites).
5183 bool was_in_spilled_code = in_spilled_code_;
5184 in_spilled_code_ = false;
5185
5186 Comment cmnt(masm_, "[ LoadReference");
5187 Expression* e = ref->expression();
5188 Property* property = e->AsProperty();
5189 Variable* var = e->AsVariableProxy()->AsVariable();
5190
5191 if (property != NULL) {
5192 // The expression is either a property or a variable proxy that rewrites
5193 // to a property.
5194 Load(property->obj());
Leon Clarkee46be812010-01-19 14:06:41 +00005195 if (property->key()->IsPropertyName()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005196 ref->set_type(Reference::NAMED);
5197 } else {
5198 Load(property->key());
5199 ref->set_type(Reference::KEYED);
5200 }
5201 } else if (var != NULL) {
5202 // The expression is a variable proxy that does not rewrite to a
5203 // property. Global variables are treated as named property references.
5204 if (var->is_global()) {
Kristian Monsen25f61362010-05-21 11:50:48 +01005205 // If rax is free, the register allocator prefers it. Thus the code
5206 // generator will load the global object into rax, which is where
5207 // LoadIC wants it. Most uses of Reference call LoadIC directly
5208 // after the reference is created.
5209 frame_->Spill(rax);
Steve Blocka7e24c12009-10-30 11:49:00 +00005210 LoadGlobal();
5211 ref->set_type(Reference::NAMED);
5212 } else {
5213 ASSERT(var->slot() != NULL);
5214 ref->set_type(Reference::SLOT);
5215 }
5216 } else {
5217 // Anything else is a runtime error.
5218 Load(e);
5219 frame_->CallRuntime(Runtime::kThrowReferenceError, 1);
5220 }
5221
5222 in_spilled_code_ = was_in_spilled_code;
5223}
5224
5225
5226void CodeGenerator::UnloadReference(Reference* ref) {
5227 // Pop a reference from the stack while preserving TOS.
5228 Comment cmnt(masm_, "[ UnloadReference");
5229 frame_->Nip(ref->size());
Leon Clarked91b9f72010-01-27 17:25:45 +00005230 ref->set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00005231}
5232
5233
5234Operand CodeGenerator::SlotOperand(Slot* slot, Register tmp) {
5235 // Currently, this assertion will fail if we try to assign to
5236 // a constant variable that is constant because it is read-only
5237 // (such as the variable referring to a named function expression).
5238 // We need to implement assignments to read-only variables.
5239 // Ideally, we should do this during AST generation (by converting
5240 // such assignments into expression statements); however, in general
5241 // we may not be able to make the decision until past AST generation,
5242 // that is when the entire program is known.
5243 ASSERT(slot != NULL);
5244 int index = slot->index();
5245 switch (slot->type()) {
5246 case Slot::PARAMETER:
5247 return frame_->ParameterAt(index);
5248
5249 case Slot::LOCAL:
5250 return frame_->LocalAt(index);
5251
5252 case Slot::CONTEXT: {
5253 // Follow the context chain if necessary.
5254 ASSERT(!tmp.is(rsi)); // do not overwrite context register
5255 Register context = rsi;
5256 int chain_length = scope()->ContextChainLength(slot->var()->scope());
5257 for (int i = 0; i < chain_length; i++) {
5258 // Load the closure.
5259 // (All contexts, even 'with' contexts, have a closure,
5260 // and it is the same for all contexts inside a function.
5261 // There is no need to go to the function context first.)
5262 __ movq(tmp, ContextOperand(context, Context::CLOSURE_INDEX));
5263 // Load the function context (which is the incoming, outer context).
5264 __ movq(tmp, FieldOperand(tmp, JSFunction::kContextOffset));
5265 context = tmp;
5266 }
5267 // We may have a 'with' context now. Get the function context.
5268 // (In fact this mov may never be the needed, since the scope analysis
5269 // may not permit a direct context access in this case and thus we are
5270 // always at a function context. However it is safe to dereference be-
5271 // cause the function context of a function context is itself. Before
5272 // deleting this mov we should try to create a counter-example first,
5273 // though...)
5274 __ movq(tmp, ContextOperand(context, Context::FCONTEXT_INDEX));
5275 return ContextOperand(tmp, index);
5276 }
5277
5278 default:
5279 UNREACHABLE();
5280 return Operand(rsp, 0);
5281 }
5282}
5283
5284
5285Operand CodeGenerator::ContextSlotOperandCheckExtensions(Slot* slot,
5286 Result tmp,
5287 JumpTarget* slow) {
5288 ASSERT(slot->type() == Slot::CONTEXT);
5289 ASSERT(tmp.is_register());
5290 Register context = rsi;
5291
5292 for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) {
5293 if (s->num_heap_slots() > 0) {
5294 if (s->calls_eval()) {
5295 // Check that extension is NULL.
5296 __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX),
5297 Immediate(0));
5298 slow->Branch(not_equal, not_taken);
5299 }
5300 __ movq(tmp.reg(), ContextOperand(context, Context::CLOSURE_INDEX));
5301 __ movq(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset));
5302 context = tmp.reg();
5303 }
5304 }
5305 // Check that last extension is NULL.
5306 __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
5307 slow->Branch(not_equal, not_taken);
5308 __ movq(tmp.reg(), ContextOperand(context, Context::FCONTEXT_INDEX));
5309 return ContextOperand(tmp.reg(), slot->index());
5310}
5311
5312
5313void CodeGenerator::LoadFromSlot(Slot* slot, TypeofState typeof_state) {
5314 if (slot->type() == Slot::LOOKUP) {
5315 ASSERT(slot->var()->is_dynamic());
5316
5317 JumpTarget slow;
5318 JumpTarget done;
5319 Result value;
5320
Kristian Monsen25f61362010-05-21 11:50:48 +01005321 // Generate fast case for loading from slots that correspond to
5322 // local/global variables or arguments unless they are shadowed by
5323 // eval-introduced bindings.
5324 EmitDynamicLoadFromSlotFastCase(slot,
5325 typeof_state,
5326 &value,
5327 &slow,
5328 &done);
Steve Blocka7e24c12009-10-30 11:49:00 +00005329
5330 slow.Bind();
5331 // A runtime call is inevitable. We eagerly sync frame elements
5332 // to memory so that we can push the arguments directly into place
5333 // on top of the frame.
5334 frame_->SyncRange(0, frame_->element_count() - 1);
5335 frame_->EmitPush(rsi);
5336 __ movq(kScratchRegister, slot->var()->name(), RelocInfo::EMBEDDED_OBJECT);
5337 frame_->EmitPush(kScratchRegister);
5338 if (typeof_state == INSIDE_TYPEOF) {
5339 value =
5340 frame_->CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
5341 } else {
5342 value = frame_->CallRuntime(Runtime::kLoadContextSlot, 2);
5343 }
5344
5345 done.Bind(&value);
5346 frame_->Push(&value);
5347
5348 } else if (slot->var()->mode() == Variable::CONST) {
5349 // Const slots may contain 'the hole' value (the constant hasn't been
5350 // initialized yet) which needs to be converted into the 'undefined'
5351 // value.
5352 //
5353 // We currently spill the virtual frame because constants use the
5354 // potentially unsafe direct-frame access of SlotOperand.
5355 VirtualFrame::SpilledScope spilled_scope;
5356 Comment cmnt(masm_, "[ Load const");
5357 JumpTarget exit;
5358 __ movq(rcx, SlotOperand(slot, rcx));
5359 __ CompareRoot(rcx, Heap::kTheHoleValueRootIndex);
5360 exit.Branch(not_equal);
5361 __ LoadRoot(rcx, Heap::kUndefinedValueRootIndex);
5362 exit.Bind();
5363 frame_->EmitPush(rcx);
5364
5365 } else if (slot->type() == Slot::PARAMETER) {
5366 frame_->PushParameterAt(slot->index());
5367
5368 } else if (slot->type() == Slot::LOCAL) {
5369 frame_->PushLocalAt(slot->index());
5370
5371 } else {
5372 // The other remaining slot types (LOOKUP and GLOBAL) cannot reach
5373 // here.
5374 //
5375 // The use of SlotOperand below is safe for an unspilled frame
5376 // because it will always be a context slot.
5377 ASSERT(slot->type() == Slot::CONTEXT);
5378 Result temp = allocator_->Allocate();
5379 ASSERT(temp.is_valid());
5380 __ movq(temp.reg(), SlotOperand(slot, temp.reg()));
5381 frame_->Push(&temp);
5382 }
5383}
5384
5385
5386void CodeGenerator::LoadFromSlotCheckForArguments(Slot* slot,
5387 TypeofState state) {
5388 LoadFromSlot(slot, state);
5389
5390 // Bail out quickly if we're not using lazy arguments allocation.
5391 if (ArgumentsMode() != LAZY_ARGUMENTS_ALLOCATION) return;
5392
5393 // ... or if the slot isn't a non-parameter arguments slot.
5394 if (slot->type() == Slot::PARAMETER || !slot->is_arguments()) return;
5395
5396 // Pop the loaded value from the stack.
5397 Result value = frame_->Pop();
5398
5399 // If the loaded value is a constant, we know if the arguments
5400 // object has been lazily loaded yet.
5401 if (value.is_constant()) {
5402 if (value.handle()->IsTheHole()) {
5403 Result arguments = StoreArgumentsObject(false);
5404 frame_->Push(&arguments);
5405 } else {
5406 frame_->Push(&value);
5407 }
5408 return;
5409 }
5410
5411 // The loaded value is in a register. If it is the sentinel that
5412 // indicates that we haven't loaded the arguments object yet, we
5413 // need to do it now.
5414 JumpTarget exit;
5415 __ CompareRoot(value.reg(), Heap::kTheHoleValueRootIndex);
5416 frame_->Push(&value);
5417 exit.Branch(not_equal);
5418 Result arguments = StoreArgumentsObject(false);
5419 frame_->SetElementAt(0, &arguments);
5420 exit.Bind();
5421}
5422
5423
5424void CodeGenerator::StoreToSlot(Slot* slot, InitState init_state) {
5425 if (slot->type() == Slot::LOOKUP) {
5426 ASSERT(slot->var()->is_dynamic());
5427
5428 // For now, just do a runtime call. Since the call is inevitable,
5429 // we eagerly sync the virtual frame so we can directly push the
5430 // arguments into place.
5431 frame_->SyncRange(0, frame_->element_count() - 1);
5432
5433 frame_->EmitPush(rsi);
5434 frame_->EmitPush(slot->var()->name());
5435
5436 Result value;
5437 if (init_state == CONST_INIT) {
5438 // Same as the case for a normal store, but ignores attribute
5439 // (e.g. READ_ONLY) of context slot so that we can initialize const
5440 // properties (introduced via eval("const foo = (some expr);")). Also,
5441 // uses the current function context instead of the top context.
5442 //
5443 // Note that we must declare the foo upon entry of eval(), via a
5444 // context slot declaration, but we cannot initialize it at the same
5445 // time, because the const declaration may be at the end of the eval
5446 // code (sigh...) and the const variable may have been used before
5447 // (where its value is 'undefined'). Thus, we can only do the
5448 // initialization when we actually encounter the expression and when
5449 // the expression operands are defined and valid, and thus we need the
5450 // split into 2 operations: declaration of the context slot followed
5451 // by initialization.
5452 value = frame_->CallRuntime(Runtime::kInitializeConstContextSlot, 3);
5453 } else {
5454 value = frame_->CallRuntime(Runtime::kStoreContextSlot, 3);
5455 }
5456 // Storing a variable must keep the (new) value on the expression
5457 // stack. This is necessary for compiling chained assignment
5458 // expressions.
5459 frame_->Push(&value);
5460 } else {
5461 ASSERT(!slot->var()->is_dynamic());
5462
5463 JumpTarget exit;
5464 if (init_state == CONST_INIT) {
5465 ASSERT(slot->var()->mode() == Variable::CONST);
5466 // Only the first const initialization must be executed (the slot
5467 // still contains 'the hole' value). When the assignment is executed,
5468 // the code is identical to a normal store (see below).
5469 //
5470 // We spill the frame in the code below because the direct-frame
5471 // access of SlotOperand is potentially unsafe with an unspilled
5472 // frame.
5473 VirtualFrame::SpilledScope spilled_scope;
5474 Comment cmnt(masm_, "[ Init const");
5475 __ movq(rcx, SlotOperand(slot, rcx));
5476 __ CompareRoot(rcx, Heap::kTheHoleValueRootIndex);
5477 exit.Branch(not_equal);
5478 }
5479
5480 // We must execute the store. Storing a variable must keep the (new)
5481 // value on the stack. This is necessary for compiling assignment
5482 // expressions.
5483 //
5484 // Note: We will reach here even with slot->var()->mode() ==
5485 // Variable::CONST because of const declarations which will initialize
5486 // consts to 'the hole' value and by doing so, end up calling this code.
5487 if (slot->type() == Slot::PARAMETER) {
5488 frame_->StoreToParameterAt(slot->index());
5489 } else if (slot->type() == Slot::LOCAL) {
5490 frame_->StoreToLocalAt(slot->index());
5491 } else {
5492 // The other slot types (LOOKUP and GLOBAL) cannot reach here.
5493 //
5494 // The use of SlotOperand below is safe for an unspilled frame
5495 // because the slot is a context slot.
5496 ASSERT(slot->type() == Slot::CONTEXT);
5497 frame_->Dup();
5498 Result value = frame_->Pop();
5499 value.ToRegister();
5500 Result start = allocator_->Allocate();
5501 ASSERT(start.is_valid());
5502 __ movq(SlotOperand(slot, start.reg()), value.reg());
5503 // RecordWrite may destroy the value registers.
5504 //
5505 // TODO(204): Avoid actually spilling when the value is not
5506 // needed (probably the common case).
5507 frame_->Spill(value.reg());
5508 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
5509 Result temp = allocator_->Allocate();
5510 ASSERT(temp.is_valid());
5511 __ RecordWrite(start.reg(), offset, value.reg(), temp.reg());
5512 // The results start, value, and temp are unused by going out of
5513 // scope.
5514 }
5515
5516 exit.Bind();
5517 }
5518}
5519
5520
5521Result CodeGenerator::LoadFromGlobalSlotCheckExtensions(
5522 Slot* slot,
5523 TypeofState typeof_state,
5524 JumpTarget* slow) {
5525 // Check that no extension objects have been created by calls to
5526 // eval from the current scope to the global scope.
5527 Register context = rsi;
5528 Result tmp = allocator_->Allocate();
5529 ASSERT(tmp.is_valid()); // All non-reserved registers were available.
5530
5531 Scope* s = scope();
5532 while (s != NULL) {
5533 if (s->num_heap_slots() > 0) {
5534 if (s->calls_eval()) {
5535 // Check that extension is NULL.
5536 __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX),
5537 Immediate(0));
5538 slow->Branch(not_equal, not_taken);
5539 }
5540 // Load next context in chain.
5541 __ movq(tmp.reg(), ContextOperand(context, Context::CLOSURE_INDEX));
5542 __ movq(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset));
5543 context = tmp.reg();
5544 }
5545 // If no outer scope calls eval, we do not need to check more
5546 // context extensions. If we have reached an eval scope, we check
5547 // all extensions from this point.
5548 if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break;
5549 s = s->outer_scope();
5550 }
5551
5552 if (s->is_eval_scope()) {
5553 // Loop up the context chain. There is no frame effect so it is
5554 // safe to use raw labels here.
5555 Label next, fast;
5556 if (!context.is(tmp.reg())) {
5557 __ movq(tmp.reg(), context);
5558 }
5559 // Load map for comparison into register, outside loop.
5560 __ LoadRoot(kScratchRegister, Heap::kGlobalContextMapRootIndex);
5561 __ bind(&next);
5562 // Terminate at global context.
5563 __ cmpq(kScratchRegister, FieldOperand(tmp.reg(), HeapObject::kMapOffset));
5564 __ j(equal, &fast);
5565 // Check that extension is NULL.
5566 __ cmpq(ContextOperand(tmp.reg(), Context::EXTENSION_INDEX), Immediate(0));
5567 slow->Branch(not_equal);
5568 // Load next context in chain.
5569 __ movq(tmp.reg(), ContextOperand(tmp.reg(), Context::CLOSURE_INDEX));
5570 __ movq(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset));
5571 __ jmp(&next);
5572 __ bind(&fast);
5573 }
5574 tmp.Unuse();
5575
5576 // All extension objects were empty and it is safe to use a global
5577 // load IC call.
5578 LoadGlobal();
5579 frame_->Push(slot->var()->name());
5580 RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
5581 ? RelocInfo::CODE_TARGET
5582 : RelocInfo::CODE_TARGET_CONTEXT;
5583 Result answer = frame_->CallLoadIC(mode);
5584 // A test rax instruction following the call signals that the inobject
5585 // property case was inlined. Ensure that there is not a test rax
5586 // instruction here.
5587 masm_->nop();
5588 // Discard the global object. The result is in answer.
5589 frame_->Drop();
5590 return answer;
5591}
5592
5593
Kristian Monsen25f61362010-05-21 11:50:48 +01005594void CodeGenerator::EmitDynamicLoadFromSlotFastCase(Slot* slot,
5595 TypeofState typeof_state,
5596 Result* result,
5597 JumpTarget* slow,
5598 JumpTarget* done) {
5599 // Generate fast-case code for variables that might be shadowed by
5600 // eval-introduced variables. Eval is used a lot without
5601 // introducing variables. In those cases, we do not want to
5602 // perform a runtime call for all variables in the scope
5603 // containing the eval.
5604 if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) {
5605 *result = LoadFromGlobalSlotCheckExtensions(slot, typeof_state, slow);
5606 done->Jump(result);
5607
5608 } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) {
5609 Slot* potential_slot = slot->var()->local_if_not_shadowed()->slot();
5610 Expression* rewrite = slot->var()->local_if_not_shadowed()->rewrite();
5611 if (potential_slot != NULL) {
5612 // Generate fast case for locals that rewrite to slots.
5613 // Allocate a fresh register to use as a temp in
5614 // ContextSlotOperandCheckExtensions and to hold the result
5615 // value.
5616 *result = allocator_->Allocate();
5617 ASSERT(result->is_valid());
5618 __ movq(result->reg(),
5619 ContextSlotOperandCheckExtensions(potential_slot,
5620 *result,
5621 slow));
5622 if (potential_slot->var()->mode() == Variable::CONST) {
5623 __ CompareRoot(result->reg(), Heap::kTheHoleValueRootIndex);
5624 done->Branch(not_equal, result);
5625 __ LoadRoot(result->reg(), Heap::kUndefinedValueRootIndex);
5626 }
5627 done->Jump(result);
5628 } else if (rewrite != NULL) {
5629 // Generate fast case for argument loads.
5630 Property* property = rewrite->AsProperty();
5631 if (property != NULL) {
5632 VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
5633 Literal* key_literal = property->key()->AsLiteral();
5634 if (obj_proxy != NULL &&
5635 key_literal != NULL &&
5636 obj_proxy->IsArguments() &&
5637 key_literal->handle()->IsSmi()) {
5638 // Load arguments object if there are no eval-introduced
5639 // variables. Then load the argument from the arguments
5640 // object using keyed load.
5641 Result arguments = allocator()->Allocate();
5642 ASSERT(arguments.is_valid());
5643 __ movq(arguments.reg(),
5644 ContextSlotOperandCheckExtensions(obj_proxy->var()->slot(),
5645 arguments,
5646 slow));
5647 frame_->Push(&arguments);
5648 frame_->Push(key_literal->handle());
5649 *result = EmitKeyedLoad(false);
5650 frame_->Drop(2); // Drop key and receiver.
5651 done->Jump(result);
5652 }
5653 }
5654 }
5655 }
5656}
5657
5658
Steve Blocka7e24c12009-10-30 11:49:00 +00005659void CodeGenerator::LoadGlobal() {
5660 if (in_spilled_code()) {
5661 frame_->EmitPush(GlobalObject());
5662 } else {
5663 Result temp = allocator_->Allocate();
5664 __ movq(temp.reg(), GlobalObject());
5665 frame_->Push(&temp);
5666 }
5667}
5668
5669
5670void CodeGenerator::LoadGlobalReceiver() {
5671 Result temp = allocator_->Allocate();
5672 Register reg = temp.reg();
5673 __ movq(reg, GlobalObject());
5674 __ movq(reg, FieldOperand(reg, GlobalObject::kGlobalReceiverOffset));
5675 frame_->Push(&temp);
5676}
5677
5678
Andrei Popescu31002712010-02-23 13:46:05 +00005679ArgumentsAllocationMode CodeGenerator::ArgumentsMode() {
5680 if (scope()->arguments() == NULL) return NO_ARGUMENTS_ALLOCATION;
5681 ASSERT(scope()->arguments_shadow() != NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +00005682 // We don't want to do lazy arguments allocation for functions that
5683 // have heap-allocated contexts, because it interfers with the
5684 // uninitialized const tracking in the context objects.
Andrei Popescu31002712010-02-23 13:46:05 +00005685 return (scope()->num_heap_slots() > 0)
Steve Blocka7e24c12009-10-30 11:49:00 +00005686 ? EAGER_ARGUMENTS_ALLOCATION
5687 : LAZY_ARGUMENTS_ALLOCATION;
5688}
5689
5690
5691Result CodeGenerator::StoreArgumentsObject(bool initial) {
5692 ArgumentsAllocationMode mode = ArgumentsMode();
5693 ASSERT(mode != NO_ARGUMENTS_ALLOCATION);
5694
5695 Comment cmnt(masm_, "[ store arguments object");
5696 if (mode == LAZY_ARGUMENTS_ALLOCATION && initial) {
5697 // When using lazy arguments allocation, we store the hole value
5698 // as a sentinel indicating that the arguments object hasn't been
5699 // allocated yet.
5700 frame_->Push(Factory::the_hole_value());
5701 } else {
5702 ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT);
5703 frame_->PushFunction();
5704 frame_->PushReceiverSlotAddress();
Andrei Popescu31002712010-02-23 13:46:05 +00005705 frame_->Push(Smi::FromInt(scope()->num_parameters()));
Steve Blocka7e24c12009-10-30 11:49:00 +00005706 Result result = frame_->CallStub(&stub, 3);
5707 frame_->Push(&result);
5708 }
5709
Leon Clarkee46be812010-01-19 14:06:41 +00005710
Andrei Popescu31002712010-02-23 13:46:05 +00005711 Variable* arguments = scope()->arguments()->var();
5712 Variable* shadow = scope()->arguments_shadow()->var();
Leon Clarkee46be812010-01-19 14:06:41 +00005713 ASSERT(arguments != NULL && arguments->slot() != NULL);
5714 ASSERT(shadow != NULL && shadow->slot() != NULL);
5715 JumpTarget done;
5716 bool skip_arguments = false;
5717 if (mode == LAZY_ARGUMENTS_ALLOCATION && !initial) {
5718 // We have to skip storing into the arguments slot if it has
5719 // already been written to. This can happen if the a function
5720 // has a local variable named 'arguments'.
Andrei Popescu31002712010-02-23 13:46:05 +00005721 LoadFromSlot(scope()->arguments()->var()->slot(), NOT_INSIDE_TYPEOF);
Leon Clarkee46be812010-01-19 14:06:41 +00005722 Result probe = frame_->Pop();
5723 if (probe.is_constant()) {
5724 // We have to skip updating the arguments object if it has been
5725 // assigned a proper value.
5726 skip_arguments = !probe.handle()->IsTheHole();
5727 } else {
5728 __ CompareRoot(probe.reg(), Heap::kTheHoleValueRootIndex);
5729 probe.Unuse();
5730 done.Branch(not_equal);
Steve Blocka7e24c12009-10-30 11:49:00 +00005731 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005732 }
Leon Clarkee46be812010-01-19 14:06:41 +00005733 if (!skip_arguments) {
5734 StoreToSlot(arguments->slot(), NOT_CONST_INIT);
5735 if (mode == LAZY_ARGUMENTS_ALLOCATION) done.Bind();
5736 }
5737 StoreToSlot(shadow->slot(), NOT_CONST_INIT);
Steve Blocka7e24c12009-10-30 11:49:00 +00005738 return frame_->Pop();
5739}
5740
5741
Steve Blockd0582a62009-12-15 09:54:21 +00005742void CodeGenerator::LoadTypeofExpression(Expression* expr) {
5743 // Special handling of identifiers as subexpressions of typeof.
5744 Variable* variable = expr->AsVariableProxy()->AsVariable();
Steve Blocka7e24c12009-10-30 11:49:00 +00005745 if (variable != NULL && !variable->is_this() && variable->is_global()) {
Steve Blockd0582a62009-12-15 09:54:21 +00005746 // For a global variable we build the property reference
5747 // <global>.<variable> and perform a (regular non-contextual) property
5748 // load to make sure we do not get reference errors.
Steve Blocka7e24c12009-10-30 11:49:00 +00005749 Slot global(variable, Slot::CONTEXT, Context::GLOBAL_INDEX);
5750 Literal key(variable->name());
Steve Blocka7e24c12009-10-30 11:49:00 +00005751 Property property(&global, &key, RelocInfo::kNoPosition);
Steve Blockd0582a62009-12-15 09:54:21 +00005752 Reference ref(this, &property);
5753 ref.GetValue();
5754 } else if (variable != NULL && variable->slot() != NULL) {
5755 // For a variable that rewrites to a slot, we signal it is the immediate
5756 // subexpression of a typeof.
5757 LoadFromSlotCheckForArguments(variable->slot(), INSIDE_TYPEOF);
Steve Blocka7e24c12009-10-30 11:49:00 +00005758 } else {
Steve Blockd0582a62009-12-15 09:54:21 +00005759 // Anything else can be handled normally.
5760 Load(expr);
Steve Blocka7e24c12009-10-30 11:49:00 +00005761 }
5762}
5763
5764
Steve Block6ded16b2010-05-10 14:33:55 +01005765static bool CouldBeNaN(const Result& result) {
5766 if (result.type_info().IsSmi()) return false;
5767 if (result.type_info().IsInteger32()) return false;
5768 if (!result.is_constant()) return true;
5769 if (!result.handle()->IsHeapNumber()) return false;
5770 return isnan(HeapNumber::cast(*result.handle())->value());
5771}
5772
5773
5774// Convert from signed to unsigned comparison to match the way EFLAGS are set
5775// by FPU and XMM compare instructions.
5776static Condition DoubleCondition(Condition cc) {
5777 switch (cc) {
5778 case less: return below;
5779 case equal: return equal;
5780 case less_equal: return below_equal;
5781 case greater: return above;
5782 case greater_equal: return above_equal;
5783 default: UNREACHABLE();
5784 }
5785 UNREACHABLE();
5786 return equal;
5787}
5788
5789
Andrei Popescu402d9372010-02-26 13:31:12 +00005790void CodeGenerator::Comparison(AstNode* node,
5791 Condition cc,
Steve Blocka7e24c12009-10-30 11:49:00 +00005792 bool strict,
5793 ControlDestination* dest) {
5794 // Strict only makes sense for equality comparisons.
5795 ASSERT(!strict || cc == equal);
5796
5797 Result left_side;
5798 Result right_side;
5799 // Implement '>' and '<=' by reversal to obtain ECMA-262 conversion order.
5800 if (cc == greater || cc == less_equal) {
5801 cc = ReverseCondition(cc);
5802 left_side = frame_->Pop();
5803 right_side = frame_->Pop();
5804 } else {
5805 right_side = frame_->Pop();
5806 left_side = frame_->Pop();
5807 }
5808 ASSERT(cc == less || cc == equal || cc == greater_equal);
5809
5810 // If either side is a constant smi, optimize the comparison.
Steve Block6ded16b2010-05-10 14:33:55 +01005811 bool left_side_constant_smi = false;
5812 bool left_side_constant_null = false;
5813 bool left_side_constant_1_char_string = false;
5814 if (left_side.is_constant()) {
5815 left_side_constant_smi = left_side.handle()->IsSmi();
5816 left_side_constant_null = left_side.handle()->IsNull();
5817 left_side_constant_1_char_string =
5818 (left_side.handle()->IsString() &&
5819 String::cast(*left_side.handle())->length() == 1 &&
5820 String::cast(*left_side.handle())->IsAsciiRepresentation());
5821 }
5822 bool right_side_constant_smi = false;
5823 bool right_side_constant_null = false;
5824 bool right_side_constant_1_char_string = false;
5825 if (right_side.is_constant()) {
5826 right_side_constant_smi = right_side.handle()->IsSmi();
5827 right_side_constant_null = right_side.handle()->IsNull();
5828 right_side_constant_1_char_string =
5829 (right_side.handle()->IsString() &&
5830 String::cast(*right_side.handle())->length() == 1 &&
5831 String::cast(*right_side.handle())->IsAsciiRepresentation());
5832 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005833
5834 if (left_side_constant_smi || right_side_constant_smi) {
5835 if (left_side_constant_smi && right_side_constant_smi) {
5836 // Trivial case, comparing two constants.
5837 int left_value = Smi::cast(*left_side.handle())->value();
5838 int right_value = Smi::cast(*right_side.handle())->value();
5839 switch (cc) {
5840 case less:
5841 dest->Goto(left_value < right_value);
5842 break;
5843 case equal:
5844 dest->Goto(left_value == right_value);
5845 break;
5846 case greater_equal:
5847 dest->Goto(left_value >= right_value);
5848 break;
5849 default:
5850 UNREACHABLE();
5851 }
Andrei Popescu402d9372010-02-26 13:31:12 +00005852 } else {
5853 // Only one side is a constant Smi.
Steve Blocka7e24c12009-10-30 11:49:00 +00005854 // If left side is a constant Smi, reverse the operands.
5855 // Since one side is a constant Smi, conversion order does not matter.
5856 if (left_side_constant_smi) {
5857 Result temp = left_side;
5858 left_side = right_side;
5859 right_side = temp;
5860 cc = ReverseCondition(cc);
Steve Block6ded16b2010-05-10 14:33:55 +01005861 // This may re-introduce greater or less_equal as the value of cc.
Steve Blocka7e24c12009-10-30 11:49:00 +00005862 // CompareStub and the inline code both support all values of cc.
5863 }
5864 // Implement comparison against a constant Smi, inlining the case
5865 // where both sides are Smis.
5866 left_side.ToRegister();
Andrei Popescu402d9372010-02-26 13:31:12 +00005867 Register left_reg = left_side.reg();
5868 Handle<Object> right_val = right_side.handle();
Steve Blocka7e24c12009-10-30 11:49:00 +00005869
5870 // Here we split control flow to the stub call and inlined cases
5871 // before finally splitting it to the control destination. We use
5872 // a jump target and branching to duplicate the virtual frame at
5873 // the first split. We manually handle the off-frame references
5874 // by reconstituting them on the non-fall-through path.
5875 JumpTarget is_smi;
Steve Blocka7e24c12009-10-30 11:49:00 +00005876
Kristian Monsen25f61362010-05-21 11:50:48 +01005877 if (left_side.is_smi()) {
5878 if (FLAG_debug_code) {
5879 __ AbortIfNotSmi(left_side.reg(), "Non-smi value inferred as smi.");
Andrei Popescu402d9372010-02-26 13:31:12 +00005880 }
Kristian Monsen25f61362010-05-21 11:50:48 +01005881 } else {
5882 Condition left_is_smi = masm_->CheckSmi(left_side.reg());
5883 is_smi.Branch(left_is_smi);
5884
5885 bool is_loop_condition = (node->AsExpression() != NULL) &&
5886 node->AsExpression()->is_loop_condition();
5887 if (!is_loop_condition && right_val->IsSmi()) {
5888 // Right side is a constant smi and left side has been checked
5889 // not to be a smi.
5890 JumpTarget not_number;
5891 __ Cmp(FieldOperand(left_reg, HeapObject::kMapOffset),
5892 Factory::heap_number_map());
5893 not_number.Branch(not_equal, &left_side);
5894 __ movsd(xmm1,
5895 FieldOperand(left_reg, HeapNumber::kValueOffset));
5896 int value = Smi::cast(*right_val)->value();
5897 if (value == 0) {
5898 __ xorpd(xmm0, xmm0);
5899 } else {
5900 Result temp = allocator()->Allocate();
5901 __ movl(temp.reg(), Immediate(value));
5902 __ cvtlsi2sd(xmm0, temp.reg());
5903 temp.Unuse();
5904 }
5905 __ ucomisd(xmm1, xmm0);
5906 // Jump to builtin for NaN.
5907 not_number.Branch(parity_even, &left_side);
5908 left_side.Unuse();
5909 dest->true_target()->Branch(DoubleCondition(cc));
5910 dest->false_target()->Jump();
5911 not_number.Bind(&left_side);
5912 }
5913
5914 // Setup and call the compare stub.
5915 CompareStub stub(cc, strict, kCantBothBeNaN);
5916 Result result = frame_->CallStub(&stub, &left_side, &right_side);
5917 result.ToRegister();
5918 __ testq(result.reg(), result.reg());
5919 result.Unuse();
5920 dest->true_target()->Branch(cc);
Andrei Popescu402d9372010-02-26 13:31:12 +00005921 dest->false_target()->Jump();
Kristian Monsen25f61362010-05-21 11:50:48 +01005922
5923 is_smi.Bind();
Andrei Popescu402d9372010-02-26 13:31:12 +00005924 }
5925
Steve Blocka7e24c12009-10-30 11:49:00 +00005926 left_side = Result(left_reg);
5927 right_side = Result(right_val);
5928 // Test smi equality and comparison by signed int comparison.
5929 // Both sides are smis, so we can use an Immediate.
Steve Block3ce2e202009-11-05 08:53:23 +00005930 __ SmiCompare(left_side.reg(), Smi::cast(*right_side.handle()));
Steve Blocka7e24c12009-10-30 11:49:00 +00005931 left_side.Unuse();
5932 right_side.Unuse();
5933 dest->Split(cc);
5934 }
5935 } else if (cc == equal &&
5936 (left_side_constant_null || right_side_constant_null)) {
5937 // To make null checks efficient, we check if either the left side or
5938 // the right side is the constant 'null'.
5939 // If so, we optimize the code by inlining a null check instead of
5940 // calling the (very) general runtime routine for checking equality.
5941 Result operand = left_side_constant_null ? right_side : left_side;
5942 right_side.Unuse();
5943 left_side.Unuse();
5944 operand.ToRegister();
5945 __ CompareRoot(operand.reg(), Heap::kNullValueRootIndex);
5946 if (strict) {
5947 operand.Unuse();
5948 dest->Split(equal);
5949 } else {
5950 // The 'null' value is only equal to 'undefined' if using non-strict
5951 // comparisons.
5952 dest->true_target()->Branch(equal);
5953 __ CompareRoot(operand.reg(), Heap::kUndefinedValueRootIndex);
5954 dest->true_target()->Branch(equal);
5955 Condition is_smi = masm_->CheckSmi(operand.reg());
5956 dest->false_target()->Branch(is_smi);
5957
5958 // It can be an undetectable object.
5959 // Use a scratch register in preference to spilling operand.reg().
5960 Result temp = allocator()->Allocate();
5961 ASSERT(temp.is_valid());
5962 __ movq(temp.reg(),
Steve Block3ce2e202009-11-05 08:53:23 +00005963 FieldOperand(operand.reg(), HeapObject::kMapOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00005964 __ testb(FieldOperand(temp.reg(), Map::kBitFieldOffset),
5965 Immediate(1 << Map::kIsUndetectable));
5966 temp.Unuse();
5967 operand.Unuse();
5968 dest->Split(not_zero);
5969 }
Steve Block6ded16b2010-05-10 14:33:55 +01005970 } else if (left_side_constant_1_char_string ||
5971 right_side_constant_1_char_string) {
5972 if (left_side_constant_1_char_string && right_side_constant_1_char_string) {
5973 // Trivial case, comparing two constants.
5974 int left_value = String::cast(*left_side.handle())->Get(0);
5975 int right_value = String::cast(*right_side.handle())->Get(0);
5976 switch (cc) {
5977 case less:
5978 dest->Goto(left_value < right_value);
5979 break;
5980 case equal:
5981 dest->Goto(left_value == right_value);
5982 break;
5983 case greater_equal:
5984 dest->Goto(left_value >= right_value);
5985 break;
5986 default:
5987 UNREACHABLE();
5988 }
5989 } else {
5990 // Only one side is a constant 1 character string.
5991 // If left side is a constant 1-character string, reverse the operands.
5992 // Since one side is a constant string, conversion order does not matter.
5993 if (left_side_constant_1_char_string) {
5994 Result temp = left_side;
5995 left_side = right_side;
5996 right_side = temp;
5997 cc = ReverseCondition(cc);
5998 // This may reintroduce greater or less_equal as the value of cc.
5999 // CompareStub and the inline code both support all values of cc.
6000 }
6001 // Implement comparison against a constant string, inlining the case
6002 // where both sides are strings.
6003 left_side.ToRegister();
6004
6005 // Here we split control flow to the stub call and inlined cases
6006 // before finally splitting it to the control destination. We use
6007 // a jump target and branching to duplicate the virtual frame at
6008 // the first split. We manually handle the off-frame references
6009 // by reconstituting them on the non-fall-through path.
6010 JumpTarget is_not_string, is_string;
6011 Register left_reg = left_side.reg();
6012 Handle<Object> right_val = right_side.handle();
6013 ASSERT(StringShape(String::cast(*right_val)).IsSymbol());
6014 Condition is_smi = masm()->CheckSmi(left_reg);
6015 is_not_string.Branch(is_smi, &left_side);
6016 Result temp = allocator_->Allocate();
6017 ASSERT(temp.is_valid());
6018 __ movq(temp.reg(),
6019 FieldOperand(left_reg, HeapObject::kMapOffset));
6020 __ movzxbl(temp.reg(),
6021 FieldOperand(temp.reg(), Map::kInstanceTypeOffset));
6022 // If we are testing for equality then make use of the symbol shortcut.
6023 // Check if the left hand side has the same type as the right hand
6024 // side (which is always a symbol).
6025 if (cc == equal) {
6026 Label not_a_symbol;
6027 ASSERT(kSymbolTag != 0);
6028 // Ensure that no non-strings have the symbol bit set.
6029 ASSERT(kNotStringTag + kIsSymbolMask > LAST_TYPE);
6030 __ testb(temp.reg(), Immediate(kIsSymbolMask)); // Test the symbol bit.
6031 __ j(zero, &not_a_symbol);
6032 // They are symbols, so do identity compare.
6033 __ Cmp(left_reg, right_side.handle());
6034 dest->true_target()->Branch(equal);
6035 dest->false_target()->Branch(not_equal);
6036 __ bind(&not_a_symbol);
6037 }
6038 // Call the compare stub if the left side is not a flat ascii string.
6039 __ andb(temp.reg(),
6040 Immediate(kIsNotStringMask |
6041 kStringRepresentationMask |
6042 kStringEncodingMask));
6043 __ cmpb(temp.reg(),
6044 Immediate(kStringTag | kSeqStringTag | kAsciiStringTag));
6045 temp.Unuse();
6046 is_string.Branch(equal, &left_side);
6047
6048 // Setup and call the compare stub.
6049 is_not_string.Bind(&left_side);
6050 CompareStub stub(cc, strict, kCantBothBeNaN);
6051 Result result = frame_->CallStub(&stub, &left_side, &right_side);
6052 result.ToRegister();
6053 __ testq(result.reg(), result.reg());
6054 result.Unuse();
6055 dest->true_target()->Branch(cc);
6056 dest->false_target()->Jump();
6057
6058 is_string.Bind(&left_side);
6059 // left_side is a sequential ASCII string.
6060 ASSERT(left_side.reg().is(left_reg));
6061 right_side = Result(right_val);
6062 Result temp2 = allocator_->Allocate();
6063 ASSERT(temp2.is_valid());
6064 // Test string equality and comparison.
6065 if (cc == equal) {
6066 Label comparison_done;
6067 __ SmiCompare(FieldOperand(left_side.reg(), String::kLengthOffset),
6068 Smi::FromInt(1));
6069 __ j(not_equal, &comparison_done);
6070 uint8_t char_value =
6071 static_cast<uint8_t>(String::cast(*right_val)->Get(0));
6072 __ cmpb(FieldOperand(left_side.reg(), SeqAsciiString::kHeaderSize),
6073 Immediate(char_value));
6074 __ bind(&comparison_done);
6075 } else {
6076 __ movq(temp2.reg(),
6077 FieldOperand(left_side.reg(), String::kLengthOffset));
6078 __ SmiSubConstant(temp2.reg(), temp2.reg(), Smi::FromInt(1));
6079 Label comparison;
6080 // If the length is 0 then the subtraction gave -1 which compares less
6081 // than any character.
6082 __ j(negative, &comparison);
6083 // Otherwise load the first character.
6084 __ movzxbl(temp2.reg(),
6085 FieldOperand(left_side.reg(), SeqAsciiString::kHeaderSize));
6086 __ bind(&comparison);
6087 // Compare the first character of the string with the
6088 // constant 1-character string.
6089 uint8_t char_value =
6090 static_cast<uint8_t>(String::cast(*right_side.handle())->Get(0));
6091 __ cmpb(temp2.reg(), Immediate(char_value));
6092 Label characters_were_different;
6093 __ j(not_equal, &characters_were_different);
6094 // If the first character is the same then the long string sorts after
6095 // the short one.
6096 __ SmiCompare(FieldOperand(left_side.reg(), String::kLengthOffset),
Kristian Monsen25f61362010-05-21 11:50:48 +01006097 Smi::FromInt(1));
Steve Block6ded16b2010-05-10 14:33:55 +01006098 __ bind(&characters_were_different);
6099 }
6100 temp2.Unuse();
6101 left_side.Unuse();
6102 right_side.Unuse();
6103 dest->Split(cc);
6104 }
6105 } else {
6106 // Neither side is a constant Smi, constant 1-char string, or constant null.
Steve Blocka7e24c12009-10-30 11:49:00 +00006107 // If either side is a non-smi constant, skip the smi check.
6108 bool known_non_smi =
6109 (left_side.is_constant() && !left_side.handle()->IsSmi()) ||
Steve Block6ded16b2010-05-10 14:33:55 +01006110 (right_side.is_constant() && !right_side.handle()->IsSmi()) ||
6111 left_side.type_info().IsDouble() ||
6112 right_side.type_info().IsDouble();
6113
6114 NaNInformation nan_info =
6115 (CouldBeNaN(left_side) && CouldBeNaN(right_side)) ?
6116 kBothCouldBeNaN :
6117 kCantBothBeNaN;
6118
6119 // Inline number comparison handling any combination of smi's and heap
6120 // numbers if:
6121 // code is in a loop
6122 // the compare operation is different from equal
6123 // compare is not a for-loop comparison
6124 // The reason for excluding equal is that it will most likely be done
6125 // with smi's (not heap numbers) and the code to comparing smi's is inlined
6126 // separately. The same reason applies for for-loop comparison which will
6127 // also most likely be smi comparisons.
6128 bool is_loop_condition = (node->AsExpression() != NULL)
6129 && node->AsExpression()->is_loop_condition();
6130 bool inline_number_compare =
6131 loop_nesting() > 0 && cc != equal && !is_loop_condition;
6132
Steve Blocka7e24c12009-10-30 11:49:00 +00006133 left_side.ToRegister();
6134 right_side.ToRegister();
6135
6136 if (known_non_smi) {
Steve Block6ded16b2010-05-10 14:33:55 +01006137 // Inlined equality check:
6138 // If at least one of the objects is not NaN, then if the objects
6139 // are identical, they are equal.
6140 if (nan_info == kCantBothBeNaN && cc == equal) {
6141 __ cmpq(left_side.reg(), right_side.reg());
6142 dest->true_target()->Branch(equal);
6143 }
6144
6145 // Inlined number comparison:
6146 if (inline_number_compare) {
6147 GenerateInlineNumberComparison(&left_side, &right_side, cc, dest);
6148 }
6149
6150 CompareStub stub(cc, strict, nan_info, !inline_number_compare);
Steve Blocka7e24c12009-10-30 11:49:00 +00006151 Result answer = frame_->CallStub(&stub, &left_side, &right_side);
Steve Block6ded16b2010-05-10 14:33:55 +01006152 __ testq(answer.reg(), answer.reg()); // Sets both zero and sign flag.
Steve Blocka7e24c12009-10-30 11:49:00 +00006153 answer.Unuse();
6154 dest->Split(cc);
6155 } else {
6156 // Here we split control flow to the stub call and inlined cases
6157 // before finally splitting it to the control destination. We use
6158 // a jump target and branching to duplicate the virtual frame at
6159 // the first split. We manually handle the off-frame references
6160 // by reconstituting them on the non-fall-through path.
6161 JumpTarget is_smi;
6162 Register left_reg = left_side.reg();
6163 Register right_reg = right_side.reg();
6164
6165 Condition both_smi = masm_->CheckBothSmi(left_reg, right_reg);
6166 is_smi.Branch(both_smi);
Steve Block6ded16b2010-05-10 14:33:55 +01006167
6168 // Inline the equality check if both operands can't be a NaN. If both
6169 // objects are the same they are equal.
6170 if (nan_info == kCantBothBeNaN && cc == equal) {
6171 __ cmpq(left_side.reg(), right_side.reg());
6172 dest->true_target()->Branch(equal);
6173 }
6174
6175 // Inlined number comparison:
6176 if (inline_number_compare) {
6177 GenerateInlineNumberComparison(&left_side, &right_side, cc, dest);
6178 }
6179
6180 CompareStub stub(cc, strict, nan_info, !inline_number_compare);
Steve Blocka7e24c12009-10-30 11:49:00 +00006181 Result answer = frame_->CallStub(&stub, &left_side, &right_side);
Steve Block6ded16b2010-05-10 14:33:55 +01006182 __ testq(answer.reg(), answer.reg()); // Sets both zero and sign flags.
Steve Blocka7e24c12009-10-30 11:49:00 +00006183 answer.Unuse();
6184 dest->true_target()->Branch(cc);
6185 dest->false_target()->Jump();
6186
6187 is_smi.Bind();
6188 left_side = Result(left_reg);
6189 right_side = Result(right_reg);
Steve Block3ce2e202009-11-05 08:53:23 +00006190 __ SmiCompare(left_side.reg(), right_side.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00006191 right_side.Unuse();
6192 left_side.Unuse();
6193 dest->Split(cc);
6194 }
6195 }
6196}
6197
6198
Steve Block6ded16b2010-05-10 14:33:55 +01006199// Load a comparison operand into into a XMM register. Jump to not_numbers jump
6200// target passing the left and right result if the operand is not a number.
6201static void LoadComparisonOperand(MacroAssembler* masm_,
6202 Result* operand,
6203 XMMRegister xmm_reg,
6204 Result* left_side,
6205 Result* right_side,
6206 JumpTarget* not_numbers) {
6207 Label done;
6208 if (operand->type_info().IsDouble()) {
6209 // Operand is known to be a heap number, just load it.
6210 __ movsd(xmm_reg, FieldOperand(operand->reg(), HeapNumber::kValueOffset));
6211 } else if (operand->type_info().IsSmi()) {
6212 // Operand is known to be a smi. Convert it to double and keep the original
6213 // smi.
6214 __ SmiToInteger32(kScratchRegister, operand->reg());
6215 __ cvtlsi2sd(xmm_reg, kScratchRegister);
6216 } else {
6217 // Operand type not known, check for smi or heap number.
6218 Label smi;
6219 __ JumpIfSmi(operand->reg(), &smi);
6220 if (!operand->type_info().IsNumber()) {
6221 __ LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex);
6222 __ cmpq(FieldOperand(operand->reg(), HeapObject::kMapOffset),
6223 kScratchRegister);
6224 not_numbers->Branch(not_equal, left_side, right_side, taken);
6225 }
6226 __ movsd(xmm_reg, FieldOperand(operand->reg(), HeapNumber::kValueOffset));
6227 __ jmp(&done);
6228
6229 __ bind(&smi);
6230 // Comvert smi to float and keep the original smi.
6231 __ SmiToInteger32(kScratchRegister, operand->reg());
6232 __ cvtlsi2sd(xmm_reg, kScratchRegister);
6233 __ jmp(&done);
6234 }
6235 __ bind(&done);
6236}
6237
6238
6239void CodeGenerator::GenerateInlineNumberComparison(Result* left_side,
6240 Result* right_side,
6241 Condition cc,
6242 ControlDestination* dest) {
6243 ASSERT(left_side->is_register());
6244 ASSERT(right_side->is_register());
6245
6246 JumpTarget not_numbers;
6247 // Load left and right operand into registers xmm0 and xmm1 and compare.
6248 LoadComparisonOperand(masm_, left_side, xmm0, left_side, right_side,
6249 &not_numbers);
6250 LoadComparisonOperand(masm_, right_side, xmm1, left_side, right_side,
6251 &not_numbers);
6252 __ comisd(xmm0, xmm1);
6253 // Bail out if a NaN is involved.
6254 not_numbers.Branch(parity_even, left_side, right_side);
6255
6256 // Split to destination targets based on comparison.
6257 left_side->Unuse();
6258 right_side->Unuse();
6259 dest->true_target()->Branch(DoubleCondition(cc));
6260 dest->false_target()->Jump();
6261
6262 not_numbers.Bind(left_side, right_side);
6263}
6264
6265
Steve Blocka7e24c12009-10-30 11:49:00 +00006266class DeferredInlineBinaryOperation: public DeferredCode {
6267 public:
6268 DeferredInlineBinaryOperation(Token::Value op,
6269 Register dst,
6270 Register left,
6271 Register right,
6272 OverwriteMode mode)
6273 : op_(op), dst_(dst), left_(left), right_(right), mode_(mode) {
6274 set_comment("[ DeferredInlineBinaryOperation");
6275 }
6276
6277 virtual void Generate();
6278
6279 private:
6280 Token::Value op_;
6281 Register dst_;
6282 Register left_;
6283 Register right_;
6284 OverwriteMode mode_;
6285};
6286
6287
6288void DeferredInlineBinaryOperation::Generate() {
Steve Block6ded16b2010-05-10 14:33:55 +01006289 Label done;
6290 if ((op_ == Token::ADD)
6291 || (op_ ==Token::SUB)
6292 || (op_ == Token::MUL)
6293 || (op_ == Token::DIV)) {
6294 Label call_runtime;
6295 Label left_smi, right_smi, load_right, do_op;
6296 __ JumpIfSmi(left_, &left_smi);
6297 __ CompareRoot(FieldOperand(left_, HeapObject::kMapOffset),
6298 Heap::kHeapNumberMapRootIndex);
6299 __ j(not_equal, &call_runtime);
6300 __ movsd(xmm0, FieldOperand(left_, HeapNumber::kValueOffset));
6301 if (mode_ == OVERWRITE_LEFT) {
6302 __ movq(dst_, left_);
6303 }
6304 __ jmp(&load_right);
6305
6306 __ bind(&left_smi);
6307 __ SmiToInteger32(left_, left_);
6308 __ cvtlsi2sd(xmm0, left_);
6309 __ Integer32ToSmi(left_, left_);
6310 if (mode_ == OVERWRITE_LEFT) {
6311 Label alloc_failure;
6312 __ AllocateHeapNumber(dst_, no_reg, &call_runtime);
6313 }
6314
6315 __ bind(&load_right);
6316 __ JumpIfSmi(right_, &right_smi);
6317 __ CompareRoot(FieldOperand(right_, HeapObject::kMapOffset),
6318 Heap::kHeapNumberMapRootIndex);
6319 __ j(not_equal, &call_runtime);
6320 __ movsd(xmm1, FieldOperand(right_, HeapNumber::kValueOffset));
6321 if (mode_ == OVERWRITE_RIGHT) {
6322 __ movq(dst_, right_);
6323 } else if (mode_ == NO_OVERWRITE) {
6324 Label alloc_failure;
6325 __ AllocateHeapNumber(dst_, no_reg, &call_runtime);
6326 }
6327 __ jmp(&do_op);
6328
6329 __ bind(&right_smi);
6330 __ SmiToInteger32(right_, right_);
6331 __ cvtlsi2sd(xmm1, right_);
6332 __ Integer32ToSmi(right_, right_);
6333 if (mode_ == OVERWRITE_RIGHT || mode_ == NO_OVERWRITE) {
6334 Label alloc_failure;
6335 __ AllocateHeapNumber(dst_, no_reg, &call_runtime);
6336 }
6337
6338 __ bind(&do_op);
6339 switch (op_) {
6340 case Token::ADD: __ addsd(xmm0, xmm1); break;
6341 case Token::SUB: __ subsd(xmm0, xmm1); break;
6342 case Token::MUL: __ mulsd(xmm0, xmm1); break;
6343 case Token::DIV: __ divsd(xmm0, xmm1); break;
6344 default: UNREACHABLE();
6345 }
6346 __ movsd(FieldOperand(dst_, HeapNumber::kValueOffset), xmm0);
6347 __ jmp(&done);
6348
6349 __ bind(&call_runtime);
6350 }
Steve Blockd0582a62009-12-15 09:54:21 +00006351 GenericBinaryOpStub stub(op_, mode_, NO_SMI_CODE_IN_STUB);
6352 stub.GenerateCall(masm_, left_, right_);
Steve Blocka7e24c12009-10-30 11:49:00 +00006353 if (!dst_.is(rax)) __ movq(dst_, rax);
Steve Block6ded16b2010-05-10 14:33:55 +01006354 __ bind(&done);
Steve Blocka7e24c12009-10-30 11:49:00 +00006355}
6356
6357
Steve Block6ded16b2010-05-10 14:33:55 +01006358static TypeInfo CalculateTypeInfo(TypeInfo operands_type,
6359 Token::Value op,
6360 const Result& right,
6361 const Result& left) {
6362 // Set TypeInfo of result according to the operation performed.
6363 // We rely on the fact that smis have a 32 bit payload on x64.
6364 STATIC_ASSERT(kSmiValueSize == 32);
6365 switch (op) {
6366 case Token::COMMA:
6367 return right.type_info();
6368 case Token::OR:
6369 case Token::AND:
6370 // Result type can be either of the two input types.
6371 return operands_type;
6372 case Token::BIT_OR:
6373 case Token::BIT_XOR:
6374 case Token::BIT_AND:
6375 // Result is always a smi.
6376 return TypeInfo::Smi();
6377 case Token::SAR:
6378 case Token::SHL:
6379 // Result is always a smi.
6380 return TypeInfo::Smi();
6381 case Token::SHR:
6382 // Result of x >>> y is always a smi if masked y >= 1, otherwise a number.
6383 return (right.is_constant() && right.handle()->IsSmi()
6384 && (Smi::cast(*right.handle())->value() & 0x1F) >= 1)
6385 ? TypeInfo::Smi()
6386 : TypeInfo::Number();
6387 case Token::ADD:
6388 if (operands_type.IsNumber()) {
6389 return TypeInfo::Number();
6390 } else if (left.type_info().IsString() || right.type_info().IsString()) {
6391 return TypeInfo::String();
6392 } else {
6393 return TypeInfo::Unknown();
6394 }
6395 case Token::SUB:
6396 case Token::MUL:
6397 case Token::DIV:
6398 case Token::MOD:
6399 // Result is always a number.
6400 return TypeInfo::Number();
6401 default:
6402 UNREACHABLE();
6403 }
6404 UNREACHABLE();
6405 return TypeInfo::Unknown();
6406}
6407
6408
6409void CodeGenerator::GenericBinaryOperation(BinaryOperation* expr,
Steve Blocka7e24c12009-10-30 11:49:00 +00006410 OverwriteMode overwrite_mode) {
6411 Comment cmnt(masm_, "[ BinaryOperation");
Steve Block6ded16b2010-05-10 14:33:55 +01006412 Token::Value op = expr->op();
Steve Blocka7e24c12009-10-30 11:49:00 +00006413 Comment cmnt_token(masm_, Token::String(op));
6414
6415 if (op == Token::COMMA) {
6416 // Simply discard left value.
6417 frame_->Nip(1);
6418 return;
6419 }
6420
Steve Blocka7e24c12009-10-30 11:49:00 +00006421 Result right = frame_->Pop();
6422 Result left = frame_->Pop();
6423
6424 if (op == Token::ADD) {
Steve Block6ded16b2010-05-10 14:33:55 +01006425 const bool left_is_string = left.type_info().IsString();
6426 const bool right_is_string = right.type_info().IsString();
6427 // Make sure constant strings have string type info.
6428 ASSERT(!(left.is_constant() && left.handle()->IsString()) ||
6429 left_is_string);
6430 ASSERT(!(right.is_constant() && right.handle()->IsString()) ||
6431 right_is_string);
Steve Blocka7e24c12009-10-30 11:49:00 +00006432 if (left_is_string || right_is_string) {
6433 frame_->Push(&left);
6434 frame_->Push(&right);
6435 Result answer;
6436 if (left_is_string) {
6437 if (right_is_string) {
Steve Block6ded16b2010-05-10 14:33:55 +01006438 StringAddStub stub(NO_STRING_CHECK_IN_STUB);
6439 answer = frame_->CallStub(&stub, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00006440 } else {
6441 answer =
6442 frame_->InvokeBuiltin(Builtins::STRING_ADD_LEFT, CALL_FUNCTION, 2);
6443 }
6444 } else if (right_is_string) {
6445 answer =
6446 frame_->InvokeBuiltin(Builtins::STRING_ADD_RIGHT, CALL_FUNCTION, 2);
6447 }
Steve Block6ded16b2010-05-10 14:33:55 +01006448 answer.set_type_info(TypeInfo::String());
Steve Blocka7e24c12009-10-30 11:49:00 +00006449 frame_->Push(&answer);
6450 return;
6451 }
6452 // Neither operand is known to be a string.
6453 }
6454
Andrei Popescu402d9372010-02-26 13:31:12 +00006455 bool left_is_smi_constant = left.is_constant() && left.handle()->IsSmi();
6456 bool left_is_non_smi_constant = left.is_constant() && !left.handle()->IsSmi();
6457 bool right_is_smi_constant = right.is_constant() && right.handle()->IsSmi();
6458 bool right_is_non_smi_constant =
6459 right.is_constant() && !right.handle()->IsSmi();
Steve Blocka7e24c12009-10-30 11:49:00 +00006460
Andrei Popescu402d9372010-02-26 13:31:12 +00006461 if (left_is_smi_constant && right_is_smi_constant) {
Steve Blocka7e24c12009-10-30 11:49:00 +00006462 // Compute the constant result at compile time, and leave it on the frame.
6463 int left_int = Smi::cast(*left.handle())->value();
6464 int right_int = Smi::cast(*right.handle())->value();
6465 if (FoldConstantSmis(op, left_int, right_int)) return;
6466 }
6467
Andrei Popescu402d9372010-02-26 13:31:12 +00006468 // Get number type of left and right sub-expressions.
Steve Block6ded16b2010-05-10 14:33:55 +01006469 TypeInfo operands_type =
6470 TypeInfo::Combine(left.type_info(), right.type_info());
6471
6472 TypeInfo result_type = CalculateTypeInfo(operands_type, op, right, left);
Andrei Popescu402d9372010-02-26 13:31:12 +00006473
Leon Clarked91b9f72010-01-27 17:25:45 +00006474 Result answer;
Andrei Popescu402d9372010-02-26 13:31:12 +00006475 if (left_is_non_smi_constant || right_is_non_smi_constant) {
Steve Block6ded16b2010-05-10 14:33:55 +01006476 // Go straight to the slow case, with no smi code.
Andrei Popescu402d9372010-02-26 13:31:12 +00006477 GenericBinaryOpStub stub(op,
6478 overwrite_mode,
6479 NO_SMI_CODE_IN_STUB,
6480 operands_type);
Leon Clarke4515c472010-02-03 11:58:03 +00006481 answer = stub.GenerateCall(masm_, frame_, &left, &right);
Andrei Popescu402d9372010-02-26 13:31:12 +00006482 } else if (right_is_smi_constant) {
Steve Block6ded16b2010-05-10 14:33:55 +01006483 answer = ConstantSmiBinaryOperation(expr, &left, right.handle(),
6484 false, overwrite_mode);
Andrei Popescu402d9372010-02-26 13:31:12 +00006485 } else if (left_is_smi_constant) {
Steve Block6ded16b2010-05-10 14:33:55 +01006486 answer = ConstantSmiBinaryOperation(expr, &right, left.handle(),
6487 true, overwrite_mode);
Leon Clarked91b9f72010-01-27 17:25:45 +00006488 } else {
6489 // Set the flags based on the operation, type and loop nesting level.
6490 // Bit operations always assume they likely operate on Smis. Still only
6491 // generate the inline Smi check code if this operation is part of a loop.
6492 // For all other operations only inline the Smi check code for likely smis
6493 // if the operation is part of a loop.
Steve Block6ded16b2010-05-10 14:33:55 +01006494 if (loop_nesting() > 0 &&
6495 (Token::IsBitOp(op) ||
6496 operands_type.IsInteger32() ||
6497 expr->type()->IsLikelySmi())) {
6498 answer = LikelySmiBinaryOperation(expr, &left, &right, overwrite_mode);
Leon Clarked91b9f72010-01-27 17:25:45 +00006499 } else {
Andrei Popescu402d9372010-02-26 13:31:12 +00006500 GenericBinaryOpStub stub(op,
6501 overwrite_mode,
6502 NO_GENERIC_BINARY_FLAGS,
6503 operands_type);
Leon Clarke4515c472010-02-03 11:58:03 +00006504 answer = stub.GenerateCall(masm_, frame_, &left, &right);
Steve Blocka7e24c12009-10-30 11:49:00 +00006505 }
Steve Blocka7e24c12009-10-30 11:49:00 +00006506 }
Andrei Popescu402d9372010-02-26 13:31:12 +00006507
Steve Block6ded16b2010-05-10 14:33:55 +01006508 answer.set_type_info(result_type);
Leon Clarked91b9f72010-01-27 17:25:45 +00006509 frame_->Push(&answer);
Steve Blocka7e24c12009-10-30 11:49:00 +00006510}
6511
6512
6513// Emit a LoadIC call to get the value from receiver and leave it in
6514// dst. The receiver register is restored after the call.
6515class DeferredReferenceGetNamedValue: public DeferredCode {
6516 public:
6517 DeferredReferenceGetNamedValue(Register dst,
6518 Register receiver,
6519 Handle<String> name)
6520 : dst_(dst), receiver_(receiver), name_(name) {
6521 set_comment("[ DeferredReferenceGetNamedValue");
6522 }
6523
6524 virtual void Generate();
6525
6526 Label* patch_site() { return &patch_site_; }
6527
6528 private:
6529 Label patch_site_;
6530 Register dst_;
6531 Register receiver_;
6532 Handle<String> name_;
6533};
6534
6535
6536void DeferredReferenceGetNamedValue::Generate() {
6537 __ push(receiver_);
6538 __ Move(rcx, name_);
6539 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
6540 __ Call(ic, RelocInfo::CODE_TARGET);
6541 // The call must be followed by a test rax instruction to indicate
6542 // that the inobject property case was inlined.
6543 //
6544 // Store the delta to the map check instruction here in the test
6545 // instruction. Use masm_-> instead of the __ macro since the
6546 // latter can't return a value.
6547 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
6548 // Here we use masm_-> instead of the __ macro because this is the
6549 // instruction that gets patched and coverage code gets in the way.
6550 masm_->testl(rax, Immediate(-delta_to_patch_site));
6551 __ IncrementCounter(&Counters::named_load_inline_miss, 1);
6552
6553 if (!dst_.is(rax)) __ movq(dst_, rax);
6554 __ pop(receiver_);
6555}
6556
6557
6558void DeferredInlineSmiAdd::Generate() {
Steve Blockd0582a62009-12-15 09:54:21 +00006559 GenericBinaryOpStub igostub(Token::ADD, overwrite_mode_, NO_SMI_CODE_IN_STUB);
6560 igostub.GenerateCall(masm_, dst_, value_);
Steve Blocka7e24c12009-10-30 11:49:00 +00006561 if (!dst_.is(rax)) __ movq(dst_, rax);
6562}
6563
6564
6565void DeferredInlineSmiAddReversed::Generate() {
Steve Blockd0582a62009-12-15 09:54:21 +00006566 GenericBinaryOpStub igostub(Token::ADD, overwrite_mode_, NO_SMI_CODE_IN_STUB);
6567 igostub.GenerateCall(masm_, value_, dst_);
Steve Blocka7e24c12009-10-30 11:49:00 +00006568 if (!dst_.is(rax)) __ movq(dst_, rax);
6569}
6570
6571
6572void DeferredInlineSmiSub::Generate() {
Steve Blockd0582a62009-12-15 09:54:21 +00006573 GenericBinaryOpStub igostub(Token::SUB, overwrite_mode_, NO_SMI_CODE_IN_STUB);
6574 igostub.GenerateCall(masm_, dst_, value_);
Steve Blocka7e24c12009-10-30 11:49:00 +00006575 if (!dst_.is(rax)) __ movq(dst_, rax);
6576}
6577
6578
6579void DeferredInlineSmiOperation::Generate() {
Steve Blocka7e24c12009-10-30 11:49:00 +00006580 // For mod we don't generate all the Smi code inline.
6581 GenericBinaryOpStub stub(
6582 op_,
6583 overwrite_mode_,
Steve Blockd0582a62009-12-15 09:54:21 +00006584 (op_ == Token::MOD) ? NO_GENERIC_BINARY_FLAGS : NO_SMI_CODE_IN_STUB);
6585 stub.GenerateCall(masm_, src_, value_);
Steve Blocka7e24c12009-10-30 11:49:00 +00006586 if (!dst_.is(rax)) __ movq(dst_, rax);
6587}
6588
6589
Steve Block6ded16b2010-05-10 14:33:55 +01006590void DeferredInlineSmiOperationReversed::Generate() {
6591 GenericBinaryOpStub stub(
6592 op_,
6593 overwrite_mode_,
6594 NO_SMI_CODE_IN_STUB);
6595 stub.GenerateCall(masm_, value_, src_);
6596 if (!dst_.is(rax)) __ movq(dst_, rax);
6597}
6598
6599
6600Result CodeGenerator::ConstantSmiBinaryOperation(BinaryOperation* expr,
Leon Clarked91b9f72010-01-27 17:25:45 +00006601 Result* operand,
6602 Handle<Object> value,
Leon Clarked91b9f72010-01-27 17:25:45 +00006603 bool reversed,
6604 OverwriteMode overwrite_mode) {
Kristian Monsen25f61362010-05-21 11:50:48 +01006605 // Generate inline code for a binary operation when one of the
6606 // operands is a constant smi. Consumes the argument "operand".
Steve Blocka7e24c12009-10-30 11:49:00 +00006607 if (IsUnsafeSmi(value)) {
6608 Result unsafe_operand(value);
6609 if (reversed) {
Steve Block6ded16b2010-05-10 14:33:55 +01006610 return LikelySmiBinaryOperation(expr, &unsafe_operand, operand,
Steve Blocka7e24c12009-10-30 11:49:00 +00006611 overwrite_mode);
6612 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01006613 return LikelySmiBinaryOperation(expr, operand, &unsafe_operand,
Steve Blocka7e24c12009-10-30 11:49:00 +00006614 overwrite_mode);
6615 }
Steve Blocka7e24c12009-10-30 11:49:00 +00006616 }
6617
6618 // Get the literal value.
6619 Smi* smi_value = Smi::cast(*value);
6620 int int_value = smi_value->value();
6621
Steve Block6ded16b2010-05-10 14:33:55 +01006622 Token::Value op = expr->op();
Leon Clarked91b9f72010-01-27 17:25:45 +00006623 Result answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00006624 switch (op) {
6625 case Token::ADD: {
6626 operand->ToRegister();
6627 frame_->Spill(operand->reg());
6628 DeferredCode* deferred = NULL;
6629 if (reversed) {
6630 deferred = new DeferredInlineSmiAddReversed(operand->reg(),
6631 smi_value,
6632 overwrite_mode);
6633 } else {
6634 deferred = new DeferredInlineSmiAdd(operand->reg(),
6635 smi_value,
6636 overwrite_mode);
6637 }
6638 __ JumpIfNotSmi(operand->reg(), deferred->entry_label());
6639 __ SmiAddConstant(operand->reg(),
6640 operand->reg(),
Steve Block3ce2e202009-11-05 08:53:23 +00006641 smi_value,
Steve Blocka7e24c12009-10-30 11:49:00 +00006642 deferred->entry_label());
6643 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00006644 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00006645 break;
6646 }
6647
6648 case Token::SUB: {
6649 if (reversed) {
6650 Result constant_operand(value);
Steve Block6ded16b2010-05-10 14:33:55 +01006651 answer = LikelySmiBinaryOperation(expr, &constant_operand, operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00006652 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00006653 } else {
6654 operand->ToRegister();
6655 frame_->Spill(operand->reg());
6656 DeferredCode* deferred = new DeferredInlineSmiSub(operand->reg(),
6657 smi_value,
6658 overwrite_mode);
6659 __ JumpIfNotSmi(operand->reg(), deferred->entry_label());
6660 // A smi currently fits in a 32-bit Immediate.
6661 __ SmiSubConstant(operand->reg(),
6662 operand->reg(),
Steve Block3ce2e202009-11-05 08:53:23 +00006663 smi_value,
Steve Blocka7e24c12009-10-30 11:49:00 +00006664 deferred->entry_label());
6665 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00006666 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00006667 }
6668 break;
6669 }
6670
6671 case Token::SAR:
6672 if (reversed) {
6673 Result constant_operand(value);
Steve Block6ded16b2010-05-10 14:33:55 +01006674 answer = LikelySmiBinaryOperation(expr, &constant_operand, operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00006675 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00006676 } else {
6677 // Only the least significant 5 bits of the shift value are used.
6678 // In the slow case, this masking is done inside the runtime call.
6679 int shift_value = int_value & 0x1f;
6680 operand->ToRegister();
6681 frame_->Spill(operand->reg());
6682 DeferredInlineSmiOperation* deferred =
6683 new DeferredInlineSmiOperation(op,
6684 operand->reg(),
6685 operand->reg(),
6686 smi_value,
6687 overwrite_mode);
6688 __ JumpIfNotSmi(operand->reg(), deferred->entry_label());
6689 __ SmiShiftArithmeticRightConstant(operand->reg(),
6690 operand->reg(),
6691 shift_value);
6692 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00006693 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00006694 }
6695 break;
6696
6697 case Token::SHR:
6698 if (reversed) {
6699 Result constant_operand(value);
Steve Block6ded16b2010-05-10 14:33:55 +01006700 answer = LikelySmiBinaryOperation(expr, &constant_operand, operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00006701 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00006702 } else {
6703 // Only the least significant 5 bits of the shift value are used.
6704 // In the slow case, this masking is done inside the runtime call.
6705 int shift_value = int_value & 0x1f;
6706 operand->ToRegister();
Leon Clarked91b9f72010-01-27 17:25:45 +00006707 answer = allocator()->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00006708 ASSERT(answer.is_valid());
6709 DeferredInlineSmiOperation* deferred =
6710 new DeferredInlineSmiOperation(op,
6711 answer.reg(),
6712 operand->reg(),
6713 smi_value,
6714 overwrite_mode);
6715 __ JumpIfNotSmi(operand->reg(), deferred->entry_label());
6716 __ SmiShiftLogicalRightConstant(answer.reg(),
Steve Block3ce2e202009-11-05 08:53:23 +00006717 operand->reg(),
6718 shift_value,
6719 deferred->entry_label());
Steve Blocka7e24c12009-10-30 11:49:00 +00006720 deferred->BindExit();
6721 operand->Unuse();
Steve Blocka7e24c12009-10-30 11:49:00 +00006722 }
6723 break;
6724
6725 case Token::SHL:
6726 if (reversed) {
Steve Block6ded16b2010-05-10 14:33:55 +01006727 operand->ToRegister();
Steve Block6ded16b2010-05-10 14:33:55 +01006728
Kristian Monsen25f61362010-05-21 11:50:48 +01006729 // We need rcx to be available to hold operand, and to be spilled.
6730 // SmiShiftLeft implicitly modifies rcx.
6731 if (operand->reg().is(rcx)) {
6732 frame_->Spill(operand->reg());
6733 answer = allocator()->Allocate();
6734 } else {
6735 Result rcx_reg = allocator()->Allocate(rcx);
6736 // answer must not be rcx.
6737 answer = allocator()->Allocate();
6738 // rcx_reg goes out of scope.
6739 }
6740
Steve Block6ded16b2010-05-10 14:33:55 +01006741 DeferredInlineSmiOperationReversed* deferred =
6742 new DeferredInlineSmiOperationReversed(op,
6743 answer.reg(),
6744 smi_value,
Kristian Monsen25f61362010-05-21 11:50:48 +01006745 operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01006746 overwrite_mode);
Kristian Monsen25f61362010-05-21 11:50:48 +01006747 if (!operand->type_info().IsSmi()) {
6748 Condition is_smi = masm_->CheckSmi(operand->reg());
Steve Block6ded16b2010-05-10 14:33:55 +01006749 deferred->Branch(NegateCondition(is_smi));
6750 } else if (FLAG_debug_code) {
Kristian Monsen25f61362010-05-21 11:50:48 +01006751 __ AbortIfNotSmi(operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01006752 "Static type info claims non-smi is smi in (const SHL smi).");
6753 }
Kristian Monsen25f61362010-05-21 11:50:48 +01006754
6755 __ Move(answer.reg(), smi_value);
6756 __ SmiShiftLeft(answer.reg(), answer.reg(), operand->reg());
6757 operand->Unuse();
Steve Block6ded16b2010-05-10 14:33:55 +01006758
6759 deferred->BindExit();
Steve Blocka7e24c12009-10-30 11:49:00 +00006760 } else {
6761 // Only the least significant 5 bits of the shift value are used.
6762 // In the slow case, this masking is done inside the runtime call.
6763 int shift_value = int_value & 0x1f;
6764 operand->ToRegister();
6765 if (shift_value == 0) {
6766 // Spill operand so it can be overwritten in the slow case.
6767 frame_->Spill(operand->reg());
6768 DeferredInlineSmiOperation* deferred =
6769 new DeferredInlineSmiOperation(op,
6770 operand->reg(),
6771 operand->reg(),
6772 smi_value,
6773 overwrite_mode);
6774 __ JumpIfNotSmi(operand->reg(), deferred->entry_label());
6775 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00006776 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00006777 } else {
6778 // Use a fresh temporary for nonzero shift values.
Leon Clarked91b9f72010-01-27 17:25:45 +00006779 answer = allocator()->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00006780 ASSERT(answer.is_valid());
6781 DeferredInlineSmiOperation* deferred =
6782 new DeferredInlineSmiOperation(op,
6783 answer.reg(),
6784 operand->reg(),
6785 smi_value,
6786 overwrite_mode);
6787 __ JumpIfNotSmi(operand->reg(), deferred->entry_label());
6788 __ SmiShiftLeftConstant(answer.reg(),
6789 operand->reg(),
Kristian Monsen25f61362010-05-21 11:50:48 +01006790 shift_value);
Steve Blocka7e24c12009-10-30 11:49:00 +00006791 deferred->BindExit();
6792 operand->Unuse();
Steve Blocka7e24c12009-10-30 11:49:00 +00006793 }
6794 }
6795 break;
6796
6797 case Token::BIT_OR:
6798 case Token::BIT_XOR:
6799 case Token::BIT_AND: {
6800 operand->ToRegister();
6801 frame_->Spill(operand->reg());
6802 if (reversed) {
6803 // Bit operations with a constant smi are commutative.
6804 // We can swap left and right operands with no problem.
6805 // Swap left and right overwrite modes. 0->0, 1->2, 2->1.
6806 overwrite_mode = static_cast<OverwriteMode>((2 * overwrite_mode) % 3);
6807 }
6808 DeferredCode* deferred = new DeferredInlineSmiOperation(op,
6809 operand->reg(),
6810 operand->reg(),
6811 smi_value,
6812 overwrite_mode);
6813 __ JumpIfNotSmi(operand->reg(), deferred->entry_label());
6814 if (op == Token::BIT_AND) {
Steve Block3ce2e202009-11-05 08:53:23 +00006815 __ SmiAndConstant(operand->reg(), operand->reg(), smi_value);
Steve Blocka7e24c12009-10-30 11:49:00 +00006816 } else if (op == Token::BIT_XOR) {
6817 if (int_value != 0) {
Steve Block3ce2e202009-11-05 08:53:23 +00006818 __ SmiXorConstant(operand->reg(), operand->reg(), smi_value);
Steve Blocka7e24c12009-10-30 11:49:00 +00006819 }
6820 } else {
6821 ASSERT(op == Token::BIT_OR);
6822 if (int_value != 0) {
Steve Block3ce2e202009-11-05 08:53:23 +00006823 __ SmiOrConstant(operand->reg(), operand->reg(), smi_value);
Steve Blocka7e24c12009-10-30 11:49:00 +00006824 }
6825 }
6826 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00006827 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00006828 break;
6829 }
6830
6831 // Generate inline code for mod of powers of 2 and negative powers of 2.
6832 case Token::MOD:
6833 if (!reversed &&
6834 int_value != 0 &&
6835 (IsPowerOf2(int_value) || IsPowerOf2(-int_value))) {
6836 operand->ToRegister();
6837 frame_->Spill(operand->reg());
Steve Block3ce2e202009-11-05 08:53:23 +00006838 DeferredCode* deferred =
6839 new DeferredInlineSmiOperation(op,
6840 operand->reg(),
6841 operand->reg(),
6842 smi_value,
6843 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00006844 // Check for negative or non-Smi left hand side.
6845 __ JumpIfNotPositiveSmi(operand->reg(), deferred->entry_label());
6846 if (int_value < 0) int_value = -int_value;
6847 if (int_value == 1) {
Steve Block3ce2e202009-11-05 08:53:23 +00006848 __ Move(operand->reg(), Smi::FromInt(0));
Steve Blocka7e24c12009-10-30 11:49:00 +00006849 } else {
Steve Block3ce2e202009-11-05 08:53:23 +00006850 __ SmiAndConstant(operand->reg(),
6851 operand->reg(),
6852 Smi::FromInt(int_value - 1));
Steve Blocka7e24c12009-10-30 11:49:00 +00006853 }
6854 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00006855 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00006856 break; // This break only applies if we generated code for MOD.
6857 }
6858 // Fall through if we did not find a power of 2 on the right hand side!
6859 // The next case must be the default.
6860
6861 default: {
6862 Result constant_operand(value);
6863 if (reversed) {
Steve Block6ded16b2010-05-10 14:33:55 +01006864 answer = LikelySmiBinaryOperation(expr, &constant_operand, operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00006865 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00006866 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01006867 answer = LikelySmiBinaryOperation(expr, operand, &constant_operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00006868 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00006869 }
6870 break;
6871 }
6872 }
Leon Clarked91b9f72010-01-27 17:25:45 +00006873 ASSERT(answer.is_valid());
6874 return answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00006875}
6876
Kristian Monsen25f61362010-05-21 11:50:48 +01006877
6878// Implements a binary operation using a deferred code object and some
6879// inline code to operate on smis quickly.
Steve Block6ded16b2010-05-10 14:33:55 +01006880Result CodeGenerator::LikelySmiBinaryOperation(BinaryOperation* expr,
Leon Clarked91b9f72010-01-27 17:25:45 +00006881 Result* left,
6882 Result* right,
6883 OverwriteMode overwrite_mode) {
Kristian Monsen25f61362010-05-21 11:50:48 +01006884 // Copy the type info because left and right may be overwritten.
6885 TypeInfo left_type_info = left->type_info();
6886 TypeInfo right_type_info = right->type_info();
6887 USE(left_type_info);
6888 USE(right_type_info);
6889 // TODO(X64): Use type information in calculations.
Steve Block6ded16b2010-05-10 14:33:55 +01006890 Token::Value op = expr->op();
Leon Clarked91b9f72010-01-27 17:25:45 +00006891 Result answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00006892 // Special handling of div and mod because they use fixed registers.
6893 if (op == Token::DIV || op == Token::MOD) {
6894 // We need rax as the quotient register, rdx as the remainder
6895 // register, neither left nor right in rax or rdx, and left copied
6896 // to rax.
6897 Result quotient;
6898 Result remainder;
6899 bool left_is_in_rax = false;
6900 // Step 1: get rax for quotient.
6901 if ((left->is_register() && left->reg().is(rax)) ||
6902 (right->is_register() && right->reg().is(rax))) {
6903 // One or both is in rax. Use a fresh non-rdx register for
6904 // them.
6905 Result fresh = allocator_->Allocate();
6906 ASSERT(fresh.is_valid());
6907 if (fresh.reg().is(rdx)) {
6908 remainder = fresh;
6909 fresh = allocator_->Allocate();
6910 ASSERT(fresh.is_valid());
6911 }
6912 if (left->is_register() && left->reg().is(rax)) {
6913 quotient = *left;
6914 *left = fresh;
6915 left_is_in_rax = true;
6916 }
6917 if (right->is_register() && right->reg().is(rax)) {
6918 quotient = *right;
6919 *right = fresh;
6920 }
6921 __ movq(fresh.reg(), rax);
6922 } else {
6923 // Neither left nor right is in rax.
6924 quotient = allocator_->Allocate(rax);
6925 }
6926 ASSERT(quotient.is_register() && quotient.reg().is(rax));
6927 ASSERT(!(left->is_register() && left->reg().is(rax)));
6928 ASSERT(!(right->is_register() && right->reg().is(rax)));
6929
6930 // Step 2: get rdx for remainder if necessary.
6931 if (!remainder.is_valid()) {
6932 if ((left->is_register() && left->reg().is(rdx)) ||
6933 (right->is_register() && right->reg().is(rdx))) {
6934 Result fresh = allocator_->Allocate();
6935 ASSERT(fresh.is_valid());
6936 if (left->is_register() && left->reg().is(rdx)) {
6937 remainder = *left;
6938 *left = fresh;
6939 }
6940 if (right->is_register() && right->reg().is(rdx)) {
6941 remainder = *right;
6942 *right = fresh;
6943 }
6944 __ movq(fresh.reg(), rdx);
6945 } else {
6946 // Neither left nor right is in rdx.
6947 remainder = allocator_->Allocate(rdx);
6948 }
6949 }
6950 ASSERT(remainder.is_register() && remainder.reg().is(rdx));
6951 ASSERT(!(left->is_register() && left->reg().is(rdx)));
6952 ASSERT(!(right->is_register() && right->reg().is(rdx)));
6953
6954 left->ToRegister();
6955 right->ToRegister();
6956 frame_->Spill(rax);
6957 frame_->Spill(rdx);
6958
6959 // Check that left and right are smi tagged.
6960 DeferredInlineBinaryOperation* deferred =
6961 new DeferredInlineBinaryOperation(op,
6962 (op == Token::DIV) ? rax : rdx,
6963 left->reg(),
6964 right->reg(),
6965 overwrite_mode);
6966 __ JumpIfNotBothSmi(left->reg(), right->reg(), deferred->entry_label());
6967
6968 if (op == Token::DIV) {
6969 __ SmiDiv(rax, left->reg(), right->reg(), deferred->entry_label());
6970 deferred->BindExit();
6971 left->Unuse();
6972 right->Unuse();
Leon Clarked91b9f72010-01-27 17:25:45 +00006973 answer = quotient;
Steve Blocka7e24c12009-10-30 11:49:00 +00006974 } else {
6975 ASSERT(op == Token::MOD);
6976 __ SmiMod(rdx, left->reg(), right->reg(), deferred->entry_label());
6977 deferred->BindExit();
6978 left->Unuse();
6979 right->Unuse();
Leon Clarked91b9f72010-01-27 17:25:45 +00006980 answer = remainder;
Steve Blocka7e24c12009-10-30 11:49:00 +00006981 }
Leon Clarked91b9f72010-01-27 17:25:45 +00006982 ASSERT(answer.is_valid());
6983 return answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00006984 }
6985
6986 // Special handling of shift operations because they use fixed
6987 // registers.
6988 if (op == Token::SHL || op == Token::SHR || op == Token::SAR) {
6989 // Move left out of rcx if necessary.
6990 if (left->is_register() && left->reg().is(rcx)) {
6991 *left = allocator_->Allocate();
6992 ASSERT(left->is_valid());
6993 __ movq(left->reg(), rcx);
6994 }
6995 right->ToRegister(rcx);
6996 left->ToRegister();
6997 ASSERT(left->is_register() && !left->reg().is(rcx));
6998 ASSERT(right->is_register() && right->reg().is(rcx));
6999
7000 // We will modify right, it must be spilled.
7001 frame_->Spill(rcx);
7002
7003 // Use a fresh answer register to avoid spilling the left operand.
Leon Clarked91b9f72010-01-27 17:25:45 +00007004 answer = allocator_->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00007005 ASSERT(answer.is_valid());
7006 // Check that both operands are smis using the answer register as a
7007 // temporary.
7008 DeferredInlineBinaryOperation* deferred =
7009 new DeferredInlineBinaryOperation(op,
7010 answer.reg(),
7011 left->reg(),
7012 rcx,
7013 overwrite_mode);
Kristian Monsen25f61362010-05-21 11:50:48 +01007014 __ JumpIfNotBothSmi(left->reg(), rcx, deferred->entry_label());
Steve Blocka7e24c12009-10-30 11:49:00 +00007015
7016 // Perform the operation.
7017 switch (op) {
7018 case Token::SAR:
7019 __ SmiShiftArithmeticRight(answer.reg(), left->reg(), rcx);
7020 break;
7021 case Token::SHR: {
7022 __ SmiShiftLogicalRight(answer.reg(),
7023 left->reg(),
7024 rcx,
7025 deferred->entry_label());
7026 break;
7027 }
7028 case Token::SHL: {
7029 __ SmiShiftLeft(answer.reg(),
7030 left->reg(),
Kristian Monsen25f61362010-05-21 11:50:48 +01007031 rcx);
Steve Blocka7e24c12009-10-30 11:49:00 +00007032 break;
7033 }
7034 default:
7035 UNREACHABLE();
7036 }
7037 deferred->BindExit();
7038 left->Unuse();
7039 right->Unuse();
Leon Clarked91b9f72010-01-27 17:25:45 +00007040 ASSERT(answer.is_valid());
7041 return answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00007042 }
7043
7044 // Handle the other binary operations.
7045 left->ToRegister();
7046 right->ToRegister();
7047 // A newly allocated register answer is used to hold the answer. The
7048 // registers containing left and right are not modified so they don't
7049 // need to be spilled in the fast case.
Leon Clarked91b9f72010-01-27 17:25:45 +00007050 answer = allocator_->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00007051 ASSERT(answer.is_valid());
7052
7053 // Perform the smi tag check.
7054 DeferredInlineBinaryOperation* deferred =
7055 new DeferredInlineBinaryOperation(op,
7056 answer.reg(),
7057 left->reg(),
7058 right->reg(),
7059 overwrite_mode);
7060 __ JumpIfNotBothSmi(left->reg(), right->reg(), deferred->entry_label());
7061
7062 switch (op) {
7063 case Token::ADD:
7064 __ SmiAdd(answer.reg(),
7065 left->reg(),
7066 right->reg(),
7067 deferred->entry_label());
7068 break;
7069
7070 case Token::SUB:
7071 __ SmiSub(answer.reg(),
7072 left->reg(),
7073 right->reg(),
7074 deferred->entry_label());
7075 break;
7076
7077 case Token::MUL: {
7078 __ SmiMul(answer.reg(),
7079 left->reg(),
7080 right->reg(),
7081 deferred->entry_label());
7082 break;
7083 }
7084
7085 case Token::BIT_OR:
7086 __ SmiOr(answer.reg(), left->reg(), right->reg());
7087 break;
7088
7089 case Token::BIT_AND:
7090 __ SmiAnd(answer.reg(), left->reg(), right->reg());
7091 break;
7092
7093 case Token::BIT_XOR:
7094 __ SmiXor(answer.reg(), left->reg(), right->reg());
7095 break;
7096
7097 default:
7098 UNREACHABLE();
7099 break;
7100 }
7101 deferred->BindExit();
7102 left->Unuse();
7103 right->Unuse();
Leon Clarked91b9f72010-01-27 17:25:45 +00007104 ASSERT(answer.is_valid());
7105 return answer;
7106}
7107
7108
7109Result CodeGenerator::EmitKeyedLoad(bool is_global) {
7110 Comment cmnt(masm_, "[ Load from keyed Property");
7111 // Inline array load code if inside of a loop. We do not know
7112 // the receiver map yet, so we initially generate the code with
7113 // a check against an invalid map. In the inline cache code, we
7114 // patch the map check if appropriate.
7115 if (loop_nesting() > 0) {
7116 Comment cmnt(masm_, "[ Inlined load from keyed Property");
7117
7118 Result key = frame_->Pop();
7119 Result receiver = frame_->Pop();
7120 key.ToRegister();
7121 receiver.ToRegister();
7122
7123 // Use a fresh temporary to load the elements without destroying
7124 // the receiver which is needed for the deferred slow case.
7125 Result elements = allocator()->Allocate();
7126 ASSERT(elements.is_valid());
7127
7128 // Use a fresh temporary for the index and later the loaded
7129 // value.
7130 Result index = allocator()->Allocate();
7131 ASSERT(index.is_valid());
7132
7133 DeferredReferenceGetKeyedValue* deferred =
7134 new DeferredReferenceGetKeyedValue(index.reg(),
7135 receiver.reg(),
7136 key.reg(),
7137 is_global);
7138
7139 // Check that the receiver is not a smi (only needed if this
7140 // is not a load from the global context) and that it has the
7141 // expected map.
7142 if (!is_global) {
7143 __ JumpIfSmi(receiver.reg(), deferred->entry_label());
7144 }
7145
7146 // Initially, use an invalid map. The map is patched in the IC
7147 // initialization code.
7148 __ bind(deferred->patch_site());
7149 // Use masm-> here instead of the double underscore macro since extra
7150 // coverage code can interfere with the patching. Do not use
7151 // root array to load null_value, since it must be patched with
7152 // the expected receiver map.
7153 masm_->movq(kScratchRegister, Factory::null_value(),
7154 RelocInfo::EMBEDDED_OBJECT);
7155 masm_->cmpq(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
7156 kScratchRegister);
7157 deferred->Branch(not_equal);
7158
7159 // Check that the key is a non-negative smi.
7160 __ JumpIfNotPositiveSmi(key.reg(), deferred->entry_label());
7161
7162 // Get the elements array from the receiver and check that it
7163 // is not a dictionary.
7164 __ movq(elements.reg(),
7165 FieldOperand(receiver.reg(), JSObject::kElementsOffset));
7166 __ Cmp(FieldOperand(elements.reg(), HeapObject::kMapOffset),
7167 Factory::fixed_array_map());
7168 deferred->Branch(not_equal);
7169
7170 // Shift the key to get the actual index value and check that
7171 // it is within bounds.
7172 __ SmiToInteger32(index.reg(), key.reg());
7173 __ cmpl(index.reg(),
7174 FieldOperand(elements.reg(), FixedArray::kLengthOffset));
7175 deferred->Branch(above_equal);
7176
7177 // The index register holds the un-smi-tagged key. It has been
7178 // zero-extended to 64-bits, so it can be used directly as index in the
7179 // operand below.
7180 // Load and check that the result is not the hole. We could
7181 // reuse the index or elements register for the value.
7182 //
7183 // TODO(206): Consider whether it makes sense to try some
7184 // heuristic about which register to reuse. For example, if
7185 // one is rax, the we can reuse that one because the value
7186 // coming from the deferred code will be in rax.
7187 Result value = index;
7188 __ movq(value.reg(),
7189 Operand(elements.reg(),
7190 index.reg(),
7191 times_pointer_size,
7192 FixedArray::kHeaderSize - kHeapObjectTag));
7193 elements.Unuse();
7194 index.Unuse();
7195 __ CompareRoot(value.reg(), Heap::kTheHoleValueRootIndex);
7196 deferred->Branch(equal);
7197 __ IncrementCounter(&Counters::keyed_load_inline, 1);
7198
7199 deferred->BindExit();
7200 // Restore the receiver and key to the frame and push the
7201 // result on top of it.
7202 frame_->Push(&receiver);
7203 frame_->Push(&key);
7204 return value;
7205
7206 } else {
7207 Comment cmnt(masm_, "[ Load from keyed Property");
7208 RelocInfo::Mode mode = is_global
7209 ? RelocInfo::CODE_TARGET_CONTEXT
7210 : RelocInfo::CODE_TARGET;
7211 Result answer = frame_->CallKeyedLoadIC(mode);
7212 // Make sure that we do not have a test instruction after the
7213 // call. A test instruction after the call is used to
7214 // indicate that we have generated an inline version of the
7215 // keyed load. The explicit nop instruction is here because
7216 // the push that follows might be peep-hole optimized away.
7217 __ nop();
7218 return answer;
7219 }
Steve Blocka7e24c12009-10-30 11:49:00 +00007220}
7221
7222
7223#undef __
7224#define __ ACCESS_MASM(masm)
7225
7226
7227Handle<String> Reference::GetName() {
7228 ASSERT(type_ == NAMED);
7229 Property* property = expression_->AsProperty();
7230 if (property == NULL) {
7231 // Global variable reference treated as a named property reference.
7232 VariableProxy* proxy = expression_->AsVariableProxy();
7233 ASSERT(proxy->AsVariable() != NULL);
7234 ASSERT(proxy->AsVariable()->is_global());
7235 return proxy->name();
7236 } else {
7237 Literal* raw_name = property->key()->AsLiteral();
7238 ASSERT(raw_name != NULL);
7239 return Handle<String>(String::cast(*raw_name->handle()));
7240 }
7241}
7242
7243
Steve Blockd0582a62009-12-15 09:54:21 +00007244void Reference::GetValue() {
Steve Blocka7e24c12009-10-30 11:49:00 +00007245 ASSERT(!cgen_->in_spilled_code());
7246 ASSERT(cgen_->HasValidEntryRegisters());
7247 ASSERT(!is_illegal());
7248 MacroAssembler* masm = cgen_->masm();
7249
7250 // Record the source position for the property load.
7251 Property* property = expression_->AsProperty();
7252 if (property != NULL) {
7253 cgen_->CodeForSourcePosition(property->position());
7254 }
7255
7256 switch (type_) {
7257 case SLOT: {
7258 Comment cmnt(masm, "[ Load from Slot");
7259 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
7260 ASSERT(slot != NULL);
Steve Blockd0582a62009-12-15 09:54:21 +00007261 cgen_->LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF);
Steve Blocka7e24c12009-10-30 11:49:00 +00007262 break;
7263 }
7264
7265 case NAMED: {
Steve Blocka7e24c12009-10-30 11:49:00 +00007266 Variable* var = expression_->AsVariableProxy()->AsVariable();
7267 bool is_global = var != NULL;
7268 ASSERT(!is_global || var->is_global());
7269
7270 // Do not inline the inobject property case for loads from the global
7271 // object. Also do not inline for unoptimized code. This saves time
7272 // in the code generator. Unoptimized code is toplevel code or code
7273 // that is not in a loop.
7274 if (is_global ||
7275 cgen_->scope()->is_global_scope() ||
7276 cgen_->loop_nesting() == 0) {
7277 Comment cmnt(masm, "[ Load from named Property");
7278 cgen_->frame()->Push(GetName());
7279
7280 RelocInfo::Mode mode = is_global
7281 ? RelocInfo::CODE_TARGET_CONTEXT
7282 : RelocInfo::CODE_TARGET;
7283 Result answer = cgen_->frame()->CallLoadIC(mode);
7284 // A test rax instruction following the call signals that the
7285 // inobject property case was inlined. Ensure that there is not
7286 // a test rax instruction here.
7287 __ nop();
7288 cgen_->frame()->Push(&answer);
7289 } else {
7290 // Inline the inobject property case.
7291 Comment cmnt(masm, "[ Inlined named property load");
7292 Result receiver = cgen_->frame()->Pop();
7293 receiver.ToRegister();
7294 Result value = cgen_->allocator()->Allocate();
7295 ASSERT(value.is_valid());
7296 // Cannot use r12 for receiver, because that changes
7297 // the distance between a call and a fixup location,
7298 // due to a special encoding of r12 as r/m in a ModR/M byte.
7299 if (receiver.reg().is(r12)) {
7300 // Swap receiver and value.
7301 __ movq(value.reg(), receiver.reg());
7302 Result temp = receiver;
7303 receiver = value;
7304 value = temp;
7305 cgen_->frame()->Spill(value.reg()); // r12 may have been shared.
7306 }
7307
7308 DeferredReferenceGetNamedValue* deferred =
7309 new DeferredReferenceGetNamedValue(value.reg(),
7310 receiver.reg(),
7311 GetName());
7312
7313 // Check that the receiver is a heap object.
7314 __ JumpIfSmi(receiver.reg(), deferred->entry_label());
7315
7316 __ bind(deferred->patch_site());
7317 // This is the map check instruction that will be patched (so we can't
7318 // use the double underscore macro that may insert instructions).
7319 // Initially use an invalid map to force a failure.
7320 masm->Move(kScratchRegister, Factory::null_value());
7321 masm->cmpq(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
7322 kScratchRegister);
7323 // This branch is always a forwards branch so it's always a fixed
7324 // size which allows the assert below to succeed and patching to work.
7325 // Don't use deferred->Branch(...), since that might add coverage code.
7326 masm->j(not_equal, deferred->entry_label());
7327
7328 // The delta from the patch label to the load offset must be
7329 // statically known.
7330 ASSERT(masm->SizeOfCodeGeneratedSince(deferred->patch_site()) ==
7331 LoadIC::kOffsetToLoadInstruction);
7332 // The initial (invalid) offset has to be large enough to force
7333 // a 32-bit instruction encoding to allow patching with an
7334 // arbitrary offset. Use kMaxInt (minus kHeapObjectTag).
7335 int offset = kMaxInt;
7336 masm->movq(value.reg(), FieldOperand(receiver.reg(), offset));
7337
7338 __ IncrementCounter(&Counters::named_load_inline, 1);
7339 deferred->BindExit();
7340 cgen_->frame()->Push(&receiver);
7341 cgen_->frame()->Push(&value);
7342 }
7343 break;
7344 }
7345
7346 case KEYED: {
Steve Blocka7e24c12009-10-30 11:49:00 +00007347 Comment cmnt(masm, "[ Load from keyed Property");
7348 Variable* var = expression_->AsVariableProxy()->AsVariable();
7349 bool is_global = var != NULL;
7350 ASSERT(!is_global || var->is_global());
7351
Leon Clarked91b9f72010-01-27 17:25:45 +00007352 Result value = cgen_->EmitKeyedLoad(is_global);
7353 cgen_->frame()->Push(&value);
Steve Blocka7e24c12009-10-30 11:49:00 +00007354 break;
7355 }
7356
7357 default:
7358 UNREACHABLE();
7359 }
Leon Clarked91b9f72010-01-27 17:25:45 +00007360
7361 if (!persist_after_get_) {
7362 cgen_->UnloadReference(this);
7363 }
Steve Blocka7e24c12009-10-30 11:49:00 +00007364}
7365
7366
Steve Blockd0582a62009-12-15 09:54:21 +00007367void Reference::TakeValue() {
Steve Blocka7e24c12009-10-30 11:49:00 +00007368 // TODO(X64): This function is completely architecture independent. Move
7369 // it somewhere shared.
7370
7371 // For non-constant frame-allocated slots, we invalidate the value in the
7372 // slot. For all others, we fall back on GetValue.
7373 ASSERT(!cgen_->in_spilled_code());
7374 ASSERT(!is_illegal());
7375 if (type_ != SLOT) {
Steve Blockd0582a62009-12-15 09:54:21 +00007376 GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00007377 return;
7378 }
7379
7380 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
7381 ASSERT(slot != NULL);
7382 if (slot->type() == Slot::LOOKUP ||
7383 slot->type() == Slot::CONTEXT ||
7384 slot->var()->mode() == Variable::CONST ||
7385 slot->is_arguments()) {
Steve Blockd0582a62009-12-15 09:54:21 +00007386 GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00007387 return;
7388 }
7389
7390 // Only non-constant, frame-allocated parameters and locals can reach
7391 // here. Be careful not to use the optimizations for arguments
7392 // object access since it may not have been initialized yet.
7393 ASSERT(!slot->is_arguments());
7394 if (slot->type() == Slot::PARAMETER) {
7395 cgen_->frame()->TakeParameterAt(slot->index());
7396 } else {
7397 ASSERT(slot->type() == Slot::LOCAL);
7398 cgen_->frame()->TakeLocalAt(slot->index());
7399 }
Leon Clarked91b9f72010-01-27 17:25:45 +00007400
7401 ASSERT(persist_after_get_);
7402 // Do not unload the reference, because it is used in SetValue.
Steve Blocka7e24c12009-10-30 11:49:00 +00007403}
7404
7405
7406void Reference::SetValue(InitState init_state) {
7407 ASSERT(cgen_->HasValidEntryRegisters());
7408 ASSERT(!is_illegal());
7409 MacroAssembler* masm = cgen_->masm();
7410 switch (type_) {
7411 case SLOT: {
7412 Comment cmnt(masm, "[ Store to Slot");
7413 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
7414 ASSERT(slot != NULL);
7415 cgen_->StoreToSlot(slot, init_state);
Leon Clarke4515c472010-02-03 11:58:03 +00007416 cgen_->UnloadReference(this);
Steve Blocka7e24c12009-10-30 11:49:00 +00007417 break;
7418 }
7419
7420 case NAMED: {
7421 Comment cmnt(masm, "[ Store to named Property");
7422 cgen_->frame()->Push(GetName());
7423 Result answer = cgen_->frame()->CallStoreIC();
7424 cgen_->frame()->Push(&answer);
Leon Clarke4515c472010-02-03 11:58:03 +00007425 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00007426 break;
7427 }
7428
7429 case KEYED: {
7430 Comment cmnt(masm, "[ Store to keyed Property");
7431
7432 // Generate inlined version of the keyed store if the code is in
7433 // a loop and the key is likely to be a smi.
7434 Property* property = expression()->AsProperty();
7435 ASSERT(property != NULL);
Leon Clarkee46be812010-01-19 14:06:41 +00007436 StaticType* key_smi_analysis = property->key()->type();
Steve Blocka7e24c12009-10-30 11:49:00 +00007437
7438 if (cgen_->loop_nesting() > 0 && key_smi_analysis->IsLikelySmi()) {
7439 Comment cmnt(masm, "[ Inlined store to keyed Property");
7440
7441 // Get the receiver, key and value into registers.
7442 Result value = cgen_->frame()->Pop();
7443 Result key = cgen_->frame()->Pop();
7444 Result receiver = cgen_->frame()->Pop();
7445
7446 Result tmp = cgen_->allocator_->Allocate();
7447 ASSERT(tmp.is_valid());
Steve Block6ded16b2010-05-10 14:33:55 +01007448 Result tmp2 = cgen_->allocator_->Allocate();
7449 ASSERT(tmp2.is_valid());
Steve Blocka7e24c12009-10-30 11:49:00 +00007450
7451 // Determine whether the value is a constant before putting it
7452 // in a register.
7453 bool value_is_constant = value.is_constant();
7454
7455 // Make sure that value, key and receiver are in registers.
7456 value.ToRegister();
7457 key.ToRegister();
7458 receiver.ToRegister();
7459
7460 DeferredReferenceSetKeyedValue* deferred =
7461 new DeferredReferenceSetKeyedValue(value.reg(),
7462 key.reg(),
7463 receiver.reg());
7464
Steve Blocka7e24c12009-10-30 11:49:00 +00007465 // Check that the receiver is not a smi.
7466 __ JumpIfSmi(receiver.reg(), deferred->entry_label());
7467
Steve Block6ded16b2010-05-10 14:33:55 +01007468 // Check that the key is a smi.
7469 if (!key.is_smi()) {
7470 __ JumpIfNotSmi(key.reg(), deferred->entry_label());
7471 } else if (FLAG_debug_code) {
7472 __ AbortIfNotSmi(key.reg(), "Non-smi value in smi-typed value.");
7473 }
7474
Steve Blocka7e24c12009-10-30 11:49:00 +00007475 // Check that the receiver is a JSArray.
7476 __ CmpObjectType(receiver.reg(), JS_ARRAY_TYPE, kScratchRegister);
7477 deferred->Branch(not_equal);
7478
7479 // Check that the key is within bounds. Both the key and the
Steve Block6ded16b2010-05-10 14:33:55 +01007480 // length of the JSArray are smis. Use unsigned comparison to handle
7481 // negative keys.
Steve Block3ce2e202009-11-05 08:53:23 +00007482 __ SmiCompare(FieldOperand(receiver.reg(), JSArray::kLengthOffset),
7483 key.reg());
Steve Block6ded16b2010-05-10 14:33:55 +01007484 deferred->Branch(below_equal);
Steve Blocka7e24c12009-10-30 11:49:00 +00007485
7486 // Get the elements array from the receiver and check that it
7487 // is a flat array (not a dictionary).
7488 __ movq(tmp.reg(),
7489 FieldOperand(receiver.reg(), JSObject::kElementsOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01007490
7491 // Check whether it is possible to omit the write barrier. If the
7492 // elements array is in new space or the value written is a smi we can
7493 // safely update the elements array without updating the remembered set.
7494 Label in_new_space;
7495 __ InNewSpace(tmp.reg(), tmp2.reg(), equal, &in_new_space);
7496 if (!value_is_constant) {
7497 __ JumpIfNotSmi(value.reg(), deferred->entry_label());
7498 }
7499
7500 __ bind(&in_new_space);
Steve Blocka7e24c12009-10-30 11:49:00 +00007501 // Bind the deferred code patch site to be able to locate the
7502 // fixed array map comparison. When debugging, we patch this
7503 // comparison to always fail so that we will hit the IC call
7504 // in the deferred code which will allow the debugger to
7505 // break for fast case stores.
7506 __ bind(deferred->patch_site());
7507 // Avoid using __ to ensure the distance from patch_site
7508 // to the map address is always the same.
7509 masm->movq(kScratchRegister, Factory::fixed_array_map(),
7510 RelocInfo::EMBEDDED_OBJECT);
7511 __ cmpq(FieldOperand(tmp.reg(), HeapObject::kMapOffset),
7512 kScratchRegister);
7513 deferred->Branch(not_equal);
7514
7515 // Store the value.
7516 SmiIndex index =
7517 masm->SmiToIndex(kScratchRegister, key.reg(), kPointerSizeLog2);
7518 __ movq(Operand(tmp.reg(),
7519 index.reg,
7520 index.scale,
7521 FixedArray::kHeaderSize - kHeapObjectTag),
7522 value.reg());
7523 __ IncrementCounter(&Counters::keyed_store_inline, 1);
7524
7525 deferred->BindExit();
7526
7527 cgen_->frame()->Push(&receiver);
7528 cgen_->frame()->Push(&key);
7529 cgen_->frame()->Push(&value);
7530 } else {
7531 Result answer = cgen_->frame()->CallKeyedStoreIC();
7532 // Make sure that we do not have a test instruction after the
7533 // call. A test instruction after the call is used to
7534 // indicate that we have generated an inline version of the
7535 // keyed store.
7536 masm->nop();
7537 cgen_->frame()->Push(&answer);
7538 }
Leon Clarke4515c472010-02-03 11:58:03 +00007539 cgen_->UnloadReference(this);
Steve Blocka7e24c12009-10-30 11:49:00 +00007540 break;
7541 }
7542
7543 default:
7544 UNREACHABLE();
7545 }
7546}
7547
7548
Leon Clarkee46be812010-01-19 14:06:41 +00007549void FastNewClosureStub::Generate(MacroAssembler* masm) {
Steve Block6ded16b2010-05-10 14:33:55 +01007550 // Create a new closure from the given function info in new
7551 // space. Set the context to the current context in rsi.
Leon Clarkee46be812010-01-19 14:06:41 +00007552 Label gc;
7553 __ AllocateInNewSpace(JSFunction::kSize, rax, rbx, rcx, &gc, TAG_OBJECT);
7554
Steve Block6ded16b2010-05-10 14:33:55 +01007555 // Get the function info from the stack.
Leon Clarkee46be812010-01-19 14:06:41 +00007556 __ movq(rdx, Operand(rsp, 1 * kPointerSize));
7557
7558 // Compute the function map in the current global context and set that
7559 // as the map of the allocated object.
7560 __ movq(rcx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
7561 __ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalContextOffset));
7562 __ movq(rcx, Operand(rcx, Context::SlotOffset(Context::FUNCTION_MAP_INDEX)));
7563 __ movq(FieldOperand(rax, JSObject::kMapOffset), rcx);
7564
Steve Block6ded16b2010-05-10 14:33:55 +01007565 // Initialize the rest of the function. We don't have to update the
7566 // write barrier because the allocated object is in new space.
7567 __ LoadRoot(rbx, Heap::kEmptyFixedArrayRootIndex);
7568 __ LoadRoot(rcx, Heap::kTheHoleValueRootIndex);
7569 __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), rbx);
7570 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rbx);
7571 __ movq(FieldOperand(rax, JSFunction::kPrototypeOrInitialMapOffset), rcx);
7572 __ movq(FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset), rdx);
7573 __ movq(FieldOperand(rax, JSFunction::kContextOffset), rsi);
7574 __ movq(FieldOperand(rax, JSFunction::kLiteralsOffset), rbx);
Leon Clarkee46be812010-01-19 14:06:41 +00007575
7576 // Return and remove the on-stack parameter.
7577 __ ret(1 * kPointerSize);
7578
7579 // Create a new closure through the slower runtime call.
7580 __ bind(&gc);
7581 __ pop(rcx); // Temporarily remove return address.
7582 __ pop(rdx);
7583 __ push(rsi);
7584 __ push(rdx);
7585 __ push(rcx); // Restore return address.
Steve Block6ded16b2010-05-10 14:33:55 +01007586 __ TailCallRuntime(Runtime::kNewClosure, 2, 1);
Leon Clarkee46be812010-01-19 14:06:41 +00007587}
7588
7589
7590void FastNewContextStub::Generate(MacroAssembler* masm) {
7591 // Try to allocate the context in new space.
7592 Label gc;
7593 int length = slots_ + Context::MIN_CONTEXT_SLOTS;
7594 __ AllocateInNewSpace((length * kPointerSize) + FixedArray::kHeaderSize,
7595 rax, rbx, rcx, &gc, TAG_OBJECT);
7596
7597 // Get the function from the stack.
7598 __ movq(rcx, Operand(rsp, 1 * kPointerSize));
7599
7600 // Setup the object header.
7601 __ LoadRoot(kScratchRegister, Heap::kContextMapRootIndex);
7602 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister);
7603 __ movl(FieldOperand(rax, Array::kLengthOffset), Immediate(length));
7604
7605 // Setup the fixed slots.
7606 __ xor_(rbx, rbx); // Set to NULL.
7607 __ movq(Operand(rax, Context::SlotOffset(Context::CLOSURE_INDEX)), rcx);
7608 __ movq(Operand(rax, Context::SlotOffset(Context::FCONTEXT_INDEX)), rax);
7609 __ movq(Operand(rax, Context::SlotOffset(Context::PREVIOUS_INDEX)), rbx);
7610 __ movq(Operand(rax, Context::SlotOffset(Context::EXTENSION_INDEX)), rbx);
7611
7612 // Copy the global object from the surrounding context.
7613 __ movq(rbx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
7614 __ movq(Operand(rax, Context::SlotOffset(Context::GLOBAL_INDEX)), rbx);
7615
7616 // Initialize the rest of the slots to undefined.
7617 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
7618 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) {
7619 __ movq(Operand(rax, Context::SlotOffset(i)), rbx);
7620 }
7621
7622 // Return and remove the on-stack parameter.
7623 __ movq(rsi, rax);
7624 __ ret(1 * kPointerSize);
7625
7626 // Need to collect. Call into runtime system.
7627 __ bind(&gc);
Steve Block6ded16b2010-05-10 14:33:55 +01007628 __ TailCallRuntime(Runtime::kNewContext, 1, 1);
Leon Clarkee46be812010-01-19 14:06:41 +00007629}
7630
7631
Andrei Popescu402d9372010-02-26 13:31:12 +00007632void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
7633 // Stack layout on entry:
7634 //
7635 // [rsp + kPointerSize]: constant elements.
7636 // [rsp + (2 * kPointerSize)]: literal index.
7637 // [rsp + (3 * kPointerSize)]: literals array.
7638
7639 // All sizes here are multiples of kPointerSize.
7640 int elements_size = (length_ > 0) ? FixedArray::SizeFor(length_) : 0;
7641 int size = JSArray::kSize + elements_size;
7642
7643 // Load boilerplate object into rcx and check if we need to create a
7644 // boilerplate.
7645 Label slow_case;
7646 __ movq(rcx, Operand(rsp, 3 * kPointerSize));
7647 __ movq(rax, Operand(rsp, 2 * kPointerSize));
7648 SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2);
7649 __ movq(rcx,
7650 FieldOperand(rcx, index.reg, index.scale, FixedArray::kHeaderSize));
7651 __ CompareRoot(rcx, Heap::kUndefinedValueRootIndex);
7652 __ j(equal, &slow_case);
7653
7654 // Allocate both the JS array and the elements array in one big
7655 // allocation. This avoids multiple limit checks.
7656 __ AllocateInNewSpace(size, rax, rbx, rdx, &slow_case, TAG_OBJECT);
7657
7658 // Copy the JS array part.
7659 for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
7660 if ((i != JSArray::kElementsOffset) || (length_ == 0)) {
7661 __ movq(rbx, FieldOperand(rcx, i));
7662 __ movq(FieldOperand(rax, i), rbx);
7663 }
7664 }
7665
7666 if (length_ > 0) {
7667 // Get hold of the elements array of the boilerplate and setup the
7668 // elements pointer in the resulting object.
7669 __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset));
7670 __ lea(rdx, Operand(rax, JSArray::kSize));
7671 __ movq(FieldOperand(rax, JSArray::kElementsOffset), rdx);
7672
7673 // Copy the elements array.
7674 for (int i = 0; i < elements_size; i += kPointerSize) {
7675 __ movq(rbx, FieldOperand(rcx, i));
7676 __ movq(FieldOperand(rdx, i), rbx);
7677 }
7678 }
7679
7680 // Return and remove the on-stack parameters.
7681 __ ret(3 * kPointerSize);
7682
7683 __ bind(&slow_case);
Steve Block6ded16b2010-05-10 14:33:55 +01007684 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1);
Andrei Popescu402d9372010-02-26 13:31:12 +00007685}
7686
7687
Steve Blocka7e24c12009-10-30 11:49:00 +00007688void ToBooleanStub::Generate(MacroAssembler* masm) {
7689 Label false_result, true_result, not_string;
7690 __ movq(rax, Operand(rsp, 1 * kPointerSize));
7691
7692 // 'null' => false.
7693 __ CompareRoot(rax, Heap::kNullValueRootIndex);
7694 __ j(equal, &false_result);
7695
7696 // Get the map and type of the heap object.
7697 // We don't use CmpObjectType because we manipulate the type field.
7698 __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
7699 __ movzxbq(rcx, FieldOperand(rdx, Map::kInstanceTypeOffset));
7700
7701 // Undetectable => false.
7702 __ movzxbq(rbx, FieldOperand(rdx, Map::kBitFieldOffset));
7703 __ and_(rbx, Immediate(1 << Map::kIsUndetectable));
7704 __ j(not_zero, &false_result);
7705
7706 // JavaScript object => true.
7707 __ cmpq(rcx, Immediate(FIRST_JS_OBJECT_TYPE));
7708 __ j(above_equal, &true_result);
7709
7710 // String value => false iff empty.
7711 __ cmpq(rcx, Immediate(FIRST_NONSTRING_TYPE));
7712 __ j(above_equal, &not_string);
Steve Block6ded16b2010-05-10 14:33:55 +01007713 __ movq(rdx, FieldOperand(rax, String::kLengthOffset));
7714 __ SmiTest(rdx);
Steve Blocka7e24c12009-10-30 11:49:00 +00007715 __ j(zero, &false_result);
7716 __ jmp(&true_result);
7717
7718 __ bind(&not_string);
7719 // HeapNumber => false iff +0, -0, or NaN.
7720 // These three cases set C3 when compared to zero in the FPU.
7721 __ CompareRoot(rdx, Heap::kHeapNumberMapRootIndex);
7722 __ j(not_equal, &true_result);
Steve Blocka7e24c12009-10-30 11:49:00 +00007723 __ fldz(); // Load zero onto fp stack
7724 // Load heap-number double value onto fp stack
7725 __ fld_d(FieldOperand(rax, HeapNumber::kValueOffset));
Steve Block3ce2e202009-11-05 08:53:23 +00007726 __ FCmp();
7727 __ j(zero, &false_result);
Steve Blocka7e24c12009-10-30 11:49:00 +00007728 // Fall through to |true_result|.
7729
7730 // Return 1/0 for true/false in rax.
7731 __ bind(&true_result);
7732 __ movq(rax, Immediate(1));
7733 __ ret(1 * kPointerSize);
7734 __ bind(&false_result);
7735 __ xor_(rax, rax);
7736 __ ret(1 * kPointerSize);
7737}
7738
7739
7740bool CodeGenerator::FoldConstantSmis(Token::Value op, int left, int right) {
Steve Blocka7e24c12009-10-30 11:49:00 +00007741 Object* answer_object = Heap::undefined_value();
7742 switch (op) {
7743 case Token::ADD:
Leon Clarked91b9f72010-01-27 17:25:45 +00007744 // Use intptr_t to detect overflow of 32-bit int.
7745 if (Smi::IsValid(static_cast<intptr_t>(left) + right)) {
Steve Blocka7e24c12009-10-30 11:49:00 +00007746 answer_object = Smi::FromInt(left + right);
7747 }
7748 break;
7749 case Token::SUB:
Leon Clarked91b9f72010-01-27 17:25:45 +00007750 // Use intptr_t to detect overflow of 32-bit int.
7751 if (Smi::IsValid(static_cast<intptr_t>(left) - right)) {
Steve Blocka7e24c12009-10-30 11:49:00 +00007752 answer_object = Smi::FromInt(left - right);
7753 }
7754 break;
7755 case Token::MUL: {
7756 double answer = static_cast<double>(left) * right;
7757 if (answer >= Smi::kMinValue && answer <= Smi::kMaxValue) {
7758 // If the product is zero and the non-zero factor is negative,
7759 // the spec requires us to return floating point negative zero.
7760 if (answer != 0 || (left + right) >= 0) {
7761 answer_object = Smi::FromInt(static_cast<int>(answer));
7762 }
7763 }
7764 }
7765 break;
7766 case Token::DIV:
7767 case Token::MOD:
7768 break;
7769 case Token::BIT_OR:
7770 answer_object = Smi::FromInt(left | right);
7771 break;
7772 case Token::BIT_AND:
7773 answer_object = Smi::FromInt(left & right);
7774 break;
7775 case Token::BIT_XOR:
7776 answer_object = Smi::FromInt(left ^ right);
7777 break;
7778
7779 case Token::SHL: {
7780 int shift_amount = right & 0x1F;
7781 if (Smi::IsValid(left << shift_amount)) {
7782 answer_object = Smi::FromInt(left << shift_amount);
7783 }
7784 break;
7785 }
7786 case Token::SHR: {
7787 int shift_amount = right & 0x1F;
7788 unsigned int unsigned_left = left;
7789 unsigned_left >>= shift_amount;
7790 if (unsigned_left <= static_cast<unsigned int>(Smi::kMaxValue)) {
7791 answer_object = Smi::FromInt(unsigned_left);
7792 }
7793 break;
7794 }
7795 case Token::SAR: {
7796 int shift_amount = right & 0x1F;
7797 unsigned int unsigned_left = left;
7798 if (left < 0) {
7799 // Perform arithmetic shift of a negative number by
7800 // complementing number, logical shifting, complementing again.
7801 unsigned_left = ~unsigned_left;
7802 unsigned_left >>= shift_amount;
7803 unsigned_left = ~unsigned_left;
7804 } else {
7805 unsigned_left >>= shift_amount;
7806 }
7807 ASSERT(Smi::IsValid(static_cast<int32_t>(unsigned_left)));
7808 answer_object = Smi::FromInt(static_cast<int32_t>(unsigned_left));
7809 break;
7810 }
7811 default:
7812 UNREACHABLE();
7813 break;
7814 }
7815 if (answer_object == Heap::undefined_value()) {
7816 return false;
7817 }
7818 frame_->Push(Handle<Object>(answer_object));
7819 return true;
7820}
7821
7822
7823// End of CodeGenerator implementation.
7824
Steve Block6ded16b2010-05-10 14:33:55 +01007825void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
7826 // Input on stack:
7827 // rsp[8]: argument (should be number).
7828 // rsp[0]: return address.
7829 Label runtime_call;
7830 Label runtime_call_clear_stack;
7831 Label input_not_smi;
7832 Label loaded;
7833 // Test that rax is a number.
7834 __ movq(rax, Operand(rsp, kPointerSize));
7835 __ JumpIfNotSmi(rax, &input_not_smi);
7836 // Input is a smi. Untag and load it onto the FPU stack.
7837 // Then load the bits of the double into rbx.
7838 __ SmiToInteger32(rax, rax);
7839 __ subq(rsp, Immediate(kPointerSize));
7840 __ cvtlsi2sd(xmm1, rax);
7841 __ movsd(Operand(rsp, 0), xmm1);
7842 __ movq(rbx, xmm1);
7843 __ movq(rdx, xmm1);
7844 __ fld_d(Operand(rsp, 0));
7845 __ addq(rsp, Immediate(kPointerSize));
7846 __ jmp(&loaded);
7847
7848 __ bind(&input_not_smi);
7849 // Check if input is a HeapNumber.
7850 __ Move(rbx, Factory::heap_number_map());
7851 __ cmpq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
7852 __ j(not_equal, &runtime_call);
7853 // Input is a HeapNumber. Push it on the FPU stack and load its
7854 // bits into rbx.
7855 __ fld_d(FieldOperand(rax, HeapNumber::kValueOffset));
7856 __ movq(rbx, FieldOperand(rax, HeapNumber::kValueOffset));
7857 __ movq(rdx, rbx);
7858 __ bind(&loaded);
7859 // ST[0] == double value
7860 // rbx = bits of double value.
7861 // rdx = also bits of double value.
7862 // Compute hash (h is 32 bits, bits are 64):
7863 // h = h0 = bits ^ (bits >> 32);
7864 // h ^= h >> 16;
7865 // h ^= h >> 8;
7866 // h = h & (cacheSize - 1);
7867 // or h = (h0 ^ (h0 >> 8) ^ (h0 >> 16) ^ (h0 >> 24)) & (cacheSize - 1)
7868 __ sar(rdx, Immediate(32));
7869 __ xorl(rdx, rbx);
7870 __ movl(rcx, rdx);
7871 __ movl(rax, rdx);
7872 __ movl(rdi, rdx);
7873 __ sarl(rdx, Immediate(8));
7874 __ sarl(rcx, Immediate(16));
7875 __ sarl(rax, Immediate(24));
7876 __ xorl(rcx, rdx);
7877 __ xorl(rax, rdi);
7878 __ xorl(rcx, rax);
7879 ASSERT(IsPowerOf2(TranscendentalCache::kCacheSize));
7880 __ andl(rcx, Immediate(TranscendentalCache::kCacheSize - 1));
7881 // ST[0] == double value.
7882 // rbx = bits of double value.
7883 // rcx = TranscendentalCache::hash(double value).
7884 __ movq(rax, ExternalReference::transcendental_cache_array_address());
7885 // rax points to cache array.
7886 __ movq(rax, Operand(rax, type_ * sizeof(TranscendentalCache::caches_[0])));
7887 // rax points to the cache for the type type_.
7888 // If NULL, the cache hasn't been initialized yet, so go through runtime.
7889 __ testq(rax, rax);
7890 __ j(zero, &runtime_call_clear_stack);
7891#ifdef DEBUG
7892 // Check that the layout of cache elements match expectations.
7893 { // NOLINT - doesn't like a single brace on a line.
7894 TranscendentalCache::Element test_elem[2];
7895 char* elem_start = reinterpret_cast<char*>(&test_elem[0]);
7896 char* elem2_start = reinterpret_cast<char*>(&test_elem[1]);
7897 char* elem_in0 = reinterpret_cast<char*>(&(test_elem[0].in[0]));
7898 char* elem_in1 = reinterpret_cast<char*>(&(test_elem[0].in[1]));
7899 char* elem_out = reinterpret_cast<char*>(&(test_elem[0].output));
7900 // Two uint_32's and a pointer per element.
7901 CHECK_EQ(16, static_cast<int>(elem2_start - elem_start));
7902 CHECK_EQ(0, static_cast<int>(elem_in0 - elem_start));
7903 CHECK_EQ(kIntSize, static_cast<int>(elem_in1 - elem_start));
7904 CHECK_EQ(2 * kIntSize, static_cast<int>(elem_out - elem_start));
7905 }
7906#endif
7907 // Find the address of the rcx'th entry in the cache, i.e., &rax[rcx*16].
7908 __ addl(rcx, rcx);
7909 __ lea(rcx, Operand(rax, rcx, times_8, 0));
7910 // Check if cache matches: Double value is stored in uint32_t[2] array.
7911 Label cache_miss;
7912 __ cmpq(rbx, Operand(rcx, 0));
7913 __ j(not_equal, &cache_miss);
7914 // Cache hit!
7915 __ movq(rax, Operand(rcx, 2 * kIntSize));
7916 __ fstp(0); // Clear FPU stack.
7917 __ ret(kPointerSize);
7918
7919 __ bind(&cache_miss);
7920 // Update cache with new value.
7921 Label nan_result;
7922 GenerateOperation(masm, &nan_result);
7923 __ AllocateHeapNumber(rax, rdi, &runtime_call_clear_stack);
7924 __ movq(Operand(rcx, 0), rbx);
7925 __ movq(Operand(rcx, 2 * kIntSize), rax);
7926 __ fstp_d(FieldOperand(rax, HeapNumber::kValueOffset));
7927 __ ret(kPointerSize);
7928
7929 __ bind(&runtime_call_clear_stack);
7930 __ fstp(0);
7931 __ bind(&runtime_call);
7932 __ TailCallExternalReference(ExternalReference(RuntimeFunction()), 1, 1);
7933
7934 __ bind(&nan_result);
7935 __ fstp(0); // Remove argument from FPU stack.
7936 __ LoadRoot(rax, Heap::kNanValueRootIndex);
7937 __ movq(Operand(rcx, 0), rbx);
7938 __ movq(Operand(rcx, 2 * kIntSize), rax);
7939 __ ret(kPointerSize);
7940}
7941
7942
7943Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() {
7944 switch (type_) {
7945 // Add more cases when necessary.
7946 case TranscendentalCache::SIN: return Runtime::kMath_sin;
7947 case TranscendentalCache::COS: return Runtime::kMath_cos;
7948 default:
7949 UNIMPLEMENTED();
7950 return Runtime::kAbort;
7951 }
7952}
7953
7954
7955void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm,
7956 Label* on_nan_result) {
7957 // Registers:
7958 // rbx: Bits of input double. Must be preserved.
7959 // rcx: Pointer to cache entry. Must be preserved.
7960 // st(0): Input double
7961 Label done;
7962 ASSERT(type_ == TranscendentalCache::SIN ||
7963 type_ == TranscendentalCache::COS);
7964 // More transcendental types can be added later.
7965
7966 // Both fsin and fcos require arguments in the range +/-2^63 and
7967 // return NaN for infinities and NaN. They can share all code except
7968 // the actual fsin/fcos operation.
7969 Label in_range;
7970 // If argument is outside the range -2^63..2^63, fsin/cos doesn't
7971 // work. We must reduce it to the appropriate range.
7972 __ movq(rdi, rbx);
7973 // Move exponent and sign bits to low bits.
7974 __ shr(rdi, Immediate(HeapNumber::kMantissaBits));
7975 // Remove sign bit.
7976 __ andl(rdi, Immediate((1 << HeapNumber::KExponentBits) - 1));
7977 int supported_exponent_limit = (63 + HeapNumber::kExponentBias);
7978 __ cmpl(rdi, Immediate(supported_exponent_limit));
7979 __ j(below, &in_range);
7980 // Check for infinity and NaN. Both return NaN for sin.
7981 __ cmpl(rdi, Immediate(0x7ff));
7982 __ j(equal, on_nan_result);
7983
7984 // Use fpmod to restrict argument to the range +/-2*PI.
7985 __ fldpi();
7986 __ fadd(0);
7987 __ fld(1);
7988 // FPU Stack: input, 2*pi, input.
7989 {
7990 Label no_exceptions;
7991 __ fwait();
7992 __ fnstsw_ax();
7993 // Clear if Illegal Operand or Zero Division exceptions are set.
7994 __ testl(rax, Immediate(5)); // #IO and #ZD flags of FPU status word.
7995 __ j(zero, &no_exceptions);
7996 __ fnclex();
7997 __ bind(&no_exceptions);
7998 }
7999
8000 // Compute st(0) % st(1)
8001 {
8002 Label partial_remainder_loop;
8003 __ bind(&partial_remainder_loop);
8004 __ fprem1();
8005 __ fwait();
8006 __ fnstsw_ax();
8007 __ testl(rax, Immediate(0x400)); // Check C2 bit of FPU status word.
8008 // If C2 is set, computation only has partial result. Loop to
8009 // continue computation.
8010 __ j(not_zero, &partial_remainder_loop);
8011 }
8012 // FPU Stack: input, 2*pi, input % 2*pi
8013 __ fstp(2);
8014 // FPU Stack: input % 2*pi, 2*pi,
8015 __ fstp(0);
8016 // FPU Stack: input % 2*pi
8017 __ bind(&in_range);
8018 switch (type_) {
8019 case TranscendentalCache::SIN:
8020 __ fsin();
8021 break;
8022 case TranscendentalCache::COS:
8023 __ fcos();
8024 break;
8025 default:
8026 UNREACHABLE();
8027 }
8028 __ bind(&done);
8029}
8030
8031
Kristian Monsen25f61362010-05-21 11:50:48 +01008032// Get the integer part of a heap number.
8033// Overwrites the contents of rdi, rbx and rcx. Result cannot be rdi or rbx.
Leon Clarked91b9f72010-01-27 17:25:45 +00008034void IntegerConvert(MacroAssembler* masm,
Kristian Monsen25f61362010-05-21 11:50:48 +01008035 Register result,
8036 Register source) {
8037 // Result may be rcx. If result and source are the same register, source will
8038 // be overwritten.
8039 ASSERT(!result.is(rdi) && !result.is(rbx));
8040 // TODO(lrn): When type info reaches here, if value is a 32-bit integer, use
8041 // cvttsd2si (32-bit version) directly.
8042 Register double_exponent = rbx;
8043 Register double_value = rdi;
8044 Label done, exponent_63_plus;
8045 // Get double and extract exponent.
8046 __ movq(double_value, FieldOperand(source, HeapNumber::kValueOffset));
8047 // Clear result preemptively, in case we need to return zero.
8048 __ xorl(result, result);
8049 __ movq(xmm0, double_value); // Save copy in xmm0 in case we need it there.
8050 // Double to remove sign bit, shift exponent down to least significant bits.
8051 // and subtract bias to get the unshifted, unbiased exponent.
8052 __ lea(double_exponent, Operand(double_value, double_value, times_1, 0));
8053 __ shr(double_exponent, Immediate(64 - HeapNumber::KExponentBits));
8054 __ subl(double_exponent, Immediate(HeapNumber::kExponentBias));
8055 // Check whether the exponent is too big for a 63 bit unsigned integer.
8056 __ cmpl(double_exponent, Immediate(63));
8057 __ j(above_equal, &exponent_63_plus);
8058 // Handle exponent range 0..62.
8059 __ cvttsd2siq(result, xmm0);
8060 __ jmp(&done);
8061
8062 __ bind(&exponent_63_plus);
8063 // Exponent negative or 63+.
8064 __ cmpl(double_exponent, Immediate(83));
8065 // If exponent negative or above 83, number contains no significant bits in
8066 // the range 0..2^31, so result is zero, and rcx already holds zero.
8067 __ j(above, &done);
8068
8069 // Exponent in rage 63..83.
8070 // Mantissa * 2^exponent contains bits in the range 2^0..2^31, namely
8071 // the least significant exponent-52 bits.
8072
8073 // Negate low bits of mantissa if value is negative.
8074 __ addq(double_value, double_value); // Move sign bit to carry.
8075 __ sbbl(result, result); // And convert carry to -1 in result register.
8076 // if scratch2 is negative, do (scratch2-1)^-1, otherwise (scratch2-0)^0.
8077 __ addl(double_value, result);
8078 // Do xor in opposite directions depending on where we want the result
8079 // (depending on whether result is rcx or not).
8080
8081 if (result.is(rcx)) {
8082 __ xorl(double_value, result);
8083 // Left shift mantissa by (exponent - mantissabits - 1) to save the
8084 // bits that have positional values below 2^32 (the extra -1 comes from the
8085 // doubling done above to move the sign bit into the carry flag).
8086 __ leal(rcx, Operand(double_exponent, -HeapNumber::kMantissaBits - 1));
8087 __ shll_cl(double_value);
8088 __ movl(result, double_value);
Leon Clarked91b9f72010-01-27 17:25:45 +00008089 } else {
Kristian Monsen25f61362010-05-21 11:50:48 +01008090 // As the then-branch, but move double-value to result before shifting.
8091 __ xorl(result, double_value);
8092 __ leal(rcx, Operand(double_exponent, -HeapNumber::kMantissaBits - 1));
8093 __ shll_cl(result);
Leon Clarked91b9f72010-01-27 17:25:45 +00008094 }
Kristian Monsen25f61362010-05-21 11:50:48 +01008095
8096 __ bind(&done);
Leon Clarked91b9f72010-01-27 17:25:45 +00008097}
8098
8099
Leon Clarkee46be812010-01-19 14:06:41 +00008100void GenericUnaryOpStub::Generate(MacroAssembler* masm) {
Leon Clarked91b9f72010-01-27 17:25:45 +00008101 Label slow, done;
Leon Clarkee46be812010-01-19 14:06:41 +00008102
Leon Clarked91b9f72010-01-27 17:25:45 +00008103 if (op_ == Token::SUB) {
8104 // Check whether the value is a smi.
8105 Label try_float;
8106 __ JumpIfNotSmi(rax, &try_float);
Steve Blocka7e24c12009-10-30 11:49:00 +00008107
Leon Clarked91b9f72010-01-27 17:25:45 +00008108 // Enter runtime system if the value of the smi is zero
8109 // to make sure that we switch between 0 and -0.
8110 // Also enter it if the value of the smi is Smi::kMinValue.
8111 __ SmiNeg(rax, rax, &done);
Steve Blocka7e24c12009-10-30 11:49:00 +00008112
Leon Clarked91b9f72010-01-27 17:25:45 +00008113 // Either zero or Smi::kMinValue, neither of which become a smi when
8114 // negated.
8115 __ SmiCompare(rax, Smi::FromInt(0));
8116 __ j(not_equal, &slow);
8117 __ Move(rax, Factory::minus_zero_value());
8118 __ jmp(&done);
Steve Blocka7e24c12009-10-30 11:49:00 +00008119
Leon Clarked91b9f72010-01-27 17:25:45 +00008120 // Try floating point case.
8121 __ bind(&try_float);
8122 __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
8123 __ CompareRoot(rdx, Heap::kHeapNumberMapRootIndex);
8124 __ j(not_equal, &slow);
8125 // Operand is a float, negate its value by flipping sign bit.
8126 __ movq(rdx, FieldOperand(rax, HeapNumber::kValueOffset));
8127 __ movq(kScratchRegister, Immediate(0x01));
8128 __ shl(kScratchRegister, Immediate(63));
8129 __ xor_(rdx, kScratchRegister); // Flip sign.
8130 // rdx is value to store.
8131 if (overwrite_) {
8132 __ movq(FieldOperand(rax, HeapNumber::kValueOffset), rdx);
8133 } else {
8134 __ AllocateHeapNumber(rcx, rbx, &slow);
8135 // rcx: allocated 'empty' number
8136 __ movq(FieldOperand(rcx, HeapNumber::kValueOffset), rdx);
8137 __ movq(rax, rcx);
8138 }
8139 } else if (op_ == Token::BIT_NOT) {
8140 // Check if the operand is a heap number.
8141 __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
8142 __ CompareRoot(rdx, Heap::kHeapNumberMapRootIndex);
8143 __ j(not_equal, &slow);
8144
8145 // Convert the heap number in rax to an untagged integer in rcx.
Kristian Monsen25f61362010-05-21 11:50:48 +01008146 IntegerConvert(masm, rax, rax);
Leon Clarked91b9f72010-01-27 17:25:45 +00008147
Kristian Monsen25f61362010-05-21 11:50:48 +01008148 // Do the bitwise operation and smi tag the result.
8149 __ notl(rax);
8150 __ Integer32ToSmi(rax, rax);
Leon Clarked91b9f72010-01-27 17:25:45 +00008151 }
8152
8153 // Return from the stub.
8154 __ bind(&done);
8155 __ StubReturn(1);
8156
8157 // Handle the slow case by jumping to the JavaScript builtin.
Steve Blocka7e24c12009-10-30 11:49:00 +00008158 __ bind(&slow);
8159 __ pop(rcx); // pop return address
8160 __ push(rax);
8161 __ push(rcx); // push return address
Leon Clarked91b9f72010-01-27 17:25:45 +00008162 switch (op_) {
8163 case Token::SUB:
8164 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION);
8165 break;
8166 case Token::BIT_NOT:
8167 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION);
8168 break;
8169 default:
8170 UNREACHABLE();
Steve Blocka7e24c12009-10-30 11:49:00 +00008171 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008172}
8173
8174
Leon Clarke4515c472010-02-03 11:58:03 +00008175void RegExpExecStub::Generate(MacroAssembler* masm) {
8176 // Just jump directly to runtime if native RegExp is not selected at compile
8177 // time or if regexp entry in generated code is turned off runtime switch or
8178 // at compilation.
Steve Block6ded16b2010-05-10 14:33:55 +01008179#ifdef V8_INTERPRETED_REGEXP
8180 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
8181#else // V8_INTERPRETED_REGEXP
Leon Clarke4515c472010-02-03 11:58:03 +00008182 if (!FLAG_regexp_entry_native) {
Steve Block6ded16b2010-05-10 14:33:55 +01008183 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
Leon Clarke4515c472010-02-03 11:58:03 +00008184 return;
8185 }
8186
8187 // Stack frame on entry.
8188 // esp[0]: return address
8189 // esp[8]: last_match_info (expected JSArray)
8190 // esp[16]: previous index
8191 // esp[24]: subject string
8192 // esp[32]: JSRegExp object
8193
8194 static const int kLastMatchInfoOffset = 1 * kPointerSize;
8195 static const int kPreviousIndexOffset = 2 * kPointerSize;
8196 static const int kSubjectOffset = 3 * kPointerSize;
8197 static const int kJSRegExpOffset = 4 * kPointerSize;
8198
8199 Label runtime;
8200
8201 // Ensure that a RegExp stack is allocated.
8202 ExternalReference address_of_regexp_stack_memory_address =
8203 ExternalReference::address_of_regexp_stack_memory_address();
8204 ExternalReference address_of_regexp_stack_memory_size =
8205 ExternalReference::address_of_regexp_stack_memory_size();
8206 __ movq(kScratchRegister, address_of_regexp_stack_memory_size);
8207 __ movq(kScratchRegister, Operand(kScratchRegister, 0));
8208 __ testq(kScratchRegister, kScratchRegister);
8209 __ j(zero, &runtime);
8210
8211
8212 // Check that the first argument is a JSRegExp object.
8213 __ movq(rax, Operand(rsp, kJSRegExpOffset));
8214 __ JumpIfSmi(rax, &runtime);
8215 __ CmpObjectType(rax, JS_REGEXP_TYPE, kScratchRegister);
8216 __ j(not_equal, &runtime);
8217 // Check that the RegExp has been compiled (data contains a fixed array).
8218 __ movq(rcx, FieldOperand(rax, JSRegExp::kDataOffset));
8219 if (FLAG_debug_code) {
8220 Condition is_smi = masm->CheckSmi(rcx);
8221 __ Check(NegateCondition(is_smi),
8222 "Unexpected type for RegExp data, FixedArray expected");
8223 __ CmpObjectType(rcx, FIXED_ARRAY_TYPE, kScratchRegister);
8224 __ Check(equal, "Unexpected type for RegExp data, FixedArray expected");
8225 }
8226
8227 // rcx: RegExp data (FixedArray)
8228 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
8229 __ movq(rbx, FieldOperand(rcx, JSRegExp::kDataTagOffset));
8230 __ SmiCompare(rbx, Smi::FromInt(JSRegExp::IRREGEXP));
8231 __ j(not_equal, &runtime);
8232
8233 // rcx: RegExp data (FixedArray)
8234 // Check that the number of captures fit in the static offsets vector buffer.
8235 __ movq(rdx, FieldOperand(rcx, JSRegExp::kIrregexpCaptureCountOffset));
8236 // Calculate number of capture registers (number_of_captures + 1) * 2.
8237 __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rdx, 1);
8238 __ addq(rdx, Immediate(2)); // rdx was number_of_captures * 2.
8239 // Check that the static offsets vector buffer is large enough.
8240 __ cmpq(rdx, Immediate(OffsetsVector::kStaticOffsetsVectorSize));
8241 __ j(above, &runtime);
8242
8243 // rcx: RegExp data (FixedArray)
8244 // rdx: Number of capture registers
8245 // Check that the second argument is a string.
8246 __ movq(rax, Operand(rsp, kSubjectOffset));
8247 __ JumpIfSmi(rax, &runtime);
8248 Condition is_string = masm->IsObjectStringType(rax, rbx, rbx);
8249 __ j(NegateCondition(is_string), &runtime);
8250 // Get the length of the string to rbx.
Steve Block6ded16b2010-05-10 14:33:55 +01008251 __ movq(rbx, FieldOperand(rax, String::kLengthOffset));
Leon Clarke4515c472010-02-03 11:58:03 +00008252
Steve Block6ded16b2010-05-10 14:33:55 +01008253 // rbx: Length of subject string as smi
Leon Clarke4515c472010-02-03 11:58:03 +00008254 // rcx: RegExp data (FixedArray)
8255 // rdx: Number of capture registers
8256 // Check that the third argument is a positive smi less than the string
Steve Block6ded16b2010-05-10 14:33:55 +01008257 // length. A negative value will be greater (unsigned comparison).
Leon Clarke4515c472010-02-03 11:58:03 +00008258 __ movq(rax, Operand(rsp, kPreviousIndexOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01008259 __ JumpIfNotSmi(rax, &runtime);
8260 __ SmiCompare(rax, rbx);
8261 __ j(above_equal, &runtime);
Leon Clarke4515c472010-02-03 11:58:03 +00008262
8263 // rcx: RegExp data (FixedArray)
8264 // rdx: Number of capture registers
8265 // Check that the fourth object is a JSArray object.
8266 __ movq(rax, Operand(rsp, kLastMatchInfoOffset));
8267 __ JumpIfSmi(rax, &runtime);
8268 __ CmpObjectType(rax, JS_ARRAY_TYPE, kScratchRegister);
8269 __ j(not_equal, &runtime);
8270 // Check that the JSArray is in fast case.
8271 __ movq(rbx, FieldOperand(rax, JSArray::kElementsOffset));
8272 __ movq(rax, FieldOperand(rbx, HeapObject::kMapOffset));
8273 __ Cmp(rax, Factory::fixed_array_map());
8274 __ j(not_equal, &runtime);
8275 // Check that the last match info has space for the capture registers and the
8276 // additional information. Ensure no overflow in add.
8277 ASSERT(FixedArray::kMaxLength < kMaxInt - FixedArray::kLengthOffset);
8278 __ movl(rax, FieldOperand(rbx, FixedArray::kLengthOffset));
8279 __ addl(rdx, Immediate(RegExpImpl::kLastMatchOverhead));
8280 __ cmpl(rdx, rax);
8281 __ j(greater, &runtime);
8282
8283 // ecx: RegExp data (FixedArray)
8284 // Check the representation and encoding of the subject string.
8285 Label seq_string, seq_two_byte_string, check_code;
8286 const int kStringRepresentationEncodingMask =
8287 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
8288 __ movq(rax, Operand(rsp, kSubjectOffset));
8289 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
8290 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
8291 __ andb(rbx, Immediate(kStringRepresentationEncodingMask));
8292 // First check for sequential string.
8293 ASSERT_EQ(0, kStringTag);
8294 ASSERT_EQ(0, kSeqStringTag);
8295 __ testb(rbx, Immediate(kIsNotStringMask | kStringRepresentationMask));
8296 __ j(zero, &seq_string);
8297
8298 // Check for flat cons string.
8299 // A flat cons string is a cons string where the second part is the empty
8300 // string. In that case the subject string is just the first part of the cons
8301 // string. Also in this case the first part of the cons string is known to be
8302 // a sequential string or an external string.
Steve Block6ded16b2010-05-10 14:33:55 +01008303 __ andb(rbx, Immediate(kStringRepresentationMask));
8304 __ cmpb(rbx, Immediate(kConsStringTag));
Leon Clarke4515c472010-02-03 11:58:03 +00008305 __ j(not_equal, &runtime);
8306 __ movq(rdx, FieldOperand(rax, ConsString::kSecondOffset));
8307 __ Cmp(rdx, Factory::empty_string());
8308 __ j(not_equal, &runtime);
8309 __ movq(rax, FieldOperand(rax, ConsString::kFirstOffset));
8310 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
8311 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
8312 ASSERT_EQ(0, kSeqStringTag);
8313 __ testb(rbx, Immediate(kStringRepresentationMask));
8314 __ j(not_zero, &runtime);
8315 __ andb(rbx, Immediate(kStringRepresentationEncodingMask));
8316
8317 __ bind(&seq_string);
8318 // rax: subject string (sequential either ascii to two byte)
8319 // rbx: suject string type & kStringRepresentationEncodingMask
8320 // rcx: RegExp data (FixedArray)
8321 // Check that the irregexp code has been generated for an ascii string. If
8322 // it has, the field contains a code object otherwise it contains the hole.
Steve Block6ded16b2010-05-10 14:33:55 +01008323 const int kSeqTwoByteString = kStringTag | kSeqStringTag | kTwoByteStringTag;
8324 __ cmpb(rbx, Immediate(kSeqTwoByteString));
Leon Clarke4515c472010-02-03 11:58:03 +00008325 __ j(equal, &seq_two_byte_string);
8326 if (FLAG_debug_code) {
8327 __ cmpb(rbx, Immediate(kStringTag | kSeqStringTag | kAsciiStringTag));
8328 __ Check(equal, "Expected sequential ascii string");
8329 }
8330 __ movq(r12, FieldOperand(rcx, JSRegExp::kDataAsciiCodeOffset));
8331 __ Set(rdi, 1); // Type is ascii.
8332 __ jmp(&check_code);
8333
8334 __ bind(&seq_two_byte_string);
8335 // rax: subject string
8336 // rcx: RegExp data (FixedArray)
8337 __ movq(r12, FieldOperand(rcx, JSRegExp::kDataUC16CodeOffset));
8338 __ Set(rdi, 0); // Type is two byte.
8339
8340 __ bind(&check_code);
8341 // Check that the irregexp code has been generated for the actual string
8342 // encoding. If it has, the field contains a code object otherwise it contains
8343 // the hole.
8344 __ CmpObjectType(r12, CODE_TYPE, kScratchRegister);
8345 __ j(not_equal, &runtime);
8346
8347 // rax: subject string
8348 // rdi: encoding of subject string (1 if ascii, 0 if two_byte);
8349 // r12: code
8350 // Load used arguments before starting to push arguments for call to native
8351 // RegExp code to avoid handling changing stack height.
8352 __ movq(rbx, Operand(rsp, kPreviousIndexOffset));
8353 __ SmiToInteger64(rbx, rbx); // Previous index from smi.
8354
8355 // rax: subject string
8356 // rbx: previous index
8357 // rdi: encoding of subject string (1 if ascii 0 if two_byte);
8358 // r12: code
8359 // All checks done. Now push arguments for native regexp code.
8360 __ IncrementCounter(&Counters::regexp_entry_native, 1);
8361
8362 // rsi is caller save on Windows and used to pass parameter on Linux.
8363 __ push(rsi);
8364
8365 static const int kRegExpExecuteArguments = 7;
8366 __ PrepareCallCFunction(kRegExpExecuteArguments);
8367 int argument_slots_on_stack =
8368 masm->ArgumentStackSlotsForCFunctionCall(kRegExpExecuteArguments);
8369
8370 // Argument 7: Indicate that this is a direct call from JavaScript.
8371 __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kPointerSize),
8372 Immediate(1));
8373
8374 // Argument 6: Start (high end) of backtracking stack memory area.
8375 __ movq(kScratchRegister, address_of_regexp_stack_memory_address);
8376 __ movq(r9, Operand(kScratchRegister, 0));
8377 __ movq(kScratchRegister, address_of_regexp_stack_memory_size);
8378 __ addq(r9, Operand(kScratchRegister, 0));
8379 // Argument 6 passed in r9 on Linux and on the stack on Windows.
8380#ifdef _WIN64
8381 __ movq(Operand(rsp, (argument_slots_on_stack - 2) * kPointerSize), r9);
8382#endif
8383
8384 // Argument 5: static offsets vector buffer.
8385 __ movq(r8, ExternalReference::address_of_static_offsets_vector());
8386 // Argument 5 passed in r8 on Linux and on the stack on Windows.
8387#ifdef _WIN64
8388 __ movq(Operand(rsp, (argument_slots_on_stack - 3) * kPointerSize), r8);
8389#endif
8390
8391 // First four arguments are passed in registers on both Linux and Windows.
8392#ifdef _WIN64
8393 Register arg4 = r9;
8394 Register arg3 = r8;
8395 Register arg2 = rdx;
8396 Register arg1 = rcx;
8397#else
8398 Register arg4 = rcx;
8399 Register arg3 = rdx;
8400 Register arg2 = rsi;
8401 Register arg1 = rdi;
8402#endif
8403
8404 // Keep track on aliasing between argX defined above and the registers used.
8405 // rax: subject string
8406 // rbx: previous index
8407 // rdi: encoding of subject string (1 if ascii 0 if two_byte);
8408 // r12: code
8409
8410 // Argument 4: End of string data
8411 // Argument 3: Start of string data
8412 Label setup_two_byte, setup_rest;
8413 __ testb(rdi, rdi);
Steve Block6ded16b2010-05-10 14:33:55 +01008414 __ movq(rdi, FieldOperand(rax, String::kLengthOffset));
Leon Clarke4515c472010-02-03 11:58:03 +00008415 __ j(zero, &setup_two_byte);
Steve Block6ded16b2010-05-10 14:33:55 +01008416 __ SmiToInteger32(rdi, rdi);
Leon Clarke4515c472010-02-03 11:58:03 +00008417 __ lea(arg4, FieldOperand(rax, rdi, times_1, SeqAsciiString::kHeaderSize));
8418 __ lea(arg3, FieldOperand(rax, rbx, times_1, SeqAsciiString::kHeaderSize));
8419 __ jmp(&setup_rest);
8420 __ bind(&setup_two_byte);
Steve Block6ded16b2010-05-10 14:33:55 +01008421 __ SmiToInteger32(rdi, rdi);
Leon Clarke4515c472010-02-03 11:58:03 +00008422 __ lea(arg4, FieldOperand(rax, rdi, times_2, SeqTwoByteString::kHeaderSize));
8423 __ lea(arg3, FieldOperand(rax, rbx, times_2, SeqTwoByteString::kHeaderSize));
8424
8425 __ bind(&setup_rest);
8426 // Argument 2: Previous index.
8427 __ movq(arg2, rbx);
8428
8429 // Argument 1: Subject string.
8430 __ movq(arg1, rax);
8431
8432 // Locate the code entry and call it.
8433 __ addq(r12, Immediate(Code::kHeaderSize - kHeapObjectTag));
8434 __ CallCFunction(r12, kRegExpExecuteArguments);
8435
8436 // rsi is caller save, as it is used to pass parameter.
8437 __ pop(rsi);
8438
8439 // Check the result.
8440 Label success;
8441 __ cmpq(rax, Immediate(NativeRegExpMacroAssembler::SUCCESS));
8442 __ j(equal, &success);
8443 Label failure;
8444 __ cmpq(rax, Immediate(NativeRegExpMacroAssembler::FAILURE));
8445 __ j(equal, &failure);
8446 __ cmpq(rax, Immediate(NativeRegExpMacroAssembler::EXCEPTION));
8447 // If not exception it can only be retry. Handle that in the runtime system.
8448 __ j(not_equal, &runtime);
8449 // Result must now be exception. If there is no pending exception already a
8450 // stack overflow (on the backtrack stack) was detected in RegExp code but
8451 // haven't created the exception yet. Handle that in the runtime system.
Steve Block6ded16b2010-05-10 14:33:55 +01008452 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
Leon Clarke4515c472010-02-03 11:58:03 +00008453 ExternalReference pending_exception_address(Top::k_pending_exception_address);
8454 __ movq(kScratchRegister, pending_exception_address);
8455 __ Cmp(kScratchRegister, Factory::the_hole_value());
8456 __ j(equal, &runtime);
8457 __ bind(&failure);
8458 // For failure and exception return null.
8459 __ Move(rax, Factory::null_value());
8460 __ ret(4 * kPointerSize);
8461
8462 // Load RegExp data.
8463 __ bind(&success);
8464 __ movq(rax, Operand(rsp, kJSRegExpOffset));
8465 __ movq(rcx, FieldOperand(rax, JSRegExp::kDataOffset));
8466 __ movq(rdx, FieldOperand(rcx, JSRegExp::kIrregexpCaptureCountOffset));
8467 // Calculate number of capture registers (number_of_captures + 1) * 2.
8468 __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rdx, 1);
8469 __ addq(rdx, Immediate(2)); // rdx was number_of_captures * 2.
8470
8471 // rdx: Number of capture registers
8472 // Load last_match_info which is still known to be a fast case JSArray.
8473 __ movq(rax, Operand(rsp, kLastMatchInfoOffset));
8474 __ movq(rbx, FieldOperand(rax, JSArray::kElementsOffset));
8475
8476 // rbx: last_match_info backing store (FixedArray)
8477 // rdx: number of capture registers
8478 // Store the capture count.
8479 __ Integer32ToSmi(kScratchRegister, rdx);
8480 __ movq(FieldOperand(rbx, RegExpImpl::kLastCaptureCountOffset),
8481 kScratchRegister);
8482 // Store last subject and last input.
8483 __ movq(rax, Operand(rsp, kSubjectOffset));
8484 __ movq(FieldOperand(rbx, RegExpImpl::kLastSubjectOffset), rax);
8485 __ movq(rcx, rbx);
8486 __ RecordWrite(rcx, RegExpImpl::kLastSubjectOffset, rax, rdi);
8487 __ movq(rax, Operand(rsp, kSubjectOffset));
8488 __ movq(FieldOperand(rbx, RegExpImpl::kLastInputOffset), rax);
8489 __ movq(rcx, rbx);
8490 __ RecordWrite(rcx, RegExpImpl::kLastInputOffset, rax, rdi);
8491
8492 // Get the static offsets vector filled by the native regexp code.
8493 __ movq(rcx, ExternalReference::address_of_static_offsets_vector());
8494
8495 // rbx: last_match_info backing store (FixedArray)
8496 // rcx: offsets vector
8497 // rdx: number of capture registers
8498 Label next_capture, done;
Leon Clarke4515c472010-02-03 11:58:03 +00008499 // Capture register counter starts from number of capture registers and
8500 // counts down until wraping after zero.
8501 __ bind(&next_capture);
8502 __ subq(rdx, Immediate(1));
8503 __ j(negative, &done);
8504 // Read the value from the static offsets vector buffer and make it a smi.
8505 __ movl(rdi, Operand(rcx, rdx, times_int_size, 0));
8506 __ Integer32ToSmi(rdi, rdi, &runtime);
Leon Clarke4515c472010-02-03 11:58:03 +00008507 // Store the smi value in the last match info.
8508 __ movq(FieldOperand(rbx,
8509 rdx,
8510 times_pointer_size,
8511 RegExpImpl::kFirstCaptureOffset),
8512 rdi);
8513 __ jmp(&next_capture);
8514 __ bind(&done);
8515
8516 // Return last match info.
8517 __ movq(rax, Operand(rsp, kLastMatchInfoOffset));
8518 __ ret(4 * kPointerSize);
8519
8520 // Do the runtime call to execute the regexp.
8521 __ bind(&runtime);
Steve Block6ded16b2010-05-10 14:33:55 +01008522 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
8523#endif // V8_INTERPRETED_REGEXP
8524}
8525
8526
8527void NumberToStringStub::GenerateConvertHashCodeToIndex(MacroAssembler* masm,
8528 Register hash,
8529 Register mask) {
8530 __ and_(hash, mask);
8531 // Each entry in string cache consists of two pointer sized fields,
8532 // but times_twice_pointer_size (multiplication by 16) scale factor
8533 // is not supported by addrmode on x64 platform.
8534 // So we have to premultiply entry index before lookup.
8535 __ shl(hash, Immediate(kPointerSizeLog2 + 1));
8536}
8537
8538
8539void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm,
8540 Register object,
8541 Register result,
8542 Register scratch1,
8543 Register scratch2,
8544 bool object_is_smi,
8545 Label* not_found) {
8546 // Use of registers. Register result is used as a temporary.
8547 Register number_string_cache = result;
8548 Register mask = scratch1;
8549 Register scratch = scratch2;
8550
8551 // Load the number string cache.
8552 __ LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex);
8553
8554 // Make the hash mask from the length of the number string cache. It
8555 // contains two elements (number and string) for each cache entry.
8556 __ movl(mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset));
8557 __ shrl(mask, Immediate(1)); // Divide length by two (length is not a smi).
8558 __ subl(mask, Immediate(1)); // Make mask.
8559
8560 // Calculate the entry in the number string cache. The hash value in the
8561 // number string cache for smis is just the smi value, and the hash for
8562 // doubles is the xor of the upper and lower words. See
8563 // Heap::GetNumberStringCache.
8564 Label is_smi;
8565 Label load_result_from_cache;
8566 if (!object_is_smi) {
8567 __ JumpIfSmi(object, &is_smi);
8568 __ CheckMap(object, Factory::heap_number_map(), not_found, true);
8569
8570 ASSERT_EQ(8, kDoubleSize);
8571 __ movl(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4));
8572 __ xor_(scratch, FieldOperand(object, HeapNumber::kValueOffset));
8573 GenerateConvertHashCodeToIndex(masm, scratch, mask);
8574
8575 Register index = scratch;
8576 Register probe = mask;
8577 __ movq(probe,
8578 FieldOperand(number_string_cache,
8579 index,
8580 times_1,
8581 FixedArray::kHeaderSize));
8582 __ JumpIfSmi(probe, not_found);
8583 ASSERT(CpuFeatures::IsSupported(SSE2));
8584 CpuFeatures::Scope fscope(SSE2);
8585 __ movsd(xmm0, FieldOperand(object, HeapNumber::kValueOffset));
8586 __ movsd(xmm1, FieldOperand(probe, HeapNumber::kValueOffset));
8587 __ comisd(xmm0, xmm1);
8588 __ j(parity_even, not_found); // Bail out if NaN is involved.
8589 __ j(not_equal, not_found); // The cache did not contain this value.
8590 __ jmp(&load_result_from_cache);
8591 }
8592
8593 __ bind(&is_smi);
8594 __ movq(scratch, object);
8595 __ SmiToInteger32(scratch, scratch);
8596 GenerateConvertHashCodeToIndex(masm, scratch, mask);
8597
8598 Register index = scratch;
8599 // Check if the entry is the smi we are looking for.
8600 __ cmpq(object,
8601 FieldOperand(number_string_cache,
8602 index,
8603 times_1,
8604 FixedArray::kHeaderSize));
8605 __ j(not_equal, not_found);
8606
8607 // Get the result from the cache.
8608 __ bind(&load_result_from_cache);
8609 __ movq(result,
8610 FieldOperand(number_string_cache,
8611 index,
8612 times_1,
8613 FixedArray::kHeaderSize + kPointerSize));
8614 __ IncrementCounter(&Counters::number_to_string_native, 1);
8615}
8616
8617
8618void NumberToStringStub::Generate(MacroAssembler* masm) {
8619 Label runtime;
8620
8621 __ movq(rbx, Operand(rsp, kPointerSize));
8622
8623 // Generate code to lookup number in the number string cache.
8624 GenerateLookupNumberStringCache(masm, rbx, rax, r8, r9, false, &runtime);
8625 __ ret(1 * kPointerSize);
8626
8627 __ bind(&runtime);
8628 // Handle number to string in the runtime system if not found in the cache.
8629 __ TailCallRuntime(Runtime::kNumberToStringSkipCache, 1, 1);
8630}
8631
8632
8633void RecordWriteStub::Generate(MacroAssembler* masm) {
8634 masm->RecordWriteHelper(object_, addr_, scratch_);
8635 masm->ret(0);
8636}
8637
8638
8639static int NegativeComparisonResult(Condition cc) {
8640 ASSERT(cc != equal);
8641 ASSERT((cc == less) || (cc == less_equal)
8642 || (cc == greater) || (cc == greater_equal));
8643 return (cc == greater || cc == greater_equal) ? LESS : GREATER;
Leon Clarke4515c472010-02-03 11:58:03 +00008644}
8645
8646
Steve Blocka7e24c12009-10-30 11:49:00 +00008647void CompareStub::Generate(MacroAssembler* masm) {
8648 Label call_builtin, done;
Steve Block6ded16b2010-05-10 14:33:55 +01008649 // The compare stub returns a positive, negative, or zero 64-bit integer
8650 // value in rax, corresponding to result of comparing the two inputs.
Steve Blocka7e24c12009-10-30 11:49:00 +00008651 // NOTICE! This code is only reached after a smi-fast-case check, so
8652 // it is certain that at least one operand isn't a smi.
8653
Steve Block6ded16b2010-05-10 14:33:55 +01008654 // Two identical objects are equal unless they are both NaN or undefined.
8655 {
8656 Label not_identical;
8657 __ cmpq(rax, rdx);
8658 __ j(not_equal, &not_identical);
Steve Blocka7e24c12009-10-30 11:49:00 +00008659
Steve Block6ded16b2010-05-10 14:33:55 +01008660 if (cc_ != equal) {
8661 // Check for undefined. undefined OP undefined is false even though
8662 // undefined == undefined.
8663 Label check_for_nan;
8664 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex);
8665 __ j(not_equal, &check_for_nan);
8666 __ Set(rax, NegativeComparisonResult(cc_));
8667 __ ret(0);
8668 __ bind(&check_for_nan);
8669 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008670
Steve Block6ded16b2010-05-10 14:33:55 +01008671 // Test for NaN. Sadly, we can't just compare to Factory::nan_value(),
8672 // so we do the second best thing - test it ourselves.
8673 // Note: if cc_ != equal, never_nan_nan_ is not used.
8674 if (never_nan_nan_ && (cc_ == equal)) {
8675 __ Set(rax, EQUAL);
8676 __ ret(0);
8677 } else {
8678 Label return_equal;
8679 Label heap_number;
8680 // If it's not a heap number, then return equal.
8681 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
8682 Factory::heap_number_map());
8683 __ j(equal, &heap_number);
8684 __ bind(&return_equal);
8685 __ Set(rax, EQUAL);
8686 __ ret(0);
Steve Blockd0582a62009-12-15 09:54:21 +00008687
Steve Block6ded16b2010-05-10 14:33:55 +01008688 __ bind(&heap_number);
8689 // It is a heap number, so return non-equal if it's NaN and equal if
8690 // it's not NaN.
8691 // The representation of NaN values has all exponent bits (52..62) set,
8692 // and not all mantissa bits (0..51) clear.
8693 // We only allow QNaNs, which have bit 51 set (which also rules out
8694 // the value being Infinity).
8695
8696 // Value is a QNaN if value & kQuietNaNMask == kQuietNaNMask, i.e.,
8697 // all bits in the mask are set. We only need to check the word
8698 // that contains the exponent and high bit of the mantissa.
8699 ASSERT_NE(0, (kQuietNaNHighBitsMask << 1) & 0x80000000u);
8700 __ movl(rdx, FieldOperand(rdx, HeapNumber::kExponentOffset));
8701 __ xorl(rax, rax);
8702 __ addl(rdx, rdx); // Shift value and mask so mask applies to top bits.
8703 __ cmpl(rdx, Immediate(kQuietNaNHighBitsMask << 1));
8704 if (cc_ == equal) {
Leon Clarkee46be812010-01-19 14:06:41 +00008705 __ setcc(above_equal, rax);
8706 __ ret(0);
Steve Block6ded16b2010-05-10 14:33:55 +01008707 } else {
8708 Label nan;
8709 __ j(above_equal, &nan);
8710 __ Set(rax, EQUAL);
8711 __ ret(0);
8712 __ bind(&nan);
8713 __ Set(rax, NegativeComparisonResult(cc_));
8714 __ ret(0);
Leon Clarkee46be812010-01-19 14:06:41 +00008715 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008716 }
8717
Steve Block6ded16b2010-05-10 14:33:55 +01008718 __ bind(&not_identical);
8719 }
8720
8721 if (cc_ == equal) { // Both strict and non-strict.
8722 Label slow; // Fallthrough label.
8723
Steve Blocka7e24c12009-10-30 11:49:00 +00008724 // If we're doing a strict equality comparison, we don't have to do
8725 // type conversion, so we generate code to do fast comparison for objects
8726 // and oddballs. Non-smi numbers and strings still go through the usual
8727 // slow-case code.
8728 if (strict_) {
8729 // If either is a Smi (we know that not both are), then they can only
8730 // be equal if the other is a HeapNumber. If so, use the slow case.
8731 {
8732 Label not_smis;
8733 __ SelectNonSmi(rbx, rax, rdx, &not_smis);
8734
8735 // Check if the non-smi operand is a heap number.
8736 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
8737 Factory::heap_number_map());
8738 // If heap number, handle it in the slow case.
8739 __ j(equal, &slow);
8740 // Return non-equal. ebx (the lower half of rbx) is not zero.
8741 __ movq(rax, rbx);
8742 __ ret(0);
8743
8744 __ bind(&not_smis);
8745 }
8746
8747 // If either operand is a JSObject or an oddball value, then they are not
8748 // equal since their pointers are different
8749 // There is no test for undetectability in strict equality.
8750
8751 // If the first object is a JS object, we have done pointer comparison.
8752 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
8753 Label first_non_object;
8754 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx);
8755 __ j(below, &first_non_object);
8756 // Return non-zero (eax (not rax) is not zero)
8757 Label return_not_equal;
8758 ASSERT(kHeapObjectTag != 0);
8759 __ bind(&return_not_equal);
8760 __ ret(0);
8761
8762 __ bind(&first_non_object);
8763 // Check for oddballs: true, false, null, undefined.
8764 __ CmpInstanceType(rcx, ODDBALL_TYPE);
8765 __ j(equal, &return_not_equal);
8766
8767 __ CmpObjectType(rdx, FIRST_JS_OBJECT_TYPE, rcx);
8768 __ j(above_equal, &return_not_equal);
8769
8770 // Check for oddballs: true, false, null, undefined.
8771 __ CmpInstanceType(rcx, ODDBALL_TYPE);
8772 __ j(equal, &return_not_equal);
8773
8774 // Fall through to the general case.
8775 }
8776 __ bind(&slow);
8777 }
8778
8779 // Push arguments below the return address to prepare jump to builtin.
8780 __ pop(rcx);
8781 __ push(rax);
8782 __ push(rdx);
8783 __ push(rcx);
8784
Steve Block6ded16b2010-05-10 14:33:55 +01008785 // Generate the number comparison code.
8786 if (include_number_compare_) {
8787 Label non_number_comparison;
8788 Label unordered;
8789 FloatingPointHelper::LoadFloatOperand(masm, rdx, xmm0,
8790 &non_number_comparison);
8791 FloatingPointHelper::LoadFloatOperand(masm, rax, xmm1,
8792 &non_number_comparison);
Steve Blocka7e24c12009-10-30 11:49:00 +00008793
Steve Block6ded16b2010-05-10 14:33:55 +01008794 __ comisd(xmm0, xmm1);
Steve Blocka7e24c12009-10-30 11:49:00 +00008795
Steve Block6ded16b2010-05-10 14:33:55 +01008796 // Don't base result on EFLAGS when a NaN is involved.
8797 __ j(parity_even, &unordered);
8798 // Return a result of -1, 0, or 1, based on EFLAGS.
8799 __ movq(rax, Immediate(0)); // equal
8800 __ movq(rcx, Immediate(1));
8801 __ cmovq(above, rax, rcx);
8802 __ movq(rcx, Immediate(-1));
8803 __ cmovq(below, rax, rcx);
8804 __ ret(2 * kPointerSize); // rax, rdx were pushed
Steve Blocka7e24c12009-10-30 11:49:00 +00008805
Steve Block6ded16b2010-05-10 14:33:55 +01008806 // If one of the numbers was NaN, then the result is always false.
8807 // The cc is never not-equal.
8808 __ bind(&unordered);
8809 ASSERT(cc_ != not_equal);
8810 if (cc_ == less || cc_ == less_equal) {
8811 __ Set(rax, 1);
8812 } else {
8813 __ Set(rax, -1);
8814 }
8815 __ ret(2 * kPointerSize); // rax, rdx were pushed
Steve Blocka7e24c12009-10-30 11:49:00 +00008816
Steve Block6ded16b2010-05-10 14:33:55 +01008817 // The number comparison code did not provide a valid result.
8818 __ bind(&non_number_comparison);
8819 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008820
8821 // Fast negative check for symbol-to-symbol equality.
Leon Clarkee46be812010-01-19 14:06:41 +00008822 Label check_for_strings;
Steve Blocka7e24c12009-10-30 11:49:00 +00008823 if (cc_ == equal) {
Leon Clarkee46be812010-01-19 14:06:41 +00008824 BranchIfNonSymbol(masm, &check_for_strings, rax, kScratchRegister);
8825 BranchIfNonSymbol(masm, &check_for_strings, rdx, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00008826
8827 // We've already checked for object identity, so if both operands
8828 // are symbols they aren't equal. Register eax (not rax) already holds a
8829 // non-zero value, which indicates not equal, so just return.
8830 __ ret(2 * kPointerSize);
8831 }
8832
Leon Clarkee46be812010-01-19 14:06:41 +00008833 __ bind(&check_for_strings);
8834
8835 __ JumpIfNotBothSequentialAsciiStrings(rdx, rax, rcx, rbx, &call_builtin);
8836
8837 // Inline comparison of ascii strings.
8838 StringCompareStub::GenerateCompareFlatAsciiStrings(masm,
8839 rdx,
8840 rax,
8841 rcx,
8842 rbx,
8843 rdi,
8844 r8);
8845
8846#ifdef DEBUG
8847 __ Abort("Unexpected fall-through from string comparison");
8848#endif
8849
Steve Blocka7e24c12009-10-30 11:49:00 +00008850 __ bind(&call_builtin);
8851 // must swap argument order
8852 __ pop(rcx);
8853 __ pop(rdx);
8854 __ pop(rax);
8855 __ push(rdx);
8856 __ push(rax);
8857
8858 // Figure out which native to call and setup the arguments.
8859 Builtins::JavaScript builtin;
8860 if (cc_ == equal) {
8861 builtin = strict_ ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
8862 } else {
8863 builtin = Builtins::COMPARE;
Steve Block6ded16b2010-05-10 14:33:55 +01008864 __ Push(Smi::FromInt(NegativeComparisonResult(cc_)));
Steve Blocka7e24c12009-10-30 11:49:00 +00008865 }
8866
8867 // Restore return address on the stack.
8868 __ push(rcx);
8869
8870 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
8871 // tagged as a small integer.
8872 __ InvokeBuiltin(builtin, JUMP_FUNCTION);
8873}
8874
8875
8876void CompareStub::BranchIfNonSymbol(MacroAssembler* masm,
8877 Label* label,
8878 Register object,
8879 Register scratch) {
8880 __ JumpIfSmi(object, label);
8881 __ movq(scratch, FieldOperand(object, HeapObject::kMapOffset));
8882 __ movzxbq(scratch,
8883 FieldOperand(scratch, Map::kInstanceTypeOffset));
Leon Clarkee46be812010-01-19 14:06:41 +00008884 // Ensure that no non-strings have the symbol bit set.
8885 ASSERT(kNotStringTag + kIsSymbolMask > LAST_TYPE);
8886 ASSERT(kSymbolTag != 0);
8887 __ testb(scratch, Immediate(kIsSymbolMask));
8888 __ j(zero, label);
Steve Blocka7e24c12009-10-30 11:49:00 +00008889}
8890
8891
8892// Call the function just below TOS on the stack with the given
8893// arguments. The receiver is the TOS.
8894void CodeGenerator::CallWithArguments(ZoneList<Expression*>* args,
Leon Clarkee46be812010-01-19 14:06:41 +00008895 CallFunctionFlags flags,
Steve Blocka7e24c12009-10-30 11:49:00 +00008896 int position) {
8897 // Push the arguments ("left-to-right") on the stack.
8898 int arg_count = args->length();
8899 for (int i = 0; i < arg_count; i++) {
8900 Load(args->at(i));
8901 }
8902
8903 // Record the position for debugging purposes.
8904 CodeForSourcePosition(position);
8905
8906 // Use the shared code stub to call the function.
8907 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Leon Clarkee46be812010-01-19 14:06:41 +00008908 CallFunctionStub call_function(arg_count, in_loop, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00008909 Result answer = frame_->CallStub(&call_function, arg_count + 1);
8910 // Restore context and replace function on the stack with the
8911 // result of the stub invocation.
8912 frame_->RestoreContextRegister();
8913 frame_->SetElementAt(0, &answer);
8914}
8915
8916
8917void InstanceofStub::Generate(MacroAssembler* masm) {
8918 // Implements "value instanceof function" operator.
8919 // Expected input state:
8920 // rsp[0] : return address
8921 // rsp[1] : function pointer
8922 // rsp[2] : value
Kristian Monsen25f61362010-05-21 11:50:48 +01008923 // Returns a bitwise zero to indicate that the value
8924 // is and instance of the function and anything else to
8925 // indicate that the value is not an instance.
Steve Blocka7e24c12009-10-30 11:49:00 +00008926
8927 // Get the object - go slow case if it's a smi.
8928 Label slow;
8929 __ movq(rax, Operand(rsp, 2 * kPointerSize));
8930 __ JumpIfSmi(rax, &slow);
8931
8932 // Check that the left hand is a JS object. Leave its map in rax.
8933 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rax);
8934 __ j(below, &slow);
8935 __ CmpInstanceType(rax, LAST_JS_OBJECT_TYPE);
8936 __ j(above, &slow);
8937
8938 // Get the prototype of the function.
8939 __ movq(rdx, Operand(rsp, 1 * kPointerSize));
Kristian Monsen25f61362010-05-21 11:50:48 +01008940 // rdx is function, rax is map.
8941
8942 // Look up the function and the map in the instanceof cache.
8943 Label miss;
8944 __ CompareRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex);
8945 __ j(not_equal, &miss);
8946 __ CompareRoot(rax, Heap::kInstanceofCacheMapRootIndex);
8947 __ j(not_equal, &miss);
8948 __ LoadRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
8949 __ ret(2 * kPointerSize);
8950
8951 __ bind(&miss);
Steve Blocka7e24c12009-10-30 11:49:00 +00008952 __ TryGetFunctionPrototype(rdx, rbx, &slow);
8953
8954 // Check that the function prototype is a JS object.
8955 __ JumpIfSmi(rbx, &slow);
8956 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, kScratchRegister);
8957 __ j(below, &slow);
8958 __ CmpInstanceType(kScratchRegister, LAST_JS_OBJECT_TYPE);
8959 __ j(above, &slow);
8960
Kristian Monsen25f61362010-05-21 11:50:48 +01008961 // Register mapping:
8962 // rax is object map.
8963 // rdx is function.
8964 // rbx is function prototype.
8965 __ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex);
8966 __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex);
8967
Steve Blocka7e24c12009-10-30 11:49:00 +00008968 __ movq(rcx, FieldOperand(rax, Map::kPrototypeOffset));
8969
8970 // Loop through the prototype chain looking for the function prototype.
8971 Label loop, is_instance, is_not_instance;
8972 __ LoadRoot(kScratchRegister, Heap::kNullValueRootIndex);
8973 __ bind(&loop);
8974 __ cmpq(rcx, rbx);
8975 __ j(equal, &is_instance);
8976 __ cmpq(rcx, kScratchRegister);
Kristian Monsen25f61362010-05-21 11:50:48 +01008977 // The code at is_not_instance assumes that kScratchRegister contains a
8978 // non-zero GCable value (the null object in this case).
Steve Blocka7e24c12009-10-30 11:49:00 +00008979 __ j(equal, &is_not_instance);
8980 __ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
8981 __ movq(rcx, FieldOperand(rcx, Map::kPrototypeOffset));
8982 __ jmp(&loop);
8983
8984 __ bind(&is_instance);
Steve Blockd0582a62009-12-15 09:54:21 +00008985 __ xorl(rax, rax);
Kristian Monsen25f61362010-05-21 11:50:48 +01008986 // Store bitwise zero in the cache. This is a Smi in GC terms.
8987 ASSERT_EQ(0, kSmiTag);
8988 __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00008989 __ ret(2 * kPointerSize);
8990
8991 __ bind(&is_not_instance);
Kristian Monsen25f61362010-05-21 11:50:48 +01008992 // We have to store a non-zero value in the cache.
8993 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00008994 __ ret(2 * kPointerSize);
8995
8996 // Slow-case: Go through the JavaScript implementation.
8997 __ bind(&slow);
8998 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
8999}
9000
9001
9002void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
Andrei Popescu402d9372010-02-26 13:31:12 +00009003 // rsp[0] : return address
9004 // rsp[8] : number of parameters
9005 // rsp[16] : receiver displacement
9006 // rsp[24] : function
9007
Steve Blocka7e24c12009-10-30 11:49:00 +00009008 // The displacement is used for skipping the return address and the
9009 // frame pointer on the stack. It is the offset of the last
9010 // parameter (if any) relative to the frame pointer.
9011 static const int kDisplacement = 2 * kPointerSize;
9012
9013 // Check if the calling frame is an arguments adaptor frame.
Andrei Popescu402d9372010-02-26 13:31:12 +00009014 Label adaptor_frame, try_allocate, runtime;
Steve Blocka7e24c12009-10-30 11:49:00 +00009015 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
Steve Block3ce2e202009-11-05 08:53:23 +00009016 __ SmiCompare(Operand(rdx, StandardFrameConstants::kContextOffset),
9017 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
Andrei Popescu402d9372010-02-26 13:31:12 +00009018 __ j(equal, &adaptor_frame);
9019
9020 // Get the length from the frame.
9021 __ movq(rcx, Operand(rsp, 1 * kPointerSize));
9022 __ jmp(&try_allocate);
Steve Blocka7e24c12009-10-30 11:49:00 +00009023
9024 // Patch the arguments.length and the parameters pointer.
Andrei Popescu402d9372010-02-26 13:31:12 +00009025 __ bind(&adaptor_frame);
Steve Blocka7e24c12009-10-30 11:49:00 +00009026 __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset));
9027 __ movq(Operand(rsp, 1 * kPointerSize), rcx);
Andrei Popescu402d9372010-02-26 13:31:12 +00009028 // Do not clobber the length index for the indexing operation since
9029 // it is used compute the size for allocation later.
9030 SmiIndex index = masm->SmiToIndex(rbx, rcx, kPointerSizeLog2);
Steve Blocka7e24c12009-10-30 11:49:00 +00009031 __ lea(rdx, Operand(rdx, index.reg, index.scale, kDisplacement));
9032 __ movq(Operand(rsp, 2 * kPointerSize), rdx);
9033
Andrei Popescu402d9372010-02-26 13:31:12 +00009034 // Try the new space allocation. Start out with computing the size of
9035 // the arguments object and the elements array.
9036 Label add_arguments_object;
9037 __ bind(&try_allocate);
9038 __ testq(rcx, rcx);
9039 __ j(zero, &add_arguments_object);
9040 index = masm->SmiToIndex(rcx, rcx, kPointerSizeLog2);
9041 __ lea(rcx, Operand(index.reg, index.scale, FixedArray::kHeaderSize));
9042 __ bind(&add_arguments_object);
9043 __ addq(rcx, Immediate(Heap::kArgumentsObjectSize));
9044
9045 // Do the allocation of both objects in one go.
9046 __ AllocateInNewSpace(rcx, rax, rdx, rbx, &runtime, TAG_OBJECT);
9047
9048 // Get the arguments boilerplate from the current (global) context.
9049 int offset = Context::SlotOffset(Context::ARGUMENTS_BOILERPLATE_INDEX);
9050 __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
9051 __ movq(rdi, FieldOperand(rdi, GlobalObject::kGlobalContextOffset));
9052 __ movq(rdi, Operand(rdi, offset));
9053
9054 // Copy the JS object part.
9055 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
9056 __ movq(kScratchRegister, FieldOperand(rdi, i));
9057 __ movq(FieldOperand(rax, i), kScratchRegister);
9058 }
9059
9060 // Setup the callee in-object property.
9061 ASSERT(Heap::arguments_callee_index == 0);
9062 __ movq(kScratchRegister, Operand(rsp, 3 * kPointerSize));
9063 __ movq(FieldOperand(rax, JSObject::kHeaderSize), kScratchRegister);
9064
9065 // Get the length (smi tagged) and set that as an in-object property too.
9066 ASSERT(Heap::arguments_length_index == 1);
9067 __ movq(rcx, Operand(rsp, 1 * kPointerSize));
9068 __ movq(FieldOperand(rax, JSObject::kHeaderSize + kPointerSize), rcx);
9069
9070 // If there are no actual arguments, we're done.
9071 Label done;
9072 __ testq(rcx, rcx);
9073 __ j(zero, &done);
9074
9075 // Get the parameters pointer from the stack and untag the length.
9076 __ movq(rdx, Operand(rsp, 2 * kPointerSize));
9077 __ SmiToInteger32(rcx, rcx);
9078
9079 // Setup the elements pointer in the allocated arguments object and
9080 // initialize the header in the elements fixed array.
9081 __ lea(rdi, Operand(rax, Heap::kArgumentsObjectSize));
9082 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rdi);
9083 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex);
9084 __ movq(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister);
Kristian Monsen25f61362010-05-21 11:50:48 +01009085 __ movl(FieldOperand(rdi, FixedArray::kLengthOffset), rcx);
Andrei Popescu402d9372010-02-26 13:31:12 +00009086
9087 // Copy the fixed array slots.
9088 Label loop;
9089 __ bind(&loop);
9090 __ movq(kScratchRegister, Operand(rdx, -1 * kPointerSize)); // Skip receiver.
9091 __ movq(FieldOperand(rdi, FixedArray::kHeaderSize), kScratchRegister);
9092 __ addq(rdi, Immediate(kPointerSize));
9093 __ subq(rdx, Immediate(kPointerSize));
9094 __ decq(rcx);
9095 __ j(not_zero, &loop);
9096
9097 // Return and remove the on-stack parameters.
9098 __ bind(&done);
9099 __ ret(3 * kPointerSize);
9100
Steve Blocka7e24c12009-10-30 11:49:00 +00009101 // Do the runtime call to allocate the arguments object.
9102 __ bind(&runtime);
Steve Block6ded16b2010-05-10 14:33:55 +01009103 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00009104}
9105
9106
9107void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
9108 // The key is in rdx and the parameter count is in rax.
9109
9110 // The displacement is used for skipping the frame pointer on the
9111 // stack. It is the offset of the last parameter (if any) relative
9112 // to the frame pointer.
9113 static const int kDisplacement = 1 * kPointerSize;
9114
9115 // Check that the key is a smi.
9116 Label slow;
9117 __ JumpIfNotSmi(rdx, &slow);
9118
9119 // Check if the calling frame is an arguments adaptor frame.
9120 Label adaptor;
9121 __ movq(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
Steve Block3ce2e202009-11-05 08:53:23 +00009122 __ SmiCompare(Operand(rbx, StandardFrameConstants::kContextOffset),
9123 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
Steve Blocka7e24c12009-10-30 11:49:00 +00009124 __ j(equal, &adaptor);
9125
9126 // Check index against formal parameters count limit passed in
9127 // through register rax. Use unsigned comparison to get negative
9128 // check for free.
9129 __ cmpq(rdx, rax);
9130 __ j(above_equal, &slow);
9131
9132 // Read the argument from the stack and return it.
9133 SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2);
9134 __ lea(rbx, Operand(rbp, index.reg, index.scale, 0));
9135 index = masm->SmiToNegativeIndex(rdx, rdx, kPointerSizeLog2);
9136 __ movq(rax, Operand(rbx, index.reg, index.scale, kDisplacement));
9137 __ Ret();
9138
9139 // Arguments adaptor case: Check index against actual arguments
9140 // limit found in the arguments adaptor frame. Use unsigned
9141 // comparison to get negative check for free.
9142 __ bind(&adaptor);
9143 __ movq(rcx, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
9144 __ cmpq(rdx, rcx);
9145 __ j(above_equal, &slow);
9146
9147 // Read the argument from the stack and return it.
9148 index = masm->SmiToIndex(rax, rcx, kPointerSizeLog2);
9149 __ lea(rbx, Operand(rbx, index.reg, index.scale, 0));
9150 index = masm->SmiToNegativeIndex(rdx, rdx, kPointerSizeLog2);
9151 __ movq(rax, Operand(rbx, index.reg, index.scale, kDisplacement));
9152 __ Ret();
9153
9154 // Slow-case: Handle non-smi or out-of-bounds access to arguments
9155 // by calling the runtime system.
9156 __ bind(&slow);
9157 __ pop(rbx); // Return address.
9158 __ push(rdx);
9159 __ push(rbx);
Steve Block6ded16b2010-05-10 14:33:55 +01009160 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00009161}
9162
9163
Steve Blocka7e24c12009-10-30 11:49:00 +00009164void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
9165 // Check that stack should contain next handler, frame pointer, state and
9166 // return address in that order.
9167 ASSERT_EQ(StackHandlerConstants::kFPOffset + kPointerSize,
9168 StackHandlerConstants::kStateOffset);
9169 ASSERT_EQ(StackHandlerConstants::kStateOffset + kPointerSize,
9170 StackHandlerConstants::kPCOffset);
9171
9172 ExternalReference handler_address(Top::k_handler_address);
9173 __ movq(kScratchRegister, handler_address);
9174 __ movq(rsp, Operand(kScratchRegister, 0));
9175 // get next in chain
9176 __ pop(rcx);
9177 __ movq(Operand(kScratchRegister, 0), rcx);
9178 __ pop(rbp); // pop frame pointer
9179 __ pop(rdx); // remove state
9180
9181 // Before returning we restore the context from the frame pointer if not NULL.
9182 // The frame pointer is NULL in the exception handler of a JS entry frame.
9183 __ xor_(rsi, rsi); // tentatively set context pointer to NULL
9184 Label skip;
9185 __ cmpq(rbp, Immediate(0));
9186 __ j(equal, &skip);
9187 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
9188 __ bind(&skip);
9189 __ ret(0);
9190}
9191
9192
9193void CEntryStub::GenerateCore(MacroAssembler* masm,
9194 Label* throw_normal_exception,
9195 Label* throw_termination_exception,
9196 Label* throw_out_of_memory_exception,
Steve Blocka7e24c12009-10-30 11:49:00 +00009197 bool do_gc,
Steve Block6ded16b2010-05-10 14:33:55 +01009198 bool always_allocate_scope,
9199 int /* alignment_skew */) {
Steve Blocka7e24c12009-10-30 11:49:00 +00009200 // rax: result parameter for PerformGC, if any.
9201 // rbx: pointer to C function (C callee-saved).
9202 // rbp: frame pointer (restored after C call).
9203 // rsp: stack pointer (restored after C call).
9204 // r14: number of arguments including receiver (C callee-saved).
9205 // r15: pointer to the first argument (C callee-saved).
9206 // This pointer is reused in LeaveExitFrame(), so it is stored in a
9207 // callee-saved register.
9208
Leon Clarke4515c472010-02-03 11:58:03 +00009209 // Simple results returned in rax (both AMD64 and Win64 calling conventions).
9210 // Complex results must be written to address passed as first argument.
9211 // AMD64 calling convention: a struct of two pointers in rax+rdx
9212
Steve Block6ded16b2010-05-10 14:33:55 +01009213 // Check stack alignment.
9214 if (FLAG_debug_code) {
9215 __ CheckStackAlignment();
9216 }
9217
Steve Blocka7e24c12009-10-30 11:49:00 +00009218 if (do_gc) {
Steve Block6ded16b2010-05-10 14:33:55 +01009219 // Pass failure code returned from last attempt as first argument to
9220 // PerformGC. No need to use PrepareCallCFunction/CallCFunction here as the
9221 // stack is known to be aligned. This function takes one argument which is
9222 // passed in register.
Steve Blocka7e24c12009-10-30 11:49:00 +00009223#ifdef _WIN64
9224 __ movq(rcx, rax);
Steve Block6ded16b2010-05-10 14:33:55 +01009225#else // _WIN64
Steve Blocka7e24c12009-10-30 11:49:00 +00009226 __ movq(rdi, rax);
9227#endif
9228 __ movq(kScratchRegister,
9229 FUNCTION_ADDR(Runtime::PerformGC),
9230 RelocInfo::RUNTIME_ENTRY);
9231 __ call(kScratchRegister);
9232 }
9233
9234 ExternalReference scope_depth =
9235 ExternalReference::heap_always_allocate_scope_depth();
9236 if (always_allocate_scope) {
9237 __ movq(kScratchRegister, scope_depth);
9238 __ incl(Operand(kScratchRegister, 0));
9239 }
9240
9241 // Call C function.
9242#ifdef _WIN64
9243 // Windows 64-bit ABI passes arguments in rcx, rdx, r8, r9
9244 // Store Arguments object on stack, below the 4 WIN64 ABI parameter slots.
9245 __ movq(Operand(rsp, 4 * kPointerSize), r14); // argc.
9246 __ movq(Operand(rsp, 5 * kPointerSize), r15); // argv.
9247 if (result_size_ < 2) {
9248 // Pass a pointer to the Arguments object as the first argument.
9249 // Return result in single register (rax).
9250 __ lea(rcx, Operand(rsp, 4 * kPointerSize));
9251 } else {
9252 ASSERT_EQ(2, result_size_);
9253 // Pass a pointer to the result location as the first argument.
9254 __ lea(rcx, Operand(rsp, 6 * kPointerSize));
9255 // Pass a pointer to the Arguments object as the second argument.
9256 __ lea(rdx, Operand(rsp, 4 * kPointerSize));
9257 }
9258
Steve Block6ded16b2010-05-10 14:33:55 +01009259#else // _WIN64
Steve Blocka7e24c12009-10-30 11:49:00 +00009260 // GCC passes arguments in rdi, rsi, rdx, rcx, r8, r9.
9261 __ movq(rdi, r14); // argc.
9262 __ movq(rsi, r15); // argv.
9263#endif
9264 __ call(rbx);
9265 // Result is in rax - do not destroy this register!
9266
9267 if (always_allocate_scope) {
9268 __ movq(kScratchRegister, scope_depth);
9269 __ decl(Operand(kScratchRegister, 0));
9270 }
9271
9272 // Check for failure result.
9273 Label failure_returned;
9274 ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0);
Steve Block3ce2e202009-11-05 08:53:23 +00009275#ifdef _WIN64
9276 // If return value is on the stack, pop it to registers.
9277 if (result_size_ > 1) {
9278 ASSERT_EQ(2, result_size_);
Steve Blockd0582a62009-12-15 09:54:21 +00009279 // Read result values stored on stack. Result is stored
9280 // above the four argument mirror slots and the two
9281 // Arguments object slots.
Steve Block3ce2e202009-11-05 08:53:23 +00009282 __ movq(rax, Operand(rsp, 6 * kPointerSize));
9283 __ movq(rdx, Operand(rsp, 7 * kPointerSize));
9284 }
9285#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00009286 __ lea(rcx, Operand(rax, 1));
9287 // Lower 2 bits of rcx are 0 iff rax has failure tag.
9288 __ testl(rcx, Immediate(kFailureTagMask));
9289 __ j(zero, &failure_returned);
9290
9291 // Exit the JavaScript to C++ exit frame.
Leon Clarke4515c472010-02-03 11:58:03 +00009292 __ LeaveExitFrame(mode_, result_size_);
Steve Blocka7e24c12009-10-30 11:49:00 +00009293 __ ret(0);
9294
9295 // Handling of failure.
9296 __ bind(&failure_returned);
9297
9298 Label retry;
9299 // If the returned exception is RETRY_AFTER_GC continue at retry label
9300 ASSERT(Failure::RETRY_AFTER_GC == 0);
9301 __ testl(rax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
9302 __ j(zero, &retry);
9303
9304 // Special handling of out of memory exceptions.
9305 __ movq(kScratchRegister, Failure::OutOfMemoryException(), RelocInfo::NONE);
9306 __ cmpq(rax, kScratchRegister);
9307 __ j(equal, throw_out_of_memory_exception);
9308
9309 // Retrieve the pending exception and clear the variable.
9310 ExternalReference pending_exception_address(Top::k_pending_exception_address);
9311 __ movq(kScratchRegister, pending_exception_address);
9312 __ movq(rax, Operand(kScratchRegister, 0));
9313 __ movq(rdx, ExternalReference::the_hole_value_location());
9314 __ movq(rdx, Operand(rdx, 0));
9315 __ movq(Operand(kScratchRegister, 0), rdx);
9316
9317 // Special handling of termination exceptions which are uncatchable
9318 // by javascript code.
9319 __ CompareRoot(rax, Heap::kTerminationExceptionRootIndex);
9320 __ j(equal, throw_termination_exception);
9321
9322 // Handle normal exception.
9323 __ jmp(throw_normal_exception);
9324
9325 // Retry.
9326 __ bind(&retry);
9327}
9328
9329
9330void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm,
9331 UncatchableExceptionType type) {
9332 // Fetch top stack handler.
9333 ExternalReference handler_address(Top::k_handler_address);
9334 __ movq(kScratchRegister, handler_address);
9335 __ movq(rsp, Operand(kScratchRegister, 0));
9336
9337 // Unwind the handlers until the ENTRY handler is found.
9338 Label loop, done;
9339 __ bind(&loop);
9340 // Load the type of the current stack handler.
9341 const int kStateOffset = StackHandlerConstants::kStateOffset;
9342 __ cmpq(Operand(rsp, kStateOffset), Immediate(StackHandler::ENTRY));
9343 __ j(equal, &done);
9344 // Fetch the next handler in the list.
9345 const int kNextOffset = StackHandlerConstants::kNextOffset;
9346 __ movq(rsp, Operand(rsp, kNextOffset));
9347 __ jmp(&loop);
9348 __ bind(&done);
9349
9350 // Set the top handler address to next handler past the current ENTRY handler.
9351 __ movq(kScratchRegister, handler_address);
9352 __ pop(Operand(kScratchRegister, 0));
9353
9354 if (type == OUT_OF_MEMORY) {
9355 // Set external caught exception to false.
9356 ExternalReference external_caught(Top::k_external_caught_exception_address);
9357 __ movq(rax, Immediate(false));
9358 __ store_rax(external_caught);
9359
9360 // Set pending exception and rax to out of memory exception.
9361 ExternalReference pending_exception(Top::k_pending_exception_address);
9362 __ movq(rax, Failure::OutOfMemoryException(), RelocInfo::NONE);
9363 __ store_rax(pending_exception);
9364 }
9365
9366 // Clear the context pointer.
9367 __ xor_(rsi, rsi);
9368
9369 // Restore registers from handler.
9370 ASSERT_EQ(StackHandlerConstants::kNextOffset + kPointerSize,
9371 StackHandlerConstants::kFPOffset);
9372 __ pop(rbp); // FP
9373 ASSERT_EQ(StackHandlerConstants::kFPOffset + kPointerSize,
9374 StackHandlerConstants::kStateOffset);
9375 __ pop(rdx); // State
9376
9377 ASSERT_EQ(StackHandlerConstants::kStateOffset + kPointerSize,
9378 StackHandlerConstants::kPCOffset);
9379 __ ret(0);
9380}
9381
9382
9383void CallFunctionStub::Generate(MacroAssembler* masm) {
9384 Label slow;
9385
Leon Clarkee46be812010-01-19 14:06:41 +00009386 // If the receiver might be a value (string, number or boolean) check for this
9387 // and box it if it is.
9388 if (ReceiverMightBeValue()) {
9389 // Get the receiver from the stack.
9390 // +1 ~ return address
9391 Label receiver_is_value, receiver_is_js_object;
9392 __ movq(rax, Operand(rsp, (argc_ + 1) * kPointerSize));
9393
9394 // Check if receiver is a smi (which is a number value).
9395 __ JumpIfSmi(rax, &receiver_is_value);
9396
9397 // Check if the receiver is a valid JS object.
9398 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rdi);
9399 __ j(above_equal, &receiver_is_js_object);
9400
9401 // Call the runtime to box the value.
9402 __ bind(&receiver_is_value);
9403 __ EnterInternalFrame();
9404 __ push(rax);
9405 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
9406 __ LeaveInternalFrame();
9407 __ movq(Operand(rsp, (argc_ + 1) * kPointerSize), rax);
9408
9409 __ bind(&receiver_is_js_object);
9410 }
9411
Steve Blocka7e24c12009-10-30 11:49:00 +00009412 // Get the function to call from the stack.
9413 // +2 ~ receiver, return address
9414 __ movq(rdi, Operand(rsp, (argc_ + 2) * kPointerSize));
9415
9416 // Check that the function really is a JavaScript function.
9417 __ JumpIfSmi(rdi, &slow);
9418 // Goto slow case if we do not have a function.
9419 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
9420 __ j(not_equal, &slow);
9421
9422 // Fast-case: Just invoke the function.
9423 ParameterCount actual(argc_);
9424 __ InvokeFunction(rdi, actual, JUMP_FUNCTION);
9425
9426 // Slow-case: Non-function called.
9427 __ bind(&slow);
Andrei Popescu402d9372010-02-26 13:31:12 +00009428 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
9429 // of the original receiver from the call site).
9430 __ movq(Operand(rsp, (argc_ + 1) * kPointerSize), rdi);
Steve Blocka7e24c12009-10-30 11:49:00 +00009431 __ Set(rax, argc_);
9432 __ Set(rbx, 0);
9433 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
9434 Handle<Code> adaptor(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
9435 __ Jump(adaptor, RelocInfo::CODE_TARGET);
9436}
9437
9438
Leon Clarke4515c472010-02-03 11:58:03 +00009439void CEntryStub::Generate(MacroAssembler* masm) {
Steve Blocka7e24c12009-10-30 11:49:00 +00009440 // rax: number of arguments including receiver
9441 // rbx: pointer to C function (C callee-saved)
9442 // rbp: frame pointer of calling JS frame (restored after C call)
9443 // rsp: stack pointer (restored after C call)
9444 // rsi: current context (restored)
9445
9446 // NOTE: Invocations of builtins may return failure objects
9447 // instead of a proper result. The builtin entry handles
9448 // this by performing a garbage collection and retrying the
9449 // builtin once.
9450
Steve Blocka7e24c12009-10-30 11:49:00 +00009451 // Enter the exit frame that transitions from JavaScript to C++.
Leon Clarke4515c472010-02-03 11:58:03 +00009452 __ EnterExitFrame(mode_, result_size_);
Steve Blocka7e24c12009-10-30 11:49:00 +00009453
9454 // rax: Holds the context at this point, but should not be used.
9455 // On entry to code generated by GenerateCore, it must hold
9456 // a failure result if the collect_garbage argument to GenerateCore
9457 // is true. This failure result can be the result of code
9458 // generated by a previous call to GenerateCore. The value
9459 // of rax is then passed to Runtime::PerformGC.
9460 // rbx: pointer to builtin function (C callee-saved).
9461 // rbp: frame pointer of exit frame (restored after C call).
9462 // rsp: stack pointer (restored after C call).
9463 // r14: number of arguments including receiver (C callee-saved).
9464 // r15: argv pointer (C callee-saved).
9465
9466 Label throw_normal_exception;
9467 Label throw_termination_exception;
9468 Label throw_out_of_memory_exception;
9469
9470 // Call into the runtime system.
9471 GenerateCore(masm,
9472 &throw_normal_exception,
9473 &throw_termination_exception,
9474 &throw_out_of_memory_exception,
Steve Blocka7e24c12009-10-30 11:49:00 +00009475 false,
9476 false);
9477
9478 // Do space-specific GC and retry runtime call.
9479 GenerateCore(masm,
9480 &throw_normal_exception,
9481 &throw_termination_exception,
9482 &throw_out_of_memory_exception,
Steve Blocka7e24c12009-10-30 11:49:00 +00009483 true,
9484 false);
9485
9486 // Do full GC and retry runtime call one final time.
9487 Failure* failure = Failure::InternalError();
9488 __ movq(rax, failure, RelocInfo::NONE);
9489 GenerateCore(masm,
9490 &throw_normal_exception,
9491 &throw_termination_exception,
9492 &throw_out_of_memory_exception,
Steve Blocka7e24c12009-10-30 11:49:00 +00009493 true,
9494 true);
9495
9496 __ bind(&throw_out_of_memory_exception);
9497 GenerateThrowUncatchable(masm, OUT_OF_MEMORY);
9498
9499 __ bind(&throw_termination_exception);
9500 GenerateThrowUncatchable(masm, TERMINATION);
9501
9502 __ bind(&throw_normal_exception);
9503 GenerateThrowTOS(masm);
9504}
9505
9506
Steve Blockd0582a62009-12-15 09:54:21 +00009507void ApiGetterEntryStub::Generate(MacroAssembler* masm) {
9508 UNREACHABLE();
9509}
9510
9511
Steve Blocka7e24c12009-10-30 11:49:00 +00009512void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
9513 Label invoke, exit;
9514#ifdef ENABLE_LOGGING_AND_PROFILING
9515 Label not_outermost_js, not_outermost_js_2;
9516#endif
9517
9518 // Setup frame.
9519 __ push(rbp);
9520 __ movq(rbp, rsp);
9521
9522 // Push the stack frame type marker twice.
9523 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
Steve Block3ce2e202009-11-05 08:53:23 +00009524 __ Push(Smi::FromInt(marker)); // context slot
9525 __ Push(Smi::FromInt(marker)); // function slot
Steve Blocka7e24c12009-10-30 11:49:00 +00009526 // Save callee-saved registers (X64 calling conventions).
9527 __ push(r12);
9528 __ push(r13);
9529 __ push(r14);
9530 __ push(r15);
9531 __ push(rdi);
9532 __ push(rsi);
9533 __ push(rbx);
9534 // TODO(X64): Push XMM6-XMM15 (low 64 bits) as well, or make them
9535 // callee-save in JS code as well.
9536
9537 // Save copies of the top frame descriptor on the stack.
9538 ExternalReference c_entry_fp(Top::k_c_entry_fp_address);
9539 __ load_rax(c_entry_fp);
9540 __ push(rax);
9541
9542#ifdef ENABLE_LOGGING_AND_PROFILING
9543 // If this is the outermost JS call, set js_entry_sp value.
9544 ExternalReference js_entry_sp(Top::k_js_entry_sp_address);
9545 __ load_rax(js_entry_sp);
9546 __ testq(rax, rax);
9547 __ j(not_zero, &not_outermost_js);
9548 __ movq(rax, rbp);
9549 __ store_rax(js_entry_sp);
9550 __ bind(&not_outermost_js);
9551#endif
9552
9553 // Call a faked try-block that does the invoke.
9554 __ call(&invoke);
9555
9556 // Caught exception: Store result (exception) in the pending
9557 // exception field in the JSEnv and return a failure sentinel.
9558 ExternalReference pending_exception(Top::k_pending_exception_address);
9559 __ store_rax(pending_exception);
9560 __ movq(rax, Failure::Exception(), RelocInfo::NONE);
9561 __ jmp(&exit);
9562
9563 // Invoke: Link this frame into the handler chain.
9564 __ bind(&invoke);
9565 __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER);
9566
9567 // Clear any pending exceptions.
9568 __ load_rax(ExternalReference::the_hole_value_location());
9569 __ store_rax(pending_exception);
9570
9571 // Fake a receiver (NULL).
9572 __ push(Immediate(0)); // receiver
9573
9574 // Invoke the function by calling through JS entry trampoline
9575 // builtin and pop the faked function when we return. We load the address
9576 // from an external reference instead of inlining the call target address
9577 // directly in the code, because the builtin stubs may not have been
9578 // generated yet at the time this code is generated.
9579 if (is_construct) {
9580 ExternalReference construct_entry(Builtins::JSConstructEntryTrampoline);
9581 __ load_rax(construct_entry);
9582 } else {
9583 ExternalReference entry(Builtins::JSEntryTrampoline);
9584 __ load_rax(entry);
9585 }
9586 __ lea(kScratchRegister, FieldOperand(rax, Code::kHeaderSize));
9587 __ call(kScratchRegister);
9588
9589 // Unlink this frame from the handler chain.
9590 __ movq(kScratchRegister, ExternalReference(Top::k_handler_address));
9591 __ pop(Operand(kScratchRegister, 0));
9592 // Pop next_sp.
9593 __ addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
9594
9595#ifdef ENABLE_LOGGING_AND_PROFILING
9596 // If current EBP value is the same as js_entry_sp value, it means that
9597 // the current function is the outermost.
9598 __ movq(kScratchRegister, js_entry_sp);
9599 __ cmpq(rbp, Operand(kScratchRegister, 0));
9600 __ j(not_equal, &not_outermost_js_2);
9601 __ movq(Operand(kScratchRegister, 0), Immediate(0));
9602 __ bind(&not_outermost_js_2);
9603#endif
9604
9605 // Restore the top frame descriptor from the stack.
9606 __ bind(&exit);
9607 __ movq(kScratchRegister, ExternalReference(Top::k_c_entry_fp_address));
9608 __ pop(Operand(kScratchRegister, 0));
9609
9610 // Restore callee-saved registers (X64 conventions).
9611 __ pop(rbx);
9612 __ pop(rsi);
9613 __ pop(rdi);
9614 __ pop(r15);
9615 __ pop(r14);
9616 __ pop(r13);
9617 __ pop(r12);
9618 __ addq(rsp, Immediate(2 * kPointerSize)); // remove markers
9619
9620 // Restore frame pointer and return.
9621 __ pop(rbp);
9622 __ ret(0);
9623}
9624
9625
9626// -----------------------------------------------------------------------------
9627// Implementation of stubs.
9628
9629// Stub classes have public member named masm, not masm_.
9630
9631void StackCheckStub::Generate(MacroAssembler* masm) {
9632 // Because builtins always remove the receiver from the stack, we
9633 // have to fake one to avoid underflowing the stack. The receiver
9634 // must be inserted below the return address on the stack so we
9635 // temporarily store that in a register.
9636 __ pop(rax);
Steve Block3ce2e202009-11-05 08:53:23 +00009637 __ Push(Smi::FromInt(0));
Steve Blocka7e24c12009-10-30 11:49:00 +00009638 __ push(rax);
9639
9640 // Do tail-call to runtime routine.
Steve Block6ded16b2010-05-10 14:33:55 +01009641 __ TailCallRuntime(Runtime::kStackGuard, 1, 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00009642}
9643
9644
Steve Blocka7e24c12009-10-30 11:49:00 +00009645void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm,
9646 Register number) {
9647 Label load_smi, done;
9648
9649 __ JumpIfSmi(number, &load_smi);
9650 __ fld_d(FieldOperand(number, HeapNumber::kValueOffset));
9651 __ jmp(&done);
9652
9653 __ bind(&load_smi);
9654 __ SmiToInteger32(number, number);
9655 __ push(number);
9656 __ fild_s(Operand(rsp, 0));
9657 __ pop(number);
9658
9659 __ bind(&done);
9660}
9661
9662
9663void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm,
9664 Register src,
9665 XMMRegister dst) {
9666 Label load_smi, done;
9667
9668 __ JumpIfSmi(src, &load_smi);
9669 __ movsd(dst, FieldOperand(src, HeapNumber::kValueOffset));
9670 __ jmp(&done);
9671
9672 __ bind(&load_smi);
9673 __ SmiToInteger32(src, src);
9674 __ cvtlsi2sd(dst, src);
9675
9676 __ bind(&done);
9677}
9678
9679
Steve Block6ded16b2010-05-10 14:33:55 +01009680void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm,
9681 Register src,
9682 XMMRegister dst,
9683 Label* not_number) {
9684 Label load_smi, done;
9685 ASSERT(!src.is(kScratchRegister));
9686 __ JumpIfSmi(src, &load_smi);
9687 __ LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex);
9688 __ cmpq(FieldOperand(src, HeapObject::kMapOffset), kScratchRegister);
9689 __ j(not_equal, not_number);
9690 __ movsd(dst, FieldOperand(src, HeapNumber::kValueOffset));
9691 __ jmp(&done);
9692
9693 __ bind(&load_smi);
9694 __ SmiToInteger32(kScratchRegister, src);
9695 __ cvtlsi2sd(dst, kScratchRegister);
9696
9697 __ bind(&done);
9698}
9699
9700
Steve Blocka7e24c12009-10-30 11:49:00 +00009701void FloatingPointHelper::LoadFloatOperands(MacroAssembler* masm,
9702 XMMRegister dst1,
9703 XMMRegister dst2) {
Leon Clarke4515c472010-02-03 11:58:03 +00009704 __ movq(kScratchRegister, rdx);
Steve Blocka7e24c12009-10-30 11:49:00 +00009705 LoadFloatOperand(masm, kScratchRegister, dst1);
Leon Clarke4515c472010-02-03 11:58:03 +00009706 __ movq(kScratchRegister, rax);
Steve Blocka7e24c12009-10-30 11:49:00 +00009707 LoadFloatOperand(masm, kScratchRegister, dst2);
9708}
9709
9710
Leon Clarke4515c472010-02-03 11:58:03 +00009711void FloatingPointHelper::LoadFloatOperandsFromSmis(MacroAssembler* masm,
9712 XMMRegister dst1,
9713 XMMRegister dst2) {
9714 __ SmiToInteger32(kScratchRegister, rdx);
9715 __ cvtlsi2sd(dst1, kScratchRegister);
9716 __ SmiToInteger32(kScratchRegister, rax);
9717 __ cvtlsi2sd(dst2, kScratchRegister);
Steve Blocka7e24c12009-10-30 11:49:00 +00009718}
9719
9720
Leon Clarked91b9f72010-01-27 17:25:45 +00009721// Input: rdx, rax are the left and right objects of a bit op.
9722// Output: rax, rcx are left and right integers for a bit op.
9723void FloatingPointHelper::LoadAsIntegers(MacroAssembler* masm,
Leon Clarked91b9f72010-01-27 17:25:45 +00009724 Label* conversion_failure) {
9725 // Check float operands.
9726 Label arg1_is_object, check_undefined_arg1;
9727 Label arg2_is_object, check_undefined_arg2;
9728 Label load_arg2, done;
9729
9730 __ JumpIfNotSmi(rdx, &arg1_is_object);
9731 __ SmiToInteger32(rdx, rdx);
9732 __ jmp(&load_arg2);
9733
9734 // If the argument is undefined it converts to zero (ECMA-262, section 9.5).
9735 __ bind(&check_undefined_arg1);
9736 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex);
9737 __ j(not_equal, conversion_failure);
9738 __ movl(rdx, Immediate(0));
9739 __ jmp(&load_arg2);
9740
9741 __ bind(&arg1_is_object);
9742 __ movq(rbx, FieldOperand(rdx, HeapObject::kMapOffset));
9743 __ CompareRoot(rbx, Heap::kHeapNumberMapRootIndex);
9744 __ j(not_equal, &check_undefined_arg1);
9745 // Get the untagged integer version of the edx heap number in rcx.
Kristian Monsen25f61362010-05-21 11:50:48 +01009746 IntegerConvert(masm, rdx, rdx);
Leon Clarked91b9f72010-01-27 17:25:45 +00009747
Kristian Monsen25f61362010-05-21 11:50:48 +01009748 // Here rdx has the untagged integer, rax has a Smi or a heap number.
Leon Clarked91b9f72010-01-27 17:25:45 +00009749 __ bind(&load_arg2);
9750 // Test if arg2 is a Smi.
9751 __ JumpIfNotSmi(rax, &arg2_is_object);
9752 __ SmiToInteger32(rax, rax);
9753 __ movl(rcx, rax);
9754 __ jmp(&done);
9755
9756 // If the argument is undefined it converts to zero (ECMA-262, section 9.5).
9757 __ bind(&check_undefined_arg2);
9758 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
9759 __ j(not_equal, conversion_failure);
9760 __ movl(rcx, Immediate(0));
9761 __ jmp(&done);
9762
9763 __ bind(&arg2_is_object);
9764 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
9765 __ CompareRoot(rbx, Heap::kHeapNumberMapRootIndex);
9766 __ j(not_equal, &check_undefined_arg2);
9767 // Get the untagged integer version of the eax heap number in ecx.
Kristian Monsen25f61362010-05-21 11:50:48 +01009768 IntegerConvert(masm, rcx, rax);
Leon Clarked91b9f72010-01-27 17:25:45 +00009769 __ bind(&done);
9770 __ movl(rax, rdx);
9771}
9772
9773
Steve Blocka7e24c12009-10-30 11:49:00 +00009774void FloatingPointHelper::LoadFloatOperands(MacroAssembler* masm,
9775 Register lhs,
9776 Register rhs) {
9777 Label load_smi_lhs, load_smi_rhs, done_load_lhs, done;
9778 __ JumpIfSmi(lhs, &load_smi_lhs);
9779 __ fld_d(FieldOperand(lhs, HeapNumber::kValueOffset));
9780 __ bind(&done_load_lhs);
9781
9782 __ JumpIfSmi(rhs, &load_smi_rhs);
9783 __ fld_d(FieldOperand(rhs, HeapNumber::kValueOffset));
9784 __ jmp(&done);
9785
9786 __ bind(&load_smi_lhs);
9787 __ SmiToInteger64(kScratchRegister, lhs);
9788 __ push(kScratchRegister);
9789 __ fild_d(Operand(rsp, 0));
9790 __ pop(kScratchRegister);
9791 __ jmp(&done_load_lhs);
9792
9793 __ bind(&load_smi_rhs);
9794 __ SmiToInteger64(kScratchRegister, rhs);
9795 __ push(kScratchRegister);
9796 __ fild_d(Operand(rsp, 0));
9797 __ pop(kScratchRegister);
9798
9799 __ bind(&done);
9800}
9801
9802
Steve Block3ce2e202009-11-05 08:53:23 +00009803void FloatingPointHelper::CheckNumberOperands(MacroAssembler* masm,
9804 Label* non_float) {
Steve Blocka7e24c12009-10-30 11:49:00 +00009805 Label test_other, done;
9806 // Test if both operands are numbers (heap_numbers or smis).
9807 // If not, jump to label non_float.
9808 __ JumpIfSmi(rdx, &test_other); // argument in rdx is OK
9809 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset), Factory::heap_number_map());
9810 __ j(not_equal, non_float); // The argument in rdx is not a number.
9811
9812 __ bind(&test_other);
9813 __ JumpIfSmi(rax, &done); // argument in rax is OK
9814 __ Cmp(FieldOperand(rax, HeapObject::kMapOffset), Factory::heap_number_map());
9815 __ j(not_equal, non_float); // The argument in rax is not a number.
9816
9817 // Fall-through: Both operands are numbers.
9818 __ bind(&done);
9819}
9820
9821
9822const char* GenericBinaryOpStub::GetName() {
Leon Clarkee46be812010-01-19 14:06:41 +00009823 if (name_ != NULL) return name_;
9824 const int len = 100;
9825 name_ = Bootstrapper::AllocateAutoDeletedArray(len);
9826 if (name_ == NULL) return "OOM";
9827 const char* op_name = Token::Name(op_);
9828 const char* overwrite_name;
9829 switch (mode_) {
9830 case NO_OVERWRITE: overwrite_name = "Alloc"; break;
9831 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break;
9832 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
9833 default: overwrite_name = "UnknownOverwrite"; break;
Steve Blocka7e24c12009-10-30 11:49:00 +00009834 }
Leon Clarkee46be812010-01-19 14:06:41 +00009835
9836 OS::SNPrintF(Vector<char>(name_, len),
Kristian Monsen25f61362010-05-21 11:50:48 +01009837 "GenericBinaryOpStub_%s_%s%s_%s%s_%s_%s",
Leon Clarkee46be812010-01-19 14:06:41 +00009838 op_name,
9839 overwrite_name,
9840 (flags_ & NO_SMI_CODE_IN_STUB) ? "_NoSmiInStub" : "",
9841 args_in_registers_ ? "RegArgs" : "StackArgs",
9842 args_reversed_ ? "_R" : "",
Steve Block6ded16b2010-05-10 14:33:55 +01009843 static_operands_type_.ToString(),
9844 BinaryOpIC::GetName(runtime_operands_type_));
Leon Clarkee46be812010-01-19 14:06:41 +00009845 return name_;
Steve Blocka7e24c12009-10-30 11:49:00 +00009846}
9847
9848
Steve Blockd0582a62009-12-15 09:54:21 +00009849void GenericBinaryOpStub::GenerateCall(
9850 MacroAssembler* masm,
9851 Register left,
9852 Register right) {
9853 if (!ArgsInRegistersSupported()) {
9854 // Pass arguments on the stack.
9855 __ push(left);
9856 __ push(right);
9857 } else {
9858 // The calling convention with registers is left in rdx and right in rax.
9859 Register left_arg = rdx;
9860 Register right_arg = rax;
9861 if (!(left.is(left_arg) && right.is(right_arg))) {
9862 if (left.is(right_arg) && right.is(left_arg)) {
9863 if (IsOperationCommutative()) {
9864 SetArgsReversed();
9865 } else {
9866 __ xchg(left, right);
9867 }
9868 } else if (left.is(left_arg)) {
9869 __ movq(right_arg, right);
Andrei Popescu402d9372010-02-26 13:31:12 +00009870 } else if (right.is(right_arg)) {
9871 __ movq(left_arg, left);
Steve Blockd0582a62009-12-15 09:54:21 +00009872 } else if (left.is(right_arg)) {
9873 if (IsOperationCommutative()) {
9874 __ movq(left_arg, right);
9875 SetArgsReversed();
9876 } else {
9877 // Order of moves important to avoid destroying left argument.
9878 __ movq(left_arg, left);
9879 __ movq(right_arg, right);
9880 }
9881 } else if (right.is(left_arg)) {
9882 if (IsOperationCommutative()) {
9883 __ movq(right_arg, left);
9884 SetArgsReversed();
9885 } else {
9886 // Order of moves important to avoid destroying right argument.
9887 __ movq(right_arg, right);
9888 __ movq(left_arg, left);
9889 }
Steve Blockd0582a62009-12-15 09:54:21 +00009890 } else {
9891 // Order of moves is not important.
9892 __ movq(left_arg, left);
9893 __ movq(right_arg, right);
9894 }
9895 }
9896
9897 // Update flags to indicate that arguments are in registers.
9898 SetArgsInRegisters();
9899 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
9900 }
9901
9902 // Call the stub.
9903 __ CallStub(this);
9904}
9905
9906
9907void GenericBinaryOpStub::GenerateCall(
9908 MacroAssembler* masm,
9909 Register left,
9910 Smi* right) {
9911 if (!ArgsInRegistersSupported()) {
9912 // Pass arguments on the stack.
9913 __ push(left);
9914 __ Push(right);
9915 } else {
9916 // The calling convention with registers is left in rdx and right in rax.
9917 Register left_arg = rdx;
9918 Register right_arg = rax;
9919 if (left.is(left_arg)) {
9920 __ Move(right_arg, right);
9921 } else if (left.is(right_arg) && IsOperationCommutative()) {
9922 __ Move(left_arg, right);
9923 SetArgsReversed();
9924 } else {
Andrei Popescu402d9372010-02-26 13:31:12 +00009925 // For non-commutative operations, left and right_arg might be
9926 // the same register. Therefore, the order of the moves is
9927 // important here in order to not overwrite left before moving
9928 // it to left_arg.
Steve Blockd0582a62009-12-15 09:54:21 +00009929 __ movq(left_arg, left);
9930 __ Move(right_arg, right);
9931 }
9932
9933 // Update flags to indicate that arguments are in registers.
9934 SetArgsInRegisters();
9935 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
9936 }
9937
9938 // Call the stub.
9939 __ CallStub(this);
9940}
9941
9942
9943void GenericBinaryOpStub::GenerateCall(
9944 MacroAssembler* masm,
9945 Smi* left,
9946 Register right) {
9947 if (!ArgsInRegistersSupported()) {
9948 // Pass arguments on the stack.
9949 __ Push(left);
9950 __ push(right);
9951 } else {
9952 // The calling convention with registers is left in rdx and right in rax.
9953 Register left_arg = rdx;
9954 Register right_arg = rax;
9955 if (right.is(right_arg)) {
9956 __ Move(left_arg, left);
9957 } else if (right.is(left_arg) && IsOperationCommutative()) {
9958 __ Move(right_arg, left);
9959 SetArgsReversed();
9960 } else {
Andrei Popescu402d9372010-02-26 13:31:12 +00009961 // For non-commutative operations, right and left_arg might be
9962 // the same register. Therefore, the order of the moves is
9963 // important here in order to not overwrite right before moving
9964 // it to right_arg.
Steve Blockd0582a62009-12-15 09:54:21 +00009965 __ movq(right_arg, right);
Andrei Popescu402d9372010-02-26 13:31:12 +00009966 __ Move(left_arg, left);
Steve Blockd0582a62009-12-15 09:54:21 +00009967 }
9968 // Update flags to indicate that arguments are in registers.
9969 SetArgsInRegisters();
9970 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
9971 }
9972
9973 // Call the stub.
9974 __ CallStub(this);
9975}
9976
9977
Leon Clarke4515c472010-02-03 11:58:03 +00009978Result GenericBinaryOpStub::GenerateCall(MacroAssembler* masm,
9979 VirtualFrame* frame,
9980 Result* left,
9981 Result* right) {
9982 if (ArgsInRegistersSupported()) {
9983 SetArgsInRegisters();
9984 return frame->CallStub(this, left, right);
9985 } else {
9986 frame->Push(left);
9987 frame->Push(right);
9988 return frame->CallStub(this, 2);
9989 }
9990}
9991
9992
Steve Blocka7e24c12009-10-30 11:49:00 +00009993void GenericBinaryOpStub::GenerateSmiCode(MacroAssembler* masm, Label* slow) {
Steve Block6ded16b2010-05-10 14:33:55 +01009994 // 1. Move arguments into rdx, rax except for DIV and MOD, which need the
9995 // dividend in rax and rdx free for the division. Use rax, rbx for those.
Leon Clarke4515c472010-02-03 11:58:03 +00009996 Comment load_comment(masm, "-- Load arguments");
9997 Register left = rdx;
9998 Register right = rax;
9999 if (op_ == Token::DIV || op_ == Token::MOD) {
10000 left = rax;
10001 right = rbx;
10002 if (HasArgsInRegisters()) {
10003 __ movq(rbx, rax);
10004 __ movq(rax, rdx);
10005 }
10006 }
10007 if (!HasArgsInRegisters()) {
10008 __ movq(right, Operand(rsp, 1 * kPointerSize));
10009 __ movq(left, Operand(rsp, 2 * kPointerSize));
10010 }
Steve Blocka7e24c12009-10-30 11:49:00 +000010011
Leon Clarke4515c472010-02-03 11:58:03 +000010012 Label not_smis;
Kristian Monsen25f61362010-05-21 11:50:48 +010010013 // 2. Smi check both operands.
10014 if (static_operands_type_.IsSmi()) {
10015 // Skip smi check if we know that both arguments are smis.
10016 if (FLAG_debug_code) {
10017 __ AbortIfNotSmi(left, "Static type check claimed non-smi is smi.");
10018 __ AbortIfNotSmi(right, "Static type check claimed non-smi is smi.");
10019 }
10020 if (op_ == Token::BIT_OR) {
10021 // Handle OR here, since we do extra smi-checking in the or code below.
10022 __ SmiOr(right, right, left);
10023 GenerateReturn(masm);
10024 return;
10025 }
10026 } else {
10027 if (op_ != Token::BIT_OR) {
10028 // Skip the check for OR as it is better combined with the
10029 // actual operation.
10030 Comment smi_check_comment(masm, "-- Smi check arguments");
10031 __ JumpIfNotBothSmi(left, right, &not_smis);
10032 }
Leon Clarke4515c472010-02-03 11:58:03 +000010033 }
Steve Blocka7e24c12009-10-30 11:49:00 +000010034
Leon Clarke4515c472010-02-03 11:58:03 +000010035 // 3. Operands are both smis (except for OR), perform the operation leaving
10036 // the result in rax and check the result if necessary.
10037 Comment perform_smi(masm, "-- Perform smi operation");
10038 Label use_fp_on_smis;
Steve Blocka7e24c12009-10-30 11:49:00 +000010039 switch (op_) {
10040 case Token::ADD: {
Leon Clarke4515c472010-02-03 11:58:03 +000010041 ASSERT(right.is(rax));
10042 __ SmiAdd(right, right, left, &use_fp_on_smis); // ADD is commutative.
Steve Blocka7e24c12009-10-30 11:49:00 +000010043 break;
10044 }
10045
10046 case Token::SUB: {
Leon Clarke4515c472010-02-03 11:58:03 +000010047 __ SmiSub(left, left, right, &use_fp_on_smis);
10048 __ movq(rax, left);
Steve Blocka7e24c12009-10-30 11:49:00 +000010049 break;
10050 }
10051
10052 case Token::MUL:
Leon Clarke4515c472010-02-03 11:58:03 +000010053 ASSERT(right.is(rax));
10054 __ SmiMul(right, right, left, &use_fp_on_smis); // MUL is commutative.
Steve Blocka7e24c12009-10-30 11:49:00 +000010055 break;
10056
10057 case Token::DIV:
Leon Clarke4515c472010-02-03 11:58:03 +000010058 ASSERT(left.is(rax));
10059 __ SmiDiv(left, left, right, &use_fp_on_smis);
Steve Blocka7e24c12009-10-30 11:49:00 +000010060 break;
10061
10062 case Token::MOD:
Leon Clarke4515c472010-02-03 11:58:03 +000010063 ASSERT(left.is(rax));
10064 __ SmiMod(left, left, right, slow);
Steve Blocka7e24c12009-10-30 11:49:00 +000010065 break;
10066
10067 case Token::BIT_OR:
Leon Clarke4515c472010-02-03 11:58:03 +000010068 ASSERT(right.is(rax));
10069 __ movq(rcx, right); // Save the right operand.
10070 __ SmiOr(right, right, left); // BIT_OR is commutative.
10071 __ testb(right, Immediate(kSmiTagMask));
10072 __ j(not_zero, &not_smis);
Steve Blocka7e24c12009-10-30 11:49:00 +000010073 break;
10074
10075 case Token::BIT_AND:
Leon Clarke4515c472010-02-03 11:58:03 +000010076 ASSERT(right.is(rax));
10077 __ SmiAnd(right, right, left); // BIT_AND is commutative.
Steve Blocka7e24c12009-10-30 11:49:00 +000010078 break;
10079
10080 case Token::BIT_XOR:
Leon Clarke4515c472010-02-03 11:58:03 +000010081 ASSERT(right.is(rax));
10082 __ SmiXor(right, right, left); // BIT_XOR is commutative.
Steve Blocka7e24c12009-10-30 11:49:00 +000010083 break;
10084
10085 case Token::SHL:
10086 case Token::SHR:
10087 case Token::SAR:
Steve Blocka7e24c12009-10-30 11:49:00 +000010088 switch (op_) {
10089 case Token::SAR:
Leon Clarke4515c472010-02-03 11:58:03 +000010090 __ SmiShiftArithmeticRight(left, left, right);
Steve Blocka7e24c12009-10-30 11:49:00 +000010091 break;
10092 case Token::SHR:
Leon Clarke4515c472010-02-03 11:58:03 +000010093 __ SmiShiftLogicalRight(left, left, right, slow);
Steve Blocka7e24c12009-10-30 11:49:00 +000010094 break;
10095 case Token::SHL:
Kristian Monsen25f61362010-05-21 11:50:48 +010010096 __ SmiShiftLeft(left, left, right);
Steve Blocka7e24c12009-10-30 11:49:00 +000010097 break;
10098 default:
10099 UNREACHABLE();
10100 }
Leon Clarke4515c472010-02-03 11:58:03 +000010101 __ movq(rax, left);
Steve Blocka7e24c12009-10-30 11:49:00 +000010102 break;
10103
10104 default:
10105 UNREACHABLE();
10106 break;
10107 }
Leon Clarke4515c472010-02-03 11:58:03 +000010108
Steve Block6ded16b2010-05-10 14:33:55 +010010109 // 4. Emit return of result in rax.
Leon Clarke4515c472010-02-03 11:58:03 +000010110 GenerateReturn(masm);
10111
10112 // 5. For some operations emit inline code to perform floating point
10113 // operations on known smis (e.g., if the result of the operation
10114 // overflowed the smi range).
10115 switch (op_) {
10116 case Token::ADD:
10117 case Token::SUB:
10118 case Token::MUL:
10119 case Token::DIV: {
Kristian Monsen25f61362010-05-21 11:50:48 +010010120 ASSERT(use_fp_on_smis.is_linked());
Leon Clarke4515c472010-02-03 11:58:03 +000010121 __ bind(&use_fp_on_smis);
10122 if (op_ == Token::DIV) {
10123 __ movq(rdx, rax);
10124 __ movq(rax, rbx);
10125 }
10126 // left is rdx, right is rax.
10127 __ AllocateHeapNumber(rbx, rcx, slow);
10128 FloatingPointHelper::LoadFloatOperandsFromSmis(masm, xmm4, xmm5);
10129 switch (op_) {
10130 case Token::ADD: __ addsd(xmm4, xmm5); break;
10131 case Token::SUB: __ subsd(xmm4, xmm5); break;
10132 case Token::MUL: __ mulsd(xmm4, xmm5); break;
10133 case Token::DIV: __ divsd(xmm4, xmm5); break;
10134 default: UNREACHABLE();
10135 }
10136 __ movsd(FieldOperand(rbx, HeapNumber::kValueOffset), xmm4);
10137 __ movq(rax, rbx);
10138 GenerateReturn(masm);
10139 }
10140 default:
10141 break;
10142 }
10143
10144 // 6. Non-smi operands, fall out to the non-smi code with the operands in
10145 // rdx and rax.
10146 Comment done_comment(masm, "-- Enter non-smi code");
10147 __ bind(&not_smis);
10148
10149 switch (op_) {
10150 case Token::DIV:
10151 case Token::MOD:
10152 // Operands are in rax, rbx at this point.
10153 __ movq(rdx, rax);
10154 __ movq(rax, rbx);
10155 break;
10156
10157 case Token::BIT_OR:
10158 // Right operand is saved in rcx and rax was destroyed by the smi
10159 // operation.
10160 __ movq(rax, rcx);
10161 break;
10162
10163 default:
10164 break;
10165 }
Steve Blocka7e24c12009-10-30 11:49:00 +000010166}
10167
10168
10169void GenericBinaryOpStub::Generate(MacroAssembler* masm) {
10170 Label call_runtime;
Steve Block6ded16b2010-05-10 14:33:55 +010010171
10172 if (ShouldGenerateSmiCode()) {
Leon Clarke4515c472010-02-03 11:58:03 +000010173 GenerateSmiCode(masm, &call_runtime);
10174 } else if (op_ != Token::MOD) {
Steve Block6ded16b2010-05-10 14:33:55 +010010175 if (!HasArgsInRegisters()) {
10176 GenerateLoadArguments(masm);
10177 }
Steve Blocka7e24c12009-10-30 11:49:00 +000010178 }
Steve Blocka7e24c12009-10-30 11:49:00 +000010179 // Floating point case.
Steve Block6ded16b2010-05-10 14:33:55 +010010180 if (ShouldGenerateFPCode()) {
10181 switch (op_) {
10182 case Token::ADD:
10183 case Token::SUB:
10184 case Token::MUL:
10185 case Token::DIV: {
10186 if (runtime_operands_type_ == BinaryOpIC::DEFAULT &&
10187 HasSmiCodeInStub()) {
10188 // Execution reaches this point when the first non-smi argument occurs
10189 // (and only if smi code is generated). This is the right moment to
10190 // patch to HEAP_NUMBERS state. The transition is attempted only for
10191 // the four basic operations. The stub stays in the DEFAULT state
10192 // forever for all other operations (also if smi code is skipped).
10193 GenerateTypeTransition(masm);
Andrei Popescu402d9372010-02-26 13:31:12 +000010194 }
Steve Block6ded16b2010-05-10 14:33:55 +010010195
10196 Label not_floats;
10197 // rax: y
10198 // rdx: x
10199 if (static_operands_type_.IsNumber() && FLAG_debug_code) {
10200 // Assert at runtime that inputs are only numbers.
10201 __ AbortIfNotNumber(rdx, "GenericBinaryOpStub operand not a number.");
10202 __ AbortIfNotNumber(rax, "GenericBinaryOpStub operand not a number.");
Andrei Popescu402d9372010-02-26 13:31:12 +000010203 } else {
10204 FloatingPointHelper::CheckNumberOperands(masm, &call_runtime);
10205 }
Steve Block6ded16b2010-05-10 14:33:55 +010010206 // Fast-case: Both operands are numbers.
10207 // xmm4 and xmm5 are volatile XMM registers.
10208 FloatingPointHelper::LoadFloatOperands(masm, xmm4, xmm5);
Leon Clarke4515c472010-02-03 11:58:03 +000010209
Steve Block6ded16b2010-05-10 14:33:55 +010010210 switch (op_) {
10211 case Token::ADD: __ addsd(xmm4, xmm5); break;
10212 case Token::SUB: __ subsd(xmm4, xmm5); break;
10213 case Token::MUL: __ mulsd(xmm4, xmm5); break;
10214 case Token::DIV: __ divsd(xmm4, xmm5); break;
10215 default: UNREACHABLE();
Leon Clarke4515c472010-02-03 11:58:03 +000010216 }
Steve Block6ded16b2010-05-10 14:33:55 +010010217 // Allocate a heap number, if needed.
10218 Label skip_allocation;
10219 OverwriteMode mode = mode_;
10220 if (HasArgsReversed()) {
10221 if (mode == OVERWRITE_RIGHT) {
10222 mode = OVERWRITE_LEFT;
10223 } else if (mode == OVERWRITE_LEFT) {
10224 mode = OVERWRITE_RIGHT;
10225 }
10226 }
10227 switch (mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +000010228 case OVERWRITE_LEFT:
Steve Block6ded16b2010-05-10 14:33:55 +010010229 __ JumpIfNotSmi(rdx, &skip_allocation);
10230 __ AllocateHeapNumber(rbx, rcx, &call_runtime);
10231 __ movq(rdx, rbx);
10232 __ bind(&skip_allocation);
10233 __ movq(rax, rdx);
10234 break;
Steve Blocka7e24c12009-10-30 11:49:00 +000010235 case OVERWRITE_RIGHT:
Steve Block6ded16b2010-05-10 14:33:55 +010010236 // If the argument in rax is already an object, we skip the
Steve Blocka7e24c12009-10-30 11:49:00 +000010237 // allocation of a heap number.
Steve Blocka7e24c12009-10-30 11:49:00 +000010238 __ JumpIfNotSmi(rax, &skip_allocation);
10239 // Fall through!
10240 case NO_OVERWRITE:
Steve Block6ded16b2010-05-10 14:33:55 +010010241 // Allocate a heap number for the result. Keep rax and rdx intact
10242 // for the possible runtime call.
10243 __ AllocateHeapNumber(rbx, rcx, &call_runtime);
10244 __ movq(rax, rbx);
Steve Blocka7e24c12009-10-30 11:49:00 +000010245 __ bind(&skip_allocation);
10246 break;
10247 default: UNREACHABLE();
10248 }
Steve Block6ded16b2010-05-10 14:33:55 +010010249 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm4);
Steve Blockd0582a62009-12-15 09:54:21 +000010250 GenerateReturn(masm);
Steve Block6ded16b2010-05-10 14:33:55 +010010251 __ bind(&not_floats);
10252 if (runtime_operands_type_ == BinaryOpIC::DEFAULT &&
10253 !HasSmiCodeInStub()) {
10254 // Execution reaches this point when the first non-number argument
10255 // occurs (and only if smi code is skipped from the stub, otherwise
10256 // the patching has already been done earlier in this case branch).
10257 // A perfect moment to try patching to STRINGS for ADD operation.
10258 if (op_ == Token::ADD) {
10259 GenerateTypeTransition(masm);
10260 }
10261 }
10262 break;
Steve Blocka7e24c12009-10-30 11:49:00 +000010263 }
Steve Block6ded16b2010-05-10 14:33:55 +010010264 case Token::MOD: {
10265 // For MOD we go directly to runtime in the non-smi case.
10266 break;
10267 }
10268 case Token::BIT_OR:
10269 case Token::BIT_AND:
10270 case Token::BIT_XOR:
10271 case Token::SAR:
10272 case Token::SHL:
10273 case Token::SHR: {
10274 Label skip_allocation, non_smi_result;
Kristian Monsen25f61362010-05-21 11:50:48 +010010275 FloatingPointHelper::LoadAsIntegers(masm, &call_runtime);
Steve Block6ded16b2010-05-10 14:33:55 +010010276 switch (op_) {
10277 case Token::BIT_OR: __ orl(rax, rcx); break;
10278 case Token::BIT_AND: __ andl(rax, rcx); break;
10279 case Token::BIT_XOR: __ xorl(rax, rcx); break;
10280 case Token::SAR: __ sarl_cl(rax); break;
10281 case Token::SHL: __ shll_cl(rax); break;
10282 case Token::SHR: __ shrl_cl(rax); break;
10283 default: UNREACHABLE();
10284 }
10285 if (op_ == Token::SHR) {
Kristian Monsen25f61362010-05-21 11:50:48 +010010286 // Check if result is negative. This can only happen for a shift
Steve Block6ded16b2010-05-10 14:33:55 +010010287 // by zero, which also doesn't update the sign flag.
10288 __ testl(rax, rax);
10289 __ j(negative, &non_smi_result);
10290 }
10291 __ JumpIfNotValidSmiValue(rax, &non_smi_result);
10292 // Tag smi result, if possible, and return.
10293 __ Integer32ToSmi(rax, rax);
10294 GenerateReturn(masm);
Steve Blocka7e24c12009-10-30 11:49:00 +000010295
Steve Block6ded16b2010-05-10 14:33:55 +010010296 // All ops except SHR return a signed int32 that we load in
10297 // a HeapNumber.
10298 if (op_ != Token::SHR && non_smi_result.is_linked()) {
10299 __ bind(&non_smi_result);
10300 // Allocate a heap number if needed.
10301 __ movsxlq(rbx, rax); // rbx: sign extended 32-bit result
10302 switch (mode_) {
10303 case OVERWRITE_LEFT:
10304 case OVERWRITE_RIGHT:
10305 // If the operand was an object, we skip the
10306 // allocation of a heap number.
10307 __ movq(rax, Operand(rsp, mode_ == OVERWRITE_RIGHT ?
10308 1 * kPointerSize : 2 * kPointerSize));
10309 __ JumpIfNotSmi(rax, &skip_allocation);
10310 // Fall through!
10311 case NO_OVERWRITE:
10312 __ AllocateHeapNumber(rax, rcx, &call_runtime);
10313 __ bind(&skip_allocation);
10314 break;
10315 default: UNREACHABLE();
10316 }
10317 // Store the result in the HeapNumber and return.
10318 __ movq(Operand(rsp, 1 * kPointerSize), rbx);
10319 __ fild_s(Operand(rsp, 1 * kPointerSize));
10320 __ fstp_d(FieldOperand(rax, HeapNumber::kValueOffset));
10321 GenerateReturn(masm);
10322 }
10323
10324 // SHR should return uint32 - go to runtime for non-smi/negative result.
10325 if (op_ == Token::SHR) {
10326 __ bind(&non_smi_result);
10327 }
10328 break;
Steve Blocka7e24c12009-10-30 11:49:00 +000010329 }
Steve Block6ded16b2010-05-10 14:33:55 +010010330 default: UNREACHABLE(); break;
Steve Blocka7e24c12009-10-30 11:49:00 +000010331 }
Steve Blocka7e24c12009-10-30 11:49:00 +000010332 }
10333
10334 // If all else fails, use the runtime system to get the correct
Steve Blockd0582a62009-12-15 09:54:21 +000010335 // result. If arguments was passed in registers now place them on the
10336 // stack in the correct order below the return address.
Steve Blocka7e24c12009-10-30 11:49:00 +000010337 __ bind(&call_runtime);
Steve Block6ded16b2010-05-10 14:33:55 +010010338
Leon Clarke4515c472010-02-03 11:58:03 +000010339 if (HasArgsInRegisters()) {
Steve Block6ded16b2010-05-10 14:33:55 +010010340 GenerateRegisterArgsPush(masm);
Steve Blockd0582a62009-12-15 09:54:21 +000010341 }
Steve Block6ded16b2010-05-10 14:33:55 +010010342
Steve Blocka7e24c12009-10-30 11:49:00 +000010343 switch (op_) {
Steve Blockd0582a62009-12-15 09:54:21 +000010344 case Token::ADD: {
Steve Block6ded16b2010-05-10 14:33:55 +010010345 // Registers containing left and right operands respectively.
10346 Register lhs, rhs;
10347
10348 if (HasArgsReversed()) {
10349 lhs = rax;
10350 rhs = rdx;
10351 } else {
10352 lhs = rdx;
10353 rhs = rax;
10354 }
10355
Steve Blockd0582a62009-12-15 09:54:21 +000010356 // Test for string arguments before calling runtime.
Steve Block6ded16b2010-05-10 14:33:55 +010010357 Label not_strings, both_strings, not_string1, string1, string1_smi2;
10358
10359 // If this stub has already generated FP-specific code then the arguments
10360 // are already in rdx, rax
10361 if (!ShouldGenerateFPCode() && !HasArgsInRegisters()) {
10362 GenerateLoadArguments(masm);
10363 }
10364
Steve Blockd0582a62009-12-15 09:54:21 +000010365 Condition is_smi;
Steve Block6ded16b2010-05-10 14:33:55 +010010366 is_smi = masm->CheckSmi(lhs);
Steve Blockd0582a62009-12-15 09:54:21 +000010367 __ j(is_smi, &not_string1);
Steve Block6ded16b2010-05-10 14:33:55 +010010368 __ CmpObjectType(lhs, FIRST_NONSTRING_TYPE, r8);
Steve Blockd0582a62009-12-15 09:54:21 +000010369 __ j(above_equal, &not_string1);
10370
10371 // First argument is a a string, test second.
Steve Block6ded16b2010-05-10 14:33:55 +010010372 is_smi = masm->CheckSmi(rhs);
10373 __ j(is_smi, &string1_smi2);
10374 __ CmpObjectType(rhs, FIRST_NONSTRING_TYPE, r9);
Steve Blockd0582a62009-12-15 09:54:21 +000010375 __ j(above_equal, &string1);
10376
10377 // First and second argument are strings.
Steve Block6ded16b2010-05-10 14:33:55 +010010378 StringAddStub string_add_stub(NO_STRING_CHECK_IN_STUB);
10379 __ TailCallStub(&string_add_stub);
10380
10381 __ bind(&string1_smi2);
10382 // First argument is a string, second is a smi. Try to lookup the number
10383 // string for the smi in the number string cache.
10384 NumberToStringStub::GenerateLookupNumberStringCache(
10385 masm, rhs, rbx, rcx, r8, true, &string1);
10386
10387 // Replace second argument on stack and tailcall string add stub to make
10388 // the result.
10389 __ movq(Operand(rsp, 1 * kPointerSize), rbx);
10390 __ TailCallStub(&string_add_stub);
Steve Blockd0582a62009-12-15 09:54:21 +000010391
10392 // Only first argument is a string.
10393 __ bind(&string1);
Steve Block6ded16b2010-05-10 14:33:55 +010010394 __ InvokeBuiltin(Builtins::STRING_ADD_LEFT, JUMP_FUNCTION);
Steve Blockd0582a62009-12-15 09:54:21 +000010395
10396 // First argument was not a string, test second.
10397 __ bind(&not_string1);
Steve Block6ded16b2010-05-10 14:33:55 +010010398 is_smi = masm->CheckSmi(rhs);
Steve Blockd0582a62009-12-15 09:54:21 +000010399 __ j(is_smi, &not_strings);
Steve Block6ded16b2010-05-10 14:33:55 +010010400 __ CmpObjectType(rhs, FIRST_NONSTRING_TYPE, rhs);
Steve Blockd0582a62009-12-15 09:54:21 +000010401 __ j(above_equal, &not_strings);
10402
10403 // Only second argument is a string.
Steve Block6ded16b2010-05-10 14:33:55 +010010404 __ InvokeBuiltin(Builtins::STRING_ADD_RIGHT, JUMP_FUNCTION);
Steve Blockd0582a62009-12-15 09:54:21 +000010405
10406 __ bind(&not_strings);
10407 // Neither argument is a string.
Steve Blocka7e24c12009-10-30 11:49:00 +000010408 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION);
10409 break;
Steve Blockd0582a62009-12-15 09:54:21 +000010410 }
Steve Blocka7e24c12009-10-30 11:49:00 +000010411 case Token::SUB:
10412 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION);
10413 break;
10414 case Token::MUL:
10415 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION);
Leon Clarke4515c472010-02-03 11:58:03 +000010416 break;
Steve Blocka7e24c12009-10-30 11:49:00 +000010417 case Token::DIV:
10418 __ InvokeBuiltin(Builtins::DIV, JUMP_FUNCTION);
10419 break;
10420 case Token::MOD:
10421 __ InvokeBuiltin(Builtins::MOD, JUMP_FUNCTION);
10422 break;
10423 case Token::BIT_OR:
10424 __ InvokeBuiltin(Builtins::BIT_OR, JUMP_FUNCTION);
10425 break;
10426 case Token::BIT_AND:
10427 __ InvokeBuiltin(Builtins::BIT_AND, JUMP_FUNCTION);
10428 break;
10429 case Token::BIT_XOR:
10430 __ InvokeBuiltin(Builtins::BIT_XOR, JUMP_FUNCTION);
10431 break;
10432 case Token::SAR:
10433 __ InvokeBuiltin(Builtins::SAR, JUMP_FUNCTION);
10434 break;
10435 case Token::SHL:
10436 __ InvokeBuiltin(Builtins::SHL, JUMP_FUNCTION);
10437 break;
10438 case Token::SHR:
10439 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION);
10440 break;
10441 default:
10442 UNREACHABLE();
10443 }
10444}
10445
10446
Steve Blockd0582a62009-12-15 09:54:21 +000010447void GenericBinaryOpStub::GenerateLoadArguments(MacroAssembler* masm) {
Steve Block6ded16b2010-05-10 14:33:55 +010010448 ASSERT(!HasArgsInRegisters());
10449 __ movq(rax, Operand(rsp, 1 * kPointerSize));
10450 __ movq(rdx, Operand(rsp, 2 * kPointerSize));
Steve Blockd0582a62009-12-15 09:54:21 +000010451}
10452
10453
10454void GenericBinaryOpStub::GenerateReturn(MacroAssembler* masm) {
10455 // If arguments are not passed in registers remove them from the stack before
10456 // returning.
Leon Clarke4515c472010-02-03 11:58:03 +000010457 if (!HasArgsInRegisters()) {
Steve Blockd0582a62009-12-15 09:54:21 +000010458 __ ret(2 * kPointerSize); // Remove both operands
10459 } else {
10460 __ ret(0);
10461 }
10462}
10463
10464
Steve Block6ded16b2010-05-10 14:33:55 +010010465void GenericBinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) {
10466 ASSERT(HasArgsInRegisters());
10467 __ pop(rcx);
10468 if (HasArgsReversed()) {
10469 __ push(rax);
10470 __ push(rdx);
10471 } else {
10472 __ push(rdx);
10473 __ push(rax);
10474 }
10475 __ push(rcx);
Leon Clarkee46be812010-01-19 14:06:41 +000010476}
10477
10478
Steve Block6ded16b2010-05-10 14:33:55 +010010479void GenericBinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
10480 Label get_result;
10481
10482 // Keep a copy of operands on the stack and make sure they are also in
10483 // rdx, rax.
10484 if (HasArgsInRegisters()) {
10485 GenerateRegisterArgsPush(masm);
10486 } else {
10487 GenerateLoadArguments(masm);
10488 }
10489
10490 // Internal frame is necessary to handle exceptions properly.
10491 __ EnterInternalFrame();
10492
10493 // Push arguments on stack if the stub expects them there.
10494 if (!HasArgsInRegisters()) {
10495 __ push(rdx);
10496 __ push(rax);
10497 }
10498 // Call the stub proper to get the result in rax.
10499 __ call(&get_result);
10500 __ LeaveInternalFrame();
10501
10502 // Left and right arguments are already on stack.
10503 __ pop(rcx);
10504 // Push the operation result. The tail call to BinaryOp_Patch will
10505 // return it to the original caller..
10506 __ push(rax);
10507
10508 // Push this stub's key.
10509 __ movq(rax, Immediate(MinorKey()));
10510 __ Integer32ToSmi(rax, rax);
10511 __ push(rax);
10512
10513 // Although the operation and the type info are encoded into the key,
10514 // the encoding is opaque, so push them too.
10515 __ movq(rax, Immediate(op_));
10516 __ Integer32ToSmi(rax, rax);
10517 __ push(rax);
10518
10519 __ movq(rax, Immediate(runtime_operands_type_));
10520 __ Integer32ToSmi(rax, rax);
10521 __ push(rax);
10522
10523 __ push(rcx);
10524
10525 // Perform patching to an appropriate fast case and return the result.
10526 __ TailCallExternalReference(
10527 ExternalReference(IC_Utility(IC::kBinaryOp_Patch)),
10528 6,
10529 1);
10530
10531 // The entry point for the result calculation is assumed to be immediately
10532 // after this sequence.
10533 __ bind(&get_result);
10534}
10535
10536
10537Handle<Code> GetBinaryOpStub(int key, BinaryOpIC::TypeInfo type_info) {
10538 GenericBinaryOpStub stub(key, type_info);
10539 return stub.GetCode();
10540}
10541
10542
10543int CompareStub::MinorKey() {
10544 // Encode the three parameters in a unique 16 bit value. To avoid duplicate
10545 // stubs the never NaN NaN condition is only taken into account if the
10546 // condition is equals.
10547 ASSERT(static_cast<unsigned>(cc_) < (1 << 13));
10548 return ConditionField::encode(static_cast<unsigned>(cc_))
10549 | StrictField::encode(strict_)
10550 | NeverNanNanField::encode(cc_ == equal ? never_nan_nan_ : false)
10551 | IncludeNumberCompareField::encode(include_number_compare_);
10552}
10553
10554
10555// Unfortunately you have to run without snapshots to see most of these
10556// names in the profile since most compare stubs end up in the snapshot.
Leon Clarkee46be812010-01-19 14:06:41 +000010557const char* CompareStub::GetName() {
Steve Block6ded16b2010-05-10 14:33:55 +010010558 if (name_ != NULL) return name_;
10559 const int kMaxNameLength = 100;
10560 name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);
10561 if (name_ == NULL) return "OOM";
10562
10563 const char* cc_name;
Leon Clarkee46be812010-01-19 14:06:41 +000010564 switch (cc_) {
Steve Block6ded16b2010-05-10 14:33:55 +010010565 case less: cc_name = "LT"; break;
10566 case greater: cc_name = "GT"; break;
10567 case less_equal: cc_name = "LE"; break;
10568 case greater_equal: cc_name = "GE"; break;
10569 case equal: cc_name = "EQ"; break;
10570 case not_equal: cc_name = "NE"; break;
10571 default: cc_name = "UnknownCondition"; break;
10572 }
10573
10574 const char* strict_name = "";
10575 if (strict_ && (cc_ == equal || cc_ == not_equal)) {
10576 strict_name = "_STRICT";
10577 }
10578
10579 const char* never_nan_nan_name = "";
10580 if (never_nan_nan_ && (cc_ == equal || cc_ == not_equal)) {
10581 never_nan_nan_name = "_NO_NAN";
10582 }
10583
10584 const char* include_number_compare_name = "";
10585 if (!include_number_compare_) {
10586 include_number_compare_name = "_NO_NUMBER";
10587 }
10588
10589 OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
10590 "CompareStub_%s%s%s%s",
10591 cc_name,
10592 strict_name,
10593 never_nan_nan_name,
10594 include_number_compare_name);
10595 return name_;
10596}
10597
10598
10599void StringHelper::GenerateFastCharCodeAt(MacroAssembler* masm,
10600 Register object,
10601 Register index,
10602 Register scratch,
10603 Register result,
10604 Label* receiver_not_string,
10605 Label* index_not_smi,
10606 Label* index_out_of_range,
10607 Label* slow_case) {
10608 Label not_a_flat_string;
10609 Label try_again_with_new_string;
10610 Label ascii_string;
10611 Label got_char_code;
10612
10613 // If the receiver is a smi trigger the non-string case.
10614 __ JumpIfSmi(object, receiver_not_string);
10615
10616 // Fetch the instance type of the receiver into result register.
10617 __ movq(result, FieldOperand(object, HeapObject::kMapOffset));
10618 __ movzxbl(result, FieldOperand(result, Map::kInstanceTypeOffset));
10619 // If the receiver is not a string trigger the non-string case.
10620 __ testb(result, Immediate(kIsNotStringMask));
10621 __ j(not_zero, receiver_not_string);
10622
10623 // If the index is non-smi trigger the non-smi case.
10624 __ JumpIfNotSmi(index, index_not_smi);
10625
10626 // Check for index out of range.
10627 __ SmiCompare(index, FieldOperand(object, String::kLengthOffset));
10628 __ j(above_equal, index_out_of_range);
10629
10630 __ bind(&try_again_with_new_string);
10631 // ----------- S t a t e -------------
10632 // -- object : string to access
10633 // -- result : instance type of the string
10634 // -- scratch : non-negative index < length
10635 // -----------------------------------
10636
10637 // We need special handling for non-flat strings.
10638 ASSERT_EQ(0, kSeqStringTag);
10639 __ testb(result, Immediate(kStringRepresentationMask));
10640 __ j(not_zero, &not_a_flat_string);
10641
10642 // Put untagged index into scratch register.
10643 __ SmiToInteger32(scratch, index);
10644
10645 // Check for 1-byte or 2-byte string.
10646 ASSERT_EQ(0, kTwoByteStringTag);
10647 __ testb(result, Immediate(kStringEncodingMask));
10648 __ j(not_zero, &ascii_string);
10649
10650 // 2-byte string.
10651 // Load the 2-byte character code into the result register.
10652 __ movzxwl(result, FieldOperand(object,
10653 scratch,
10654 times_2,
10655 SeqTwoByteString::kHeaderSize));
10656 __ jmp(&got_char_code);
10657
10658 // Handle non-flat strings.
10659 __ bind(&not_a_flat_string);
10660 __ and_(result, Immediate(kStringRepresentationMask));
10661 __ cmpb(result, Immediate(kConsStringTag));
10662 __ j(not_equal, slow_case);
10663
10664 // ConsString.
10665 // Check that the right hand side is the empty string (ie if this is really a
10666 // flat string in a cons string). If that is not the case we would rather go
10667 // to the runtime system now, to flatten the string.
10668 __ movq(result, FieldOperand(object, ConsString::kSecondOffset));
10669 __ CompareRoot(result, Heap::kEmptyStringRootIndex);
10670 __ j(not_equal, slow_case);
10671 // Get the first of the two strings and load its instance type.
10672 __ movq(object, FieldOperand(object, ConsString::kFirstOffset));
10673 __ movq(result, FieldOperand(object, HeapObject::kMapOffset));
10674 __ movzxbl(result, FieldOperand(result, Map::kInstanceTypeOffset));
10675 __ jmp(&try_again_with_new_string);
10676
10677 // ASCII string.
10678 __ bind(&ascii_string);
10679 // Load the byte into the result register.
10680 __ movzxbl(result, FieldOperand(object,
10681 scratch,
10682 times_1,
10683 SeqAsciiString::kHeaderSize));
10684 __ bind(&got_char_code);
10685 __ Integer32ToSmi(result, result);
10686}
10687
10688
10689void StringHelper::GenerateCharFromCode(MacroAssembler* masm,
10690 Register code,
10691 Register result,
10692 Register scratch,
10693 InvokeFlag flag) {
10694 ASSERT(!code.is(result));
10695
10696 Label slow_case;
10697 Label exit;
10698
10699 // Fast case of Heap::LookupSingleCharacterStringFromCode.
10700 __ JumpIfNotSmi(code, &slow_case);
10701 __ SmiToInteger32(scratch, code);
10702 __ cmpl(scratch, Immediate(String::kMaxAsciiCharCode));
10703 __ j(above, &slow_case);
10704
10705 __ Move(result, Factory::single_character_string_cache());
10706 __ movq(result, FieldOperand(result,
10707 scratch,
10708 times_pointer_size,
10709 FixedArray::kHeaderSize));
10710
10711 __ CompareRoot(result, Heap::kUndefinedValueRootIndex);
10712 __ j(equal, &slow_case);
10713 __ jmp(&exit);
10714
10715 __ bind(&slow_case);
10716 if (flag == CALL_FUNCTION) {
10717 __ push(code);
10718 __ CallRuntime(Runtime::kCharFromCode, 1);
10719 if (!result.is(rax)) {
10720 __ movq(result, rax);
Leon Clarkee46be812010-01-19 14:06:41 +000010721 }
Steve Block6ded16b2010-05-10 14:33:55 +010010722 } else {
10723 ASSERT(flag == JUMP_FUNCTION);
10724 ASSERT(result.is(rax));
10725 __ pop(rax); // Save return address.
10726 __ push(code);
10727 __ push(rax); // Restore return address.
10728 __ TailCallRuntime(Runtime::kCharFromCode, 1, 1);
10729 }
10730
10731 __ bind(&exit);
10732 if (flag == JUMP_FUNCTION) {
10733 ASSERT(result.is(rax));
10734 __ ret(0);
Leon Clarkee46be812010-01-19 14:06:41 +000010735 }
10736}
10737
10738
10739void StringAddStub::Generate(MacroAssembler* masm) {
10740 Label string_add_runtime;
10741
10742 // Load the two arguments.
10743 __ movq(rax, Operand(rsp, 2 * kPointerSize)); // First argument.
10744 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); // Second argument.
10745
10746 // Make sure that both arguments are strings if not known in advance.
10747 if (string_check_) {
10748 Condition is_smi;
10749 is_smi = masm->CheckSmi(rax);
10750 __ j(is_smi, &string_add_runtime);
10751 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, r8);
10752 __ j(above_equal, &string_add_runtime);
10753
10754 // First argument is a a string, test second.
10755 is_smi = masm->CheckSmi(rdx);
10756 __ j(is_smi, &string_add_runtime);
10757 __ CmpObjectType(rdx, FIRST_NONSTRING_TYPE, r9);
10758 __ j(above_equal, &string_add_runtime);
10759 }
10760
10761 // Both arguments are strings.
10762 // rax: first string
10763 // rdx: second string
10764 // Check if either of the strings are empty. In that case return the other.
10765 Label second_not_zero_length, both_not_zero_length;
Steve Block6ded16b2010-05-10 14:33:55 +010010766 __ movq(rcx, FieldOperand(rdx, String::kLengthOffset));
10767 __ SmiTest(rcx);
Leon Clarkee46be812010-01-19 14:06:41 +000010768 __ j(not_zero, &second_not_zero_length);
10769 // Second string is empty, result is first string which is already in rax.
10770 __ IncrementCounter(&Counters::string_add_native, 1);
10771 __ ret(2 * kPointerSize);
10772 __ bind(&second_not_zero_length);
Steve Block6ded16b2010-05-10 14:33:55 +010010773 __ movq(rbx, FieldOperand(rax, String::kLengthOffset));
10774 __ SmiTest(rbx);
Leon Clarkee46be812010-01-19 14:06:41 +000010775 __ j(not_zero, &both_not_zero_length);
10776 // First string is empty, result is second string which is in rdx.
10777 __ movq(rax, rdx);
10778 __ IncrementCounter(&Counters::string_add_native, 1);
10779 __ ret(2 * kPointerSize);
10780
10781 // Both strings are non-empty.
10782 // rax: first string
10783 // rbx: length of first string
Leon Clarked91b9f72010-01-27 17:25:45 +000010784 // rcx: length of second string
10785 // rdx: second string
Steve Block6ded16b2010-05-10 14:33:55 +010010786 // r8: map of first string if string check was performed above
10787 // r9: map of second string if string check was performed above
10788 Label string_add_flat_result, longer_than_two;
Leon Clarkee46be812010-01-19 14:06:41 +000010789 __ bind(&both_not_zero_length);
Steve Block6ded16b2010-05-10 14:33:55 +010010790
Leon Clarkee46be812010-01-19 14:06:41 +000010791 // If arguments where known to be strings, maps are not loaded to r8 and r9
10792 // by the code above.
10793 if (!string_check_) {
10794 __ movq(r8, FieldOperand(rax, HeapObject::kMapOffset));
10795 __ movq(r9, FieldOperand(rdx, HeapObject::kMapOffset));
10796 }
10797 // Get the instance types of the two strings as they will be needed soon.
10798 __ movzxbl(r8, FieldOperand(r8, Map::kInstanceTypeOffset));
10799 __ movzxbl(r9, FieldOperand(r9, Map::kInstanceTypeOffset));
Steve Block6ded16b2010-05-10 14:33:55 +010010800
10801 // Look at the length of the result of adding the two strings.
10802 ASSERT(String::kMaxLength <= Smi::kMaxValue / 2);
10803 __ SmiAdd(rbx, rbx, rcx, NULL);
10804 // Use the runtime system when adding two one character strings, as it
10805 // contains optimizations for this specific case using the symbol table.
10806 __ SmiCompare(rbx, Smi::FromInt(2));
10807 __ j(not_equal, &longer_than_two);
10808
10809 // Check that both strings are non-external ascii strings.
10810 __ JumpIfBothInstanceTypesAreNotSequentialAscii(r8, r9, rbx, rcx,
10811 &string_add_runtime);
10812
10813 // Get the two characters forming the sub string.
10814 __ movzxbq(rbx, FieldOperand(rax, SeqAsciiString::kHeaderSize));
10815 __ movzxbq(rcx, FieldOperand(rdx, SeqAsciiString::kHeaderSize));
10816
10817 // Try to lookup two character string in symbol table. If it is not found
10818 // just allocate a new one.
10819 Label make_two_character_string, make_flat_ascii_string;
10820 StringHelper::GenerateTwoCharacterSymbolTableProbe(
10821 masm, rbx, rcx, r14, r12, rdi, r15, &make_two_character_string);
10822 __ IncrementCounter(&Counters::string_add_native, 1);
10823 __ ret(2 * kPointerSize);
10824
10825 __ bind(&make_two_character_string);
10826 __ Set(rbx, 2);
10827 __ jmp(&make_flat_ascii_string);
10828
10829 __ bind(&longer_than_two);
Leon Clarkee46be812010-01-19 14:06:41 +000010830 // Check if resulting string will be flat.
Steve Block6ded16b2010-05-10 14:33:55 +010010831 __ SmiCompare(rbx, Smi::FromInt(String::kMinNonFlatLength));
Leon Clarkee46be812010-01-19 14:06:41 +000010832 __ j(below, &string_add_flat_result);
10833 // Handle exceptionally long strings in the runtime system.
10834 ASSERT((String::kMaxLength & 0x80000000) == 0);
Steve Block6ded16b2010-05-10 14:33:55 +010010835 __ SmiCompare(rbx, Smi::FromInt(String::kMaxLength));
Leon Clarkee46be812010-01-19 14:06:41 +000010836 __ j(above, &string_add_runtime);
10837
10838 // If result is not supposed to be flat, allocate a cons string object. If
10839 // both strings are ascii the result is an ascii cons string.
10840 // rax: first string
10841 // ebx: length of resulting flat string
10842 // rdx: second string
10843 // r8: instance type of first string
10844 // r9: instance type of second string
10845 Label non_ascii, allocated;
10846 __ movl(rcx, r8);
10847 __ and_(rcx, r9);
10848 ASSERT(kStringEncodingMask == kAsciiStringTag);
10849 __ testl(rcx, Immediate(kAsciiStringTag));
10850 __ j(zero, &non_ascii);
10851 // Allocate an acsii cons string.
10852 __ AllocateAsciiConsString(rcx, rdi, no_reg, &string_add_runtime);
10853 __ bind(&allocated);
10854 // Fill the fields of the cons string.
Steve Block6ded16b2010-05-10 14:33:55 +010010855 __ movq(FieldOperand(rcx, ConsString::kLengthOffset), rbx);
Leon Clarkee46be812010-01-19 14:06:41 +000010856 __ movl(FieldOperand(rcx, ConsString::kHashFieldOffset),
10857 Immediate(String::kEmptyHashField));
10858 __ movq(FieldOperand(rcx, ConsString::kFirstOffset), rax);
10859 __ movq(FieldOperand(rcx, ConsString::kSecondOffset), rdx);
10860 __ movq(rax, rcx);
10861 __ IncrementCounter(&Counters::string_add_native, 1);
10862 __ ret(2 * kPointerSize);
10863 __ bind(&non_ascii);
10864 // Allocate a two byte cons string.
10865 __ AllocateConsString(rcx, rdi, no_reg, &string_add_runtime);
10866 __ jmp(&allocated);
10867
10868 // Handle creating a flat result. First check that both strings are not
10869 // external strings.
10870 // rax: first string
Steve Block6ded16b2010-05-10 14:33:55 +010010871 // ebx: length of resulting flat string as smi
Leon Clarkee46be812010-01-19 14:06:41 +000010872 // rdx: second string
10873 // r8: instance type of first string
10874 // r9: instance type of first string
10875 __ bind(&string_add_flat_result);
Steve Block6ded16b2010-05-10 14:33:55 +010010876 __ SmiToInteger32(rbx, rbx);
Leon Clarkee46be812010-01-19 14:06:41 +000010877 __ movl(rcx, r8);
10878 __ and_(rcx, Immediate(kStringRepresentationMask));
10879 __ cmpl(rcx, Immediate(kExternalStringTag));
10880 __ j(equal, &string_add_runtime);
10881 __ movl(rcx, r9);
10882 __ and_(rcx, Immediate(kStringRepresentationMask));
10883 __ cmpl(rcx, Immediate(kExternalStringTag));
10884 __ j(equal, &string_add_runtime);
10885 // Now check if both strings are ascii strings.
10886 // rax: first string
10887 // ebx: length of resulting flat string
10888 // rdx: second string
10889 // r8: instance type of first string
10890 // r9: instance type of second string
10891 Label non_ascii_string_add_flat_result;
10892 ASSERT(kStringEncodingMask == kAsciiStringTag);
10893 __ testl(r8, Immediate(kAsciiStringTag));
10894 __ j(zero, &non_ascii_string_add_flat_result);
10895 __ testl(r9, Immediate(kAsciiStringTag));
10896 __ j(zero, &string_add_runtime);
Steve Block6ded16b2010-05-10 14:33:55 +010010897
10898 __ bind(&make_flat_ascii_string);
Leon Clarkee46be812010-01-19 14:06:41 +000010899 // Both strings are ascii strings. As they are short they are both flat.
10900 __ AllocateAsciiString(rcx, rbx, rdi, r14, r15, &string_add_runtime);
10901 // rcx: result string
10902 __ movq(rbx, rcx);
10903 // Locate first character of result.
10904 __ addq(rcx, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
10905 // Locate first character of first argument
Steve Block6ded16b2010-05-10 14:33:55 +010010906 __ movq(rdi, FieldOperand(rax, String::kLengthOffset));
10907 __ SmiToInteger32(rdi, rdi);
Leon Clarkee46be812010-01-19 14:06:41 +000010908 __ addq(rax, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
10909 // rax: first char of first argument
10910 // rbx: result string
10911 // rcx: first character of result
10912 // rdx: second string
10913 // rdi: length of first argument
Steve Block6ded16b2010-05-10 14:33:55 +010010914 StringHelper::GenerateCopyCharacters(masm, rcx, rax, rdi, true);
Leon Clarkee46be812010-01-19 14:06:41 +000010915 // Locate first character of second argument.
Steve Block6ded16b2010-05-10 14:33:55 +010010916 __ movq(rdi, FieldOperand(rdx, String::kLengthOffset));
10917 __ SmiToInteger32(rdi, rdi);
Leon Clarkee46be812010-01-19 14:06:41 +000010918 __ addq(rdx, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
10919 // rbx: result string
10920 // rcx: next character of result
10921 // rdx: first char of second argument
10922 // rdi: length of second argument
Steve Block6ded16b2010-05-10 14:33:55 +010010923 StringHelper::GenerateCopyCharacters(masm, rcx, rdx, rdi, true);
Leon Clarkee46be812010-01-19 14:06:41 +000010924 __ movq(rax, rbx);
10925 __ IncrementCounter(&Counters::string_add_native, 1);
10926 __ ret(2 * kPointerSize);
10927
10928 // Handle creating a flat two byte result.
10929 // rax: first string - known to be two byte
10930 // rbx: length of resulting flat string
10931 // rdx: second string
10932 // r8: instance type of first string
10933 // r9: instance type of first string
10934 __ bind(&non_ascii_string_add_flat_result);
10935 __ and_(r9, Immediate(kAsciiStringTag));
10936 __ j(not_zero, &string_add_runtime);
10937 // Both strings are two byte strings. As they are short they are both
10938 // flat.
10939 __ AllocateTwoByteString(rcx, rbx, rdi, r14, r15, &string_add_runtime);
10940 // rcx: result string
10941 __ movq(rbx, rcx);
10942 // Locate first character of result.
10943 __ addq(rcx, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
10944 // Locate first character of first argument.
Steve Block6ded16b2010-05-10 14:33:55 +010010945 __ movq(rdi, FieldOperand(rax, String::kLengthOffset));
10946 __ SmiToInteger32(rdi, rdi);
Leon Clarkee46be812010-01-19 14:06:41 +000010947 __ addq(rax, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
10948 // rax: first char of first argument
10949 // rbx: result string
10950 // rcx: first character of result
10951 // rdx: second argument
10952 // rdi: length of first argument
Steve Block6ded16b2010-05-10 14:33:55 +010010953 StringHelper::GenerateCopyCharacters(masm, rcx, rax, rdi, false);
Leon Clarkee46be812010-01-19 14:06:41 +000010954 // Locate first character of second argument.
Steve Block6ded16b2010-05-10 14:33:55 +010010955 __ movq(rdi, FieldOperand(rdx, String::kLengthOffset));
10956 __ SmiToInteger32(rdi, rdi);
Leon Clarkee46be812010-01-19 14:06:41 +000010957 __ addq(rdx, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
10958 // rbx: result string
10959 // rcx: next character of result
10960 // rdx: first char of second argument
10961 // rdi: length of second argument
Steve Block6ded16b2010-05-10 14:33:55 +010010962 StringHelper::GenerateCopyCharacters(masm, rcx, rdx, rdi, false);
Leon Clarkee46be812010-01-19 14:06:41 +000010963 __ movq(rax, rbx);
10964 __ IncrementCounter(&Counters::string_add_native, 1);
10965 __ ret(2 * kPointerSize);
10966
10967 // Just jump to runtime to add the two strings.
10968 __ bind(&string_add_runtime);
Steve Block6ded16b2010-05-10 14:33:55 +010010969 __ TailCallRuntime(Runtime::kStringAdd, 2, 1);
Leon Clarkee46be812010-01-19 14:06:41 +000010970}
10971
10972
Steve Block6ded16b2010-05-10 14:33:55 +010010973void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
10974 Register dest,
10975 Register src,
10976 Register count,
10977 bool ascii) {
Leon Clarkee46be812010-01-19 14:06:41 +000010978 Label loop;
10979 __ bind(&loop);
10980 // This loop just copies one character at a time, as it is only used for very
10981 // short strings.
10982 if (ascii) {
10983 __ movb(kScratchRegister, Operand(src, 0));
10984 __ movb(Operand(dest, 0), kScratchRegister);
10985 __ addq(src, Immediate(1));
10986 __ addq(dest, Immediate(1));
10987 } else {
10988 __ movzxwl(kScratchRegister, Operand(src, 0));
10989 __ movw(Operand(dest, 0), kScratchRegister);
10990 __ addq(src, Immediate(2));
10991 __ addq(dest, Immediate(2));
10992 }
10993 __ subl(count, Immediate(1));
10994 __ j(not_zero, &loop);
10995}
10996
10997
Steve Block6ded16b2010-05-10 14:33:55 +010010998void StringHelper::GenerateCopyCharactersREP(MacroAssembler* masm,
10999 Register dest,
11000 Register src,
11001 Register count,
11002 bool ascii) {
Leon Clarked91b9f72010-01-27 17:25:45 +000011003 // Copy characters using rep movs of doublewords. Align destination on 4 byte
11004 // boundary before starting rep movs. Copy remaining characters after running
11005 // rep movs.
11006 ASSERT(dest.is(rdi)); // rep movs destination
11007 ASSERT(src.is(rsi)); // rep movs source
11008 ASSERT(count.is(rcx)); // rep movs count
11009
11010 // Nothing to do for zero characters.
11011 Label done;
11012 __ testq(count, count);
11013 __ j(zero, &done);
11014
11015 // Make count the number of bytes to copy.
11016 if (!ascii) {
11017 ASSERT_EQ(2, sizeof(uc16)); // NOLINT
11018 __ addq(count, count);
11019 }
11020
11021 // Don't enter the rep movs if there are less than 4 bytes to copy.
11022 Label last_bytes;
11023 __ testq(count, Immediate(~7));
11024 __ j(zero, &last_bytes);
11025
11026 // Copy from edi to esi using rep movs instruction.
11027 __ movq(kScratchRegister, count);
11028 __ sar(count, Immediate(3)); // Number of doublewords to copy.
11029 __ repmovsq();
11030
11031 // Find number of bytes left.
11032 __ movq(count, kScratchRegister);
11033 __ and_(count, Immediate(7));
11034
11035 // Check if there are more bytes to copy.
11036 __ bind(&last_bytes);
11037 __ testq(count, count);
11038 __ j(zero, &done);
11039
11040 // Copy remaining characters.
11041 Label loop;
11042 __ bind(&loop);
11043 __ movb(kScratchRegister, Operand(src, 0));
11044 __ movb(Operand(dest, 0), kScratchRegister);
11045 __ addq(src, Immediate(1));
11046 __ addq(dest, Immediate(1));
11047 __ subq(count, Immediate(1));
11048 __ j(not_zero, &loop);
11049
11050 __ bind(&done);
11051}
11052
Steve Block6ded16b2010-05-10 14:33:55 +010011053void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
11054 Register c1,
11055 Register c2,
11056 Register scratch1,
11057 Register scratch2,
11058 Register scratch3,
11059 Register scratch4,
11060 Label* not_found) {
11061 // Register scratch3 is the general scratch register in this function.
11062 Register scratch = scratch3;
11063
11064 // Make sure that both characters are not digits as such strings has a
11065 // different hash algorithm. Don't try to look for these in the symbol table.
11066 Label not_array_index;
11067 __ movq(scratch, c1);
11068 __ subq(scratch, Immediate(static_cast<int>('0')));
11069 __ cmpq(scratch, Immediate(static_cast<int>('9' - '0')));
11070 __ j(above, &not_array_index);
11071 __ movq(scratch, c2);
11072 __ subq(scratch, Immediate(static_cast<int>('0')));
11073 __ cmpq(scratch, Immediate(static_cast<int>('9' - '0')));
11074 __ j(below_equal, not_found);
11075
11076 __ bind(&not_array_index);
11077 // Calculate the two character string hash.
11078 Register hash = scratch1;
11079 GenerateHashInit(masm, hash, c1, scratch);
11080 GenerateHashAddCharacter(masm, hash, c2, scratch);
11081 GenerateHashGetHash(masm, hash, scratch);
11082
11083 // Collect the two characters in a register.
11084 Register chars = c1;
11085 __ shl(c2, Immediate(kBitsPerByte));
11086 __ orl(chars, c2);
11087
11088 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
11089 // hash: hash of two character string.
11090
11091 // Load the symbol table.
11092 Register symbol_table = c2;
11093 __ LoadRoot(symbol_table, Heap::kSymbolTableRootIndex);
11094
11095 // Calculate capacity mask from the symbol table capacity.
11096 Register mask = scratch2;
11097 __ movq(mask, FieldOperand(symbol_table, SymbolTable::kCapacityOffset));
11098 __ SmiToInteger32(mask, mask);
11099 __ decl(mask);
11100
11101 Register undefined = scratch4;
11102 __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex);
11103
11104 // Registers
11105 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
11106 // hash: hash of two character string (32-bit int)
11107 // symbol_table: symbol table
11108 // mask: capacity mask (32-bit int)
11109 // undefined: undefined value
11110 // scratch: -
11111
11112 // Perform a number of probes in the symbol table.
11113 static const int kProbes = 4;
11114 Label found_in_symbol_table;
11115 Label next_probe[kProbes];
11116 for (int i = 0; i < kProbes; i++) {
11117 // Calculate entry in symbol table.
11118 __ movl(scratch, hash);
11119 if (i > 0) {
11120 __ addl(scratch, Immediate(SymbolTable::GetProbeOffset(i)));
11121 }
11122 __ andl(scratch, mask);
11123
11124 // Load the entry from the symble table.
11125 Register candidate = scratch; // Scratch register contains candidate.
11126 ASSERT_EQ(1, SymbolTable::kEntrySize);
11127 __ movq(candidate,
11128 FieldOperand(symbol_table,
11129 scratch,
11130 times_pointer_size,
11131 SymbolTable::kElementsStartOffset));
11132
11133 // If entry is undefined no string with this hash can be found.
11134 __ cmpq(candidate, undefined);
11135 __ j(equal, not_found);
11136
11137 // If length is not 2 the string is not a candidate.
11138 __ SmiCompare(FieldOperand(candidate, String::kLengthOffset),
11139 Smi::FromInt(2));
11140 __ j(not_equal, &next_probe[i]);
11141
11142 // We use kScratchRegister as a temporary register in assumption that
11143 // JumpIfInstanceTypeIsNotSequentialAscii does not use it implicitly
11144 Register temp = kScratchRegister;
11145
11146 // Check that the candidate is a non-external ascii string.
11147 __ movq(temp, FieldOperand(candidate, HeapObject::kMapOffset));
11148 __ movzxbl(temp, FieldOperand(temp, Map::kInstanceTypeOffset));
11149 __ JumpIfInstanceTypeIsNotSequentialAscii(
11150 temp, temp, &next_probe[i]);
11151
11152 // Check if the two characters match.
11153 __ movl(temp, FieldOperand(candidate, SeqAsciiString::kHeaderSize));
11154 __ andl(temp, Immediate(0x0000ffff));
11155 __ cmpl(chars, temp);
11156 __ j(equal, &found_in_symbol_table);
11157 __ bind(&next_probe[i]);
11158 }
11159
11160 // No matching 2 character string found by probing.
11161 __ jmp(not_found);
11162
11163 // Scratch register contains result when we fall through to here.
11164 Register result = scratch;
11165 __ bind(&found_in_symbol_table);
11166 if (!result.is(rax)) {
11167 __ movq(rax, result);
11168 }
11169}
11170
11171
11172void StringHelper::GenerateHashInit(MacroAssembler* masm,
11173 Register hash,
11174 Register character,
11175 Register scratch) {
11176 // hash = character + (character << 10);
11177 __ movl(hash, character);
11178 __ shll(hash, Immediate(10));
11179 __ addl(hash, character);
11180 // hash ^= hash >> 6;
11181 __ movl(scratch, hash);
11182 __ sarl(scratch, Immediate(6));
11183 __ xorl(hash, scratch);
11184}
11185
11186
11187void StringHelper::GenerateHashAddCharacter(MacroAssembler* masm,
11188 Register hash,
11189 Register character,
11190 Register scratch) {
11191 // hash += character;
11192 __ addl(hash, character);
11193 // hash += hash << 10;
11194 __ movl(scratch, hash);
11195 __ shll(scratch, Immediate(10));
11196 __ addl(hash, scratch);
11197 // hash ^= hash >> 6;
11198 __ movl(scratch, hash);
11199 __ sarl(scratch, Immediate(6));
11200 __ xorl(hash, scratch);
11201}
11202
11203
11204void StringHelper::GenerateHashGetHash(MacroAssembler* masm,
11205 Register hash,
11206 Register scratch) {
11207 // hash += hash << 3;
11208 __ movl(scratch, hash);
11209 __ shll(scratch, Immediate(3));
11210 __ addl(hash, scratch);
11211 // hash ^= hash >> 11;
11212 __ movl(scratch, hash);
11213 __ sarl(scratch, Immediate(11));
11214 __ xorl(hash, scratch);
11215 // hash += hash << 15;
11216 __ movl(scratch, hash);
11217 __ shll(scratch, Immediate(15));
11218 __ addl(hash, scratch);
11219
11220 // if (hash == 0) hash = 27;
11221 Label hash_not_zero;
11222 __ testl(hash, hash);
11223 __ j(not_zero, &hash_not_zero);
11224 __ movl(hash, Immediate(27));
11225 __ bind(&hash_not_zero);
11226}
Leon Clarked91b9f72010-01-27 17:25:45 +000011227
11228void SubStringStub::Generate(MacroAssembler* masm) {
11229 Label runtime;
11230
11231 // Stack frame on entry.
11232 // rsp[0]: return address
11233 // rsp[8]: to
11234 // rsp[16]: from
11235 // rsp[24]: string
11236
11237 const int kToOffset = 1 * kPointerSize;
11238 const int kFromOffset = kToOffset + kPointerSize;
11239 const int kStringOffset = kFromOffset + kPointerSize;
11240 const int kArgumentsSize = (kStringOffset + kPointerSize) - kToOffset;
11241
11242 // Make sure first argument is a string.
11243 __ movq(rax, Operand(rsp, kStringOffset));
11244 ASSERT_EQ(0, kSmiTag);
11245 __ testl(rax, Immediate(kSmiTagMask));
11246 __ j(zero, &runtime);
11247 Condition is_string = masm->IsObjectStringType(rax, rbx, rbx);
11248 __ j(NegateCondition(is_string), &runtime);
11249
11250 // rax: string
11251 // rbx: instance type
11252 // Calculate length of sub string using the smi values.
Steve Block6ded16b2010-05-10 14:33:55 +010011253 Label result_longer_than_two;
Leon Clarked91b9f72010-01-27 17:25:45 +000011254 __ movq(rcx, Operand(rsp, kToOffset));
11255 __ movq(rdx, Operand(rsp, kFromOffset));
11256 __ JumpIfNotBothPositiveSmi(rcx, rdx, &runtime);
11257
11258 __ SmiSub(rcx, rcx, rdx, NULL); // Overflow doesn't happen.
11259 __ j(negative, &runtime);
Steve Block6ded16b2010-05-10 14:33:55 +010011260 // Special handling of sub-strings of length 1 and 2. One character strings
11261 // are handled in the runtime system (looked up in the single character
11262 // cache). Two character strings are looked for in the symbol cache.
Leon Clarked91b9f72010-01-27 17:25:45 +000011263 __ SmiToInteger32(rcx, rcx);
11264 __ cmpl(rcx, Immediate(2));
Steve Block6ded16b2010-05-10 14:33:55 +010011265 __ j(greater, &result_longer_than_two);
11266 __ j(less, &runtime);
11267
11268 // Sub string of length 2 requested.
11269 // rax: string
11270 // rbx: instance type
11271 // rcx: sub string length (value is 2)
11272 // rdx: from index (smi)
11273 __ JumpIfInstanceTypeIsNotSequentialAscii(rbx, rbx, &runtime);
11274
11275 // Get the two characters forming the sub string.
11276 __ SmiToInteger32(rdx, rdx); // From index is no longer smi.
11277 __ movzxbq(rbx, FieldOperand(rax, rdx, times_1, SeqAsciiString::kHeaderSize));
11278 __ movzxbq(rcx,
11279 FieldOperand(rax, rdx, times_1, SeqAsciiString::kHeaderSize + 1));
11280
11281 // Try to lookup two character string in symbol table.
11282 Label make_two_character_string;
11283 StringHelper::GenerateTwoCharacterSymbolTableProbe(
11284 masm, rbx, rcx, rax, rdx, rdi, r14, &make_two_character_string);
11285 __ ret(3 * kPointerSize);
11286
11287 __ bind(&make_two_character_string);
11288 // Setup registers for allocating the two character string.
11289 __ movq(rax, Operand(rsp, kStringOffset));
11290 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
11291 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
11292 __ Set(rcx, 2);
11293
11294 __ bind(&result_longer_than_two);
Leon Clarked91b9f72010-01-27 17:25:45 +000011295
11296 // rax: string
11297 // rbx: instance type
11298 // rcx: result string length
11299 // Check for flat ascii string
11300 Label non_ascii_flat;
Steve Block6ded16b2010-05-10 14:33:55 +010011301 __ JumpIfInstanceTypeIsNotSequentialAscii(rbx, rbx, &non_ascii_flat);
Leon Clarked91b9f72010-01-27 17:25:45 +000011302
11303 // Allocate the result.
11304 __ AllocateAsciiString(rax, rcx, rbx, rdx, rdi, &runtime);
11305
11306 // rax: result string
11307 // rcx: result string length
11308 __ movq(rdx, rsi); // esi used by following code.
11309 // Locate first character of result.
11310 __ lea(rdi, FieldOperand(rax, SeqAsciiString::kHeaderSize));
11311 // Load string argument and locate character of sub string start.
11312 __ movq(rsi, Operand(rsp, kStringOffset));
11313 __ movq(rbx, Operand(rsp, kFromOffset));
11314 {
11315 SmiIndex smi_as_index = masm->SmiToIndex(rbx, rbx, times_1);
11316 __ lea(rsi, Operand(rsi, smi_as_index.reg, smi_as_index.scale,
11317 SeqAsciiString::kHeaderSize - kHeapObjectTag));
11318 }
11319
11320 // rax: result string
11321 // rcx: result length
11322 // rdx: original value of rsi
11323 // rdi: first character of result
11324 // rsi: character of sub string start
Steve Block6ded16b2010-05-10 14:33:55 +010011325 StringHelper::GenerateCopyCharactersREP(masm, rdi, rsi, rcx, true);
Leon Clarked91b9f72010-01-27 17:25:45 +000011326 __ movq(rsi, rdx); // Restore rsi.
11327 __ IncrementCounter(&Counters::sub_string_native, 1);
11328 __ ret(kArgumentsSize);
11329
11330 __ bind(&non_ascii_flat);
11331 // rax: string
11332 // rbx: instance type & kStringRepresentationMask | kStringEncodingMask
11333 // rcx: result string length
11334 // Check for sequential two byte string
11335 __ cmpb(rbx, Immediate(kSeqStringTag | kTwoByteStringTag));
11336 __ j(not_equal, &runtime);
11337
11338 // Allocate the result.
11339 __ AllocateTwoByteString(rax, rcx, rbx, rdx, rdi, &runtime);
11340
11341 // rax: result string
11342 // rcx: result string length
11343 __ movq(rdx, rsi); // esi used by following code.
11344 // Locate first character of result.
11345 __ lea(rdi, FieldOperand(rax, SeqTwoByteString::kHeaderSize));
11346 // Load string argument and locate character of sub string start.
11347 __ movq(rsi, Operand(rsp, kStringOffset));
11348 __ movq(rbx, Operand(rsp, kFromOffset));
11349 {
11350 SmiIndex smi_as_index = masm->SmiToIndex(rbx, rbx, times_2);
11351 __ lea(rsi, Operand(rsi, smi_as_index.reg, smi_as_index.scale,
11352 SeqAsciiString::kHeaderSize - kHeapObjectTag));
11353 }
11354
11355 // rax: result string
11356 // rcx: result length
11357 // rdx: original value of rsi
11358 // rdi: first character of result
11359 // rsi: character of sub string start
Steve Block6ded16b2010-05-10 14:33:55 +010011360 StringHelper::GenerateCopyCharactersREP(masm, rdi, rsi, rcx, false);
Leon Clarked91b9f72010-01-27 17:25:45 +000011361 __ movq(rsi, rdx); // Restore esi.
11362 __ IncrementCounter(&Counters::sub_string_native, 1);
11363 __ ret(kArgumentsSize);
11364
11365 // Just jump to runtime to create the sub string.
11366 __ bind(&runtime);
Steve Block6ded16b2010-05-10 14:33:55 +010011367 __ TailCallRuntime(Runtime::kSubString, 3, 1);
Leon Clarked91b9f72010-01-27 17:25:45 +000011368}
11369
Leon Clarkee46be812010-01-19 14:06:41 +000011370
11371void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
11372 Register left,
11373 Register right,
11374 Register scratch1,
11375 Register scratch2,
11376 Register scratch3,
11377 Register scratch4) {
11378 // Ensure that you can always subtract a string length from a non-negative
11379 // number (e.g. another length).
11380 ASSERT(String::kMaxLength < 0x7fffffff);
11381
11382 // Find minimum length and length difference.
Steve Block6ded16b2010-05-10 14:33:55 +010011383 __ movq(scratch1, FieldOperand(left, String::kLengthOffset));
11384 __ movq(scratch4, scratch1);
11385 __ SmiSub(scratch4,
11386 scratch4,
11387 FieldOperand(right, String::kLengthOffset),
11388 NULL);
Leon Clarkee46be812010-01-19 14:06:41 +000011389 // Register scratch4 now holds left.length - right.length.
11390 const Register length_difference = scratch4;
11391 Label left_shorter;
11392 __ j(less, &left_shorter);
11393 // The right string isn't longer that the left one.
11394 // Get the right string's length by subtracting the (non-negative) difference
11395 // from the left string's length.
Steve Block6ded16b2010-05-10 14:33:55 +010011396 __ SmiSub(scratch1, scratch1, length_difference, NULL);
Leon Clarkee46be812010-01-19 14:06:41 +000011397 __ bind(&left_shorter);
11398 // Register scratch1 now holds Min(left.length, right.length).
11399 const Register min_length = scratch1;
11400
11401 Label compare_lengths;
11402 // If min-length is zero, go directly to comparing lengths.
Steve Block6ded16b2010-05-10 14:33:55 +010011403 __ SmiTest(min_length);
Leon Clarkee46be812010-01-19 14:06:41 +000011404 __ j(zero, &compare_lengths);
11405
Steve Block6ded16b2010-05-10 14:33:55 +010011406 __ SmiToInteger32(min_length, min_length);
11407
Leon Clarkee46be812010-01-19 14:06:41 +000011408 // Registers scratch2 and scratch3 are free.
11409 Label result_not_equal;
11410 Label loop;
11411 {
11412 // Check characters 0 .. min_length - 1 in a loop.
11413 // Use scratch3 as loop index, min_length as limit and scratch2
11414 // for computation.
11415 const Register index = scratch3;
11416 __ movl(index, Immediate(0)); // Index into strings.
11417 __ bind(&loop);
11418 // Compare characters.
11419 // TODO(lrn): Could we load more than one character at a time?
11420 __ movb(scratch2, FieldOperand(left,
11421 index,
11422 times_1,
11423 SeqAsciiString::kHeaderSize));
11424 // Increment index and use -1 modifier on next load to give
11425 // the previous load extra time to complete.
11426 __ addl(index, Immediate(1));
11427 __ cmpb(scratch2, FieldOperand(right,
11428 index,
11429 times_1,
11430 SeqAsciiString::kHeaderSize - 1));
11431 __ j(not_equal, &result_not_equal);
11432 __ cmpl(index, min_length);
11433 __ j(not_equal, &loop);
11434 }
11435 // Completed loop without finding different characters.
11436 // Compare lengths (precomputed).
11437 __ bind(&compare_lengths);
Steve Block6ded16b2010-05-10 14:33:55 +010011438 __ SmiTest(length_difference);
Leon Clarkee46be812010-01-19 14:06:41 +000011439 __ j(not_zero, &result_not_equal);
11440
11441 // Result is EQUAL.
11442 __ Move(rax, Smi::FromInt(EQUAL));
Leon Clarkee46be812010-01-19 14:06:41 +000011443 __ ret(2 * kPointerSize);
11444
11445 Label result_greater;
11446 __ bind(&result_not_equal);
11447 // Unequal comparison of left to right, either character or length.
11448 __ j(greater, &result_greater);
11449
11450 // Result is LESS.
11451 __ Move(rax, Smi::FromInt(LESS));
Leon Clarkee46be812010-01-19 14:06:41 +000011452 __ ret(2 * kPointerSize);
11453
11454 // Result is GREATER.
11455 __ bind(&result_greater);
11456 __ Move(rax, Smi::FromInt(GREATER));
Leon Clarkee46be812010-01-19 14:06:41 +000011457 __ ret(2 * kPointerSize);
11458}
11459
11460
11461void StringCompareStub::Generate(MacroAssembler* masm) {
11462 Label runtime;
11463
11464 // Stack frame on entry.
11465 // rsp[0]: return address
11466 // rsp[8]: right string
11467 // rsp[16]: left string
11468
11469 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); // left
11470 __ movq(rax, Operand(rsp, 1 * kPointerSize)); // right
11471
11472 // Check for identity.
11473 Label not_same;
11474 __ cmpq(rdx, rax);
11475 __ j(not_equal, &not_same);
11476 __ Move(rax, Smi::FromInt(EQUAL));
11477 __ IncrementCounter(&Counters::string_compare_native, 1);
11478 __ ret(2 * kPointerSize);
11479
11480 __ bind(&not_same);
11481
11482 // Check that both are sequential ASCII strings.
11483 __ JumpIfNotBothSequentialAsciiStrings(rdx, rax, rcx, rbx, &runtime);
11484
11485 // Inline comparison of ascii strings.
Leon Clarked91b9f72010-01-27 17:25:45 +000011486 __ IncrementCounter(&Counters::string_compare_native, 1);
Leon Clarkee46be812010-01-19 14:06:41 +000011487 GenerateCompareFlatAsciiStrings(masm, rdx, rax, rcx, rbx, rdi, r8);
11488
11489 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
11490 // tagged as a small integer.
11491 __ bind(&runtime);
Steve Block6ded16b2010-05-10 14:33:55 +010011492 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
Steve Blocka7e24c12009-10-30 11:49:00 +000011493}
11494
Steve Block3ce2e202009-11-05 08:53:23 +000011495#undef __
11496
11497#define __ masm.
11498
11499#ifdef _WIN64
11500typedef double (*ModuloFunction)(double, double);
11501// Define custom fmod implementation.
11502ModuloFunction CreateModuloFunction() {
11503 size_t actual_size;
11504 byte* buffer = static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize,
11505 &actual_size,
11506 true));
11507 CHECK(buffer);
Steve Blockd0582a62009-12-15 09:54:21 +000011508 Assembler masm(buffer, static_cast<int>(actual_size));
Steve Block3ce2e202009-11-05 08:53:23 +000011509 // Generated code is put into a fixed, unmovable, buffer, and not into
11510 // the V8 heap. We can't, and don't, refer to any relocatable addresses
11511 // (e.g. the JavaScript nan-object).
11512
11513 // Windows 64 ABI passes double arguments in xmm0, xmm1 and
11514 // returns result in xmm0.
11515 // Argument backing space is allocated on the stack above
11516 // the return address.
11517
11518 // Compute x mod y.
11519 // Load y and x (use argument backing store as temporary storage).
11520 __ movsd(Operand(rsp, kPointerSize * 2), xmm1);
11521 __ movsd(Operand(rsp, kPointerSize), xmm0);
11522 __ fld_d(Operand(rsp, kPointerSize * 2));
11523 __ fld_d(Operand(rsp, kPointerSize));
11524
11525 // Clear exception flags before operation.
11526 {
11527 Label no_exceptions;
11528 __ fwait();
11529 __ fnstsw_ax();
11530 // Clear if Illegal Operand or Zero Division exceptions are set.
11531 __ testb(rax, Immediate(5));
11532 __ j(zero, &no_exceptions);
11533 __ fnclex();
11534 __ bind(&no_exceptions);
11535 }
11536
11537 // Compute st(0) % st(1)
11538 {
11539 Label partial_remainder_loop;
11540 __ bind(&partial_remainder_loop);
11541 __ fprem();
11542 __ fwait();
11543 __ fnstsw_ax();
11544 __ testl(rax, Immediate(0x400 /* C2 */));
11545 // If C2 is set, computation only has partial result. Loop to
11546 // continue computation.
11547 __ j(not_zero, &partial_remainder_loop);
11548 }
11549
11550 Label valid_result;
11551 Label return_result;
11552 // If Invalid Operand or Zero Division exceptions are set,
11553 // return NaN.
11554 __ testb(rax, Immediate(5));
11555 __ j(zero, &valid_result);
11556 __ fstp(0); // Drop result in st(0).
11557 int64_t kNaNValue = V8_INT64_C(0x7ff8000000000000);
11558 __ movq(rcx, kNaNValue, RelocInfo::NONE);
11559 __ movq(Operand(rsp, kPointerSize), rcx);
11560 __ movsd(xmm0, Operand(rsp, kPointerSize));
11561 __ jmp(&return_result);
11562
11563 // If result is valid, return that.
11564 __ bind(&valid_result);
11565 __ fstp_d(Operand(rsp, kPointerSize));
11566 __ movsd(xmm0, Operand(rsp, kPointerSize));
11567
11568 // Clean up FPU stack and exceptions and return xmm0
11569 __ bind(&return_result);
11570 __ fstp(0); // Unload y.
11571
11572 Label clear_exceptions;
11573 __ testb(rax, Immediate(0x3f /* Any Exception*/));
11574 __ j(not_zero, &clear_exceptions);
11575 __ ret(0);
11576 __ bind(&clear_exceptions);
11577 __ fnclex();
11578 __ ret(0);
11579
11580 CodeDesc desc;
11581 masm.GetCode(&desc);
11582 // Call the function from C++.
11583 return FUNCTION_CAST<ModuloFunction>(buffer);
11584}
11585
11586#endif
Steve Blocka7e24c12009-10-30 11:49:00 +000011587
Leon Clarkee46be812010-01-19 14:06:41 +000011588
Steve Blocka7e24c12009-10-30 11:49:00 +000011589#undef __
11590
11591} } // namespace v8::internal