blob: 2f14e82e14ad2b4add1451d8dffe25352a70f128 [file] [log] [blame]
Leon Clarked91b9f72010-01-27 17:25:45 +00001// Copyright 2010 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_IA32)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "codegen-inl.h"
Kristian Monsen80d68ea2010-09-08 11:05:35 +010033#include "bootstrapper.h"
34#include "code-stubs.h"
Steve Blockd0582a62009-12-15 09:54:21 +000035#include "compiler.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000036#include "debug.h"
37#include "ic-inl.h"
38#include "parser.h"
Leon Clarkee46be812010-01-19 14:06:41 +000039#include "regexp-macro-assembler.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000040#include "register-allocator-inl.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000041#include "scopes.h"
Steve Block6ded16b2010-05-10 14:33:55 +010042#include "virtual-frame-inl.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000043
44namespace v8 {
45namespace internal {
46
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010047#define __ ACCESS_MASM(masm)
Steve Blocka7e24c12009-10-30 11:49:00 +000048
49// -------------------------------------------------------------------------
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010050// Platform-specific FrameRegisterState functions.
Steve Blocka7e24c12009-10-30 11:49:00 +000051
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010052void FrameRegisterState::Save(MacroAssembler* masm) const {
Steve Blocka7e24c12009-10-30 11:49:00 +000053 for (int i = 0; i < RegisterAllocator::kNumRegisters; i++) {
54 int action = registers_[i];
55 if (action == kPush) {
56 __ push(RegisterAllocator::ToRegister(i));
57 } else if (action != kIgnore && (action & kSyncedFlag) == 0) {
58 __ mov(Operand(ebp, action), RegisterAllocator::ToRegister(i));
59 }
60 }
61}
62
63
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010064void FrameRegisterState::Restore(MacroAssembler* masm) const {
Steve Blocka7e24c12009-10-30 11:49:00 +000065 // Restore registers in reverse order due to the stack.
66 for (int i = RegisterAllocator::kNumRegisters - 1; i >= 0; i--) {
67 int action = registers_[i];
68 if (action == kPush) {
69 __ pop(RegisterAllocator::ToRegister(i));
70 } else if (action != kIgnore) {
71 action &= ~kSyncedFlag;
72 __ mov(RegisterAllocator::ToRegister(i), Operand(ebp, action));
73 }
74 }
75}
76
77
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010078#undef __
79#define __ ACCESS_MASM(masm_)
80
81// -------------------------------------------------------------------------
82// Platform-specific DeferredCode functions.
83
84void DeferredCode::SaveRegisters() {
85 frame_state_.Save(masm_);
86}
87
88
89void DeferredCode::RestoreRegisters() {
90 frame_state_.Restore(masm_);
91}
92
93
94// -------------------------------------------------------------------------
95// Platform-specific RuntimeCallHelper functions.
96
97void VirtualFrameRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
98 frame_state_->Save(masm);
99}
100
101
102void VirtualFrameRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
103 frame_state_->Restore(masm);
104}
105
106
Ben Murdochb0fe1622011-05-05 13:52:32 +0100107void StubRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100108 masm->EnterInternalFrame();
109}
110
111
Ben Murdochb0fe1622011-05-05 13:52:32 +0100112void StubRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100113 masm->LeaveInternalFrame();
114}
115
116
Steve Blocka7e24c12009-10-30 11:49:00 +0000117// -------------------------------------------------------------------------
118// CodeGenState implementation.
119
120CodeGenState::CodeGenState(CodeGenerator* owner)
121 : owner_(owner),
Steve Blocka7e24c12009-10-30 11:49:00 +0000122 destination_(NULL),
123 previous_(NULL) {
124 owner_->set_state(this);
125}
126
127
128CodeGenState::CodeGenState(CodeGenerator* owner,
Steve Blocka7e24c12009-10-30 11:49:00 +0000129 ControlDestination* destination)
130 : owner_(owner),
Steve Blocka7e24c12009-10-30 11:49:00 +0000131 destination_(destination),
132 previous_(owner->state()) {
133 owner_->set_state(this);
134}
135
136
137CodeGenState::~CodeGenState() {
138 ASSERT(owner_->state() == this);
139 owner_->set_state(previous_);
140}
141
Steve Blocka7e24c12009-10-30 11:49:00 +0000142// -------------------------------------------------------------------------
Ben Murdoch3bec4d22010-07-22 14:51:16 +0100143// CodeGenerator implementation.
Steve Blocka7e24c12009-10-30 11:49:00 +0000144
Andrei Popescu31002712010-02-23 13:46:05 +0000145CodeGenerator::CodeGenerator(MacroAssembler* masm)
146 : deferred_(8),
Leon Clarke4515c472010-02-03 11:58:03 +0000147 masm_(masm),
Andrei Popescu31002712010-02-23 13:46:05 +0000148 info_(NULL),
Steve Blocka7e24c12009-10-30 11:49:00 +0000149 frame_(NULL),
150 allocator_(NULL),
151 state_(NULL),
152 loop_nesting_(0),
Steve Block6ded16b2010-05-10 14:33:55 +0100153 in_safe_int32_mode_(false),
154 safe_int32_mode_enabled_(true),
Steve Blocka7e24c12009-10-30 11:49:00 +0000155 function_return_is_shadowed_(false),
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -0800156 in_spilled_code_(false),
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800157 jit_cookie_((FLAG_mask_constants_with_cookie) ? V8::RandomPrivate() : 0) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000158}
159
160
161// Calling conventions:
162// ebp: caller's frame pointer
163// esp: stack pointer
164// edi: called JS function
165// esi: callee's context
166
Andrei Popescu402d9372010-02-26 13:31:12 +0000167void CodeGenerator::Generate(CompilationInfo* info) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000168 // Record the position for debugging purposes.
Andrei Popescu31002712010-02-23 13:46:05 +0000169 CodeForFunctionPosition(info->function());
Steve Block6ded16b2010-05-10 14:33:55 +0100170 Comment cmnt(masm_, "[ function compiled by virtual frame code generator");
Steve Blocka7e24c12009-10-30 11:49:00 +0000171
172 // Initialize state.
Andrei Popescu31002712010-02-23 13:46:05 +0000173 info_ = info;
Steve Blocka7e24c12009-10-30 11:49:00 +0000174 ASSERT(allocator_ == NULL);
175 RegisterAllocator register_allocator(this);
176 allocator_ = &register_allocator;
177 ASSERT(frame_ == NULL);
178 frame_ = new VirtualFrame();
179 set_in_spilled_code(false);
180
181 // Adjust for function-level loop nesting.
Steve Block6ded16b2010-05-10 14:33:55 +0100182 ASSERT_EQ(0, loop_nesting_);
Ben Murdochf87a2032010-10-22 12:50:53 +0100183 loop_nesting_ = info->is_in_loop() ? 1 : 0;
Steve Blocka7e24c12009-10-30 11:49:00 +0000184
185 JumpTarget::set_compiling_deferred_code(false);
186
Ben Murdochf87a2032010-10-22 12:50:53 +0100187 {
Steve Blocka7e24c12009-10-30 11:49:00 +0000188 CodeGenState state(this);
189
190 // Entry:
191 // Stack: receiver, arguments, return address.
192 // ebp: caller's frame pointer
193 // esp: stack pointer
194 // edi: called JS function
195 // esi: callee's context
196 allocator_->Initialize();
Steve Blocka7e24c12009-10-30 11:49:00 +0000197
Ben Murdochf87a2032010-10-22 12:50:53 +0100198#ifdef DEBUG
199 if (strlen(FLAG_stop_at) > 0 &&
200 info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
201 frame_->SpillAll();
202 __ int3();
203 }
204#endif
205
Iain Merrick75681382010-08-19 15:07:18 +0100206 frame_->Enter();
Leon Clarke4515c472010-02-03 11:58:03 +0000207
Iain Merrick75681382010-08-19 15:07:18 +0100208 // Allocate space for locals and initialize them.
209 frame_->AllocateStackSlots();
Leon Clarke4515c472010-02-03 11:58:03 +0000210
Iain Merrick75681382010-08-19 15:07:18 +0100211 // Allocate the local context if needed.
212 int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
213 if (heap_slots > 0) {
214 Comment cmnt(masm_, "[ allocate local context");
215 // Allocate local context.
216 // Get outer context and create a new context based on it.
217 frame_->PushFunction();
218 Result context;
219 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
220 FastNewContextStub stub(heap_slots);
221 context = frame_->CallStub(&stub, 1);
222 } else {
223 context = frame_->CallRuntime(Runtime::kNewContext, 1);
Leon Clarke4515c472010-02-03 11:58:03 +0000224 }
225
Iain Merrick75681382010-08-19 15:07:18 +0100226 // Update context local.
227 frame_->SaveContextRegister();
Leon Clarke4515c472010-02-03 11:58:03 +0000228
Iain Merrick75681382010-08-19 15:07:18 +0100229 // Verify that the runtime call result and esi agree.
230 if (FLAG_debug_code) {
231 __ cmp(context.reg(), Operand(esi));
232 __ Assert(equal, "Runtime::NewContext should end up in esi");
Andrei Popescu402d9372010-02-26 13:31:12 +0000233 }
Leon Clarke4515c472010-02-03 11:58:03 +0000234 }
235
Iain Merrick75681382010-08-19 15:07:18 +0100236 // TODO(1241774): Improve this code:
237 // 1) only needed if we have a context
238 // 2) no need to recompute context ptr every single time
239 // 3) don't copy parameter operand code from SlotOperand!
240 {
241 Comment cmnt2(masm_, "[ copy context parameters into .context");
242 // Note that iteration order is relevant here! If we have the same
243 // parameter twice (e.g., function (x, y, x)), and that parameter
244 // needs to be copied into the context, it must be the last argument
245 // passed to the parameter that needs to be copied. This is a rare
246 // case so we don't check for it, instead we rely on the copying
247 // order: such a parameter is copied repeatedly into the same
248 // context location and thus the last value is what is seen inside
249 // the function.
250 for (int i = 0; i < scope()->num_parameters(); i++) {
251 Variable* par = scope()->parameter(i);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100252 Slot* slot = par->AsSlot();
Iain Merrick75681382010-08-19 15:07:18 +0100253 if (slot != NULL && slot->type() == Slot::CONTEXT) {
254 // The use of SlotOperand below is safe in unspilled code
255 // because the slot is guaranteed to be a context slot.
256 //
257 // There are no parameters in the global scope.
258 ASSERT(!scope()->is_global_scope());
259 frame_->PushParameterAt(i);
260 Result value = frame_->Pop();
261 value.ToRegister();
262
263 // SlotOperand loads context.reg() with the context object
264 // stored to, used below in RecordWrite.
265 Result context = allocator_->Allocate();
266 ASSERT(context.is_valid());
267 __ mov(SlotOperand(slot, context.reg()), value.reg());
268 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
269 Result scratch = allocator_->Allocate();
270 ASSERT(scratch.is_valid());
271 frame_->Spill(context.reg());
272 frame_->Spill(value.reg());
273 __ RecordWrite(context.reg(), offset, value.reg(), scratch.reg());
274 }
275 }
276 }
277
278 // Store the arguments object. This must happen after context
279 // initialization because the arguments object may be stored in
280 // the context.
281 if (ArgumentsMode() != NO_ARGUMENTS_ALLOCATION) {
282 StoreArgumentsObject(true);
283 }
284
285 // Initialize ThisFunction reference if present.
286 if (scope()->is_function_scope() && scope()->function() != NULL) {
287 frame_->Push(Factory::the_hole_value());
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100288 StoreToSlot(scope()->function()->AsSlot(), NOT_CONST_INIT);
Iain Merrick75681382010-08-19 15:07:18 +0100289 }
290
291
Steve Blocka7e24c12009-10-30 11:49:00 +0000292 // Initialize the function return target after the locals are set
293 // up, because it needs the expected frame height from the frame.
294 function_return_.set_direction(JumpTarget::BIDIRECTIONAL);
295 function_return_is_shadowed_ = false;
296
Steve Blocka7e24c12009-10-30 11:49:00 +0000297 // Generate code to 'execute' declarations and initialize functions
298 // (source elements). In case of an illegal redeclaration we need to
299 // handle that instead of processing the declarations.
Andrei Popescu31002712010-02-23 13:46:05 +0000300 if (scope()->HasIllegalRedeclaration()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000301 Comment cmnt(masm_, "[ illegal redeclarations");
Andrei Popescu31002712010-02-23 13:46:05 +0000302 scope()->VisitIllegalRedeclaration(this);
Steve Blocka7e24c12009-10-30 11:49:00 +0000303 } else {
304 Comment cmnt(masm_, "[ declarations");
Andrei Popescu31002712010-02-23 13:46:05 +0000305 ProcessDeclarations(scope()->declarations());
Steve Blocka7e24c12009-10-30 11:49:00 +0000306 // Bail out if a stack-overflow exception occurred when processing
307 // declarations.
308 if (HasStackOverflow()) return;
309 }
310
311 if (FLAG_trace) {
312 frame_->CallRuntime(Runtime::kTraceEnter, 0);
313 // Ignore the return value.
314 }
315 CheckStack();
316
317 // Compile the body of the function in a vanilla state. Don't
318 // bother compiling all the code if the scope has an illegal
319 // redeclaration.
Andrei Popescu31002712010-02-23 13:46:05 +0000320 if (!scope()->HasIllegalRedeclaration()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000321 Comment cmnt(masm_, "[ function body");
322#ifdef DEBUG
323 bool is_builtin = Bootstrapper::IsActive();
324 bool should_trace =
325 is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls;
326 if (should_trace) {
327 frame_->CallRuntime(Runtime::kDebugTrace, 0);
328 // Ignore the return value.
329 }
330#endif
Andrei Popescu31002712010-02-23 13:46:05 +0000331 VisitStatements(info->function()->body());
Steve Blocka7e24c12009-10-30 11:49:00 +0000332
333 // Handle the return from the function.
334 if (has_valid_frame()) {
335 // If there is a valid frame, control flow can fall off the end of
336 // the body. In that case there is an implicit return statement.
337 ASSERT(!function_return_is_shadowed_);
Andrei Popescu31002712010-02-23 13:46:05 +0000338 CodeForReturnPosition(info->function());
Steve Blocka7e24c12009-10-30 11:49:00 +0000339 frame_->PrepareForReturn();
340 Result undefined(Factory::undefined_value());
341 if (function_return_.is_bound()) {
342 function_return_.Jump(&undefined);
343 } else {
344 function_return_.Bind(&undefined);
345 GenerateReturnSequence(&undefined);
346 }
347 } else if (function_return_.is_linked()) {
348 // If the return target has dangling jumps to it, then we have not
349 // yet generated the return sequence. This can happen when (a)
350 // control does not flow off the end of the body so we did not
351 // compile an artificial return statement just above, and (b) there
352 // are return statements in the body but (c) they are all shadowed.
353 Result return_value;
354 function_return_.Bind(&return_value);
355 GenerateReturnSequence(&return_value);
356 }
357 }
358 }
359
360 // Adjust for function-level loop nesting.
Ben Murdochf87a2032010-10-22 12:50:53 +0100361 ASSERT_EQ(loop_nesting_, info->is_in_loop() ? 1 : 0);
Steve Block6ded16b2010-05-10 14:33:55 +0100362 loop_nesting_ = 0;
Steve Blocka7e24c12009-10-30 11:49:00 +0000363
364 // Code generation state must be reset.
365 ASSERT(state_ == NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000366 ASSERT(!function_return_is_shadowed_);
367 function_return_.Unuse();
368 DeleteFrame();
369
370 // Process any deferred code using the register allocator.
371 if (!HasStackOverflow()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000372 JumpTarget::set_compiling_deferred_code(true);
373 ProcessDeferred();
374 JumpTarget::set_compiling_deferred_code(false);
375 }
376
377 // There is no need to delete the register allocator, it is a
378 // stack-allocated local.
379 allocator_ = NULL;
Steve Blocka7e24c12009-10-30 11:49:00 +0000380}
381
382
383Operand CodeGenerator::SlotOperand(Slot* slot, Register tmp) {
384 // Currently, this assertion will fail if we try to assign to
385 // a constant variable that is constant because it is read-only
386 // (such as the variable referring to a named function expression).
387 // We need to implement assignments to read-only variables.
388 // Ideally, we should do this during AST generation (by converting
389 // such assignments into expression statements); however, in general
390 // we may not be able to make the decision until past AST generation,
391 // that is when the entire program is known.
392 ASSERT(slot != NULL);
393 int index = slot->index();
394 switch (slot->type()) {
395 case Slot::PARAMETER:
396 return frame_->ParameterAt(index);
397
398 case Slot::LOCAL:
399 return frame_->LocalAt(index);
400
401 case Slot::CONTEXT: {
402 // Follow the context chain if necessary.
403 ASSERT(!tmp.is(esi)); // do not overwrite context register
404 Register context = esi;
405 int chain_length = scope()->ContextChainLength(slot->var()->scope());
406 for (int i = 0; i < chain_length; i++) {
407 // Load the closure.
408 // (All contexts, even 'with' contexts, have a closure,
409 // and it is the same for all contexts inside a function.
410 // There is no need to go to the function context first.)
411 __ mov(tmp, ContextOperand(context, Context::CLOSURE_INDEX));
412 // Load the function context (which is the incoming, outer context).
413 __ mov(tmp, FieldOperand(tmp, JSFunction::kContextOffset));
414 context = tmp;
415 }
416 // We may have a 'with' context now. Get the function context.
417 // (In fact this mov may never be the needed, since the scope analysis
418 // may not permit a direct context access in this case and thus we are
419 // always at a function context. However it is safe to dereference be-
420 // cause the function context of a function context is itself. Before
421 // deleting this mov we should try to create a counter-example first,
422 // though...)
423 __ mov(tmp, ContextOperand(context, Context::FCONTEXT_INDEX));
424 return ContextOperand(tmp, index);
425 }
426
427 default:
428 UNREACHABLE();
429 return Operand(eax);
430 }
431}
432
433
434Operand CodeGenerator::ContextSlotOperandCheckExtensions(Slot* slot,
435 Result tmp,
436 JumpTarget* slow) {
437 ASSERT(slot->type() == Slot::CONTEXT);
438 ASSERT(tmp.is_register());
439 Register context = esi;
440
441 for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) {
442 if (s->num_heap_slots() > 0) {
443 if (s->calls_eval()) {
444 // Check that extension is NULL.
445 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
446 Immediate(0));
447 slow->Branch(not_equal, not_taken);
448 }
449 __ mov(tmp.reg(), ContextOperand(context, Context::CLOSURE_INDEX));
450 __ mov(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset));
451 context = tmp.reg();
452 }
453 }
454 // Check that last extension is NULL.
455 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
456 slow->Branch(not_equal, not_taken);
457 __ mov(tmp.reg(), ContextOperand(context, Context::FCONTEXT_INDEX));
458 return ContextOperand(tmp.reg(), slot->index());
459}
460
461
462// Emit code to load the value of an expression to the top of the
463// frame. If the expression is boolean-valued it may be compiled (or
464// partially compiled) into control flow to the control destination.
465// If force_control is true, control flow is forced.
Steve Block6ded16b2010-05-10 14:33:55 +0100466void CodeGenerator::LoadCondition(Expression* expr,
Steve Blocka7e24c12009-10-30 11:49:00 +0000467 ControlDestination* dest,
468 bool force_control) {
469 ASSERT(!in_spilled_code());
470 int original_height = frame_->height();
471
Steve Blockd0582a62009-12-15 09:54:21 +0000472 { CodeGenState new_state(this, dest);
Steve Block6ded16b2010-05-10 14:33:55 +0100473 Visit(expr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000474
475 // If we hit a stack overflow, we may not have actually visited
476 // the expression. In that case, we ensure that we have a
477 // valid-looking frame state because we will continue to generate
478 // code as we unwind the C++ stack.
479 //
480 // It's possible to have both a stack overflow and a valid frame
481 // state (eg, a subexpression overflowed, visiting it returned
482 // with a dummied frame state, and visiting this expression
483 // returned with a normal-looking state).
484 if (HasStackOverflow() &&
485 !dest->is_used() &&
486 frame_->height() == original_height) {
487 dest->Goto(true);
488 }
489 }
490
491 if (force_control && !dest->is_used()) {
492 // Convert the TOS value into flow to the control destination.
493 ToBoolean(dest);
494 }
495
496 ASSERT(!(force_control && !dest->is_used()));
497 ASSERT(dest->is_used() || frame_->height() == original_height + 1);
498}
499
500
Steve Blockd0582a62009-12-15 09:54:21 +0000501void CodeGenerator::LoadAndSpill(Expression* expression) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000502 ASSERT(in_spilled_code());
503 set_in_spilled_code(false);
Steve Blockd0582a62009-12-15 09:54:21 +0000504 Load(expression);
Steve Blocka7e24c12009-10-30 11:49:00 +0000505 frame_->SpillAll();
506 set_in_spilled_code(true);
507}
508
509
Steve Block6ded16b2010-05-10 14:33:55 +0100510void CodeGenerator::LoadInSafeInt32Mode(Expression* expr,
511 BreakTarget* unsafe_bailout) {
512 set_unsafe_bailout(unsafe_bailout);
513 set_in_safe_int32_mode(true);
514 Load(expr);
515 Result value = frame_->Pop();
516 ASSERT(frame_->HasNoUntaggedInt32Elements());
517 if (expr->GuaranteedSmiResult()) {
518 ConvertInt32ResultToSmi(&value);
519 } else {
520 ConvertInt32ResultToNumber(&value);
521 }
522 set_in_safe_int32_mode(false);
523 set_unsafe_bailout(NULL);
524 frame_->Push(&value);
525}
526
527
528void CodeGenerator::LoadWithSafeInt32ModeDisabled(Expression* expr) {
529 set_safe_int32_mode_enabled(false);
530 Load(expr);
531 set_safe_int32_mode_enabled(true);
532}
533
534
535void CodeGenerator::ConvertInt32ResultToSmi(Result* value) {
536 ASSERT(value->is_untagged_int32());
537 if (value->is_register()) {
538 __ add(value->reg(), Operand(value->reg()));
539 } else {
540 ASSERT(value->is_constant());
541 ASSERT(value->handle()->IsSmi());
542 }
543 value->set_untagged_int32(false);
544 value->set_type_info(TypeInfo::Smi());
545}
546
547
548void CodeGenerator::ConvertInt32ResultToNumber(Result* value) {
549 ASSERT(value->is_untagged_int32());
550 if (value->is_register()) {
551 Register val = value->reg();
552 JumpTarget done;
553 __ add(val, Operand(val));
554 done.Branch(no_overflow, value);
555 __ sar(val, 1);
556 // If there was an overflow, bits 30 and 31 of the original number disagree.
557 __ xor_(val, 0x80000000u);
558 if (CpuFeatures::IsSupported(SSE2)) {
559 CpuFeatures::Scope fscope(SSE2);
560 __ cvtsi2sd(xmm0, Operand(val));
561 } else {
562 // Move val to ST[0] in the FPU
563 // Push and pop are safe with respect to the virtual frame because
564 // all synced elements are below the actual stack pointer.
565 __ push(val);
566 __ fild_s(Operand(esp, 0));
567 __ pop(val);
568 }
569 Result scratch = allocator_->Allocate();
570 ASSERT(scratch.is_register());
571 Label allocation_failed;
572 __ AllocateHeapNumber(val, scratch.reg(),
573 no_reg, &allocation_failed);
574 VirtualFrame* clone = new VirtualFrame(frame_);
575 scratch.Unuse();
576 if (CpuFeatures::IsSupported(SSE2)) {
577 CpuFeatures::Scope fscope(SSE2);
578 __ movdbl(FieldOperand(val, HeapNumber::kValueOffset), xmm0);
579 } else {
580 __ fstp_d(FieldOperand(val, HeapNumber::kValueOffset));
581 }
582 done.Jump(value);
583
584 // Establish the virtual frame, cloned from where AllocateHeapNumber
585 // jumped to allocation_failed.
586 RegisterFile empty_regs;
587 SetFrame(clone, &empty_regs);
588 __ bind(&allocation_failed);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100589 if (!CpuFeatures::IsSupported(SSE2)) {
590 // Pop the value from the floating point stack.
591 __ fstp(0);
592 }
Steve Block6ded16b2010-05-10 14:33:55 +0100593 unsafe_bailout_->Jump();
594
595 done.Bind(value);
596 } else {
597 ASSERT(value->is_constant());
598 }
599 value->set_untagged_int32(false);
600 value->set_type_info(TypeInfo::Integer32());
601}
602
603
Steve Blockd0582a62009-12-15 09:54:21 +0000604void CodeGenerator::Load(Expression* expr) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000605#ifdef DEBUG
606 int original_height = frame_->height();
607#endif
608 ASSERT(!in_spilled_code());
Steve Blocka7e24c12009-10-30 11:49:00 +0000609
Steve Block6ded16b2010-05-10 14:33:55 +0100610 // If the expression should be a side-effect-free 32-bit int computation,
611 // compile that SafeInt32 path, and a bailout path.
612 if (!in_safe_int32_mode() &&
613 safe_int32_mode_enabled() &&
614 expr->side_effect_free() &&
615 expr->num_bit_ops() > 2 &&
616 CpuFeatures::IsSupported(SSE2)) {
617 BreakTarget unsafe_bailout;
618 JumpTarget done;
619 unsafe_bailout.set_expected_height(frame_->height());
620 LoadInSafeInt32Mode(expr, &unsafe_bailout);
621 done.Jump();
622
623 if (unsafe_bailout.is_linked()) {
624 unsafe_bailout.Bind();
625 LoadWithSafeInt32ModeDisabled(expr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000626 }
Steve Block6ded16b2010-05-10 14:33:55 +0100627 done.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +0000628 } else {
Steve Block6ded16b2010-05-10 14:33:55 +0100629 JumpTarget true_target;
630 JumpTarget false_target;
Steve Block6ded16b2010-05-10 14:33:55 +0100631 ControlDestination dest(&true_target, &false_target, true);
632 LoadCondition(expr, &dest, false);
633
634 if (dest.false_was_fall_through()) {
635 // The false target was just bound.
Steve Blocka7e24c12009-10-30 11:49:00 +0000636 JumpTarget loaded;
Steve Block6ded16b2010-05-10 14:33:55 +0100637 frame_->Push(Factory::false_value());
638 // There may be dangling jumps to the true target.
Steve Blocka7e24c12009-10-30 11:49:00 +0000639 if (true_target.is_linked()) {
Steve Block6ded16b2010-05-10 14:33:55 +0100640 loaded.Jump();
Steve Blocka7e24c12009-10-30 11:49:00 +0000641 true_target.Bind();
642 frame_->Push(Factory::true_value());
Steve Block6ded16b2010-05-10 14:33:55 +0100643 loaded.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +0000644 }
Steve Block6ded16b2010-05-10 14:33:55 +0100645
646 } else if (dest.is_used()) {
647 // There is true, and possibly false, control flow (with true as
648 // the fall through).
649 JumpTarget loaded;
650 frame_->Push(Factory::true_value());
Steve Blocka7e24c12009-10-30 11:49:00 +0000651 if (false_target.is_linked()) {
Steve Block6ded16b2010-05-10 14:33:55 +0100652 loaded.Jump();
Steve Blocka7e24c12009-10-30 11:49:00 +0000653 false_target.Bind();
654 frame_->Push(Factory::false_value());
Steve Block6ded16b2010-05-10 14:33:55 +0100655 loaded.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +0000656 }
Steve Block6ded16b2010-05-10 14:33:55 +0100657
658 } else {
659 // We have a valid value on top of the frame, but we still may
660 // have dangling jumps to the true and false targets from nested
661 // subexpressions (eg, the left subexpressions of the
662 // short-circuited boolean operators).
663 ASSERT(has_valid_frame());
664 if (true_target.is_linked() || false_target.is_linked()) {
665 JumpTarget loaded;
666 loaded.Jump(); // Don't lose the current TOS.
667 if (true_target.is_linked()) {
668 true_target.Bind();
669 frame_->Push(Factory::true_value());
670 if (false_target.is_linked()) {
671 loaded.Jump();
672 }
673 }
674 if (false_target.is_linked()) {
675 false_target.Bind();
676 frame_->Push(Factory::false_value());
677 }
678 loaded.Bind();
679 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000680 }
681 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000682 ASSERT(has_valid_frame());
683 ASSERT(frame_->height() == original_height + 1);
684}
685
686
687void CodeGenerator::LoadGlobal() {
688 if (in_spilled_code()) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800689 frame_->EmitPush(GlobalObjectOperand());
Steve Blocka7e24c12009-10-30 11:49:00 +0000690 } else {
691 Result temp = allocator_->Allocate();
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800692 __ mov(temp.reg(), GlobalObjectOperand());
Steve Blocka7e24c12009-10-30 11:49:00 +0000693 frame_->Push(&temp);
694 }
695}
696
697
698void CodeGenerator::LoadGlobalReceiver() {
699 Result temp = allocator_->Allocate();
700 Register reg = temp.reg();
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800701 __ mov(reg, GlobalObjectOperand());
Steve Blocka7e24c12009-10-30 11:49:00 +0000702 __ mov(reg, FieldOperand(reg, GlobalObject::kGlobalReceiverOffset));
703 frame_->Push(&temp);
704}
705
706
Steve Blockd0582a62009-12-15 09:54:21 +0000707void CodeGenerator::LoadTypeofExpression(Expression* expr) {
708 // Special handling of identifiers as subexpressions of typeof.
709 Variable* variable = expr->AsVariableProxy()->AsVariable();
Steve Blocka7e24c12009-10-30 11:49:00 +0000710 if (variable != NULL && !variable->is_this() && variable->is_global()) {
Steve Blockd0582a62009-12-15 09:54:21 +0000711 // For a global variable we build the property reference
712 // <global>.<variable> and perform a (regular non-contextual) property
713 // load to make sure we do not get reference errors.
Steve Blocka7e24c12009-10-30 11:49:00 +0000714 Slot global(variable, Slot::CONTEXT, Context::GLOBAL_INDEX);
715 Literal key(variable->name());
Steve Blocka7e24c12009-10-30 11:49:00 +0000716 Property property(&global, &key, RelocInfo::kNoPosition);
Steve Blockd0582a62009-12-15 09:54:21 +0000717 Reference ref(this, &property);
718 ref.GetValue();
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100719 } else if (variable != NULL && variable->AsSlot() != NULL) {
Steve Blockd0582a62009-12-15 09:54:21 +0000720 // For a variable that rewrites to a slot, we signal it is the immediate
721 // subexpression of a typeof.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100722 LoadFromSlotCheckForArguments(variable->AsSlot(), INSIDE_TYPEOF);
Steve Blocka7e24c12009-10-30 11:49:00 +0000723 } else {
Steve Blockd0582a62009-12-15 09:54:21 +0000724 // Anything else can be handled normally.
725 Load(expr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000726 }
727}
728
729
Andrei Popescu31002712010-02-23 13:46:05 +0000730ArgumentsAllocationMode CodeGenerator::ArgumentsMode() {
731 if (scope()->arguments() == NULL) return NO_ARGUMENTS_ALLOCATION;
732 ASSERT(scope()->arguments_shadow() != NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000733 // We don't want to do lazy arguments allocation for functions that
734 // have heap-allocated contexts, because it interfers with the
735 // uninitialized const tracking in the context objects.
Andrei Popescu31002712010-02-23 13:46:05 +0000736 return (scope()->num_heap_slots() > 0)
Steve Blocka7e24c12009-10-30 11:49:00 +0000737 ? EAGER_ARGUMENTS_ALLOCATION
738 : LAZY_ARGUMENTS_ALLOCATION;
739}
740
741
742Result CodeGenerator::StoreArgumentsObject(bool initial) {
743 ArgumentsAllocationMode mode = ArgumentsMode();
744 ASSERT(mode != NO_ARGUMENTS_ALLOCATION);
745
746 Comment cmnt(masm_, "[ store arguments object");
747 if (mode == LAZY_ARGUMENTS_ALLOCATION && initial) {
748 // When using lazy arguments allocation, we store the hole value
749 // as a sentinel indicating that the arguments object hasn't been
750 // allocated yet.
751 frame_->Push(Factory::the_hole_value());
752 } else {
753 ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT);
754 frame_->PushFunction();
755 frame_->PushReceiverSlotAddress();
Andrei Popescu31002712010-02-23 13:46:05 +0000756 frame_->Push(Smi::FromInt(scope()->num_parameters()));
Steve Blocka7e24c12009-10-30 11:49:00 +0000757 Result result = frame_->CallStub(&stub, 3);
758 frame_->Push(&result);
759 }
760
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100761 Variable* arguments = scope()->arguments();
762 Variable* shadow = scope()->arguments_shadow();
763 ASSERT(arguments != NULL && arguments->AsSlot() != NULL);
764 ASSERT(shadow != NULL && shadow->AsSlot() != NULL);
Leon Clarkee46be812010-01-19 14:06:41 +0000765 JumpTarget done;
766 bool skip_arguments = false;
767 if (mode == LAZY_ARGUMENTS_ALLOCATION && !initial) {
Ben Murdoch3bec4d22010-07-22 14:51:16 +0100768 // We have to skip storing into the arguments slot if it has
769 // already been written to. This can happen if the a function
770 // has a local variable named 'arguments'.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100771 LoadFromSlot(arguments->AsSlot(), NOT_INSIDE_TYPEOF);
Leon Clarkef7060e22010-06-03 12:02:55 +0100772 Result probe = frame_->Pop();
Leon Clarkee46be812010-01-19 14:06:41 +0000773 if (probe.is_constant()) {
774 // We have to skip updating the arguments object if it has
775 // been assigned a proper value.
776 skip_arguments = !probe.handle()->IsTheHole();
777 } else {
778 __ cmp(Operand(probe.reg()), Immediate(Factory::the_hole_value()));
779 probe.Unuse();
780 done.Branch(not_equal);
Steve Blocka7e24c12009-10-30 11:49:00 +0000781 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000782 }
Leon Clarkee46be812010-01-19 14:06:41 +0000783 if (!skip_arguments) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100784 StoreToSlot(arguments->AsSlot(), NOT_CONST_INIT);
Leon Clarkee46be812010-01-19 14:06:41 +0000785 if (mode == LAZY_ARGUMENTS_ALLOCATION) done.Bind();
786 }
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100787 StoreToSlot(shadow->AsSlot(), NOT_CONST_INIT);
Steve Blocka7e24c12009-10-30 11:49:00 +0000788 return frame_->Pop();
789}
790
Leon Clarked91b9f72010-01-27 17:25:45 +0000791//------------------------------------------------------------------------------
792// CodeGenerator implementation of variables, lookups, and stores.
Steve Blocka7e24c12009-10-30 11:49:00 +0000793
Leon Clarked91b9f72010-01-27 17:25:45 +0000794Reference::Reference(CodeGenerator* cgen,
795 Expression* expression,
796 bool persist_after_get)
797 : cgen_(cgen),
798 expression_(expression),
799 type_(ILLEGAL),
800 persist_after_get_(persist_after_get) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000801 cgen->LoadReference(this);
802}
803
804
805Reference::~Reference() {
Leon Clarked91b9f72010-01-27 17:25:45 +0000806 ASSERT(is_unloaded() || is_illegal());
Steve Blocka7e24c12009-10-30 11:49:00 +0000807}
808
809
810void CodeGenerator::LoadReference(Reference* ref) {
811 // References are loaded from both spilled and unspilled code. Set the
812 // state to unspilled to allow that (and explicitly spill after
813 // construction at the construction sites).
814 bool was_in_spilled_code = in_spilled_code_;
815 in_spilled_code_ = false;
816
817 Comment cmnt(masm_, "[ LoadReference");
818 Expression* e = ref->expression();
819 Property* property = e->AsProperty();
820 Variable* var = e->AsVariableProxy()->AsVariable();
821
822 if (property != NULL) {
823 // The expression is either a property or a variable proxy that rewrites
824 // to a property.
825 Load(property->obj());
Leon Clarkee46be812010-01-19 14:06:41 +0000826 if (property->key()->IsPropertyName()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000827 ref->set_type(Reference::NAMED);
828 } else {
829 Load(property->key());
830 ref->set_type(Reference::KEYED);
831 }
832 } else if (var != NULL) {
833 // The expression is a variable proxy that does not rewrite to a
834 // property. Global variables are treated as named property references.
835 if (var->is_global()) {
Andrei Popescu402d9372010-02-26 13:31:12 +0000836 // If eax is free, the register allocator prefers it. Thus the code
837 // generator will load the global object into eax, which is where
838 // LoadIC wants it. Most uses of Reference call LoadIC directly
839 // after the reference is created.
840 frame_->Spill(eax);
Steve Blocka7e24c12009-10-30 11:49:00 +0000841 LoadGlobal();
842 ref->set_type(Reference::NAMED);
843 } else {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100844 ASSERT(var->AsSlot() != NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000845 ref->set_type(Reference::SLOT);
846 }
847 } else {
848 // Anything else is a runtime error.
849 Load(e);
850 frame_->CallRuntime(Runtime::kThrowReferenceError, 1);
851 }
852
853 in_spilled_code_ = was_in_spilled_code;
854}
855
856
Steve Blocka7e24c12009-10-30 11:49:00 +0000857// ECMA-262, section 9.2, page 30: ToBoolean(). Pop the top of stack and
858// convert it to a boolean in the condition code register or jump to
859// 'false_target'/'true_target' as appropriate.
860void CodeGenerator::ToBoolean(ControlDestination* dest) {
861 Comment cmnt(masm_, "[ ToBoolean");
862
863 // The value to convert should be popped from the frame.
864 Result value = frame_->Pop();
865 value.ToRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +0000866
Steve Block6ded16b2010-05-10 14:33:55 +0100867 if (value.is_integer32()) { // Also takes Smi case.
868 Comment cmnt(masm_, "ONLY_INTEGER_32");
Andrei Popescu402d9372010-02-26 13:31:12 +0000869 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100870 Label ok;
871 __ AbortIfNotNumber(value.reg());
872 __ test(value.reg(), Immediate(kSmiTagMask));
873 __ j(zero, &ok);
874 __ fldz();
875 __ fld_d(FieldOperand(value.reg(), HeapNumber::kValueOffset));
876 __ FCmp();
877 __ j(not_zero, &ok);
878 __ Abort("Smi was wrapped in HeapNumber in output from bitop");
879 __ bind(&ok);
880 }
881 // In the integer32 case there are no Smis hidden in heap numbers, so we
882 // need only test for Smi zero.
883 __ test(value.reg(), Operand(value.reg()));
884 dest->false_target()->Branch(zero);
885 value.Unuse();
886 dest->Split(not_zero);
887 } else if (value.is_number()) {
888 Comment cmnt(masm_, "ONLY_NUMBER");
889 // Fast case if TypeInfo indicates only numbers.
890 if (FLAG_debug_code) {
891 __ AbortIfNotNumber(value.reg());
Andrei Popescu402d9372010-02-26 13:31:12 +0000892 }
893 // Smi => false iff zero.
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100894 STATIC_ASSERT(kSmiTag == 0);
Andrei Popescu402d9372010-02-26 13:31:12 +0000895 __ test(value.reg(), Operand(value.reg()));
896 dest->false_target()->Branch(zero);
897 __ test(value.reg(), Immediate(kSmiTagMask));
898 dest->true_target()->Branch(zero);
899 __ fldz();
900 __ fld_d(FieldOperand(value.reg(), HeapNumber::kValueOffset));
901 __ FCmp();
902 value.Unuse();
903 dest->Split(not_zero);
904 } else {
905 // Fast case checks.
906 // 'false' => false.
907 __ cmp(value.reg(), Factory::false_value());
908 dest->false_target()->Branch(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +0000909
Andrei Popescu402d9372010-02-26 13:31:12 +0000910 // 'true' => true.
911 __ cmp(value.reg(), Factory::true_value());
912 dest->true_target()->Branch(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +0000913
Andrei Popescu402d9372010-02-26 13:31:12 +0000914 // 'undefined' => false.
915 __ cmp(value.reg(), Factory::undefined_value());
916 dest->false_target()->Branch(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +0000917
Andrei Popescu402d9372010-02-26 13:31:12 +0000918 // Smi => false iff zero.
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100919 STATIC_ASSERT(kSmiTag == 0);
Andrei Popescu402d9372010-02-26 13:31:12 +0000920 __ test(value.reg(), Operand(value.reg()));
921 dest->false_target()->Branch(zero);
922 __ test(value.reg(), Immediate(kSmiTagMask));
923 dest->true_target()->Branch(zero);
Steve Blocka7e24c12009-10-30 11:49:00 +0000924
Andrei Popescu402d9372010-02-26 13:31:12 +0000925 // Call the stub for all other cases.
926 frame_->Push(&value); // Undo the Pop() from above.
927 ToBooleanStub stub;
928 Result temp = frame_->CallStub(&stub, 1);
929 // Convert the result to a condition code.
930 __ test(temp.reg(), Operand(temp.reg()));
931 temp.Unuse();
932 dest->Split(not_equal);
933 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000934}
935
936
Iain Merrick75681382010-08-19 15:07:18 +0100937// Perform or call the specialized stub for a binary operation. Requires the
938// three registers left, right and dst to be distinct and spilled. This
939// deferred operation has up to three entry points: The main one calls the
940// runtime system. The second is for when the result is a non-Smi. The
941// third is for when at least one of the inputs is non-Smi and we have SSE2.
Steve Blocka7e24c12009-10-30 11:49:00 +0000942class DeferredInlineBinaryOperation: public DeferredCode {
943 public:
944 DeferredInlineBinaryOperation(Token::Value op,
945 Register dst,
946 Register left,
947 Register right,
Steve Block6ded16b2010-05-10 14:33:55 +0100948 TypeInfo left_info,
949 TypeInfo right_info,
Steve Blocka7e24c12009-10-30 11:49:00 +0000950 OverwriteMode mode)
Steve Block6ded16b2010-05-10 14:33:55 +0100951 : op_(op), dst_(dst), left_(left), right_(right),
952 left_info_(left_info), right_info_(right_info), mode_(mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000953 set_comment("[ DeferredInlineBinaryOperation");
Iain Merrick75681382010-08-19 15:07:18 +0100954 ASSERT(!left.is(right));
Steve Blocka7e24c12009-10-30 11:49:00 +0000955 }
956
957 virtual void Generate();
958
Iain Merrick75681382010-08-19 15:07:18 +0100959 // This stub makes explicit calls to SaveRegisters(), RestoreRegisters() and
960 // Exit().
961 virtual bool AutoSaveAndRestore() { return false; }
962
963 void JumpToAnswerOutOfRange(Condition cond);
964 void JumpToConstantRhs(Condition cond, Smi* smi_value);
965 Label* NonSmiInputLabel();
966
Steve Blocka7e24c12009-10-30 11:49:00 +0000967 private:
Iain Merrick75681382010-08-19 15:07:18 +0100968 void GenerateAnswerOutOfRange();
969 void GenerateNonSmiInput();
970
Steve Blocka7e24c12009-10-30 11:49:00 +0000971 Token::Value op_;
972 Register dst_;
973 Register left_;
974 Register right_;
Steve Block6ded16b2010-05-10 14:33:55 +0100975 TypeInfo left_info_;
976 TypeInfo right_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000977 OverwriteMode mode_;
Iain Merrick75681382010-08-19 15:07:18 +0100978 Label answer_out_of_range_;
979 Label non_smi_input_;
980 Label constant_rhs_;
981 Smi* smi_value_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000982};
983
984
Iain Merrick75681382010-08-19 15:07:18 +0100985Label* DeferredInlineBinaryOperation::NonSmiInputLabel() {
986 if (Token::IsBitOp(op_) && CpuFeatures::IsSupported(SSE2)) {
987 return &non_smi_input_;
988 } else {
989 return entry_label();
990 }
991}
992
993
994void DeferredInlineBinaryOperation::JumpToAnswerOutOfRange(Condition cond) {
995 __ j(cond, &answer_out_of_range_);
996}
997
998
999void DeferredInlineBinaryOperation::JumpToConstantRhs(Condition cond,
1000 Smi* smi_value) {
1001 smi_value_ = smi_value;
1002 __ j(cond, &constant_rhs_);
1003}
1004
1005
Steve Blocka7e24c12009-10-30 11:49:00 +00001006void DeferredInlineBinaryOperation::Generate() {
Iain Merrick75681382010-08-19 15:07:18 +01001007 // Registers are not saved implicitly for this stub, so we should not
1008 // tread on the registers that were not passed to us.
1009 if (CpuFeatures::IsSupported(SSE2) &&
1010 ((op_ == Token::ADD) ||
1011 (op_ == Token::SUB) ||
1012 (op_ == Token::MUL) ||
1013 (op_ == Token::DIV))) {
Leon Clarkee46be812010-01-19 14:06:41 +00001014 CpuFeatures::Scope use_sse2(SSE2);
1015 Label call_runtime, after_alloc_failure;
1016 Label left_smi, right_smi, load_right, do_op;
Steve Block6ded16b2010-05-10 14:33:55 +01001017 if (!left_info_.IsSmi()) {
1018 __ test(left_, Immediate(kSmiTagMask));
1019 __ j(zero, &left_smi);
1020 if (!left_info_.IsNumber()) {
1021 __ cmp(FieldOperand(left_, HeapObject::kMapOffset),
1022 Factory::heap_number_map());
1023 __ j(not_equal, &call_runtime);
1024 }
1025 __ movdbl(xmm0, FieldOperand(left_, HeapNumber::kValueOffset));
1026 if (mode_ == OVERWRITE_LEFT) {
1027 __ mov(dst_, left_);
1028 }
1029 __ jmp(&load_right);
Leon Clarkee46be812010-01-19 14:06:41 +00001030
Steve Block6ded16b2010-05-10 14:33:55 +01001031 __ bind(&left_smi);
1032 } else {
1033 if (FLAG_debug_code) __ AbortIfNotSmi(left_);
1034 }
Leon Clarkee46be812010-01-19 14:06:41 +00001035 __ SmiUntag(left_);
1036 __ cvtsi2sd(xmm0, Operand(left_));
1037 __ SmiTag(left_);
1038 if (mode_ == OVERWRITE_LEFT) {
1039 Label alloc_failure;
1040 __ push(left_);
1041 __ AllocateHeapNumber(dst_, left_, no_reg, &after_alloc_failure);
1042 __ pop(left_);
1043 }
1044
1045 __ bind(&load_right);
Steve Block6ded16b2010-05-10 14:33:55 +01001046 if (!right_info_.IsSmi()) {
1047 __ test(right_, Immediate(kSmiTagMask));
1048 __ j(zero, &right_smi);
1049 if (!right_info_.IsNumber()) {
1050 __ cmp(FieldOperand(right_, HeapObject::kMapOffset),
1051 Factory::heap_number_map());
1052 __ j(not_equal, &call_runtime);
1053 }
1054 __ movdbl(xmm1, FieldOperand(right_, HeapNumber::kValueOffset));
1055 if (mode_ == OVERWRITE_RIGHT) {
1056 __ mov(dst_, right_);
1057 } else if (mode_ == NO_OVERWRITE) {
1058 Label alloc_failure;
1059 __ push(left_);
1060 __ AllocateHeapNumber(dst_, left_, no_reg, &after_alloc_failure);
1061 __ pop(left_);
1062 }
1063 __ jmp(&do_op);
Leon Clarkee46be812010-01-19 14:06:41 +00001064
Steve Block6ded16b2010-05-10 14:33:55 +01001065 __ bind(&right_smi);
1066 } else {
1067 if (FLAG_debug_code) __ AbortIfNotSmi(right_);
1068 }
Leon Clarkee46be812010-01-19 14:06:41 +00001069 __ SmiUntag(right_);
1070 __ cvtsi2sd(xmm1, Operand(right_));
1071 __ SmiTag(right_);
1072 if (mode_ == OVERWRITE_RIGHT || mode_ == NO_OVERWRITE) {
Leon Clarkee46be812010-01-19 14:06:41 +00001073 __ push(left_);
1074 __ AllocateHeapNumber(dst_, left_, no_reg, &after_alloc_failure);
1075 __ pop(left_);
1076 }
1077
1078 __ bind(&do_op);
1079 switch (op_) {
1080 case Token::ADD: __ addsd(xmm0, xmm1); break;
1081 case Token::SUB: __ subsd(xmm0, xmm1); break;
1082 case Token::MUL: __ mulsd(xmm0, xmm1); break;
1083 case Token::DIV: __ divsd(xmm0, xmm1); break;
1084 default: UNREACHABLE();
1085 }
1086 __ movdbl(FieldOperand(dst_, HeapNumber::kValueOffset), xmm0);
Iain Merrick75681382010-08-19 15:07:18 +01001087 Exit();
1088
Leon Clarkee46be812010-01-19 14:06:41 +00001089
1090 __ bind(&after_alloc_failure);
1091 __ pop(left_);
1092 __ bind(&call_runtime);
1093 }
Iain Merrick75681382010-08-19 15:07:18 +01001094 // Register spilling is not done implicitly for this stub.
1095 // We can't postpone it any more now though.
1096 SaveRegisters();
1097
Steve Block6ded16b2010-05-10 14:33:55 +01001098 GenericBinaryOpStub stub(op_,
1099 mode_,
1100 NO_SMI_CODE_IN_STUB,
1101 TypeInfo::Combine(left_info_, right_info_));
Steve Block3ce2e202009-11-05 08:53:23 +00001102 stub.GenerateCall(masm_, left_, right_);
Steve Blocka7e24c12009-10-30 11:49:00 +00001103 if (!dst_.is(eax)) __ mov(dst_, eax);
Iain Merrick75681382010-08-19 15:07:18 +01001104 RestoreRegisters();
1105 Exit();
1106
1107 if (non_smi_input_.is_linked() || constant_rhs_.is_linked()) {
1108 GenerateNonSmiInput();
1109 }
1110 if (answer_out_of_range_.is_linked()) {
1111 GenerateAnswerOutOfRange();
1112 }
1113}
1114
1115
1116void DeferredInlineBinaryOperation::GenerateNonSmiInput() {
1117 // We know at least one of the inputs was not a Smi.
1118 // This is a third entry point into the deferred code.
1119 // We may not overwrite left_ because we want to be able
1120 // to call the handling code for non-smi answer and it
1121 // might want to overwrite the heap number in left_.
1122 ASSERT(!right_.is(dst_));
1123 ASSERT(!left_.is(dst_));
1124 ASSERT(!left_.is(right_));
1125 // This entry point is used for bit ops where the right hand side
1126 // is a constant Smi and the left hand side is a heap object. It
1127 // is also used for bit ops where both sides are unknown, but where
1128 // at least one of them is a heap object.
1129 bool rhs_is_constant = constant_rhs_.is_linked();
1130 // We can't generate code for both cases.
1131 ASSERT(!non_smi_input_.is_linked() || !constant_rhs_.is_linked());
1132
1133 if (FLAG_debug_code) {
1134 __ int3(); // We don't fall through into this code.
1135 }
1136
1137 __ bind(&non_smi_input_);
1138
1139 if (rhs_is_constant) {
1140 __ bind(&constant_rhs_);
1141 // In this case the input is a heap object and it is in the dst_ register.
1142 // The left_ and right_ registers have not been initialized yet.
1143 __ mov(right_, Immediate(smi_value_));
1144 __ mov(left_, Operand(dst_));
1145 if (!CpuFeatures::IsSupported(SSE2)) {
1146 __ jmp(entry_label());
1147 return;
1148 } else {
1149 CpuFeatures::Scope use_sse2(SSE2);
1150 __ JumpIfNotNumber(dst_, left_info_, entry_label());
1151 __ ConvertToInt32(dst_, left_, dst_, left_info_, entry_label());
1152 __ SmiUntag(right_);
1153 }
1154 } else {
1155 // We know we have SSE2 here because otherwise the label is not linked (see
1156 // NonSmiInputLabel).
1157 CpuFeatures::Scope use_sse2(SSE2);
1158 // Handle the non-constant right hand side situation:
1159 if (left_info_.IsSmi()) {
1160 // Right is a heap object.
1161 __ JumpIfNotNumber(right_, right_info_, entry_label());
1162 __ ConvertToInt32(right_, right_, dst_, right_info_, entry_label());
1163 __ mov(dst_, Operand(left_));
1164 __ SmiUntag(dst_);
1165 } else if (right_info_.IsSmi()) {
1166 // Left is a heap object.
1167 __ JumpIfNotNumber(left_, left_info_, entry_label());
1168 __ ConvertToInt32(dst_, left_, dst_, left_info_, entry_label());
1169 __ SmiUntag(right_);
1170 } else {
1171 // Here we don't know if it's one or both that is a heap object.
1172 Label only_right_is_heap_object, got_both;
1173 __ mov(dst_, Operand(left_));
1174 __ SmiUntag(dst_, &only_right_is_heap_object);
1175 // Left was a heap object.
1176 __ JumpIfNotNumber(left_, left_info_, entry_label());
1177 __ ConvertToInt32(dst_, left_, dst_, left_info_, entry_label());
1178 __ SmiUntag(right_, &got_both);
1179 // Both were heap objects.
1180 __ rcl(right_, 1); // Put tag back.
1181 __ JumpIfNotNumber(right_, right_info_, entry_label());
1182 __ ConvertToInt32(right_, right_, no_reg, right_info_, entry_label());
1183 __ jmp(&got_both);
1184 __ bind(&only_right_is_heap_object);
1185 __ JumpIfNotNumber(right_, right_info_, entry_label());
1186 __ ConvertToInt32(right_, right_, no_reg, right_info_, entry_label());
1187 __ bind(&got_both);
1188 }
1189 }
1190 ASSERT(op_ == Token::BIT_AND ||
1191 op_ == Token::BIT_OR ||
1192 op_ == Token::BIT_XOR ||
1193 right_.is(ecx));
1194 switch (op_) {
1195 case Token::BIT_AND: __ and_(dst_, Operand(right_)); break;
1196 case Token::BIT_OR: __ or_(dst_, Operand(right_)); break;
1197 case Token::BIT_XOR: __ xor_(dst_, Operand(right_)); break;
1198 case Token::SHR: __ shr_cl(dst_); break;
1199 case Token::SAR: __ sar_cl(dst_); break;
1200 case Token::SHL: __ shl_cl(dst_); break;
1201 default: UNREACHABLE();
1202 }
1203 if (op_ == Token::SHR) {
1204 // Check that the *unsigned* result fits in a smi. Neither of
1205 // the two high-order bits can be set:
1206 // * 0x80000000: high bit would be lost when smi tagging.
1207 // * 0x40000000: this number would convert to negative when smi
1208 // tagging.
1209 __ test(dst_, Immediate(0xc0000000));
1210 __ j(not_zero, &answer_out_of_range_);
1211 } else {
1212 // Check that the *signed* result fits in a smi.
1213 __ cmp(dst_, 0xc0000000);
1214 __ j(negative, &answer_out_of_range_);
1215 }
1216 __ SmiTag(dst_);
1217 Exit();
1218}
1219
1220
1221void DeferredInlineBinaryOperation::GenerateAnswerOutOfRange() {
1222 Label after_alloc_failure2;
1223 Label allocation_ok;
1224 __ bind(&after_alloc_failure2);
1225 // We have to allocate a number, causing a GC, while keeping hold of
1226 // the answer in dst_. The answer is not a Smi. We can't just call the
1227 // runtime shift function here because we already threw away the inputs.
1228 __ xor_(left_, Operand(left_));
1229 __ shl(dst_, 1); // Put top bit in carry flag and Smi tag the low bits.
1230 __ rcr(left_, 1); // Rotate with carry.
1231 __ push(dst_); // Smi tagged low 31 bits.
1232 __ push(left_); // 0 or 0x80000000, which is Smi tagged in both cases.
1233 __ CallRuntime(Runtime::kNumberAlloc, 0);
1234 if (!left_.is(eax)) {
1235 __ mov(left_, eax);
1236 }
1237 __ pop(right_); // High bit.
1238 __ pop(dst_); // Low 31 bits.
1239 __ shr(dst_, 1); // Put 0 in top bit.
1240 __ or_(dst_, Operand(right_));
1241 __ jmp(&allocation_ok);
1242
1243 // This is the second entry point to the deferred code. It is used only by
1244 // the bit operations.
1245 // The dst_ register has the answer. It is not Smi tagged. If mode_ is
1246 // OVERWRITE_LEFT then left_ must contain either an overwritable heap number
1247 // or a Smi.
1248 // Put a heap number pointer in left_.
1249 __ bind(&answer_out_of_range_);
1250 SaveRegisters();
1251 if (mode_ == OVERWRITE_LEFT) {
1252 __ test(left_, Immediate(kSmiTagMask));
1253 __ j(not_zero, &allocation_ok);
1254 }
1255 // This trashes right_.
1256 __ AllocateHeapNumber(left_, right_, no_reg, &after_alloc_failure2);
1257 __ bind(&allocation_ok);
1258 if (CpuFeatures::IsSupported(SSE2) && op_ != Token::SHR) {
1259 CpuFeatures::Scope use_sse2(SSE2);
1260 ASSERT(Token::IsBitOp(op_));
1261 // Signed conversion.
1262 __ cvtsi2sd(xmm0, Operand(dst_));
1263 __ movdbl(FieldOperand(left_, HeapNumber::kValueOffset), xmm0);
1264 } else {
1265 if (op_ == Token::SHR) {
1266 __ push(Immediate(0)); // High word of unsigned value.
1267 __ push(dst_);
1268 __ fild_d(Operand(esp, 0));
1269 __ Drop(2);
1270 } else {
1271 ASSERT(Token::IsBitOp(op_));
1272 __ push(dst_);
1273 __ fild_s(Operand(esp, 0)); // Signed conversion.
1274 __ pop(dst_);
1275 }
1276 __ fstp_d(FieldOperand(left_, HeapNumber::kValueOffset));
1277 }
1278 __ mov(dst_, left_);
1279 RestoreRegisters();
1280 Exit();
Steve Blocka7e24c12009-10-30 11:49:00 +00001281}
1282
1283
Steve Block6ded16b2010-05-10 14:33:55 +01001284static TypeInfo CalculateTypeInfo(TypeInfo operands_type,
1285 Token::Value op,
1286 const Result& right,
1287 const Result& left) {
1288 // Set TypeInfo of result according to the operation performed.
1289 // Rely on the fact that smis have a 31 bit payload on ia32.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01001290 STATIC_ASSERT(kSmiValueSize == 31);
Steve Block6ded16b2010-05-10 14:33:55 +01001291 switch (op) {
1292 case Token::COMMA:
1293 return right.type_info();
1294 case Token::OR:
1295 case Token::AND:
1296 // Result type can be either of the two input types.
1297 return operands_type;
1298 case Token::BIT_AND: {
1299 // Anding with positive Smis will give you a Smi.
1300 if (right.is_constant() && right.handle()->IsSmi() &&
1301 Smi::cast(*right.handle())->value() >= 0) {
1302 return TypeInfo::Smi();
1303 } else if (left.is_constant() && left.handle()->IsSmi() &&
1304 Smi::cast(*left.handle())->value() >= 0) {
1305 return TypeInfo::Smi();
1306 }
1307 return (operands_type.IsSmi())
1308 ? TypeInfo::Smi()
1309 : TypeInfo::Integer32();
1310 }
1311 case Token::BIT_OR: {
1312 // Oring with negative Smis will give you a Smi.
1313 if (right.is_constant() && right.handle()->IsSmi() &&
1314 Smi::cast(*right.handle())->value() < 0) {
1315 return TypeInfo::Smi();
1316 } else if (left.is_constant() && left.handle()->IsSmi() &&
1317 Smi::cast(*left.handle())->value() < 0) {
1318 return TypeInfo::Smi();
1319 }
1320 return (operands_type.IsSmi())
1321 ? TypeInfo::Smi()
1322 : TypeInfo::Integer32();
1323 }
1324 case Token::BIT_XOR:
1325 // Result is always a 32 bit integer. Smi property of inputs is preserved.
1326 return (operands_type.IsSmi())
1327 ? TypeInfo::Smi()
1328 : TypeInfo::Integer32();
1329 case Token::SAR:
1330 if (left.is_smi()) return TypeInfo::Smi();
1331 // Result is a smi if we shift by a constant >= 1, otherwise an integer32.
1332 // Shift amount is masked with 0x1F (ECMA standard 11.7.2).
1333 return (right.is_constant() && right.handle()->IsSmi()
1334 && (Smi::cast(*right.handle())->value() & 0x1F) >= 1)
1335 ? TypeInfo::Smi()
1336 : TypeInfo::Integer32();
1337 case Token::SHR:
1338 // Result is a smi if we shift by a constant >= 2, an integer32 if
1339 // we shift by 1, and an unsigned 32-bit integer if we shift by 0.
1340 if (right.is_constant() && right.handle()->IsSmi()) {
1341 int shift_amount = Smi::cast(*right.handle())->value() & 0x1F;
1342 if (shift_amount > 1) {
1343 return TypeInfo::Smi();
1344 } else if (shift_amount > 0) {
1345 return TypeInfo::Integer32();
1346 }
1347 }
1348 return TypeInfo::Number();
1349 case Token::ADD:
1350 if (operands_type.IsSmi()) {
1351 // The Integer32 range is big enough to take the sum of any two Smis.
1352 return TypeInfo::Integer32();
1353 } else if (operands_type.IsNumber()) {
1354 return TypeInfo::Number();
1355 } else if (left.type_info().IsString() || right.type_info().IsString()) {
1356 return TypeInfo::String();
1357 } else {
1358 return TypeInfo::Unknown();
1359 }
1360 case Token::SHL:
1361 return TypeInfo::Integer32();
1362 case Token::SUB:
1363 // The Integer32 range is big enough to take the difference of any two
1364 // Smis.
1365 return (operands_type.IsSmi()) ?
1366 TypeInfo::Integer32() :
1367 TypeInfo::Number();
1368 case Token::MUL:
1369 case Token::DIV:
1370 case Token::MOD:
1371 // Result is always a number.
1372 return TypeInfo::Number();
1373 default:
1374 UNREACHABLE();
1375 }
1376 UNREACHABLE();
1377 return TypeInfo::Unknown();
1378}
1379
1380
1381void CodeGenerator::GenericBinaryOperation(BinaryOperation* expr,
Steve Blocka7e24c12009-10-30 11:49:00 +00001382 OverwriteMode overwrite_mode) {
1383 Comment cmnt(masm_, "[ BinaryOperation");
Steve Block6ded16b2010-05-10 14:33:55 +01001384 Token::Value op = expr->op();
Steve Blocka7e24c12009-10-30 11:49:00 +00001385 Comment cmnt_token(masm_, Token::String(op));
1386
1387 if (op == Token::COMMA) {
1388 // Simply discard left value.
1389 frame_->Nip(1);
1390 return;
1391 }
1392
Steve Blocka7e24c12009-10-30 11:49:00 +00001393 Result right = frame_->Pop();
1394 Result left = frame_->Pop();
1395
1396 if (op == Token::ADD) {
Steve Block6ded16b2010-05-10 14:33:55 +01001397 const bool left_is_string = left.type_info().IsString();
1398 const bool right_is_string = right.type_info().IsString();
1399 // Make sure constant strings have string type info.
1400 ASSERT(!(left.is_constant() && left.handle()->IsString()) ||
1401 left_is_string);
1402 ASSERT(!(right.is_constant() && right.handle()->IsString()) ||
1403 right_is_string);
Steve Blocka7e24c12009-10-30 11:49:00 +00001404 if (left_is_string || right_is_string) {
1405 frame_->Push(&left);
1406 frame_->Push(&right);
1407 Result answer;
1408 if (left_is_string) {
1409 if (right_is_string) {
Steve Block6ded16b2010-05-10 14:33:55 +01001410 StringAddStub stub(NO_STRING_CHECK_IN_STUB);
1411 answer = frame_->CallStub(&stub, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001412 } else {
Iain Merrick9ac36c92010-09-13 15:29:50 +01001413 StringAddStub stub(NO_STRING_CHECK_LEFT_IN_STUB);
1414 answer = frame_->CallStub(&stub, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001415 }
1416 } else if (right_is_string) {
Iain Merrick9ac36c92010-09-13 15:29:50 +01001417 StringAddStub stub(NO_STRING_CHECK_RIGHT_IN_STUB);
1418 answer = frame_->CallStub(&stub, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001419 }
Steve Block6ded16b2010-05-10 14:33:55 +01001420 answer.set_type_info(TypeInfo::String());
Steve Blocka7e24c12009-10-30 11:49:00 +00001421 frame_->Push(&answer);
1422 return;
1423 }
1424 // Neither operand is known to be a string.
1425 }
1426
Andrei Popescu402d9372010-02-26 13:31:12 +00001427 bool left_is_smi_constant = left.is_constant() && left.handle()->IsSmi();
1428 bool left_is_non_smi_constant = left.is_constant() && !left.handle()->IsSmi();
1429 bool right_is_smi_constant = right.is_constant() && right.handle()->IsSmi();
1430 bool right_is_non_smi_constant =
1431 right.is_constant() && !right.handle()->IsSmi();
Steve Blocka7e24c12009-10-30 11:49:00 +00001432
Andrei Popescu402d9372010-02-26 13:31:12 +00001433 if (left_is_smi_constant && right_is_smi_constant) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001434 // Compute the constant result at compile time, and leave it on the frame.
1435 int left_int = Smi::cast(*left.handle())->value();
1436 int right_int = Smi::cast(*right.handle())->value();
1437 if (FoldConstantSmis(op, left_int, right_int)) return;
1438 }
1439
Andrei Popescu402d9372010-02-26 13:31:12 +00001440 // Get number type of left and right sub-expressions.
Steve Block6ded16b2010-05-10 14:33:55 +01001441 TypeInfo operands_type =
1442 TypeInfo::Combine(left.type_info(), right.type_info());
1443
1444 TypeInfo result_type = CalculateTypeInfo(operands_type, op, right, left);
Andrei Popescu402d9372010-02-26 13:31:12 +00001445
Leon Clarked91b9f72010-01-27 17:25:45 +00001446 Result answer;
Andrei Popescu402d9372010-02-26 13:31:12 +00001447 if (left_is_non_smi_constant || right_is_non_smi_constant) {
Leon Clarked91b9f72010-01-27 17:25:45 +00001448 // Go straight to the slow case, with no smi code.
Andrei Popescu402d9372010-02-26 13:31:12 +00001449 GenericBinaryOpStub stub(op,
1450 overwrite_mode,
1451 NO_SMI_CODE_IN_STUB,
1452 operands_type);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001453 answer = GenerateGenericBinaryOpStubCall(&stub, &left, &right);
Andrei Popescu402d9372010-02-26 13:31:12 +00001454 } else if (right_is_smi_constant) {
Steve Block6ded16b2010-05-10 14:33:55 +01001455 answer = ConstantSmiBinaryOperation(expr, &left, right.handle(),
1456 false, overwrite_mode);
Andrei Popescu402d9372010-02-26 13:31:12 +00001457 } else if (left_is_smi_constant) {
Steve Block6ded16b2010-05-10 14:33:55 +01001458 answer = ConstantSmiBinaryOperation(expr, &right, left.handle(),
1459 true, overwrite_mode);
Leon Clarkeeab96aa2010-01-27 16:31:12 +00001460 } else {
Leon Clarked91b9f72010-01-27 17:25:45 +00001461 // Set the flags based on the operation, type and loop nesting level.
1462 // Bit operations always assume they likely operate on Smis. Still only
1463 // generate the inline Smi check code if this operation is part of a loop.
1464 // For all other operations only inline the Smi check code for likely smis
1465 // if the operation is part of a loop.
Steve Block6ded16b2010-05-10 14:33:55 +01001466 if (loop_nesting() > 0 &&
1467 (Token::IsBitOp(op) ||
1468 operands_type.IsInteger32() ||
1469 expr->type()->IsLikelySmi())) {
1470 answer = LikelySmiBinaryOperation(expr, &left, &right, overwrite_mode);
Leon Clarked91b9f72010-01-27 17:25:45 +00001471 } else {
Andrei Popescu402d9372010-02-26 13:31:12 +00001472 GenericBinaryOpStub stub(op,
1473 overwrite_mode,
1474 NO_GENERIC_BINARY_FLAGS,
1475 operands_type);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001476 answer = GenerateGenericBinaryOpStubCall(&stub, &left, &right);
Leon Clarkeeab96aa2010-01-27 16:31:12 +00001477 }
Leon Clarkeeab96aa2010-01-27 16:31:12 +00001478 }
Andrei Popescu402d9372010-02-26 13:31:12 +00001479
Steve Block6ded16b2010-05-10 14:33:55 +01001480 answer.set_type_info(result_type);
Leon Clarked91b9f72010-01-27 17:25:45 +00001481 frame_->Push(&answer);
Steve Blocka7e24c12009-10-30 11:49:00 +00001482}
1483
1484
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001485Result CodeGenerator::GenerateGenericBinaryOpStubCall(GenericBinaryOpStub* stub,
1486 Result* left,
1487 Result* right) {
1488 if (stub->ArgsInRegistersSupported()) {
1489 stub->SetArgsInRegisters();
1490 return frame_->CallStub(stub, left, right);
1491 } else {
1492 frame_->Push(left);
1493 frame_->Push(right);
1494 return frame_->CallStub(stub, 2);
1495 }
1496}
1497
1498
Steve Blocka7e24c12009-10-30 11:49:00 +00001499bool CodeGenerator::FoldConstantSmis(Token::Value op, int left, int right) {
1500 Object* answer_object = Heap::undefined_value();
1501 switch (op) {
1502 case Token::ADD:
1503 if (Smi::IsValid(left + right)) {
1504 answer_object = Smi::FromInt(left + right);
1505 }
1506 break;
1507 case Token::SUB:
1508 if (Smi::IsValid(left - right)) {
1509 answer_object = Smi::FromInt(left - right);
1510 }
1511 break;
1512 case Token::MUL: {
1513 double answer = static_cast<double>(left) * right;
1514 if (answer >= Smi::kMinValue && answer <= Smi::kMaxValue) {
1515 // If the product is zero and the non-zero factor is negative,
1516 // the spec requires us to return floating point negative zero.
1517 if (answer != 0 || (left >= 0 && right >= 0)) {
1518 answer_object = Smi::FromInt(static_cast<int>(answer));
1519 }
1520 }
1521 }
1522 break;
1523 case Token::DIV:
1524 case Token::MOD:
1525 break;
1526 case Token::BIT_OR:
1527 answer_object = Smi::FromInt(left | right);
1528 break;
1529 case Token::BIT_AND:
1530 answer_object = Smi::FromInt(left & right);
1531 break;
1532 case Token::BIT_XOR:
1533 answer_object = Smi::FromInt(left ^ right);
1534 break;
1535
1536 case Token::SHL: {
1537 int shift_amount = right & 0x1F;
1538 if (Smi::IsValid(left << shift_amount)) {
1539 answer_object = Smi::FromInt(left << shift_amount);
1540 }
1541 break;
1542 }
1543 case Token::SHR: {
1544 int shift_amount = right & 0x1F;
1545 unsigned int unsigned_left = left;
1546 unsigned_left >>= shift_amount;
1547 if (unsigned_left <= static_cast<unsigned int>(Smi::kMaxValue)) {
1548 answer_object = Smi::FromInt(unsigned_left);
1549 }
1550 break;
1551 }
1552 case Token::SAR: {
1553 int shift_amount = right & 0x1F;
1554 unsigned int unsigned_left = left;
1555 if (left < 0) {
1556 // Perform arithmetic shift of a negative number by
1557 // complementing number, logical shifting, complementing again.
1558 unsigned_left = ~unsigned_left;
1559 unsigned_left >>= shift_amount;
1560 unsigned_left = ~unsigned_left;
1561 } else {
1562 unsigned_left >>= shift_amount;
1563 }
Ben Murdoch3bec4d22010-07-22 14:51:16 +01001564 ASSERT(Smi::IsValid(static_cast<int32_t>(unsigned_left)));
1565 answer_object = Smi::FromInt(static_cast<int32_t>(unsigned_left));
Steve Blocka7e24c12009-10-30 11:49:00 +00001566 break;
1567 }
1568 default:
1569 UNREACHABLE();
1570 break;
1571 }
1572 if (answer_object == Heap::undefined_value()) {
1573 return false;
1574 }
1575 frame_->Push(Handle<Object>(answer_object));
1576 return true;
1577}
1578
1579
Kristian Monsen50ef84f2010-07-29 15:18:00 +01001580void CodeGenerator::JumpIfBothSmiUsingTypeInfo(Result* left,
1581 Result* right,
1582 JumpTarget* both_smi) {
1583 TypeInfo left_info = left->type_info();
1584 TypeInfo right_info = right->type_info();
1585 if (left_info.IsDouble() || left_info.IsString() ||
1586 right_info.IsDouble() || right_info.IsString()) {
1587 // We know that left and right are not both smi. Don't do any tests.
1588 return;
1589 }
1590
1591 if (left->reg().is(right->reg())) {
1592 if (!left_info.IsSmi()) {
1593 __ test(left->reg(), Immediate(kSmiTagMask));
1594 both_smi->Branch(zero);
1595 } else {
1596 if (FLAG_debug_code) __ AbortIfNotSmi(left->reg());
1597 left->Unuse();
1598 right->Unuse();
1599 both_smi->Jump();
1600 }
1601 } else if (!left_info.IsSmi()) {
1602 if (!right_info.IsSmi()) {
1603 Result temp = allocator_->Allocate();
1604 ASSERT(temp.is_valid());
1605 __ mov(temp.reg(), left->reg());
1606 __ or_(temp.reg(), Operand(right->reg()));
1607 __ test(temp.reg(), Immediate(kSmiTagMask));
1608 temp.Unuse();
1609 both_smi->Branch(zero);
1610 } else {
1611 __ test(left->reg(), Immediate(kSmiTagMask));
1612 both_smi->Branch(zero);
1613 }
1614 } else {
1615 if (FLAG_debug_code) __ AbortIfNotSmi(left->reg());
1616 if (!right_info.IsSmi()) {
1617 __ test(right->reg(), Immediate(kSmiTagMask));
1618 both_smi->Branch(zero);
1619 } else {
1620 if (FLAG_debug_code) __ AbortIfNotSmi(right->reg());
1621 left->Unuse();
1622 right->Unuse();
1623 both_smi->Jump();
1624 }
1625 }
1626}
1627
1628
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001629void CodeGenerator::JumpIfNotBothSmiUsingTypeInfo(Register left,
1630 Register right,
1631 Register scratch,
1632 TypeInfo left_info,
1633 TypeInfo right_info,
1634 DeferredCode* deferred) {
Iain Merrick75681382010-08-19 15:07:18 +01001635 JumpIfNotBothSmiUsingTypeInfo(left,
1636 right,
1637 scratch,
1638 left_info,
1639 right_info,
1640 deferred->entry_label());
1641}
1642
1643
1644void CodeGenerator::JumpIfNotBothSmiUsingTypeInfo(Register left,
1645 Register right,
1646 Register scratch,
1647 TypeInfo left_info,
1648 TypeInfo right_info,
1649 Label* on_not_smi) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001650 if (left.is(right)) {
1651 if (!left_info.IsSmi()) {
1652 __ test(left, Immediate(kSmiTagMask));
Iain Merrick75681382010-08-19 15:07:18 +01001653 __ j(not_zero, on_not_smi);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001654 } else {
1655 if (FLAG_debug_code) __ AbortIfNotSmi(left);
1656 }
1657 } else if (!left_info.IsSmi()) {
1658 if (!right_info.IsSmi()) {
1659 __ mov(scratch, left);
1660 __ or_(scratch, Operand(right));
1661 __ test(scratch, Immediate(kSmiTagMask));
Iain Merrick75681382010-08-19 15:07:18 +01001662 __ j(not_zero, on_not_smi);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001663 } else {
1664 __ test(left, Immediate(kSmiTagMask));
Iain Merrick75681382010-08-19 15:07:18 +01001665 __ j(not_zero, on_not_smi);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001666 if (FLAG_debug_code) __ AbortIfNotSmi(right);
1667 }
1668 } else {
1669 if (FLAG_debug_code) __ AbortIfNotSmi(left);
1670 if (!right_info.IsSmi()) {
1671 __ test(right, Immediate(kSmiTagMask));
Iain Merrick75681382010-08-19 15:07:18 +01001672 __ j(not_zero, on_not_smi);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001673 } else {
1674 if (FLAG_debug_code) __ AbortIfNotSmi(right);
1675 }
1676 }
1677}
Steve Block6ded16b2010-05-10 14:33:55 +01001678
1679
Steve Blocka7e24c12009-10-30 11:49:00 +00001680// Implements a binary operation using a deferred code object and some
1681// inline code to operate on smis quickly.
Steve Block6ded16b2010-05-10 14:33:55 +01001682Result CodeGenerator::LikelySmiBinaryOperation(BinaryOperation* expr,
Leon Clarked91b9f72010-01-27 17:25:45 +00001683 Result* left,
1684 Result* right,
1685 OverwriteMode overwrite_mode) {
Kristian Monsen25f61362010-05-21 11:50:48 +01001686 // Copy the type info because left and right may be overwritten.
1687 TypeInfo left_type_info = left->type_info();
1688 TypeInfo right_type_info = right->type_info();
Steve Block6ded16b2010-05-10 14:33:55 +01001689 Token::Value op = expr->op();
Leon Clarked91b9f72010-01-27 17:25:45 +00001690 Result answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00001691 // Special handling of div and mod because they use fixed registers.
1692 if (op == Token::DIV || op == Token::MOD) {
1693 // We need eax as the quotient register, edx as the remainder
1694 // register, neither left nor right in eax or edx, and left copied
1695 // to eax.
1696 Result quotient;
1697 Result remainder;
1698 bool left_is_in_eax = false;
1699 // Step 1: get eax for quotient.
1700 if ((left->is_register() && left->reg().is(eax)) ||
1701 (right->is_register() && right->reg().is(eax))) {
1702 // One or both is in eax. Use a fresh non-edx register for
1703 // them.
1704 Result fresh = allocator_->Allocate();
1705 ASSERT(fresh.is_valid());
1706 if (fresh.reg().is(edx)) {
1707 remainder = fresh;
1708 fresh = allocator_->Allocate();
1709 ASSERT(fresh.is_valid());
1710 }
1711 if (left->is_register() && left->reg().is(eax)) {
1712 quotient = *left;
1713 *left = fresh;
1714 left_is_in_eax = true;
1715 }
1716 if (right->is_register() && right->reg().is(eax)) {
1717 quotient = *right;
1718 *right = fresh;
1719 }
1720 __ mov(fresh.reg(), eax);
1721 } else {
1722 // Neither left nor right is in eax.
1723 quotient = allocator_->Allocate(eax);
1724 }
1725 ASSERT(quotient.is_register() && quotient.reg().is(eax));
1726 ASSERT(!(left->is_register() && left->reg().is(eax)));
1727 ASSERT(!(right->is_register() && right->reg().is(eax)));
1728
1729 // Step 2: get edx for remainder if necessary.
1730 if (!remainder.is_valid()) {
1731 if ((left->is_register() && left->reg().is(edx)) ||
1732 (right->is_register() && right->reg().is(edx))) {
1733 Result fresh = allocator_->Allocate();
1734 ASSERT(fresh.is_valid());
1735 if (left->is_register() && left->reg().is(edx)) {
1736 remainder = *left;
1737 *left = fresh;
1738 }
1739 if (right->is_register() && right->reg().is(edx)) {
1740 remainder = *right;
1741 *right = fresh;
1742 }
1743 __ mov(fresh.reg(), edx);
1744 } else {
1745 // Neither left nor right is in edx.
1746 remainder = allocator_->Allocate(edx);
1747 }
1748 }
1749 ASSERT(remainder.is_register() && remainder.reg().is(edx));
1750 ASSERT(!(left->is_register() && left->reg().is(edx)));
1751 ASSERT(!(right->is_register() && right->reg().is(edx)));
1752
1753 left->ToRegister();
1754 right->ToRegister();
1755 frame_->Spill(eax);
1756 frame_->Spill(edx);
Iain Merrick75681382010-08-19 15:07:18 +01001757 // DeferredInlineBinaryOperation requires all the registers that it is
1758 // told about to be spilled and distinct.
1759 Result distinct_right = frame_->MakeDistinctAndSpilled(left, right);
Steve Blocka7e24c12009-10-30 11:49:00 +00001760
1761 // Check that left and right are smi tagged.
1762 DeferredInlineBinaryOperation* deferred =
1763 new DeferredInlineBinaryOperation(op,
1764 (op == Token::DIV) ? eax : edx,
1765 left->reg(),
Iain Merrick75681382010-08-19 15:07:18 +01001766 distinct_right.reg(),
Kristian Monsen25f61362010-05-21 11:50:48 +01001767 left_type_info,
1768 right_type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00001769 overwrite_mode);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001770 JumpIfNotBothSmiUsingTypeInfo(left->reg(), right->reg(), edx,
1771 left_type_info, right_type_info, deferred);
1772 if (!left_is_in_eax) {
1773 __ mov(eax, left->reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001774 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001775 // Sign extend eax into edx:eax.
1776 __ cdq();
1777 // Check for 0 divisor.
1778 __ test(right->reg(), Operand(right->reg()));
1779 deferred->Branch(zero);
1780 // Divide edx:eax by the right operand.
1781 __ idiv(right->reg());
1782
1783 // Complete the operation.
1784 if (op == Token::DIV) {
1785 // Check for negative zero result. If result is zero, and divisor
1786 // is negative, return a floating point negative zero. The
1787 // virtual frame is unchanged in this block, so local control flow
Steve Block6ded16b2010-05-10 14:33:55 +01001788 // can use a Label rather than a JumpTarget. If the context of this
1789 // expression will treat -0 like 0, do not do this test.
1790 if (!expr->no_negative_zero()) {
1791 Label non_zero_result;
1792 __ test(left->reg(), Operand(left->reg()));
1793 __ j(not_zero, &non_zero_result);
1794 __ test(right->reg(), Operand(right->reg()));
1795 deferred->Branch(negative);
1796 __ bind(&non_zero_result);
1797 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001798 // Check for the corner case of dividing the most negative smi by
1799 // -1. We cannot use the overflow flag, since it is not set by
1800 // idiv instruction.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01001801 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001802 __ cmp(eax, 0x40000000);
1803 deferred->Branch(equal);
1804 // Check that the remainder is zero.
1805 __ test(edx, Operand(edx));
1806 deferred->Branch(not_zero);
1807 // Tag the result and store it in the quotient register.
Leon Clarkee46be812010-01-19 14:06:41 +00001808 __ SmiTag(eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00001809 deferred->BindExit();
1810 left->Unuse();
1811 right->Unuse();
Leon Clarked91b9f72010-01-27 17:25:45 +00001812 answer = quotient;
Steve Blocka7e24c12009-10-30 11:49:00 +00001813 } else {
1814 ASSERT(op == Token::MOD);
1815 // Check for a negative zero result. If the result is zero, and
1816 // the dividend is negative, return a floating point negative
1817 // zero. The frame is unchanged in this block, so local control
1818 // flow can use a Label rather than a JumpTarget.
Steve Block6ded16b2010-05-10 14:33:55 +01001819 if (!expr->no_negative_zero()) {
1820 Label non_zero_result;
1821 __ test(edx, Operand(edx));
1822 __ j(not_zero, &non_zero_result, taken);
1823 __ test(left->reg(), Operand(left->reg()));
1824 deferred->Branch(negative);
1825 __ bind(&non_zero_result);
1826 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001827 deferred->BindExit();
1828 left->Unuse();
1829 right->Unuse();
Leon Clarked91b9f72010-01-27 17:25:45 +00001830 answer = remainder;
Steve Blocka7e24c12009-10-30 11:49:00 +00001831 }
Leon Clarked91b9f72010-01-27 17:25:45 +00001832 ASSERT(answer.is_valid());
1833 return answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00001834 }
1835
1836 // Special handling of shift operations because they use fixed
1837 // registers.
1838 if (op == Token::SHL || op == Token::SHR || op == Token::SAR) {
1839 // Move left out of ecx if necessary.
1840 if (left->is_register() && left->reg().is(ecx)) {
1841 *left = allocator_->Allocate();
1842 ASSERT(left->is_valid());
1843 __ mov(left->reg(), ecx);
1844 }
1845 right->ToRegister(ecx);
1846 left->ToRegister();
1847 ASSERT(left->is_register() && !left->reg().is(ecx));
1848 ASSERT(right->is_register() && right->reg().is(ecx));
Iain Merrick75681382010-08-19 15:07:18 +01001849 if (left_type_info.IsSmi()) {
1850 if (FLAG_debug_code) __ AbortIfNotSmi(left->reg());
1851 }
1852 if (right_type_info.IsSmi()) {
1853 if (FLAG_debug_code) __ AbortIfNotSmi(right->reg());
1854 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001855
1856 // We will modify right, it must be spilled.
1857 frame_->Spill(ecx);
Iain Merrick75681382010-08-19 15:07:18 +01001858 // DeferredInlineBinaryOperation requires all the registers that it is told
1859 // about to be spilled and distinct. We know that right is ecx and left is
1860 // not ecx.
1861 frame_->Spill(left->reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001862
1863 // Use a fresh answer register to avoid spilling the left operand.
Leon Clarked91b9f72010-01-27 17:25:45 +00001864 answer = allocator_->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00001865 ASSERT(answer.is_valid());
Iain Merrick75681382010-08-19 15:07:18 +01001866
Steve Blocka7e24c12009-10-30 11:49:00 +00001867 DeferredInlineBinaryOperation* deferred =
1868 new DeferredInlineBinaryOperation(op,
1869 answer.reg(),
1870 left->reg(),
1871 ecx,
Kristian Monsen25f61362010-05-21 11:50:48 +01001872 left_type_info,
1873 right_type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00001874 overwrite_mode);
Iain Merrick75681382010-08-19 15:07:18 +01001875 JumpIfNotBothSmiUsingTypeInfo(left->reg(), right->reg(), answer.reg(),
1876 left_type_info, right_type_info,
1877 deferred->NonSmiInputLabel());
Steve Blocka7e24c12009-10-30 11:49:00 +00001878
Iain Merrick75681382010-08-19 15:07:18 +01001879 // Untag both operands.
1880 __ mov(answer.reg(), left->reg());
1881 __ SmiUntag(answer.reg());
1882 __ SmiUntag(right->reg()); // Right is ecx.
Steve Block6ded16b2010-05-10 14:33:55 +01001883
Steve Blocka7e24c12009-10-30 11:49:00 +00001884 // Perform the operation.
Iain Merrick75681382010-08-19 15:07:18 +01001885 ASSERT(right->reg().is(ecx));
Steve Blocka7e24c12009-10-30 11:49:00 +00001886 switch (op) {
Iain Merrick75681382010-08-19 15:07:18 +01001887 case Token::SAR: {
Steve Blockd0582a62009-12-15 09:54:21 +00001888 __ sar_cl(answer.reg());
Iain Merrick75681382010-08-19 15:07:18 +01001889 if (!left_type_info.IsSmi()) {
1890 // Check that the *signed* result fits in a smi.
1891 __ cmp(answer.reg(), 0xc0000000);
1892 deferred->JumpToAnswerOutOfRange(negative);
1893 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001894 break;
Iain Merrick75681382010-08-19 15:07:18 +01001895 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001896 case Token::SHR: {
Steve Blockd0582a62009-12-15 09:54:21 +00001897 __ shr_cl(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001898 // Check that the *unsigned* result fits in a smi. Neither of
1899 // the two high-order bits can be set:
1900 // * 0x80000000: high bit would be lost when smi tagging.
1901 // * 0x40000000: this number would convert to negative when smi
1902 // tagging.
1903 // These two cases can only happen with shifts by 0 or 1 when
1904 // handed a valid smi. If the answer cannot be represented by a
1905 // smi, restore the left and right arguments, and jump to slow
1906 // case. The low bit of the left argument may be lost, but only
1907 // in a case where it is dropped anyway.
1908 __ test(answer.reg(), Immediate(0xc0000000));
Iain Merrick75681382010-08-19 15:07:18 +01001909 deferred->JumpToAnswerOutOfRange(not_zero);
Steve Blocka7e24c12009-10-30 11:49:00 +00001910 break;
1911 }
1912 case Token::SHL: {
Steve Blockd0582a62009-12-15 09:54:21 +00001913 __ shl_cl(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001914 // Check that the *signed* result fits in a smi.
1915 __ cmp(answer.reg(), 0xc0000000);
Iain Merrick75681382010-08-19 15:07:18 +01001916 deferred->JumpToAnswerOutOfRange(negative);
Steve Blocka7e24c12009-10-30 11:49:00 +00001917 break;
1918 }
1919 default:
1920 UNREACHABLE();
1921 }
1922 // Smi-tag the result in answer.
Leon Clarkee46be812010-01-19 14:06:41 +00001923 __ SmiTag(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001924 deferred->BindExit();
1925 left->Unuse();
1926 right->Unuse();
Leon Clarked91b9f72010-01-27 17:25:45 +00001927 ASSERT(answer.is_valid());
1928 return answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00001929 }
1930
1931 // Handle the other binary operations.
1932 left->ToRegister();
1933 right->ToRegister();
Iain Merrick75681382010-08-19 15:07:18 +01001934 // DeferredInlineBinaryOperation requires all the registers that it is told
1935 // about to be spilled.
1936 Result distinct_right = frame_->MakeDistinctAndSpilled(left, right);
Steve Blocka7e24c12009-10-30 11:49:00 +00001937 // A newly allocated register answer is used to hold the answer. The
1938 // registers containing left and right are not modified so they don't
1939 // need to be spilled in the fast case.
Leon Clarked91b9f72010-01-27 17:25:45 +00001940 answer = allocator_->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00001941 ASSERT(answer.is_valid());
1942
1943 // Perform the smi tag check.
1944 DeferredInlineBinaryOperation* deferred =
1945 new DeferredInlineBinaryOperation(op,
1946 answer.reg(),
1947 left->reg(),
Iain Merrick75681382010-08-19 15:07:18 +01001948 distinct_right.reg(),
Kristian Monsen25f61362010-05-21 11:50:48 +01001949 left_type_info,
1950 right_type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00001951 overwrite_mode);
Iain Merrick75681382010-08-19 15:07:18 +01001952 Label non_smi_bit_op;
1953 if (op != Token::BIT_OR) {
1954 JumpIfNotBothSmiUsingTypeInfo(left->reg(), right->reg(), answer.reg(),
1955 left_type_info, right_type_info,
1956 deferred->NonSmiInputLabel());
1957 }
Steve Block6ded16b2010-05-10 14:33:55 +01001958
Steve Blocka7e24c12009-10-30 11:49:00 +00001959 __ mov(answer.reg(), left->reg());
1960 switch (op) {
1961 case Token::ADD:
Leon Clarked91b9f72010-01-27 17:25:45 +00001962 __ add(answer.reg(), Operand(right->reg()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001963 deferred->Branch(overflow);
1964 break;
1965
1966 case Token::SUB:
Leon Clarked91b9f72010-01-27 17:25:45 +00001967 __ sub(answer.reg(), Operand(right->reg()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001968 deferred->Branch(overflow);
1969 break;
1970
1971 case Token::MUL: {
1972 // If the smi tag is 0 we can just leave the tag on one operand.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01001973 STATIC_ASSERT(kSmiTag == 0); // Adjust code below if not the case.
Steve Blocka7e24c12009-10-30 11:49:00 +00001974 // Remove smi tag from the left operand (but keep sign).
1975 // Left-hand operand has been copied into answer.
Leon Clarkee46be812010-01-19 14:06:41 +00001976 __ SmiUntag(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001977 // Do multiplication of smis, leaving result in answer.
1978 __ imul(answer.reg(), Operand(right->reg()));
1979 // Go slow on overflows.
1980 deferred->Branch(overflow);
1981 // Check for negative zero result. If product is zero, and one
1982 // argument is negative, go to slow case. The frame is unchanged
1983 // in this block, so local control flow can use a Label rather
1984 // than a JumpTarget.
Steve Block6ded16b2010-05-10 14:33:55 +01001985 if (!expr->no_negative_zero()) {
1986 Label non_zero_result;
1987 __ test(answer.reg(), Operand(answer.reg()));
1988 __ j(not_zero, &non_zero_result, taken);
1989 __ mov(answer.reg(), left->reg());
1990 __ or_(answer.reg(), Operand(right->reg()));
1991 deferred->Branch(negative);
1992 __ xor_(answer.reg(), Operand(answer.reg())); // Positive 0 is correct.
1993 __ bind(&non_zero_result);
1994 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001995 break;
1996 }
1997
1998 case Token::BIT_OR:
1999 __ or_(answer.reg(), Operand(right->reg()));
Iain Merrick75681382010-08-19 15:07:18 +01002000 __ test(answer.reg(), Immediate(kSmiTagMask));
2001 __ j(not_zero, deferred->NonSmiInputLabel());
Steve Blocka7e24c12009-10-30 11:49:00 +00002002 break;
2003
2004 case Token::BIT_AND:
2005 __ and_(answer.reg(), Operand(right->reg()));
2006 break;
2007
2008 case Token::BIT_XOR:
2009 __ xor_(answer.reg(), Operand(right->reg()));
2010 break;
2011
2012 default:
2013 UNREACHABLE();
2014 break;
2015 }
Iain Merrick75681382010-08-19 15:07:18 +01002016
Steve Blocka7e24c12009-10-30 11:49:00 +00002017 deferred->BindExit();
2018 left->Unuse();
2019 right->Unuse();
Leon Clarked91b9f72010-01-27 17:25:45 +00002020 ASSERT(answer.is_valid());
2021 return answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00002022}
2023
2024
2025// Call the appropriate binary operation stub to compute src op value
2026// and leave the result in dst.
2027class DeferredInlineSmiOperation: public DeferredCode {
2028 public:
2029 DeferredInlineSmiOperation(Token::Value op,
2030 Register dst,
2031 Register src,
Steve Block6ded16b2010-05-10 14:33:55 +01002032 TypeInfo type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00002033 Smi* value,
2034 OverwriteMode overwrite_mode)
2035 : op_(op),
2036 dst_(dst),
2037 src_(src),
Steve Block6ded16b2010-05-10 14:33:55 +01002038 type_info_(type_info),
Steve Blocka7e24c12009-10-30 11:49:00 +00002039 value_(value),
2040 overwrite_mode_(overwrite_mode) {
Steve Block6ded16b2010-05-10 14:33:55 +01002041 if (type_info.IsSmi()) overwrite_mode_ = NO_OVERWRITE;
Steve Blocka7e24c12009-10-30 11:49:00 +00002042 set_comment("[ DeferredInlineSmiOperation");
2043 }
2044
2045 virtual void Generate();
2046
2047 private:
2048 Token::Value op_;
2049 Register dst_;
2050 Register src_;
Steve Block6ded16b2010-05-10 14:33:55 +01002051 TypeInfo type_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +00002052 Smi* value_;
2053 OverwriteMode overwrite_mode_;
2054};
2055
2056
2057void DeferredInlineSmiOperation::Generate() {
Steve Blocka7e24c12009-10-30 11:49:00 +00002058 // For mod we don't generate all the Smi code inline.
2059 GenericBinaryOpStub stub(
2060 op_,
2061 overwrite_mode_,
Steve Block6ded16b2010-05-10 14:33:55 +01002062 (op_ == Token::MOD) ? NO_GENERIC_BINARY_FLAGS : NO_SMI_CODE_IN_STUB,
2063 TypeInfo::Combine(TypeInfo::Smi(), type_info_));
Steve Block3ce2e202009-11-05 08:53:23 +00002064 stub.GenerateCall(masm_, src_, value_);
Steve Blocka7e24c12009-10-30 11:49:00 +00002065 if (!dst_.is(eax)) __ mov(dst_, eax);
2066}
2067
2068
2069// Call the appropriate binary operation stub to compute value op src
2070// and leave the result in dst.
2071class DeferredInlineSmiOperationReversed: public DeferredCode {
2072 public:
2073 DeferredInlineSmiOperationReversed(Token::Value op,
2074 Register dst,
2075 Smi* value,
2076 Register src,
Steve Block6ded16b2010-05-10 14:33:55 +01002077 TypeInfo type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00002078 OverwriteMode overwrite_mode)
2079 : op_(op),
2080 dst_(dst),
Steve Block6ded16b2010-05-10 14:33:55 +01002081 type_info_(type_info),
Steve Blocka7e24c12009-10-30 11:49:00 +00002082 value_(value),
2083 src_(src),
2084 overwrite_mode_(overwrite_mode) {
2085 set_comment("[ DeferredInlineSmiOperationReversed");
2086 }
2087
2088 virtual void Generate();
2089
2090 private:
2091 Token::Value op_;
2092 Register dst_;
Steve Block6ded16b2010-05-10 14:33:55 +01002093 TypeInfo type_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +00002094 Smi* value_;
2095 Register src_;
2096 OverwriteMode overwrite_mode_;
2097};
2098
2099
2100void DeferredInlineSmiOperationReversed::Generate() {
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002101 GenericBinaryOpStub stub(
Steve Block6ded16b2010-05-10 14:33:55 +01002102 op_,
2103 overwrite_mode_,
2104 NO_SMI_CODE_IN_STUB,
2105 TypeInfo::Combine(TypeInfo::Smi(), type_info_));
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002106 stub.GenerateCall(masm_, value_, src_);
Steve Blocka7e24c12009-10-30 11:49:00 +00002107 if (!dst_.is(eax)) __ mov(dst_, eax);
2108}
2109
2110
2111// The result of src + value is in dst. It either overflowed or was not
2112// smi tagged. Undo the speculative addition and call the appropriate
2113// specialized stub for add. The result is left in dst.
2114class DeferredInlineSmiAdd: public DeferredCode {
2115 public:
2116 DeferredInlineSmiAdd(Register dst,
Steve Block6ded16b2010-05-10 14:33:55 +01002117 TypeInfo type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00002118 Smi* value,
2119 OverwriteMode overwrite_mode)
Steve Block6ded16b2010-05-10 14:33:55 +01002120 : dst_(dst),
2121 type_info_(type_info),
2122 value_(value),
2123 overwrite_mode_(overwrite_mode) {
2124 if (type_info_.IsSmi()) overwrite_mode_ = NO_OVERWRITE;
Steve Blocka7e24c12009-10-30 11:49:00 +00002125 set_comment("[ DeferredInlineSmiAdd");
2126 }
2127
2128 virtual void Generate();
2129
2130 private:
2131 Register dst_;
Steve Block6ded16b2010-05-10 14:33:55 +01002132 TypeInfo type_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +00002133 Smi* value_;
2134 OverwriteMode overwrite_mode_;
2135};
2136
2137
2138void DeferredInlineSmiAdd::Generate() {
2139 // Undo the optimistic add operation and call the shared stub.
2140 __ sub(Operand(dst_), Immediate(value_));
Steve Block6ded16b2010-05-10 14:33:55 +01002141 GenericBinaryOpStub igostub(
2142 Token::ADD,
2143 overwrite_mode_,
2144 NO_SMI_CODE_IN_STUB,
2145 TypeInfo::Combine(TypeInfo::Smi(), type_info_));
Steve Block3ce2e202009-11-05 08:53:23 +00002146 igostub.GenerateCall(masm_, dst_, value_);
Steve Blocka7e24c12009-10-30 11:49:00 +00002147 if (!dst_.is(eax)) __ mov(dst_, eax);
2148}
2149
2150
2151// The result of value + src is in dst. It either overflowed or was not
2152// smi tagged. Undo the speculative addition and call the appropriate
2153// specialized stub for add. The result is left in dst.
2154class DeferredInlineSmiAddReversed: public DeferredCode {
2155 public:
2156 DeferredInlineSmiAddReversed(Register dst,
Steve Block6ded16b2010-05-10 14:33:55 +01002157 TypeInfo type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00002158 Smi* value,
2159 OverwriteMode overwrite_mode)
Steve Block6ded16b2010-05-10 14:33:55 +01002160 : dst_(dst),
2161 type_info_(type_info),
2162 value_(value),
2163 overwrite_mode_(overwrite_mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002164 set_comment("[ DeferredInlineSmiAddReversed");
2165 }
2166
2167 virtual void Generate();
2168
2169 private:
2170 Register dst_;
Steve Block6ded16b2010-05-10 14:33:55 +01002171 TypeInfo type_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +00002172 Smi* value_;
2173 OverwriteMode overwrite_mode_;
2174};
2175
2176
2177void DeferredInlineSmiAddReversed::Generate() {
2178 // Undo the optimistic add operation and call the shared stub.
2179 __ sub(Operand(dst_), Immediate(value_));
Steve Block6ded16b2010-05-10 14:33:55 +01002180 GenericBinaryOpStub igostub(
2181 Token::ADD,
2182 overwrite_mode_,
2183 NO_SMI_CODE_IN_STUB,
2184 TypeInfo::Combine(TypeInfo::Smi(), type_info_));
Steve Block3ce2e202009-11-05 08:53:23 +00002185 igostub.GenerateCall(masm_, value_, dst_);
Steve Blocka7e24c12009-10-30 11:49:00 +00002186 if (!dst_.is(eax)) __ mov(dst_, eax);
2187}
2188
2189
2190// The result of src - value is in dst. It either overflowed or was not
2191// smi tagged. Undo the speculative subtraction and call the
2192// appropriate specialized stub for subtract. The result is left in
2193// dst.
2194class DeferredInlineSmiSub: public DeferredCode {
2195 public:
2196 DeferredInlineSmiSub(Register dst,
Steve Block6ded16b2010-05-10 14:33:55 +01002197 TypeInfo type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00002198 Smi* value,
2199 OverwriteMode overwrite_mode)
Steve Block6ded16b2010-05-10 14:33:55 +01002200 : dst_(dst),
2201 type_info_(type_info),
2202 value_(value),
2203 overwrite_mode_(overwrite_mode) {
2204 if (type_info.IsSmi()) overwrite_mode_ = NO_OVERWRITE;
Steve Blocka7e24c12009-10-30 11:49:00 +00002205 set_comment("[ DeferredInlineSmiSub");
2206 }
2207
2208 virtual void Generate();
2209
2210 private:
2211 Register dst_;
Steve Block6ded16b2010-05-10 14:33:55 +01002212 TypeInfo type_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +00002213 Smi* value_;
2214 OverwriteMode overwrite_mode_;
2215};
2216
2217
2218void DeferredInlineSmiSub::Generate() {
2219 // Undo the optimistic sub operation and call the shared stub.
2220 __ add(Operand(dst_), Immediate(value_));
Steve Block6ded16b2010-05-10 14:33:55 +01002221 GenericBinaryOpStub igostub(
2222 Token::SUB,
2223 overwrite_mode_,
2224 NO_SMI_CODE_IN_STUB,
2225 TypeInfo::Combine(TypeInfo::Smi(), type_info_));
Steve Block3ce2e202009-11-05 08:53:23 +00002226 igostub.GenerateCall(masm_, dst_, value_);
Steve Blocka7e24c12009-10-30 11:49:00 +00002227 if (!dst_.is(eax)) __ mov(dst_, eax);
2228}
2229
2230
Kristian Monsen25f61362010-05-21 11:50:48 +01002231Result CodeGenerator::ConstantSmiBinaryOperation(BinaryOperation* expr,
2232 Result* operand,
2233 Handle<Object> value,
2234 bool reversed,
2235 OverwriteMode overwrite_mode) {
2236 // Generate inline code for a binary operation when one of the
2237 // operands is a constant smi. Consumes the argument "operand".
Steve Blocka7e24c12009-10-30 11:49:00 +00002238 if (IsUnsafeSmi(value)) {
2239 Result unsafe_operand(value);
2240 if (reversed) {
Steve Block6ded16b2010-05-10 14:33:55 +01002241 return LikelySmiBinaryOperation(expr, &unsafe_operand, operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00002242 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002243 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01002244 return LikelySmiBinaryOperation(expr, operand, &unsafe_operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00002245 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002246 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002247 }
2248
2249 // Get the literal value.
2250 Smi* smi_value = Smi::cast(*value);
2251 int int_value = smi_value->value();
2252
Steve Block6ded16b2010-05-10 14:33:55 +01002253 Token::Value op = expr->op();
Leon Clarked91b9f72010-01-27 17:25:45 +00002254 Result answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00002255 switch (op) {
2256 case Token::ADD: {
2257 operand->ToRegister();
2258 frame_->Spill(operand->reg());
2259
2260 // Optimistically add. Call the specialized add stub if the
2261 // result is not a smi or overflows.
2262 DeferredCode* deferred = NULL;
2263 if (reversed) {
2264 deferred = new DeferredInlineSmiAddReversed(operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002265 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002266 smi_value,
2267 overwrite_mode);
2268 } else {
2269 deferred = new DeferredInlineSmiAdd(operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002270 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002271 smi_value,
2272 overwrite_mode);
2273 }
2274 __ add(Operand(operand->reg()), Immediate(value));
2275 deferred->Branch(overflow);
Steve Block6ded16b2010-05-10 14:33:55 +01002276 if (!operand->type_info().IsSmi()) {
2277 __ test(operand->reg(), Immediate(kSmiTagMask));
2278 deferred->Branch(not_zero);
2279 } else if (FLAG_debug_code) {
2280 __ AbortIfNotSmi(operand->reg());
2281 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002282 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00002283 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00002284 break;
2285 }
2286
2287 case Token::SUB: {
2288 DeferredCode* deferred = NULL;
Steve Blocka7e24c12009-10-30 11:49:00 +00002289 if (reversed) {
2290 // The reversed case is only hit when the right operand is not a
2291 // constant.
2292 ASSERT(operand->is_register());
2293 answer = allocator()->Allocate();
2294 ASSERT(answer.is_valid());
2295 __ Set(answer.reg(), Immediate(value));
Steve Block6ded16b2010-05-10 14:33:55 +01002296 deferred =
2297 new DeferredInlineSmiOperationReversed(op,
2298 answer.reg(),
2299 smi_value,
2300 operand->reg(),
2301 operand->type_info(),
2302 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002303 __ sub(answer.reg(), Operand(operand->reg()));
2304 } else {
2305 operand->ToRegister();
2306 frame_->Spill(operand->reg());
2307 answer = *operand;
2308 deferred = new DeferredInlineSmiSub(operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002309 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002310 smi_value,
2311 overwrite_mode);
2312 __ sub(Operand(operand->reg()), Immediate(value));
2313 }
2314 deferred->Branch(overflow);
Steve Block6ded16b2010-05-10 14:33:55 +01002315 if (!operand->type_info().IsSmi()) {
2316 __ test(answer.reg(), Immediate(kSmiTagMask));
2317 deferred->Branch(not_zero);
2318 } else if (FLAG_debug_code) {
2319 __ AbortIfNotSmi(operand->reg());
2320 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002321 deferred->BindExit();
2322 operand->Unuse();
Steve Blocka7e24c12009-10-30 11:49:00 +00002323 break;
2324 }
2325
2326 case Token::SAR:
2327 if (reversed) {
2328 Result constant_operand(value);
Steve Block6ded16b2010-05-10 14:33:55 +01002329 answer = LikelySmiBinaryOperation(expr, &constant_operand, operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00002330 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002331 } else {
2332 // Only the least significant 5 bits of the shift value are used.
2333 // In the slow case, this masking is done inside the runtime call.
2334 int shift_value = int_value & 0x1f;
2335 operand->ToRegister();
2336 frame_->Spill(operand->reg());
Steve Block6ded16b2010-05-10 14:33:55 +01002337 if (!operand->type_info().IsSmi()) {
2338 DeferredInlineSmiOperation* deferred =
2339 new DeferredInlineSmiOperation(op,
2340 operand->reg(),
2341 operand->reg(),
2342 operand->type_info(),
2343 smi_value,
2344 overwrite_mode);
2345 __ test(operand->reg(), Immediate(kSmiTagMask));
2346 deferred->Branch(not_zero);
2347 if (shift_value > 0) {
2348 __ sar(operand->reg(), shift_value);
2349 __ and_(operand->reg(), ~kSmiTagMask);
2350 }
2351 deferred->BindExit();
2352 } else {
2353 if (FLAG_debug_code) {
2354 __ AbortIfNotSmi(operand->reg());
2355 }
2356 if (shift_value > 0) {
2357 __ sar(operand->reg(), shift_value);
2358 __ and_(operand->reg(), ~kSmiTagMask);
2359 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002360 }
Leon Clarked91b9f72010-01-27 17:25:45 +00002361 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00002362 }
2363 break;
2364
2365 case Token::SHR:
2366 if (reversed) {
2367 Result constant_operand(value);
Steve Block6ded16b2010-05-10 14:33:55 +01002368 answer = LikelySmiBinaryOperation(expr, &constant_operand, operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00002369 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002370 } else {
2371 // Only the least significant 5 bits of the shift value are used.
2372 // In the slow case, this masking is done inside the runtime call.
2373 int shift_value = int_value & 0x1f;
2374 operand->ToRegister();
Leon Clarked91b9f72010-01-27 17:25:45 +00002375 answer = allocator()->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00002376 ASSERT(answer.is_valid());
2377 DeferredInlineSmiOperation* deferred =
2378 new DeferredInlineSmiOperation(op,
2379 answer.reg(),
2380 operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002381 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002382 smi_value,
2383 overwrite_mode);
Steve Block6ded16b2010-05-10 14:33:55 +01002384 if (!operand->type_info().IsSmi()) {
2385 __ test(operand->reg(), Immediate(kSmiTagMask));
2386 deferred->Branch(not_zero);
2387 } else if (FLAG_debug_code) {
2388 __ AbortIfNotSmi(operand->reg());
2389 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002390 __ mov(answer.reg(), operand->reg());
Leon Clarkee46be812010-01-19 14:06:41 +00002391 __ SmiUntag(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00002392 __ shr(answer.reg(), shift_value);
2393 // A negative Smi shifted right two is in the positive Smi range.
2394 if (shift_value < 2) {
2395 __ test(answer.reg(), Immediate(0xc0000000));
2396 deferred->Branch(not_zero);
2397 }
2398 operand->Unuse();
Leon Clarkee46be812010-01-19 14:06:41 +00002399 __ SmiTag(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00002400 deferred->BindExit();
Steve Blocka7e24c12009-10-30 11:49:00 +00002401 }
2402 break;
2403
2404 case Token::SHL:
2405 if (reversed) {
Steve Block6ded16b2010-05-10 14:33:55 +01002406 // Move operand into ecx and also into a second register.
2407 // If operand is already in a register, take advantage of that.
2408 // This lets us modify ecx, but still bail out to deferred code.
Leon Clarkee46be812010-01-19 14:06:41 +00002409 Result right;
2410 Result right_copy_in_ecx;
Steve Block6ded16b2010-05-10 14:33:55 +01002411 TypeInfo right_type_info = operand->type_info();
Leon Clarkee46be812010-01-19 14:06:41 +00002412 operand->ToRegister();
2413 if (operand->reg().is(ecx)) {
2414 right = allocator()->Allocate();
2415 __ mov(right.reg(), ecx);
2416 frame_->Spill(ecx);
2417 right_copy_in_ecx = *operand;
2418 } else {
2419 right_copy_in_ecx = allocator()->Allocate(ecx);
2420 __ mov(ecx, operand->reg());
2421 right = *operand;
2422 }
2423 operand->Unuse();
2424
Leon Clarked91b9f72010-01-27 17:25:45 +00002425 answer = allocator()->Allocate();
Leon Clarkee46be812010-01-19 14:06:41 +00002426 DeferredInlineSmiOperationReversed* deferred =
2427 new DeferredInlineSmiOperationReversed(op,
2428 answer.reg(),
2429 smi_value,
2430 right.reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002431 right_type_info,
Leon Clarkee46be812010-01-19 14:06:41 +00002432 overwrite_mode);
2433 __ mov(answer.reg(), Immediate(int_value));
2434 __ sar(ecx, kSmiTagSize);
Steve Block6ded16b2010-05-10 14:33:55 +01002435 if (!right_type_info.IsSmi()) {
2436 deferred->Branch(carry);
2437 } else if (FLAG_debug_code) {
2438 __ AbortIfNotSmi(right.reg());
2439 }
Leon Clarkee46be812010-01-19 14:06:41 +00002440 __ shl_cl(answer.reg());
2441 __ cmp(answer.reg(), 0xc0000000);
2442 deferred->Branch(sign);
2443 __ SmiTag(answer.reg());
2444
2445 deferred->BindExit();
Steve Blocka7e24c12009-10-30 11:49:00 +00002446 } else {
2447 // Only the least significant 5 bits of the shift value are used.
2448 // In the slow case, this masking is done inside the runtime call.
2449 int shift_value = int_value & 0x1f;
2450 operand->ToRegister();
2451 if (shift_value == 0) {
2452 // Spill operand so it can be overwritten in the slow case.
2453 frame_->Spill(operand->reg());
2454 DeferredInlineSmiOperation* deferred =
2455 new DeferredInlineSmiOperation(op,
2456 operand->reg(),
2457 operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002458 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002459 smi_value,
2460 overwrite_mode);
2461 __ test(operand->reg(), Immediate(kSmiTagMask));
2462 deferred->Branch(not_zero);
2463 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00002464 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00002465 } else {
2466 // Use a fresh temporary for nonzero shift values.
Leon Clarked91b9f72010-01-27 17:25:45 +00002467 answer = allocator()->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00002468 ASSERT(answer.is_valid());
2469 DeferredInlineSmiOperation* deferred =
2470 new DeferredInlineSmiOperation(op,
2471 answer.reg(),
2472 operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002473 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002474 smi_value,
2475 overwrite_mode);
Steve Block6ded16b2010-05-10 14:33:55 +01002476 if (!operand->type_info().IsSmi()) {
2477 __ test(operand->reg(), Immediate(kSmiTagMask));
2478 deferred->Branch(not_zero);
2479 } else if (FLAG_debug_code) {
2480 __ AbortIfNotSmi(operand->reg());
2481 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002482 __ mov(answer.reg(), operand->reg());
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002483 STATIC_ASSERT(kSmiTag == 0); // adjust code if not the case
Steve Blocka7e24c12009-10-30 11:49:00 +00002484 // We do no shifts, only the Smi conversion, if shift_value is 1.
2485 if (shift_value > 1) {
2486 __ shl(answer.reg(), shift_value - 1);
2487 }
2488 // Convert int result to Smi, checking that it is in int range.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002489 STATIC_ASSERT(kSmiTagSize == 1); // adjust code if not the case
Steve Blocka7e24c12009-10-30 11:49:00 +00002490 __ add(answer.reg(), Operand(answer.reg()));
2491 deferred->Branch(overflow);
2492 deferred->BindExit();
2493 operand->Unuse();
Steve Blocka7e24c12009-10-30 11:49:00 +00002494 }
2495 }
2496 break;
2497
2498 case Token::BIT_OR:
2499 case Token::BIT_XOR:
2500 case Token::BIT_AND: {
2501 operand->ToRegister();
Iain Merrick75681382010-08-19 15:07:18 +01002502 // DeferredInlineBinaryOperation requires all the registers that it is
2503 // told about to be spilled.
Steve Blocka7e24c12009-10-30 11:49:00 +00002504 frame_->Spill(operand->reg());
Iain Merrick75681382010-08-19 15:07:18 +01002505 DeferredInlineBinaryOperation* deferred = NULL;
Steve Block6ded16b2010-05-10 14:33:55 +01002506 if (!operand->type_info().IsSmi()) {
Iain Merrick75681382010-08-19 15:07:18 +01002507 Result left = allocator()->Allocate();
2508 ASSERT(left.is_valid());
2509 Result right = allocator()->Allocate();
2510 ASSERT(right.is_valid());
2511 deferred = new DeferredInlineBinaryOperation(
2512 op,
2513 operand->reg(),
2514 left.reg(),
2515 right.reg(),
2516 operand->type_info(),
2517 TypeInfo::Smi(),
2518 overwrite_mode == NO_OVERWRITE ? NO_OVERWRITE : OVERWRITE_LEFT);
Steve Block6ded16b2010-05-10 14:33:55 +01002519 __ test(operand->reg(), Immediate(kSmiTagMask));
Iain Merrick75681382010-08-19 15:07:18 +01002520 deferred->JumpToConstantRhs(not_zero, smi_value);
Steve Block6ded16b2010-05-10 14:33:55 +01002521 } else if (FLAG_debug_code) {
2522 __ AbortIfNotSmi(operand->reg());
2523 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002524 if (op == Token::BIT_AND) {
2525 __ and_(Operand(operand->reg()), Immediate(value));
2526 } else if (op == Token::BIT_XOR) {
2527 if (int_value != 0) {
2528 __ xor_(Operand(operand->reg()), Immediate(value));
2529 }
2530 } else {
2531 ASSERT(op == Token::BIT_OR);
2532 if (int_value != 0) {
2533 __ or_(Operand(operand->reg()), Immediate(value));
2534 }
2535 }
Iain Merrick75681382010-08-19 15:07:18 +01002536 if (deferred != NULL) deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00002537 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00002538 break;
2539 }
2540
Andrei Popescu402d9372010-02-26 13:31:12 +00002541 case Token::DIV:
2542 if (!reversed && int_value == 2) {
2543 operand->ToRegister();
2544 frame_->Spill(operand->reg());
2545
2546 DeferredInlineSmiOperation* deferred =
2547 new DeferredInlineSmiOperation(op,
2548 operand->reg(),
2549 operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002550 operand->type_info(),
Andrei Popescu402d9372010-02-26 13:31:12 +00002551 smi_value,
2552 overwrite_mode);
2553 // Check that lowest log2(value) bits of operand are zero, and test
2554 // smi tag at the same time.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002555 STATIC_ASSERT(kSmiTag == 0);
2556 STATIC_ASSERT(kSmiTagSize == 1);
Andrei Popescu402d9372010-02-26 13:31:12 +00002557 __ test(operand->reg(), Immediate(3));
2558 deferred->Branch(not_zero); // Branch if non-smi or odd smi.
2559 __ sar(operand->reg(), 1);
2560 deferred->BindExit();
2561 answer = *operand;
2562 } else {
2563 // Cannot fall through MOD to default case, so we duplicate the
2564 // default case here.
2565 Result constant_operand(value);
2566 if (reversed) {
Steve Block6ded16b2010-05-10 14:33:55 +01002567 answer = LikelySmiBinaryOperation(expr, &constant_operand, operand,
Andrei Popescu402d9372010-02-26 13:31:12 +00002568 overwrite_mode);
2569 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01002570 answer = LikelySmiBinaryOperation(expr, operand, &constant_operand,
Andrei Popescu402d9372010-02-26 13:31:12 +00002571 overwrite_mode);
2572 }
2573 }
2574 break;
Steve Block6ded16b2010-05-10 14:33:55 +01002575
Steve Blocka7e24c12009-10-30 11:49:00 +00002576 // Generate inline code for mod of powers of 2 and negative powers of 2.
2577 case Token::MOD:
2578 if (!reversed &&
2579 int_value != 0 &&
2580 (IsPowerOf2(int_value) || IsPowerOf2(-int_value))) {
2581 operand->ToRegister();
2582 frame_->Spill(operand->reg());
Steve Block6ded16b2010-05-10 14:33:55 +01002583 DeferredCode* deferred =
2584 new DeferredInlineSmiOperation(op,
2585 operand->reg(),
2586 operand->reg(),
2587 operand->type_info(),
2588 smi_value,
2589 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002590 // Check for negative or non-Smi left hand side.
Steve Block6ded16b2010-05-10 14:33:55 +01002591 __ test(operand->reg(), Immediate(kSmiTagMask | kSmiSignMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00002592 deferred->Branch(not_zero);
2593 if (int_value < 0) int_value = -int_value;
2594 if (int_value == 1) {
2595 __ mov(operand->reg(), Immediate(Smi::FromInt(0)));
2596 } else {
2597 __ and_(operand->reg(), (int_value << kSmiTagSize) - 1);
2598 }
2599 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00002600 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00002601 break;
2602 }
2603 // Fall through if we did not find a power of 2 on the right hand side!
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002604 // The next case must be the default.
Steve Blocka7e24c12009-10-30 11:49:00 +00002605
2606 default: {
2607 Result constant_operand(value);
2608 if (reversed) {
Steve Block6ded16b2010-05-10 14:33:55 +01002609 answer = LikelySmiBinaryOperation(expr, &constant_operand, operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00002610 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002611 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01002612 answer = LikelySmiBinaryOperation(expr, operand, &constant_operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00002613 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002614 }
2615 break;
2616 }
2617 }
Leon Clarked91b9f72010-01-27 17:25:45 +00002618 ASSERT(answer.is_valid());
2619 return answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00002620}
2621
2622
Leon Clarkee46be812010-01-19 14:06:41 +00002623static bool CouldBeNaN(const Result& result) {
Steve Block6ded16b2010-05-10 14:33:55 +01002624 if (result.type_info().IsSmi()) return false;
2625 if (result.type_info().IsInteger32()) return false;
Leon Clarkee46be812010-01-19 14:06:41 +00002626 if (!result.is_constant()) return true;
2627 if (!result.handle()->IsHeapNumber()) return false;
2628 return isnan(HeapNumber::cast(*result.handle())->value());
2629}
2630
2631
Steve Block6ded16b2010-05-10 14:33:55 +01002632// Convert from signed to unsigned comparison to match the way EFLAGS are set
2633// by FPU and XMM compare instructions.
2634static Condition DoubleCondition(Condition cc) {
2635 switch (cc) {
2636 case less: return below;
2637 case equal: return equal;
2638 case less_equal: return below_equal;
2639 case greater: return above;
2640 case greater_equal: return above_equal;
2641 default: UNREACHABLE();
2642 }
2643 UNREACHABLE();
2644 return equal;
2645}
2646
2647
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002648static CompareFlags ComputeCompareFlags(NaNInformation nan_info,
2649 bool inline_number_compare) {
2650 CompareFlags flags = NO_SMI_COMPARE_IN_STUB;
2651 if (nan_info == kCantBothBeNaN) {
2652 flags = static_cast<CompareFlags>(flags | CANT_BOTH_BE_NAN);
2653 }
2654 if (inline_number_compare) {
2655 flags = static_cast<CompareFlags>(flags | NO_NUMBER_COMPARE_IN_STUB);
2656 }
2657 return flags;
2658}
2659
2660
Leon Clarkee46be812010-01-19 14:06:41 +00002661void CodeGenerator::Comparison(AstNode* node,
2662 Condition cc,
Steve Blocka7e24c12009-10-30 11:49:00 +00002663 bool strict,
2664 ControlDestination* dest) {
2665 // Strict only makes sense for equality comparisons.
2666 ASSERT(!strict || cc == equal);
2667
2668 Result left_side;
2669 Result right_side;
2670 // Implement '>' and '<=' by reversal to obtain ECMA-262 conversion order.
2671 if (cc == greater || cc == less_equal) {
2672 cc = ReverseCondition(cc);
2673 left_side = frame_->Pop();
2674 right_side = frame_->Pop();
2675 } else {
2676 right_side = frame_->Pop();
2677 left_side = frame_->Pop();
2678 }
2679 ASSERT(cc == less || cc == equal || cc == greater_equal);
2680
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002681 // If either side is a constant smi, optimize the comparison.
Leon Clarkee46be812010-01-19 14:06:41 +00002682 bool left_side_constant_smi = false;
2683 bool left_side_constant_null = false;
2684 bool left_side_constant_1_char_string = false;
2685 if (left_side.is_constant()) {
2686 left_side_constant_smi = left_side.handle()->IsSmi();
2687 left_side_constant_null = left_side.handle()->IsNull();
2688 left_side_constant_1_char_string =
2689 (left_side.handle()->IsString() &&
Steve Block6ded16b2010-05-10 14:33:55 +01002690 String::cast(*left_side.handle())->length() == 1 &&
2691 String::cast(*left_side.handle())->IsAsciiRepresentation());
Leon Clarkee46be812010-01-19 14:06:41 +00002692 }
2693 bool right_side_constant_smi = false;
2694 bool right_side_constant_null = false;
2695 bool right_side_constant_1_char_string = false;
2696 if (right_side.is_constant()) {
2697 right_side_constant_smi = right_side.handle()->IsSmi();
2698 right_side_constant_null = right_side.handle()->IsNull();
2699 right_side_constant_1_char_string =
2700 (right_side.handle()->IsString() &&
Steve Block6ded16b2010-05-10 14:33:55 +01002701 String::cast(*right_side.handle())->length() == 1 &&
2702 String::cast(*right_side.handle())->IsAsciiRepresentation());
Leon Clarkee46be812010-01-19 14:06:41 +00002703 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002704
2705 if (left_side_constant_smi || right_side_constant_smi) {
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002706 bool is_loop_condition = (node->AsExpression() != NULL) &&
2707 node->AsExpression()->is_loop_condition();
2708 ConstantSmiComparison(cc, strict, dest, &left_side, &right_side,
2709 left_side_constant_smi, right_side_constant_smi,
2710 is_loop_condition);
Leon Clarkee46be812010-01-19 14:06:41 +00002711 } else if (left_side_constant_1_char_string ||
2712 right_side_constant_1_char_string) {
2713 if (left_side_constant_1_char_string && right_side_constant_1_char_string) {
2714 // Trivial case, comparing two constants.
2715 int left_value = String::cast(*left_side.handle())->Get(0);
2716 int right_value = String::cast(*right_side.handle())->Get(0);
2717 switch (cc) {
2718 case less:
2719 dest->Goto(left_value < right_value);
2720 break;
2721 case equal:
2722 dest->Goto(left_value == right_value);
2723 break;
2724 case greater_equal:
2725 dest->Goto(left_value >= right_value);
2726 break;
2727 default:
2728 UNREACHABLE();
2729 }
2730 } else {
2731 // Only one side is a constant 1 character string.
2732 // If left side is a constant 1-character string, reverse the operands.
2733 // Since one side is a constant string, conversion order does not matter.
2734 if (left_side_constant_1_char_string) {
2735 Result temp = left_side;
2736 left_side = right_side;
2737 right_side = temp;
2738 cc = ReverseCondition(cc);
2739 // This may reintroduce greater or less_equal as the value of cc.
2740 // CompareStub and the inline code both support all values of cc.
2741 }
2742 // Implement comparison against a constant string, inlining the case
2743 // where both sides are strings.
2744 left_side.ToRegister();
2745
2746 // Here we split control flow to the stub call and inlined cases
2747 // before finally splitting it to the control destination. We use
2748 // a jump target and branching to duplicate the virtual frame at
2749 // the first split. We manually handle the off-frame references
2750 // by reconstituting them on the non-fall-through path.
2751 JumpTarget is_not_string, is_string;
2752 Register left_reg = left_side.reg();
2753 Handle<Object> right_val = right_side.handle();
Steve Block6ded16b2010-05-10 14:33:55 +01002754 ASSERT(StringShape(String::cast(*right_val)).IsSymbol());
Leon Clarkee46be812010-01-19 14:06:41 +00002755 __ test(left_side.reg(), Immediate(kSmiTagMask));
2756 is_not_string.Branch(zero, &left_side);
2757 Result temp = allocator_->Allocate();
2758 ASSERT(temp.is_valid());
2759 __ mov(temp.reg(),
2760 FieldOperand(left_side.reg(), HeapObject::kMapOffset));
2761 __ movzx_b(temp.reg(),
2762 FieldOperand(temp.reg(), Map::kInstanceTypeOffset));
2763 // If we are testing for equality then make use of the symbol shortcut.
2764 // Check if the right left hand side has the same type as the left hand
2765 // side (which is always a symbol).
2766 if (cc == equal) {
2767 Label not_a_symbol;
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002768 STATIC_ASSERT(kSymbolTag != 0);
Leon Clarkee46be812010-01-19 14:06:41 +00002769 // Ensure that no non-strings have the symbol bit set.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002770 STATIC_ASSERT(LAST_TYPE < kNotStringTag + kIsSymbolMask);
Leon Clarkee46be812010-01-19 14:06:41 +00002771 __ test(temp.reg(), Immediate(kIsSymbolMask)); // Test the symbol bit.
2772 __ j(zero, &not_a_symbol);
2773 // They are symbols, so do identity compare.
2774 __ cmp(left_side.reg(), right_side.handle());
2775 dest->true_target()->Branch(equal);
2776 dest->false_target()->Branch(not_equal);
2777 __ bind(&not_a_symbol);
2778 }
Steve Block6ded16b2010-05-10 14:33:55 +01002779 // Call the compare stub if the left side is not a flat ascii string.
Leon Clarkee46be812010-01-19 14:06:41 +00002780 __ and_(temp.reg(),
2781 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
2782 __ cmp(temp.reg(), kStringTag | kSeqStringTag | kAsciiStringTag);
2783 temp.Unuse();
2784 is_string.Branch(equal, &left_side);
2785
2786 // Setup and call the compare stub.
2787 is_not_string.Bind(&left_side);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002788 CompareFlags flags =
2789 static_cast<CompareFlags>(CANT_BOTH_BE_NAN | NO_SMI_COMPARE_IN_STUB);
2790 CompareStub stub(cc, strict, flags);
Leon Clarkee46be812010-01-19 14:06:41 +00002791 Result result = frame_->CallStub(&stub, &left_side, &right_side);
2792 result.ToRegister();
2793 __ cmp(result.reg(), 0);
2794 result.Unuse();
2795 dest->true_target()->Branch(cc);
2796 dest->false_target()->Jump();
2797
2798 is_string.Bind(&left_side);
Steve Block6ded16b2010-05-10 14:33:55 +01002799 // left_side is a sequential ASCII string.
Leon Clarkee46be812010-01-19 14:06:41 +00002800 left_side = Result(left_reg);
2801 right_side = Result(right_val);
Leon Clarkee46be812010-01-19 14:06:41 +00002802 // Test string equality and comparison.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002803 Label comparison_done;
Leon Clarkee46be812010-01-19 14:06:41 +00002804 if (cc == equal) {
Leon Clarkee46be812010-01-19 14:06:41 +00002805 __ cmp(FieldOperand(left_side.reg(), String::kLengthOffset),
Steve Block6ded16b2010-05-10 14:33:55 +01002806 Immediate(Smi::FromInt(1)));
Leon Clarkee46be812010-01-19 14:06:41 +00002807 __ j(not_equal, &comparison_done);
2808 uint8_t char_value =
Steve Block6ded16b2010-05-10 14:33:55 +01002809 static_cast<uint8_t>(String::cast(*right_val)->Get(0));
Leon Clarkee46be812010-01-19 14:06:41 +00002810 __ cmpb(FieldOperand(left_side.reg(), SeqAsciiString::kHeaderSize),
2811 char_value);
Leon Clarkee46be812010-01-19 14:06:41 +00002812 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002813 __ cmp(FieldOperand(left_side.reg(), String::kLengthOffset),
2814 Immediate(Smi::FromInt(1)));
2815 // If the length is 0 then the jump is taken and the flags
2816 // correctly represent being less than the one-character string.
2817 __ j(below, &comparison_done);
Steve Block6ded16b2010-05-10 14:33:55 +01002818 // Compare the first character of the string with the
2819 // constant 1-character string.
Leon Clarkee46be812010-01-19 14:06:41 +00002820 uint8_t char_value =
Steve Block6ded16b2010-05-10 14:33:55 +01002821 static_cast<uint8_t>(String::cast(*right_val)->Get(0));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002822 __ cmpb(FieldOperand(left_side.reg(), SeqAsciiString::kHeaderSize),
2823 char_value);
2824 __ j(not_equal, &comparison_done);
Leon Clarkee46be812010-01-19 14:06:41 +00002825 // If the first character is the same then the long string sorts after
2826 // the short one.
2827 __ cmp(FieldOperand(left_side.reg(), String::kLengthOffset),
Steve Block6ded16b2010-05-10 14:33:55 +01002828 Immediate(Smi::FromInt(1)));
Leon Clarkee46be812010-01-19 14:06:41 +00002829 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002830 __ bind(&comparison_done);
Leon Clarkee46be812010-01-19 14:06:41 +00002831 left_side.Unuse();
2832 right_side.Unuse();
2833 dest->Split(cc);
2834 }
2835 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01002836 // Neither side is a constant Smi, constant 1-char string or constant null.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002837 // If either side is a non-smi constant, or known to be a heap number,
2838 // skip the smi check.
Steve Blocka7e24c12009-10-30 11:49:00 +00002839 bool known_non_smi =
2840 (left_side.is_constant() && !left_side.handle()->IsSmi()) ||
Steve Block6ded16b2010-05-10 14:33:55 +01002841 (right_side.is_constant() && !right_side.handle()->IsSmi()) ||
2842 left_side.type_info().IsDouble() ||
2843 right_side.type_info().IsDouble();
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002844
Leon Clarkee46be812010-01-19 14:06:41 +00002845 NaNInformation nan_info =
2846 (CouldBeNaN(left_side) && CouldBeNaN(right_side)) ?
2847 kBothCouldBeNaN :
2848 kCantBothBeNaN;
Steve Block6ded16b2010-05-10 14:33:55 +01002849
2850 // Inline number comparison handling any combination of smi's and heap
2851 // numbers if:
2852 // code is in a loop
2853 // the compare operation is different from equal
2854 // compare is not a for-loop comparison
2855 // The reason for excluding equal is that it will most likely be done
2856 // with smi's (not heap numbers) and the code to comparing smi's is inlined
2857 // separately. The same reason applies for for-loop comparison which will
2858 // also most likely be smi comparisons.
2859 bool is_loop_condition = (node->AsExpression() != NULL)
2860 && node->AsExpression()->is_loop_condition();
2861 bool inline_number_compare =
2862 loop_nesting() > 0 && cc != equal && !is_loop_condition;
2863
2864 // Left and right needed in registers for the following code.
Steve Blocka7e24c12009-10-30 11:49:00 +00002865 left_side.ToRegister();
2866 right_side.ToRegister();
2867
2868 if (known_non_smi) {
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002869 // Inlined equality check:
2870 // If at least one of the objects is not NaN, then if the objects
2871 // are identical, they are equal.
Steve Block6ded16b2010-05-10 14:33:55 +01002872 if (nan_info == kCantBothBeNaN && cc == equal) {
2873 __ cmp(left_side.reg(), Operand(right_side.reg()));
2874 dest->true_target()->Branch(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +00002875 }
Steve Block6ded16b2010-05-10 14:33:55 +01002876
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002877 // Inlined number comparison:
Steve Block6ded16b2010-05-10 14:33:55 +01002878 if (inline_number_compare) {
2879 GenerateInlineNumberComparison(&left_side, &right_side, cc, dest);
2880 }
2881
2882 // End of in-line compare, call out to the compare stub. Don't include
2883 // number comparison in the stub if it was inlined.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002884 CompareFlags flags = ComputeCompareFlags(nan_info, inline_number_compare);
2885 CompareStub stub(cc, strict, flags);
Steve Block6ded16b2010-05-10 14:33:55 +01002886 Result answer = frame_->CallStub(&stub, &left_side, &right_side);
2887 __ test(answer.reg(), Operand(answer.reg()));
Steve Blocka7e24c12009-10-30 11:49:00 +00002888 answer.Unuse();
2889 dest->Split(cc);
2890 } else {
2891 // Here we split control flow to the stub call and inlined cases
2892 // before finally splitting it to the control destination. We use
2893 // a jump target and branching to duplicate the virtual frame at
2894 // the first split. We manually handle the off-frame references
2895 // by reconstituting them on the non-fall-through path.
2896 JumpTarget is_smi;
2897 Register left_reg = left_side.reg();
2898 Register right_reg = right_side.reg();
2899
Steve Block6ded16b2010-05-10 14:33:55 +01002900 // In-line check for comparing two smis.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002901 JumpIfBothSmiUsingTypeInfo(&left_side, &right_side, &is_smi);
Steve Block6ded16b2010-05-10 14:33:55 +01002902
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002903 if (has_valid_frame()) {
2904 // Inline the equality check if both operands can't be a NaN. If both
2905 // objects are the same they are equal.
2906 if (nan_info == kCantBothBeNaN && cc == equal) {
2907 __ cmp(left_side.reg(), Operand(right_side.reg()));
2908 dest->true_target()->Branch(equal);
2909 }
2910
2911 // Inlined number comparison:
2912 if (inline_number_compare) {
2913 GenerateInlineNumberComparison(&left_side, &right_side, cc, dest);
2914 }
2915
2916 // End of in-line compare, call out to the compare stub. Don't include
2917 // number comparison in the stub if it was inlined.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002918 CompareFlags flags =
2919 ComputeCompareFlags(nan_info, inline_number_compare);
2920 CompareStub stub(cc, strict, flags);
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002921 Result answer = frame_->CallStub(&stub, &left_side, &right_side);
2922 __ test(answer.reg(), Operand(answer.reg()));
2923 answer.Unuse();
2924 if (is_smi.is_linked()) {
2925 dest->true_target()->Branch(cc);
2926 dest->false_target()->Jump();
2927 } else {
2928 dest->Split(cc);
2929 }
2930 }
2931
2932 if (is_smi.is_linked()) {
2933 is_smi.Bind();
2934 left_side = Result(left_reg);
2935 right_side = Result(right_reg);
Steve Block6ded16b2010-05-10 14:33:55 +01002936 __ cmp(left_side.reg(), Operand(right_side.reg()));
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002937 right_side.Unuse();
2938 left_side.Unuse();
2939 dest->Split(cc);
Steve Block6ded16b2010-05-10 14:33:55 +01002940 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002941 }
2942 }
2943}
2944
2945
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002946void CodeGenerator::ConstantSmiComparison(Condition cc,
2947 bool strict,
2948 ControlDestination* dest,
2949 Result* left_side,
2950 Result* right_side,
2951 bool left_side_constant_smi,
2952 bool right_side_constant_smi,
2953 bool is_loop_condition) {
2954 if (left_side_constant_smi && right_side_constant_smi) {
2955 // Trivial case, comparing two constants.
2956 int left_value = Smi::cast(*left_side->handle())->value();
2957 int right_value = Smi::cast(*right_side->handle())->value();
2958 switch (cc) {
2959 case less:
2960 dest->Goto(left_value < right_value);
2961 break;
2962 case equal:
2963 dest->Goto(left_value == right_value);
2964 break;
2965 case greater_equal:
2966 dest->Goto(left_value >= right_value);
2967 break;
2968 default:
2969 UNREACHABLE();
2970 }
2971 } else {
2972 // Only one side is a constant Smi.
2973 // If left side is a constant Smi, reverse the operands.
2974 // Since one side is a constant Smi, conversion order does not matter.
2975 if (left_side_constant_smi) {
2976 Result* temp = left_side;
2977 left_side = right_side;
2978 right_side = temp;
2979 cc = ReverseCondition(cc);
2980 // This may re-introduce greater or less_equal as the value of cc.
2981 // CompareStub and the inline code both support all values of cc.
2982 }
2983 // Implement comparison against a constant Smi, inlining the case
2984 // where both sides are Smis.
2985 left_side->ToRegister();
2986 Register left_reg = left_side->reg();
2987 Handle<Object> right_val = right_side->handle();
2988
2989 if (left_side->is_smi()) {
2990 if (FLAG_debug_code) {
2991 __ AbortIfNotSmi(left_reg);
2992 }
2993 // Test smi equality and comparison by signed int comparison.
2994 if (IsUnsafeSmi(right_side->handle())) {
2995 right_side->ToRegister();
2996 __ cmp(left_reg, Operand(right_side->reg()));
2997 } else {
2998 __ cmp(Operand(left_reg), Immediate(right_side->handle()));
2999 }
3000 left_side->Unuse();
3001 right_side->Unuse();
3002 dest->Split(cc);
3003 } else {
3004 // Only the case where the left side could possibly be a non-smi is left.
3005 JumpTarget is_smi;
3006 if (cc == equal) {
3007 // We can do the equality comparison before the smi check.
3008 __ cmp(Operand(left_reg), Immediate(right_side->handle()));
3009 dest->true_target()->Branch(equal);
3010 __ test(left_reg, Immediate(kSmiTagMask));
3011 dest->false_target()->Branch(zero);
3012 } else {
3013 // Do the smi check, then the comparison.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003014 __ test(left_reg, Immediate(kSmiTagMask));
3015 is_smi.Branch(zero, left_side, right_side);
3016 }
3017
3018 // Jump or fall through to here if we are comparing a non-smi to a
3019 // constant smi. If the non-smi is a heap number and this is not
3020 // a loop condition, inline the floating point code.
3021 if (!is_loop_condition && CpuFeatures::IsSupported(SSE2)) {
3022 // Right side is a constant smi and left side has been checked
3023 // not to be a smi.
3024 CpuFeatures::Scope use_sse2(SSE2);
3025 JumpTarget not_number;
3026 __ cmp(FieldOperand(left_reg, HeapObject::kMapOffset),
3027 Immediate(Factory::heap_number_map()));
3028 not_number.Branch(not_equal, left_side);
3029 __ movdbl(xmm1,
3030 FieldOperand(left_reg, HeapNumber::kValueOffset));
3031 int value = Smi::cast(*right_val)->value();
3032 if (value == 0) {
3033 __ xorpd(xmm0, xmm0);
3034 } else {
3035 Result temp = allocator()->Allocate();
3036 __ mov(temp.reg(), Immediate(value));
3037 __ cvtsi2sd(xmm0, Operand(temp.reg()));
3038 temp.Unuse();
3039 }
3040 __ ucomisd(xmm1, xmm0);
3041 // Jump to builtin for NaN.
3042 not_number.Branch(parity_even, left_side);
3043 left_side->Unuse();
3044 dest->true_target()->Branch(DoubleCondition(cc));
3045 dest->false_target()->Jump();
3046 not_number.Bind(left_side);
3047 }
3048
3049 // Setup and call the compare stub.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003050 CompareFlags flags =
3051 static_cast<CompareFlags>(CANT_BOTH_BE_NAN | NO_SMI_CODE_IN_STUB);
3052 CompareStub stub(cc, strict, flags);
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003053 Result result = frame_->CallStub(&stub, left_side, right_side);
3054 result.ToRegister();
3055 __ test(result.reg(), Operand(result.reg()));
3056 result.Unuse();
3057 if (cc == equal) {
3058 dest->Split(cc);
3059 } else {
3060 dest->true_target()->Branch(cc);
3061 dest->false_target()->Jump();
3062
3063 // It is important for performance for this case to be at the end.
3064 is_smi.Bind(left_side, right_side);
3065 if (IsUnsafeSmi(right_side->handle())) {
3066 right_side->ToRegister();
3067 __ cmp(left_reg, Operand(right_side->reg()));
3068 } else {
3069 __ cmp(Operand(left_reg), Immediate(right_side->handle()));
3070 }
3071 left_side->Unuse();
3072 right_side->Unuse();
3073 dest->Split(cc);
3074 }
3075 }
3076 }
3077}
3078
3079
Steve Block6ded16b2010-05-10 14:33:55 +01003080// Check that the comparison operand is a number. Jump to not_numbers jump
3081// target passing the left and right result if the operand is not a number.
3082static void CheckComparisonOperand(MacroAssembler* masm_,
3083 Result* operand,
3084 Result* left_side,
3085 Result* right_side,
3086 JumpTarget* not_numbers) {
3087 // Perform check if operand is not known to be a number.
3088 if (!operand->type_info().IsNumber()) {
3089 Label done;
3090 __ test(operand->reg(), Immediate(kSmiTagMask));
3091 __ j(zero, &done);
3092 __ cmp(FieldOperand(operand->reg(), HeapObject::kMapOffset),
3093 Immediate(Factory::heap_number_map()));
3094 not_numbers->Branch(not_equal, left_side, right_side, not_taken);
3095 __ bind(&done);
3096 }
3097}
3098
3099
3100// Load a comparison operand to the FPU stack. This assumes that the operand has
3101// already been checked and is a number.
3102static void LoadComparisonOperand(MacroAssembler* masm_,
3103 Result* operand) {
3104 Label done;
3105 if (operand->type_info().IsDouble()) {
3106 // Operand is known to be a heap number, just load it.
3107 __ fld_d(FieldOperand(operand->reg(), HeapNumber::kValueOffset));
3108 } else if (operand->type_info().IsSmi()) {
3109 // Operand is known to be a smi. Convert it to double and keep the original
3110 // smi.
3111 __ SmiUntag(operand->reg());
3112 __ push(operand->reg());
3113 __ fild_s(Operand(esp, 0));
3114 __ pop(operand->reg());
3115 __ SmiTag(operand->reg());
3116 } else {
3117 // Operand type not known, check for smi otherwise assume heap number.
3118 Label smi;
3119 __ test(operand->reg(), Immediate(kSmiTagMask));
3120 __ j(zero, &smi);
3121 __ fld_d(FieldOperand(operand->reg(), HeapNumber::kValueOffset));
3122 __ jmp(&done);
3123 __ bind(&smi);
3124 __ SmiUntag(operand->reg());
3125 __ push(operand->reg());
3126 __ fild_s(Operand(esp, 0));
3127 __ pop(operand->reg());
3128 __ SmiTag(operand->reg());
3129 __ jmp(&done);
3130 }
3131 __ bind(&done);
3132}
3133
3134
3135// Load a comparison operand into into a XMM register. Jump to not_numbers jump
3136// target passing the left and right result if the operand is not a number.
3137static void LoadComparisonOperandSSE2(MacroAssembler* masm_,
3138 Result* operand,
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003139 XMMRegister xmm_reg,
Steve Block6ded16b2010-05-10 14:33:55 +01003140 Result* left_side,
3141 Result* right_side,
3142 JumpTarget* not_numbers) {
3143 Label done;
3144 if (operand->type_info().IsDouble()) {
3145 // Operand is known to be a heap number, just load it.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003146 __ movdbl(xmm_reg, FieldOperand(operand->reg(), HeapNumber::kValueOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01003147 } else if (operand->type_info().IsSmi()) {
3148 // Operand is known to be a smi. Convert it to double and keep the original
3149 // smi.
3150 __ SmiUntag(operand->reg());
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003151 __ cvtsi2sd(xmm_reg, Operand(operand->reg()));
Steve Block6ded16b2010-05-10 14:33:55 +01003152 __ SmiTag(operand->reg());
3153 } else {
3154 // Operand type not known, check for smi or heap number.
3155 Label smi;
3156 __ test(operand->reg(), Immediate(kSmiTagMask));
3157 __ j(zero, &smi);
3158 if (!operand->type_info().IsNumber()) {
3159 __ cmp(FieldOperand(operand->reg(), HeapObject::kMapOffset),
3160 Immediate(Factory::heap_number_map()));
3161 not_numbers->Branch(not_equal, left_side, right_side, taken);
3162 }
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003163 __ movdbl(xmm_reg, FieldOperand(operand->reg(), HeapNumber::kValueOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01003164 __ jmp(&done);
3165
3166 __ bind(&smi);
3167 // Comvert smi to float and keep the original smi.
3168 __ SmiUntag(operand->reg());
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003169 __ cvtsi2sd(xmm_reg, Operand(operand->reg()));
Steve Block6ded16b2010-05-10 14:33:55 +01003170 __ SmiTag(operand->reg());
3171 __ jmp(&done);
3172 }
3173 __ bind(&done);
3174}
3175
3176
3177void CodeGenerator::GenerateInlineNumberComparison(Result* left_side,
3178 Result* right_side,
3179 Condition cc,
3180 ControlDestination* dest) {
3181 ASSERT(left_side->is_register());
3182 ASSERT(right_side->is_register());
3183
3184 JumpTarget not_numbers;
3185 if (CpuFeatures::IsSupported(SSE2)) {
3186 CpuFeatures::Scope use_sse2(SSE2);
3187
3188 // Load left and right operand into registers xmm0 and xmm1 and compare.
3189 LoadComparisonOperandSSE2(masm_, left_side, xmm0, left_side, right_side,
3190 &not_numbers);
3191 LoadComparisonOperandSSE2(masm_, right_side, xmm1, left_side, right_side,
3192 &not_numbers);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003193 __ ucomisd(xmm0, xmm1);
Steve Block6ded16b2010-05-10 14:33:55 +01003194 } else {
3195 Label check_right, compare;
3196
3197 // Make sure that both comparison operands are numbers.
3198 CheckComparisonOperand(masm_, left_side, left_side, right_side,
3199 &not_numbers);
3200 CheckComparisonOperand(masm_, right_side, left_side, right_side,
3201 &not_numbers);
3202
3203 // Load right and left operand to FPU stack and compare.
3204 LoadComparisonOperand(masm_, right_side);
3205 LoadComparisonOperand(masm_, left_side);
3206 __ FCmp();
3207 }
3208
3209 // Bail out if a NaN is involved.
3210 not_numbers.Branch(parity_even, left_side, right_side, not_taken);
3211
3212 // Split to destination targets based on comparison.
3213 left_side->Unuse();
3214 right_side->Unuse();
3215 dest->true_target()->Branch(DoubleCondition(cc));
3216 dest->false_target()->Jump();
3217
3218 not_numbers.Bind(left_side, right_side);
3219}
3220
3221
Steve Blocka7e24c12009-10-30 11:49:00 +00003222// Call the function just below TOS on the stack with the given
3223// arguments. The receiver is the TOS.
3224void CodeGenerator::CallWithArguments(ZoneList<Expression*>* args,
Leon Clarkee46be812010-01-19 14:06:41 +00003225 CallFunctionFlags flags,
Steve Blocka7e24c12009-10-30 11:49:00 +00003226 int position) {
3227 // Push the arguments ("left-to-right") on the stack.
3228 int arg_count = args->length();
3229 for (int i = 0; i < arg_count; i++) {
3230 Load(args->at(i));
Leon Clarkef7060e22010-06-03 12:02:55 +01003231 frame_->SpillTop();
Steve Blocka7e24c12009-10-30 11:49:00 +00003232 }
3233
3234 // Record the position for debugging purposes.
3235 CodeForSourcePosition(position);
3236
3237 // Use the shared code stub to call the function.
3238 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Leon Clarkee46be812010-01-19 14:06:41 +00003239 CallFunctionStub call_function(arg_count, in_loop, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00003240 Result answer = frame_->CallStub(&call_function, arg_count + 1);
3241 // Restore context and replace function on the stack with the
3242 // result of the stub invocation.
3243 frame_->RestoreContextRegister();
3244 frame_->SetElementAt(0, &answer);
3245}
3246
3247
Leon Clarked91b9f72010-01-27 17:25:45 +00003248void CodeGenerator::CallApplyLazy(Expression* applicand,
Steve Blocka7e24c12009-10-30 11:49:00 +00003249 Expression* receiver,
3250 VariableProxy* arguments,
3251 int position) {
Leon Clarked91b9f72010-01-27 17:25:45 +00003252 // An optimized implementation of expressions of the form
3253 // x.apply(y, arguments).
3254 // If the arguments object of the scope has not been allocated,
3255 // and x.apply is Function.prototype.apply, this optimization
3256 // just copies y and the arguments of the current function on the
3257 // stack, as receiver and arguments, and calls x.
3258 // In the implementation comments, we call x the applicand
3259 // and y the receiver.
Steve Blocka7e24c12009-10-30 11:49:00 +00003260 ASSERT(ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION);
3261 ASSERT(arguments->IsArguments());
3262
Leon Clarked91b9f72010-01-27 17:25:45 +00003263 // Load applicand.apply onto the stack. This will usually
Steve Blocka7e24c12009-10-30 11:49:00 +00003264 // give us a megamorphic load site. Not super, but it works.
Leon Clarked91b9f72010-01-27 17:25:45 +00003265 Load(applicand);
Andrei Popescu402d9372010-02-26 13:31:12 +00003266 frame()->Dup();
Leon Clarked91b9f72010-01-27 17:25:45 +00003267 Handle<String> name = Factory::LookupAsciiSymbol("apply");
3268 frame()->Push(name);
3269 Result answer = frame()->CallLoadIC(RelocInfo::CODE_TARGET);
3270 __ nop();
3271 frame()->Push(&answer);
Steve Blocka7e24c12009-10-30 11:49:00 +00003272
3273 // Load the receiver and the existing arguments object onto the
3274 // expression stack. Avoid allocating the arguments object here.
3275 Load(receiver);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003276 LoadFromSlot(scope()->arguments()->AsSlot(), NOT_INSIDE_TYPEOF);
Steve Blocka7e24c12009-10-30 11:49:00 +00003277
3278 // Emit the source position information after having loaded the
3279 // receiver and the arguments.
3280 CodeForSourcePosition(position);
Leon Clarked91b9f72010-01-27 17:25:45 +00003281 // Contents of frame at this point:
3282 // Frame[0]: arguments object of the current function or the hole.
3283 // Frame[1]: receiver
3284 // Frame[2]: applicand.apply
3285 // Frame[3]: applicand.
Steve Blocka7e24c12009-10-30 11:49:00 +00003286
3287 // Check if the arguments object has been lazily allocated
3288 // already. If so, just use that instead of copying the arguments
3289 // from the stack. This also deals with cases where a local variable
3290 // named 'arguments' has been introduced.
3291 frame_->Dup();
3292 Result probe = frame_->Pop();
Leon Clarked91b9f72010-01-27 17:25:45 +00003293 { VirtualFrame::SpilledScope spilled_scope;
3294 Label slow, done;
3295 bool try_lazy = true;
3296 if (probe.is_constant()) {
3297 try_lazy = probe.handle()->IsTheHole();
3298 } else {
3299 __ cmp(Operand(probe.reg()), Immediate(Factory::the_hole_value()));
3300 probe.Unuse();
3301 __ j(not_equal, &slow);
3302 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003303
Leon Clarked91b9f72010-01-27 17:25:45 +00003304 if (try_lazy) {
3305 Label build_args;
3306 // Get rid of the arguments object probe.
3307 frame_->Drop(); // Can be called on a spilled frame.
3308 // Stack now has 3 elements on it.
3309 // Contents of stack at this point:
3310 // esp[0]: receiver
3311 // esp[1]: applicand.apply
3312 // esp[2]: applicand.
Steve Blocka7e24c12009-10-30 11:49:00 +00003313
Leon Clarked91b9f72010-01-27 17:25:45 +00003314 // Check that the receiver really is a JavaScript object.
3315 __ mov(eax, Operand(esp, 0));
3316 __ test(eax, Immediate(kSmiTagMask));
3317 __ j(zero, &build_args);
Steve Blocka7e24c12009-10-30 11:49:00 +00003318 // We allow all JSObjects including JSFunctions. As long as
3319 // JS_FUNCTION_TYPE is the last instance type and it is right
3320 // after LAST_JS_OBJECT_TYPE, we do not have to check the upper
3321 // bound.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01003322 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
3323 STATIC_ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
Leon Clarked91b9f72010-01-27 17:25:45 +00003324 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
3325 __ j(below, &build_args);
Steve Blocka7e24c12009-10-30 11:49:00 +00003326
Leon Clarked91b9f72010-01-27 17:25:45 +00003327 // Check that applicand.apply is Function.prototype.apply.
3328 __ mov(eax, Operand(esp, kPointerSize));
3329 __ test(eax, Immediate(kSmiTagMask));
3330 __ j(zero, &build_args);
3331 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ecx);
3332 __ j(not_equal, &build_args);
Steve Block791712a2010-08-27 10:21:07 +01003333 __ mov(ecx, FieldOperand(eax, JSFunction::kCodeEntryOffset));
3334 __ sub(Operand(ecx), Immediate(Code::kHeaderSize - kHeapObjectTag));
Leon Clarkeeab96aa2010-01-27 16:31:12 +00003335 Handle<Code> apply_code(Builtins::builtin(Builtins::FunctionApply));
Steve Block791712a2010-08-27 10:21:07 +01003336 __ cmp(Operand(ecx), Immediate(apply_code));
Leon Clarked91b9f72010-01-27 17:25:45 +00003337 __ j(not_equal, &build_args);
3338
3339 // Check that applicand is a function.
3340 __ mov(edi, Operand(esp, 2 * kPointerSize));
3341 __ test(edi, Immediate(kSmiTagMask));
3342 __ j(zero, &build_args);
3343 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
3344 __ j(not_equal, &build_args);
3345
3346 // Copy the arguments to this function possibly from the
3347 // adaptor frame below it.
3348 Label invoke, adapted;
3349 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
3350 __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
3351 __ cmp(Operand(ecx),
3352 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3353 __ j(equal, &adapted);
3354
3355 // No arguments adaptor frame. Copy fixed number of arguments.
Andrei Popescu31002712010-02-23 13:46:05 +00003356 __ mov(eax, Immediate(scope()->num_parameters()));
3357 for (int i = 0; i < scope()->num_parameters(); i++) {
Leon Clarked91b9f72010-01-27 17:25:45 +00003358 __ push(frame_->ParameterAt(i));
3359 }
3360 __ jmp(&invoke);
3361
3362 // Arguments adaptor frame present. Copy arguments from there, but
3363 // avoid copying too many arguments to avoid stack overflows.
3364 __ bind(&adapted);
3365 static const uint32_t kArgumentsLimit = 1 * KB;
3366 __ mov(eax, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
3367 __ SmiUntag(eax);
3368 __ mov(ecx, Operand(eax));
3369 __ cmp(eax, kArgumentsLimit);
3370 __ j(above, &build_args);
3371
3372 // Loop through the arguments pushing them onto the execution
3373 // stack. We don't inform the virtual frame of the push, so we don't
3374 // have to worry about getting rid of the elements from the virtual
3375 // frame.
3376 Label loop;
3377 // ecx is a small non-negative integer, due to the test above.
3378 __ test(ecx, Operand(ecx));
3379 __ j(zero, &invoke);
3380 __ bind(&loop);
3381 __ push(Operand(edx, ecx, times_pointer_size, 1 * kPointerSize));
3382 __ dec(ecx);
3383 __ j(not_zero, &loop);
3384
3385 // Invoke the function.
3386 __ bind(&invoke);
3387 ParameterCount actual(eax);
3388 __ InvokeFunction(edi, actual, CALL_FUNCTION);
3389 // Drop applicand.apply and applicand from the stack, and push
3390 // the result of the function call, but leave the spilled frame
3391 // unchanged, with 3 elements, so it is correct when we compile the
3392 // slow-case code.
3393 __ add(Operand(esp), Immediate(2 * kPointerSize));
3394 __ push(eax);
3395 // Stack now has 1 element:
3396 // esp[0]: result
3397 __ jmp(&done);
3398
3399 // Slow-case: Allocate the arguments object since we know it isn't
3400 // there, and fall-through to the slow-case where we call
3401 // applicand.apply.
3402 __ bind(&build_args);
3403 // Stack now has 3 elements, because we have jumped from where:
3404 // esp[0]: receiver
3405 // esp[1]: applicand.apply
3406 // esp[2]: applicand.
3407
3408 // StoreArgumentsObject requires a correct frame, and may modify it.
3409 Result arguments_object = StoreArgumentsObject(false);
3410 frame_->SpillAll();
3411 arguments_object.ToRegister();
3412 frame_->EmitPush(arguments_object.reg());
3413 arguments_object.Unuse();
3414 // Stack and frame now have 4 elements.
3415 __ bind(&slow);
Leon Clarkeeab96aa2010-01-27 16:31:12 +00003416 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003417
Leon Clarked91b9f72010-01-27 17:25:45 +00003418 // Generic computation of x.apply(y, args) with no special optimization.
3419 // Flip applicand.apply and applicand on the stack, so
3420 // applicand looks like the receiver of the applicand.apply call.
3421 // Then process it as a normal function call.
3422 __ mov(eax, Operand(esp, 3 * kPointerSize));
3423 __ mov(ebx, Operand(esp, 2 * kPointerSize));
3424 __ mov(Operand(esp, 2 * kPointerSize), eax);
3425 __ mov(Operand(esp, 3 * kPointerSize), ebx);
Leon Clarkeeab96aa2010-01-27 16:31:12 +00003426
Leon Clarked91b9f72010-01-27 17:25:45 +00003427 CallFunctionStub call_function(2, NOT_IN_LOOP, NO_CALL_FUNCTION_FLAGS);
3428 Result res = frame_->CallStub(&call_function, 3);
3429 // The function and its two arguments have been dropped.
3430 frame_->Drop(1); // Drop the receiver as well.
3431 res.ToRegister();
3432 frame_->EmitPush(res.reg());
3433 // Stack now has 1 element:
3434 // esp[0]: result
3435 if (try_lazy) __ bind(&done);
3436 } // End of spilled scope.
3437 // Restore the context register after a call.
Steve Blocka7e24c12009-10-30 11:49:00 +00003438 frame_->RestoreContextRegister();
3439}
3440
3441
3442class DeferredStackCheck: public DeferredCode {
3443 public:
3444 DeferredStackCheck() {
3445 set_comment("[ DeferredStackCheck");
3446 }
3447
3448 virtual void Generate();
3449};
3450
3451
3452void DeferredStackCheck::Generate() {
3453 StackCheckStub stub;
3454 __ CallStub(&stub);
3455}
3456
3457
3458void CodeGenerator::CheckStack() {
Steve Blockd0582a62009-12-15 09:54:21 +00003459 DeferredStackCheck* deferred = new DeferredStackCheck;
3460 ExternalReference stack_limit =
3461 ExternalReference::address_of_stack_limit();
3462 __ cmp(esp, Operand::StaticVariable(stack_limit));
3463 deferred->Branch(below);
3464 deferred->BindExit();
Steve Blocka7e24c12009-10-30 11:49:00 +00003465}
3466
3467
3468void CodeGenerator::VisitAndSpill(Statement* statement) {
3469 ASSERT(in_spilled_code());
3470 set_in_spilled_code(false);
3471 Visit(statement);
3472 if (frame_ != NULL) {
3473 frame_->SpillAll();
3474 }
3475 set_in_spilled_code(true);
3476}
3477
3478
3479void CodeGenerator::VisitStatementsAndSpill(ZoneList<Statement*>* statements) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003480#ifdef DEBUG
3481 int original_height = frame_->height();
3482#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003483 ASSERT(in_spilled_code());
3484 set_in_spilled_code(false);
3485 VisitStatements(statements);
3486 if (frame_ != NULL) {
3487 frame_->SpillAll();
3488 }
3489 set_in_spilled_code(true);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003490
3491 ASSERT(!has_valid_frame() || frame_->height() == original_height);
Steve Blocka7e24c12009-10-30 11:49:00 +00003492}
3493
3494
3495void CodeGenerator::VisitStatements(ZoneList<Statement*>* statements) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003496#ifdef DEBUG
3497 int original_height = frame_->height();
3498#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003499 ASSERT(!in_spilled_code());
3500 for (int i = 0; has_valid_frame() && i < statements->length(); i++) {
3501 Visit(statements->at(i));
3502 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003503 ASSERT(!has_valid_frame() || frame_->height() == original_height);
Steve Blocka7e24c12009-10-30 11:49:00 +00003504}
3505
3506
3507void CodeGenerator::VisitBlock(Block* node) {
3508 ASSERT(!in_spilled_code());
3509 Comment cmnt(masm_, "[ Block");
3510 CodeForStatementPosition(node);
3511 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
3512 VisitStatements(node->statements());
3513 if (node->break_target()->is_linked()) {
3514 node->break_target()->Bind();
3515 }
3516 node->break_target()->Unuse();
3517}
3518
3519
3520void CodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
3521 // Call the runtime to declare the globals. The inevitable call
3522 // will sync frame elements to memory anyway, so we do it eagerly to
3523 // allow us to push the arguments directly into place.
3524 frame_->SyncRange(0, frame_->element_count() - 1);
3525
Steve Block3ce2e202009-11-05 08:53:23 +00003526 frame_->EmitPush(esi); // The context is the first argument.
Steve Blocka7e24c12009-10-30 11:49:00 +00003527 frame_->EmitPush(Immediate(pairs));
Steve Blocka7e24c12009-10-30 11:49:00 +00003528 frame_->EmitPush(Immediate(Smi::FromInt(is_eval() ? 1 : 0)));
3529 Result ignored = frame_->CallRuntime(Runtime::kDeclareGlobals, 3);
3530 // Return value is ignored.
3531}
3532
3533
3534void CodeGenerator::VisitDeclaration(Declaration* node) {
3535 Comment cmnt(masm_, "[ Declaration");
3536 Variable* var = node->proxy()->var();
3537 ASSERT(var != NULL); // must have been resolved
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003538 Slot* slot = var->AsSlot();
Steve Blocka7e24c12009-10-30 11:49:00 +00003539
3540 // If it was not possible to allocate the variable at compile time,
3541 // we need to "declare" it at runtime to make sure it actually
3542 // exists in the local context.
3543 if (slot != NULL && slot->type() == Slot::LOOKUP) {
3544 // Variables with a "LOOKUP" slot were introduced as non-locals
3545 // during variable resolution and must have mode DYNAMIC.
3546 ASSERT(var->is_dynamic());
3547 // For now, just do a runtime call. Sync the virtual frame eagerly
3548 // so we can simply push the arguments into place.
3549 frame_->SyncRange(0, frame_->element_count() - 1);
3550 frame_->EmitPush(esi);
3551 frame_->EmitPush(Immediate(var->name()));
3552 // Declaration nodes are always introduced in one of two modes.
3553 ASSERT(node->mode() == Variable::VAR || node->mode() == Variable::CONST);
3554 PropertyAttributes attr = node->mode() == Variable::VAR ? NONE : READ_ONLY;
3555 frame_->EmitPush(Immediate(Smi::FromInt(attr)));
3556 // Push initial value, if any.
3557 // Note: For variables we must not push an initial value (such as
3558 // 'undefined') because we may have a (legal) redeclaration and we
3559 // must not destroy the current value.
3560 if (node->mode() == Variable::CONST) {
3561 frame_->EmitPush(Immediate(Factory::the_hole_value()));
3562 } else if (node->fun() != NULL) {
3563 Load(node->fun());
3564 } else {
3565 frame_->EmitPush(Immediate(Smi::FromInt(0))); // no initial value!
3566 }
3567 Result ignored = frame_->CallRuntime(Runtime::kDeclareContextSlot, 4);
3568 // Ignore the return value (declarations are statements).
3569 return;
3570 }
3571
3572 ASSERT(!var->is_global());
3573
3574 // If we have a function or a constant, we need to initialize the variable.
3575 Expression* val = NULL;
3576 if (node->mode() == Variable::CONST) {
3577 val = new Literal(Factory::the_hole_value());
3578 } else {
3579 val = node->fun(); // NULL if we don't have a function
3580 }
3581
3582 if (val != NULL) {
3583 {
3584 // Set the initial value.
3585 Reference target(this, node->proxy());
3586 Load(val);
3587 target.SetValue(NOT_CONST_INIT);
3588 // The reference is removed from the stack (preserving TOS) when
3589 // it goes out of scope.
3590 }
3591 // Get rid of the assigned value (declarations are statements).
3592 frame_->Drop();
3593 }
3594}
3595
3596
3597void CodeGenerator::VisitExpressionStatement(ExpressionStatement* node) {
3598 ASSERT(!in_spilled_code());
3599 Comment cmnt(masm_, "[ ExpressionStatement");
3600 CodeForStatementPosition(node);
3601 Expression* expression = node->expression();
3602 expression->MarkAsStatement();
3603 Load(expression);
3604 // Remove the lingering expression result from the top of stack.
3605 frame_->Drop();
3606}
3607
3608
3609void CodeGenerator::VisitEmptyStatement(EmptyStatement* node) {
3610 ASSERT(!in_spilled_code());
3611 Comment cmnt(masm_, "// EmptyStatement");
3612 CodeForStatementPosition(node);
3613 // nothing to do
3614}
3615
3616
3617void CodeGenerator::VisitIfStatement(IfStatement* node) {
3618 ASSERT(!in_spilled_code());
3619 Comment cmnt(masm_, "[ IfStatement");
3620 // Generate different code depending on which parts of the if statement
3621 // are present or not.
3622 bool has_then_stm = node->HasThenStatement();
3623 bool has_else_stm = node->HasElseStatement();
3624
3625 CodeForStatementPosition(node);
3626 JumpTarget exit;
3627 if (has_then_stm && has_else_stm) {
3628 JumpTarget then;
3629 JumpTarget else_;
3630 ControlDestination dest(&then, &else_, true);
Steve Blockd0582a62009-12-15 09:54:21 +00003631 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00003632
3633 if (dest.false_was_fall_through()) {
3634 // The else target was bound, so we compile the else part first.
3635 Visit(node->else_statement());
3636
3637 // We may have dangling jumps to the then part.
3638 if (then.is_linked()) {
3639 if (has_valid_frame()) exit.Jump();
3640 then.Bind();
3641 Visit(node->then_statement());
3642 }
3643 } else {
3644 // The then target was bound, so we compile the then part first.
3645 Visit(node->then_statement());
3646
3647 if (else_.is_linked()) {
3648 if (has_valid_frame()) exit.Jump();
3649 else_.Bind();
3650 Visit(node->else_statement());
3651 }
3652 }
3653
3654 } else if (has_then_stm) {
3655 ASSERT(!has_else_stm);
3656 JumpTarget then;
3657 ControlDestination dest(&then, &exit, true);
Steve Blockd0582a62009-12-15 09:54:21 +00003658 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00003659
3660 if (dest.false_was_fall_through()) {
3661 // The exit label was bound. We may have dangling jumps to the
3662 // then part.
3663 if (then.is_linked()) {
3664 exit.Unuse();
3665 exit.Jump();
3666 then.Bind();
3667 Visit(node->then_statement());
3668 }
3669 } else {
3670 // The then label was bound.
3671 Visit(node->then_statement());
3672 }
3673
3674 } else if (has_else_stm) {
3675 ASSERT(!has_then_stm);
3676 JumpTarget else_;
3677 ControlDestination dest(&exit, &else_, false);
Steve Blockd0582a62009-12-15 09:54:21 +00003678 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00003679
3680 if (dest.true_was_fall_through()) {
3681 // The exit label was bound. We may have dangling jumps to the
3682 // else part.
3683 if (else_.is_linked()) {
3684 exit.Unuse();
3685 exit.Jump();
3686 else_.Bind();
3687 Visit(node->else_statement());
3688 }
3689 } else {
3690 // The else label was bound.
3691 Visit(node->else_statement());
3692 }
3693
3694 } else {
3695 ASSERT(!has_then_stm && !has_else_stm);
3696 // We only care about the condition's side effects (not its value
3697 // or control flow effect). LoadCondition is called without
3698 // forcing control flow.
3699 ControlDestination dest(&exit, &exit, true);
Steve Blockd0582a62009-12-15 09:54:21 +00003700 LoadCondition(node->condition(), &dest, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00003701 if (!dest.is_used()) {
3702 // We got a value on the frame rather than (or in addition to)
3703 // control flow.
3704 frame_->Drop();
3705 }
3706 }
3707
3708 if (exit.is_linked()) {
3709 exit.Bind();
3710 }
3711}
3712
3713
3714void CodeGenerator::VisitContinueStatement(ContinueStatement* node) {
3715 ASSERT(!in_spilled_code());
3716 Comment cmnt(masm_, "[ ContinueStatement");
3717 CodeForStatementPosition(node);
3718 node->target()->continue_target()->Jump();
3719}
3720
3721
3722void CodeGenerator::VisitBreakStatement(BreakStatement* node) {
3723 ASSERT(!in_spilled_code());
3724 Comment cmnt(masm_, "[ BreakStatement");
3725 CodeForStatementPosition(node);
3726 node->target()->break_target()->Jump();
3727}
3728
3729
3730void CodeGenerator::VisitReturnStatement(ReturnStatement* node) {
3731 ASSERT(!in_spilled_code());
3732 Comment cmnt(masm_, "[ ReturnStatement");
3733
3734 CodeForStatementPosition(node);
3735 Load(node->expression());
3736 Result return_value = frame_->Pop();
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08003737 masm()->positions_recorder()->WriteRecordedPositions();
Steve Blocka7e24c12009-10-30 11:49:00 +00003738 if (function_return_is_shadowed_) {
3739 function_return_.Jump(&return_value);
3740 } else {
3741 frame_->PrepareForReturn();
3742 if (function_return_.is_bound()) {
3743 // If the function return label is already bound we reuse the
3744 // code by jumping to the return site.
3745 function_return_.Jump(&return_value);
3746 } else {
3747 function_return_.Bind(&return_value);
3748 GenerateReturnSequence(&return_value);
3749 }
3750 }
3751}
3752
3753
3754void CodeGenerator::GenerateReturnSequence(Result* return_value) {
3755 // The return value is a live (but not currently reference counted)
3756 // reference to eax. This is safe because the current frame does not
3757 // contain a reference to eax (it is prepared for the return by spilling
3758 // all registers).
3759 if (FLAG_trace) {
3760 frame_->Push(return_value);
3761 *return_value = frame_->CallRuntime(Runtime::kTraceExit, 1);
3762 }
3763 return_value->ToRegister(eax);
3764
3765 // Add a label for checking the size of the code used for returning.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003766#ifdef DEBUG
Steve Blocka7e24c12009-10-30 11:49:00 +00003767 Label check_exit_codesize;
3768 masm_->bind(&check_exit_codesize);
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003769#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003770
3771 // Leave the frame and return popping the arguments and the
3772 // receiver.
3773 frame_->Exit();
Andrei Popescu31002712010-02-23 13:46:05 +00003774 masm_->ret((scope()->num_parameters() + 1) * kPointerSize);
Steve Blocka7e24c12009-10-30 11:49:00 +00003775 DeleteFrame();
3776
3777#ifdef ENABLE_DEBUGGER_SUPPORT
3778 // Check that the size of the code used for returning matches what is
3779 // expected by the debugger.
Steve Blockd0582a62009-12-15 09:54:21 +00003780 ASSERT_EQ(Assembler::kJSReturnSequenceLength,
Steve Blocka7e24c12009-10-30 11:49:00 +00003781 masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
3782#endif
3783}
3784
3785
3786void CodeGenerator::VisitWithEnterStatement(WithEnterStatement* node) {
3787 ASSERT(!in_spilled_code());
3788 Comment cmnt(masm_, "[ WithEnterStatement");
3789 CodeForStatementPosition(node);
3790 Load(node->expression());
3791 Result context;
3792 if (node->is_catch_block()) {
3793 context = frame_->CallRuntime(Runtime::kPushCatchContext, 1);
3794 } else {
3795 context = frame_->CallRuntime(Runtime::kPushContext, 1);
3796 }
3797
3798 // Update context local.
3799 frame_->SaveContextRegister();
3800
3801 // Verify that the runtime call result and esi agree.
3802 if (FLAG_debug_code) {
3803 __ cmp(context.reg(), Operand(esi));
3804 __ Assert(equal, "Runtime::NewContext should end up in esi");
3805 }
3806}
3807
3808
3809void CodeGenerator::VisitWithExitStatement(WithExitStatement* node) {
3810 ASSERT(!in_spilled_code());
3811 Comment cmnt(masm_, "[ WithExitStatement");
3812 CodeForStatementPosition(node);
3813 // Pop context.
3814 __ mov(esi, ContextOperand(esi, Context::PREVIOUS_INDEX));
3815 // Update context local.
3816 frame_->SaveContextRegister();
3817}
3818
3819
3820void CodeGenerator::VisitSwitchStatement(SwitchStatement* node) {
3821 ASSERT(!in_spilled_code());
3822 Comment cmnt(masm_, "[ SwitchStatement");
3823 CodeForStatementPosition(node);
3824 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
3825
3826 // Compile the switch value.
3827 Load(node->tag());
3828
3829 ZoneList<CaseClause*>* cases = node->cases();
3830 int length = cases->length();
3831 CaseClause* default_clause = NULL;
3832
3833 JumpTarget next_test;
3834 // Compile the case label expressions and comparisons. Exit early
3835 // if a comparison is unconditionally true. The target next_test is
3836 // bound before the loop in order to indicate control flow to the
3837 // first comparison.
3838 next_test.Bind();
3839 for (int i = 0; i < length && !next_test.is_unused(); i++) {
3840 CaseClause* clause = cases->at(i);
3841 // The default is not a test, but remember it for later.
3842 if (clause->is_default()) {
3843 default_clause = clause;
3844 continue;
3845 }
3846
3847 Comment cmnt(masm_, "[ Case comparison");
3848 // We recycle the same target next_test for each test. Bind it if
3849 // the previous test has not done so and then unuse it for the
3850 // loop.
3851 if (next_test.is_linked()) {
3852 next_test.Bind();
3853 }
3854 next_test.Unuse();
3855
3856 // Duplicate the switch value.
3857 frame_->Dup();
3858
3859 // Compile the label expression.
3860 Load(clause->label());
3861
3862 // Compare and branch to the body if true or the next test if
3863 // false. Prefer the next test as a fall through.
3864 ControlDestination dest(clause->body_target(), &next_test, false);
Leon Clarkee46be812010-01-19 14:06:41 +00003865 Comparison(node, equal, true, &dest);
Steve Blocka7e24c12009-10-30 11:49:00 +00003866
3867 // If the comparison fell through to the true target, jump to the
3868 // actual body.
3869 if (dest.true_was_fall_through()) {
3870 clause->body_target()->Unuse();
3871 clause->body_target()->Jump();
3872 }
3873 }
3874
3875 // If there was control flow to a next test from the last one
3876 // compiled, compile a jump to the default or break target.
3877 if (!next_test.is_unused()) {
3878 if (next_test.is_linked()) {
3879 next_test.Bind();
3880 }
3881 // Drop the switch value.
3882 frame_->Drop();
3883 if (default_clause != NULL) {
3884 default_clause->body_target()->Jump();
3885 } else {
3886 node->break_target()->Jump();
3887 }
3888 }
3889
Steve Blocka7e24c12009-10-30 11:49:00 +00003890 // The last instruction emitted was a jump, either to the default
3891 // clause or the break target, or else to a case body from the loop
3892 // that compiles the tests.
3893 ASSERT(!has_valid_frame());
3894 // Compile case bodies as needed.
3895 for (int i = 0; i < length; i++) {
3896 CaseClause* clause = cases->at(i);
3897
3898 // There are two ways to reach the body: from the corresponding
3899 // test or as the fall through of the previous body.
3900 if (clause->body_target()->is_linked() || has_valid_frame()) {
3901 if (clause->body_target()->is_linked()) {
3902 if (has_valid_frame()) {
3903 // If we have both a jump to the test and a fall through, put
3904 // a jump on the fall through path to avoid the dropping of
3905 // the switch value on the test path. The exception is the
3906 // default which has already had the switch value dropped.
3907 if (clause->is_default()) {
3908 clause->body_target()->Bind();
3909 } else {
3910 JumpTarget body;
3911 body.Jump();
3912 clause->body_target()->Bind();
3913 frame_->Drop();
3914 body.Bind();
3915 }
3916 } else {
3917 // No fall through to worry about.
3918 clause->body_target()->Bind();
3919 if (!clause->is_default()) {
3920 frame_->Drop();
3921 }
3922 }
3923 } else {
3924 // Otherwise, we have only fall through.
3925 ASSERT(has_valid_frame());
3926 }
3927
3928 // We are now prepared to compile the body.
3929 Comment cmnt(masm_, "[ Case body");
3930 VisitStatements(clause->statements());
3931 }
3932 clause->body_target()->Unuse();
3933 }
3934
3935 // We may not have a valid frame here so bind the break target only
3936 // if needed.
3937 if (node->break_target()->is_linked()) {
3938 node->break_target()->Bind();
3939 }
3940 node->break_target()->Unuse();
3941}
3942
3943
Steve Block3ce2e202009-11-05 08:53:23 +00003944void CodeGenerator::VisitDoWhileStatement(DoWhileStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003945 ASSERT(!in_spilled_code());
Steve Block3ce2e202009-11-05 08:53:23 +00003946 Comment cmnt(masm_, "[ DoWhileStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00003947 CodeForStatementPosition(node);
3948 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
Steve Block3ce2e202009-11-05 08:53:23 +00003949 JumpTarget body(JumpTarget::BIDIRECTIONAL);
3950 IncrementLoopNesting();
Steve Blocka7e24c12009-10-30 11:49:00 +00003951
Steve Block3ce2e202009-11-05 08:53:23 +00003952 ConditionAnalysis info = AnalyzeCondition(node->cond());
3953 // Label the top of the loop for the backward jump if necessary.
3954 switch (info) {
3955 case ALWAYS_TRUE:
3956 // Use the continue target.
3957 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
3958 node->continue_target()->Bind();
3959 break;
3960 case ALWAYS_FALSE:
3961 // No need to label it.
3962 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
3963 break;
3964 case DONT_KNOW:
3965 // Continue is the test, so use the backward body target.
3966 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
3967 body.Bind();
3968 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00003969 }
3970
Steve Block3ce2e202009-11-05 08:53:23 +00003971 CheckStack(); // TODO(1222600): ignore if body contains calls.
3972 Visit(node->body());
Steve Blocka7e24c12009-10-30 11:49:00 +00003973
Steve Block3ce2e202009-11-05 08:53:23 +00003974 // Compile the test.
3975 switch (info) {
3976 case ALWAYS_TRUE:
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003977 // If control flow can fall off the end of the body, jump back
3978 // to the top and bind the break target at the exit.
Steve Block3ce2e202009-11-05 08:53:23 +00003979 if (has_valid_frame()) {
3980 node->continue_target()->Jump();
Steve Blocka7e24c12009-10-30 11:49:00 +00003981 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003982 if (node->break_target()->is_linked()) {
3983 node->break_target()->Bind();
3984 }
3985 break;
Steve Block3ce2e202009-11-05 08:53:23 +00003986 case ALWAYS_FALSE:
3987 // We may have had continues or breaks in the body.
3988 if (node->continue_target()->is_linked()) {
3989 node->continue_target()->Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00003990 }
Steve Block3ce2e202009-11-05 08:53:23 +00003991 if (node->break_target()->is_linked()) {
3992 node->break_target()->Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00003993 }
Steve Block3ce2e202009-11-05 08:53:23 +00003994 break;
3995 case DONT_KNOW:
3996 // We have to compile the test expression if it can be reached by
3997 // control flow falling out of the body or via continue.
3998 if (node->continue_target()->is_linked()) {
3999 node->continue_target()->Bind();
4000 }
4001 if (has_valid_frame()) {
Steve Blockd0582a62009-12-15 09:54:21 +00004002 Comment cmnt(masm_, "[ DoWhileCondition");
4003 CodeForDoWhileConditionPosition(node);
Steve Block3ce2e202009-11-05 08:53:23 +00004004 ControlDestination dest(&body, node->break_target(), false);
Steve Blockd0582a62009-12-15 09:54:21 +00004005 LoadCondition(node->cond(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00004006 }
Steve Block3ce2e202009-11-05 08:53:23 +00004007 if (node->break_target()->is_linked()) {
4008 node->break_target()->Bind();
4009 }
4010 break;
4011 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004012
Steve Block3ce2e202009-11-05 08:53:23 +00004013 DecrementLoopNesting();
Ben Murdoch3bec4d22010-07-22 14:51:16 +01004014 node->continue_target()->Unuse();
4015 node->break_target()->Unuse();
Steve Block3ce2e202009-11-05 08:53:23 +00004016}
Steve Blocka7e24c12009-10-30 11:49:00 +00004017
Steve Block3ce2e202009-11-05 08:53:23 +00004018
4019void CodeGenerator::VisitWhileStatement(WhileStatement* node) {
4020 ASSERT(!in_spilled_code());
4021 Comment cmnt(masm_, "[ WhileStatement");
4022 CodeForStatementPosition(node);
4023
4024 // If the condition is always false and has no side effects, we do not
4025 // need to compile anything.
4026 ConditionAnalysis info = AnalyzeCondition(node->cond());
4027 if (info == ALWAYS_FALSE) return;
4028
4029 // Do not duplicate conditions that may have function literal
4030 // subexpressions. This can cause us to compile the function literal
4031 // twice.
4032 bool test_at_bottom = !node->may_have_function_literal();
4033 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
4034 IncrementLoopNesting();
4035 JumpTarget body;
4036 if (test_at_bottom) {
4037 body.set_direction(JumpTarget::BIDIRECTIONAL);
4038 }
4039
4040 // Based on the condition analysis, compile the test as necessary.
4041 switch (info) {
4042 case ALWAYS_TRUE:
4043 // We will not compile the test expression. Label the top of the
4044 // loop with the continue target.
4045 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
4046 node->continue_target()->Bind();
4047 break;
4048 case DONT_KNOW: {
4049 if (test_at_bottom) {
4050 // Continue is the test at the bottom, no need to label the test
4051 // at the top. The body is a backward target.
4052 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
4053 } else {
4054 // Label the test at the top as the continue target. The body
4055 // is a forward-only target.
4056 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
4057 node->continue_target()->Bind();
4058 }
4059 // Compile the test with the body as the true target and preferred
4060 // fall-through and with the break target as the false target.
4061 ControlDestination dest(&body, node->break_target(), true);
Steve Blockd0582a62009-12-15 09:54:21 +00004062 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00004063
4064 if (dest.false_was_fall_through()) {
4065 // If we got the break target as fall-through, the test may have
4066 // been unconditionally false (if there are no jumps to the
4067 // body).
4068 if (!body.is_linked()) {
4069 DecrementLoopNesting();
4070 return;
4071 }
4072
4073 // Otherwise, jump around the body on the fall through and then
4074 // bind the body target.
4075 node->break_target()->Unuse();
4076 node->break_target()->Jump();
4077 body.Bind();
4078 }
4079 break;
4080 }
4081 case ALWAYS_FALSE:
4082 UNREACHABLE();
4083 break;
4084 }
4085
4086 CheckStack(); // TODO(1222600): ignore if body contains calls.
4087 Visit(node->body());
4088
4089 // Based on the condition analysis, compile the backward jump as
4090 // necessary.
4091 switch (info) {
4092 case ALWAYS_TRUE:
4093 // The loop body has been labeled with the continue target.
4094 if (has_valid_frame()) {
4095 node->continue_target()->Jump();
4096 }
4097 break;
4098 case DONT_KNOW:
4099 if (test_at_bottom) {
Ben Murdoch3bec4d22010-07-22 14:51:16 +01004100 // If we have chosen to recompile the test at the bottom,
4101 // then it is the continue target.
Steve Blocka7e24c12009-10-30 11:49:00 +00004102 if (node->continue_target()->is_linked()) {
4103 node->continue_target()->Bind();
4104 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004105 if (has_valid_frame()) {
Steve Block3ce2e202009-11-05 08:53:23 +00004106 // The break target is the fall-through (body is a backward
4107 // jump from here and thus an invalid fall-through).
4108 ControlDestination dest(&body, node->break_target(), false);
Steve Blockd0582a62009-12-15 09:54:21 +00004109 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00004110 }
4111 } else {
4112 // If we have chosen not to recompile the test at the bottom,
4113 // jump back to the one at the top.
4114 if (has_valid_frame()) {
4115 node->continue_target()->Jump();
Steve Blocka7e24c12009-10-30 11:49:00 +00004116 }
4117 }
Steve Block3ce2e202009-11-05 08:53:23 +00004118 break;
4119 case ALWAYS_FALSE:
4120 UNREACHABLE();
4121 break;
4122 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004123
Steve Block3ce2e202009-11-05 08:53:23 +00004124 // The break target may be already bound (by the condition), or there
4125 // may not be a valid frame. Bind it only if needed.
4126 if (node->break_target()->is_linked()) {
4127 node->break_target()->Bind();
4128 }
4129 DecrementLoopNesting();
4130}
4131
4132
Steve Block6ded16b2010-05-10 14:33:55 +01004133void CodeGenerator::SetTypeForStackSlot(Slot* slot, TypeInfo info) {
4134 ASSERT(slot->type() == Slot::LOCAL || slot->type() == Slot::PARAMETER);
4135 if (slot->type() == Slot::LOCAL) {
4136 frame_->SetTypeForLocalAt(slot->index(), info);
4137 } else {
4138 frame_->SetTypeForParamAt(slot->index(), info);
4139 }
4140 if (FLAG_debug_code && info.IsSmi()) {
4141 if (slot->type() == Slot::LOCAL) {
4142 frame_->PushLocalAt(slot->index());
4143 } else {
4144 frame_->PushParameterAt(slot->index());
4145 }
4146 Result var = frame_->Pop();
4147 var.ToRegister();
4148 __ AbortIfNotSmi(var.reg());
4149 }
4150}
4151
4152
Steve Block3ce2e202009-11-05 08:53:23 +00004153void CodeGenerator::VisitForStatement(ForStatement* node) {
4154 ASSERT(!in_spilled_code());
4155 Comment cmnt(masm_, "[ ForStatement");
4156 CodeForStatementPosition(node);
4157
4158 // Compile the init expression if present.
4159 if (node->init() != NULL) {
4160 Visit(node->init());
4161 }
4162
4163 // If the condition is always false and has no side effects, we do not
4164 // need to compile anything else.
4165 ConditionAnalysis info = AnalyzeCondition(node->cond());
4166 if (info == ALWAYS_FALSE) return;
4167
4168 // Do not duplicate conditions that may have function literal
4169 // subexpressions. This can cause us to compile the function literal
4170 // twice.
4171 bool test_at_bottom = !node->may_have_function_literal();
4172 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
4173 IncrementLoopNesting();
4174
4175 // Target for backward edge if no test at the bottom, otherwise
4176 // unused.
4177 JumpTarget loop(JumpTarget::BIDIRECTIONAL);
4178
4179 // Target for backward edge if there is a test at the bottom,
4180 // otherwise used as target for test at the top.
4181 JumpTarget body;
4182 if (test_at_bottom) {
4183 body.set_direction(JumpTarget::BIDIRECTIONAL);
4184 }
4185
4186 // Based on the condition analysis, compile the test as necessary.
4187 switch (info) {
4188 case ALWAYS_TRUE:
4189 // We will not compile the test expression. Label the top of the
4190 // loop.
4191 if (node->next() == NULL) {
4192 // Use the continue target if there is no update expression.
4193 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
4194 node->continue_target()->Bind();
4195 } else {
4196 // Otherwise use the backward loop target.
4197 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
4198 loop.Bind();
4199 }
4200 break;
4201 case DONT_KNOW: {
4202 if (test_at_bottom) {
4203 // Continue is either the update expression or the test at the
4204 // bottom, no need to label the test at the top.
4205 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
4206 } else if (node->next() == NULL) {
4207 // We are not recompiling the test at the bottom and there is no
4208 // update expression.
4209 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
4210 node->continue_target()->Bind();
4211 } else {
4212 // We are not recompiling the test at the bottom and there is an
4213 // update expression.
4214 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
4215 loop.Bind();
4216 }
Ben Murdoch3bec4d22010-07-22 14:51:16 +01004217
Steve Block3ce2e202009-11-05 08:53:23 +00004218 // Compile the test with the body as the true target and preferred
4219 // fall-through and with the break target as the false target.
4220 ControlDestination dest(&body, node->break_target(), true);
Steve Blockd0582a62009-12-15 09:54:21 +00004221 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00004222
4223 if (dest.false_was_fall_through()) {
4224 // If we got the break target as fall-through, the test may have
4225 // been unconditionally false (if there are no jumps to the
4226 // body).
4227 if (!body.is_linked()) {
4228 DecrementLoopNesting();
4229 return;
4230 }
4231
4232 // Otherwise, jump around the body on the fall through and then
4233 // bind the body target.
4234 node->break_target()->Unuse();
4235 node->break_target()->Jump();
4236 body.Bind();
4237 }
4238 break;
4239 }
4240 case ALWAYS_FALSE:
4241 UNREACHABLE();
4242 break;
4243 }
4244
4245 CheckStack(); // TODO(1222600): ignore if body contains calls.
Steve Block6ded16b2010-05-10 14:33:55 +01004246
4247 // We know that the loop index is a smi if it is not modified in the
4248 // loop body and it is checked against a constant limit in the loop
4249 // condition. In this case, we reset the static type information of the
4250 // loop index to smi before compiling the body, the update expression, and
4251 // the bottom check of the loop condition.
4252 if (node->is_fast_smi_loop()) {
4253 // Set number type of the loop variable to smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004254 SetTypeForStackSlot(node->loop_variable()->AsSlot(), TypeInfo::Smi());
Steve Block6ded16b2010-05-10 14:33:55 +01004255 }
4256
Steve Block3ce2e202009-11-05 08:53:23 +00004257 Visit(node->body());
4258
4259 // If there is an update expression, compile it if necessary.
4260 if (node->next() != NULL) {
4261 if (node->continue_target()->is_linked()) {
4262 node->continue_target()->Bind();
4263 }
4264
4265 // Control can reach the update by falling out of the body or by a
4266 // continue.
4267 if (has_valid_frame()) {
4268 // Record the source position of the statement as this code which
4269 // is after the code for the body actually belongs to the loop
4270 // statement and not the body.
4271 CodeForStatementPosition(node);
4272 Visit(node->next());
4273 }
4274 }
4275
Steve Block6ded16b2010-05-10 14:33:55 +01004276 // Set the type of the loop variable to smi before compiling the test
4277 // expression if we are in a fast smi loop condition.
4278 if (node->is_fast_smi_loop() && has_valid_frame()) {
4279 // Set number type of the loop variable to smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004280 SetTypeForStackSlot(node->loop_variable()->AsSlot(), TypeInfo::Smi());
Steve Block6ded16b2010-05-10 14:33:55 +01004281 }
4282
Steve Block3ce2e202009-11-05 08:53:23 +00004283 // Based on the condition analysis, compile the backward jump as
4284 // necessary.
4285 switch (info) {
4286 case ALWAYS_TRUE:
4287 if (has_valid_frame()) {
4288 if (node->next() == NULL) {
4289 node->continue_target()->Jump();
4290 } else {
4291 loop.Jump();
4292 }
4293 }
4294 break;
4295 case DONT_KNOW:
4296 if (test_at_bottom) {
4297 if (node->continue_target()->is_linked()) {
4298 // We can have dangling jumps to the continue target if there
4299 // was no update expression.
4300 node->continue_target()->Bind();
4301 }
4302 // Control can reach the test at the bottom by falling out of
4303 // the body, by a continue in the body, or from the update
4304 // expression.
4305 if (has_valid_frame()) {
4306 // The break target is the fall-through (body is a backward
4307 // jump from here).
4308 ControlDestination dest(&body, node->break_target(), false);
Steve Blockd0582a62009-12-15 09:54:21 +00004309 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00004310 }
4311 } else {
4312 // Otherwise, jump back to the test at the top.
Steve Blocka7e24c12009-10-30 11:49:00 +00004313 if (has_valid_frame()) {
4314 if (node->next() == NULL) {
4315 node->continue_target()->Jump();
4316 } else {
4317 loop.Jump();
4318 }
4319 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004320 }
4321 break;
Steve Block3ce2e202009-11-05 08:53:23 +00004322 case ALWAYS_FALSE:
4323 UNREACHABLE();
4324 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00004325 }
4326
Ben Murdoch3bec4d22010-07-22 14:51:16 +01004327 // The break target may be already bound (by the condition), or there
4328 // may not be a valid frame. Bind it only if needed.
Steve Block3ce2e202009-11-05 08:53:23 +00004329 if (node->break_target()->is_linked()) {
4330 node->break_target()->Bind();
4331 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004332 DecrementLoopNesting();
Steve Blocka7e24c12009-10-30 11:49:00 +00004333}
4334
4335
4336void CodeGenerator::VisitForInStatement(ForInStatement* node) {
4337 ASSERT(!in_spilled_code());
4338 VirtualFrame::SpilledScope spilled_scope;
4339 Comment cmnt(masm_, "[ ForInStatement");
4340 CodeForStatementPosition(node);
4341
4342 JumpTarget primitive;
4343 JumpTarget jsobject;
4344 JumpTarget fixed_array;
4345 JumpTarget entry(JumpTarget::BIDIRECTIONAL);
4346 JumpTarget end_del_check;
4347 JumpTarget exit;
4348
4349 // Get the object to enumerate over (converted to JSObject).
4350 LoadAndSpill(node->enumerable());
4351
4352 // Both SpiderMonkey and kjs ignore null and undefined in contrast
4353 // to the specification. 12.6.4 mandates a call to ToObject.
4354 frame_->EmitPop(eax);
4355
4356 // eax: value to be iterated over
4357 __ cmp(eax, Factory::undefined_value());
4358 exit.Branch(equal);
4359 __ cmp(eax, Factory::null_value());
4360 exit.Branch(equal);
4361
4362 // Stack layout in body:
4363 // [iteration counter (smi)] <- slot 0
4364 // [length of array] <- slot 1
4365 // [FixedArray] <- slot 2
4366 // [Map or 0] <- slot 3
4367 // [Object] <- slot 4
4368
4369 // Check if enumerable is already a JSObject
4370 // eax: value to be iterated over
4371 __ test(eax, Immediate(kSmiTagMask));
4372 primitive.Branch(zero);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004373 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
Steve Blocka7e24c12009-10-30 11:49:00 +00004374 jsobject.Branch(above_equal);
4375
4376 primitive.Bind();
4377 frame_->EmitPush(eax);
4378 frame_->InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION, 1);
4379 // function call returns the value in eax, which is where we want it below
4380
4381 jsobject.Bind();
4382 // Get the set of properties (as a FixedArray or Map).
4383 // eax: value to be iterated over
Steve Blockd0582a62009-12-15 09:54:21 +00004384 frame_->EmitPush(eax); // Push the object being iterated over.
Steve Blocka7e24c12009-10-30 11:49:00 +00004385
Steve Blockd0582a62009-12-15 09:54:21 +00004386 // Check cache validity in generated code. This is a fast case for
4387 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
4388 // guarantee cache validity, call the runtime system to check cache
4389 // validity or get the property names in a fixed array.
4390 JumpTarget call_runtime;
4391 JumpTarget loop(JumpTarget::BIDIRECTIONAL);
4392 JumpTarget check_prototype;
4393 JumpTarget use_cache;
4394 __ mov(ecx, eax);
4395 loop.Bind();
4396 // Check that there are no elements.
4397 __ mov(edx, FieldOperand(ecx, JSObject::kElementsOffset));
4398 __ cmp(Operand(edx), Immediate(Factory::empty_fixed_array()));
4399 call_runtime.Branch(not_equal);
4400 // Check that instance descriptors are not empty so that we can
4401 // check for an enum cache. Leave the map in ebx for the subsequent
4402 // prototype load.
4403 __ mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
4404 __ mov(edx, FieldOperand(ebx, Map::kInstanceDescriptorsOffset));
4405 __ cmp(Operand(edx), Immediate(Factory::empty_descriptor_array()));
4406 call_runtime.Branch(equal);
4407 // Check that there in an enum cache in the non-empty instance
4408 // descriptors. This is the case if the next enumeration index
4409 // field does not contain a smi.
4410 __ mov(edx, FieldOperand(edx, DescriptorArray::kEnumerationIndexOffset));
4411 __ test(edx, Immediate(kSmiTagMask));
4412 call_runtime.Branch(zero);
4413 // For all objects but the receiver, check that the cache is empty.
4414 __ cmp(ecx, Operand(eax));
4415 check_prototype.Branch(equal);
4416 __ mov(edx, FieldOperand(edx, DescriptorArray::kEnumCacheBridgeCacheOffset));
4417 __ cmp(Operand(edx), Immediate(Factory::empty_fixed_array()));
4418 call_runtime.Branch(not_equal);
4419 check_prototype.Bind();
4420 // Load the prototype from the map and loop if non-null.
4421 __ mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
4422 __ cmp(Operand(ecx), Immediate(Factory::null_value()));
4423 loop.Branch(not_equal);
4424 // The enum cache is valid. Load the map of the object being
4425 // iterated over and use the cache for the iteration.
4426 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
4427 use_cache.Jump();
4428
4429 call_runtime.Bind();
4430 // Call the runtime to get the property names for the object.
Steve Blocka7e24c12009-10-30 11:49:00 +00004431 frame_->EmitPush(eax); // push the Object (slot 4) for the runtime call
4432 frame_->CallRuntime(Runtime::kGetPropertyNamesFast, 1);
4433
Steve Blockd0582a62009-12-15 09:54:21 +00004434 // If we got a map from the runtime call, we can do a fast
4435 // modification check. Otherwise, we got a fixed array, and we have
4436 // to do a slow check.
Steve Blocka7e24c12009-10-30 11:49:00 +00004437 // eax: map or fixed array (result from call to
4438 // Runtime::kGetPropertyNamesFast)
4439 __ mov(edx, Operand(eax));
4440 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
4441 __ cmp(ecx, Factory::meta_map());
4442 fixed_array.Branch(not_equal);
4443
Steve Blockd0582a62009-12-15 09:54:21 +00004444 use_cache.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00004445 // Get enum cache
Steve Blockd0582a62009-12-15 09:54:21 +00004446 // eax: map (either the result from a call to
4447 // Runtime::kGetPropertyNamesFast or has been fetched directly from
4448 // the object)
Steve Blocka7e24c12009-10-30 11:49:00 +00004449 __ mov(ecx, Operand(eax));
Steve Blockd0582a62009-12-15 09:54:21 +00004450
Steve Blocka7e24c12009-10-30 11:49:00 +00004451 __ mov(ecx, FieldOperand(ecx, Map::kInstanceDescriptorsOffset));
4452 // Get the bridge array held in the enumeration index field.
4453 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumerationIndexOffset));
4454 // Get the cache from the bridge array.
4455 __ mov(edx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset));
4456
4457 frame_->EmitPush(eax); // <- slot 3
4458 frame_->EmitPush(edx); // <- slot 2
4459 __ mov(eax, FieldOperand(edx, FixedArray::kLengthOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004460 frame_->EmitPush(eax); // <- slot 1
4461 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 0
4462 entry.Jump();
4463
4464 fixed_array.Bind();
4465 // eax: fixed array (result from call to Runtime::kGetPropertyNamesFast)
4466 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 3
4467 frame_->EmitPush(eax); // <- slot 2
4468
4469 // Push the length of the array and the initial index onto the stack.
4470 __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004471 frame_->EmitPush(eax); // <- slot 1
4472 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 0
4473
4474 // Condition.
4475 entry.Bind();
4476 // Grab the current frame's height for the break and continue
4477 // targets only after all the state is pushed on the frame.
4478 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
4479 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
4480
4481 __ mov(eax, frame_->ElementAt(0)); // load the current count
4482 __ cmp(eax, frame_->ElementAt(1)); // compare to the array length
4483 node->break_target()->Branch(above_equal);
4484
4485 // Get the i'th entry of the array.
4486 __ mov(edx, frame_->ElementAt(2));
Kristian Monsen25f61362010-05-21 11:50:48 +01004487 __ mov(ebx, FixedArrayElementOperand(edx, eax));
Steve Blocka7e24c12009-10-30 11:49:00 +00004488
4489 // Get the expected map from the stack or a zero map in the
4490 // permanent slow case eax: current iteration count ebx: i'th entry
4491 // of the enum cache
4492 __ mov(edx, frame_->ElementAt(3));
4493 // Check if the expected map still matches that of the enumerable.
4494 // If not, we have to filter the key.
4495 // eax: current iteration count
4496 // ebx: i'th entry of the enum cache
4497 // edx: expected map value
4498 __ mov(ecx, frame_->ElementAt(4));
4499 __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
4500 __ cmp(ecx, Operand(edx));
4501 end_del_check.Branch(equal);
4502
4503 // Convert the entry to a string (or null if it isn't a property anymore).
4504 frame_->EmitPush(frame_->ElementAt(4)); // push enumerable
4505 frame_->EmitPush(ebx); // push entry
4506 frame_->InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION, 2);
4507 __ mov(ebx, Operand(eax));
4508
4509 // If the property has been removed while iterating, we just skip it.
Iain Merrick75681382010-08-19 15:07:18 +01004510 __ test(ebx, Operand(ebx));
Steve Blocka7e24c12009-10-30 11:49:00 +00004511 node->continue_target()->Branch(equal);
4512
4513 end_del_check.Bind();
4514 // Store the entry in the 'each' expression and take another spin in the
4515 // loop. edx: i'th entry of the enum cache (or string there of)
4516 frame_->EmitPush(ebx);
4517 { Reference each(this, node->each());
Steve Blocka7e24c12009-10-30 11:49:00 +00004518 if (!each.is_illegal()) {
4519 if (each.size() > 0) {
Iain Merrick75681382010-08-19 15:07:18 +01004520 // Loading a reference may leave the frame in an unspilled state.
4521 frame_->SpillAll();
4522 // Get the value (under the reference on the stack) from memory.
Steve Blocka7e24c12009-10-30 11:49:00 +00004523 frame_->EmitPush(frame_->ElementAt(each.size()));
Leon Clarked91b9f72010-01-27 17:25:45 +00004524 each.SetValue(NOT_CONST_INIT);
4525 frame_->Drop(2);
4526 } else {
4527 // If the reference was to a slot we rely on the convenient property
4528 // that it doesn't matter whether a value (eg, ebx pushed above) is
4529 // right on top of or right underneath a zero-sized reference.
4530 each.SetValue(NOT_CONST_INIT);
Steve Blocka7e24c12009-10-30 11:49:00 +00004531 frame_->Drop();
4532 }
4533 }
4534 }
4535 // Unloading a reference may leave the frame in an unspilled state.
4536 frame_->SpillAll();
4537
Steve Blocka7e24c12009-10-30 11:49:00 +00004538 // Body.
4539 CheckStack(); // TODO(1222600): ignore if body contains calls.
4540 VisitAndSpill(node->body());
4541
4542 // Next. Reestablish a spilled frame in case we are coming here via
4543 // a continue in the body.
4544 node->continue_target()->Bind();
4545 frame_->SpillAll();
4546 frame_->EmitPop(eax);
4547 __ add(Operand(eax), Immediate(Smi::FromInt(1)));
4548 frame_->EmitPush(eax);
4549 entry.Jump();
4550
4551 // Cleanup. No need to spill because VirtualFrame::Drop is safe for
4552 // any frame.
4553 node->break_target()->Bind();
4554 frame_->Drop(5);
4555
4556 // Exit.
4557 exit.Bind();
4558
4559 node->continue_target()->Unuse();
4560 node->break_target()->Unuse();
4561}
4562
4563
Steve Block3ce2e202009-11-05 08:53:23 +00004564void CodeGenerator::VisitTryCatchStatement(TryCatchStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004565 ASSERT(!in_spilled_code());
4566 VirtualFrame::SpilledScope spilled_scope;
Steve Block3ce2e202009-11-05 08:53:23 +00004567 Comment cmnt(masm_, "[ TryCatchStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00004568 CodeForStatementPosition(node);
4569
4570 JumpTarget try_block;
4571 JumpTarget exit;
4572
4573 try_block.Call();
4574 // --- Catch block ---
4575 frame_->EmitPush(eax);
4576
4577 // Store the caught exception in the catch variable.
Leon Clarkee46be812010-01-19 14:06:41 +00004578 Variable* catch_var = node->catch_var()->var();
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004579 ASSERT(catch_var != NULL && catch_var->AsSlot() != NULL);
4580 StoreToSlot(catch_var->AsSlot(), NOT_CONST_INIT);
Steve Blocka7e24c12009-10-30 11:49:00 +00004581
4582 // Remove the exception from the stack.
4583 frame_->Drop();
4584
4585 VisitStatementsAndSpill(node->catch_block()->statements());
4586 if (has_valid_frame()) {
4587 exit.Jump();
4588 }
4589
4590
4591 // --- Try block ---
4592 try_block.Bind();
4593
4594 frame_->PushTryHandler(TRY_CATCH_HANDLER);
4595 int handler_height = frame_->height();
4596
4597 // Shadow the jump targets for all escapes from the try block, including
4598 // returns. During shadowing, the original target is hidden as the
4599 // ShadowTarget and operations on the original actually affect the
4600 // shadowing target.
4601 //
4602 // We should probably try to unify the escaping targets and the return
4603 // target.
4604 int nof_escapes = node->escaping_targets()->length();
4605 List<ShadowTarget*> shadows(1 + nof_escapes);
4606
4607 // Add the shadow target for the function return.
4608 static const int kReturnShadowIndex = 0;
4609 shadows.Add(new ShadowTarget(&function_return_));
4610 bool function_return_was_shadowed = function_return_is_shadowed_;
4611 function_return_is_shadowed_ = true;
4612 ASSERT(shadows[kReturnShadowIndex]->other_target() == &function_return_);
4613
4614 // Add the remaining shadow targets.
4615 for (int i = 0; i < nof_escapes; i++) {
4616 shadows.Add(new ShadowTarget(node->escaping_targets()->at(i)));
4617 }
4618
4619 // Generate code for the statements in the try block.
4620 VisitStatementsAndSpill(node->try_block()->statements());
4621
4622 // Stop the introduced shadowing and count the number of required unlinks.
4623 // After shadowing stops, the original targets are unshadowed and the
4624 // ShadowTargets represent the formerly shadowing targets.
4625 bool has_unlinks = false;
4626 for (int i = 0; i < shadows.length(); i++) {
4627 shadows[i]->StopShadowing();
4628 has_unlinks = has_unlinks || shadows[i]->is_linked();
4629 }
4630 function_return_is_shadowed_ = function_return_was_shadowed;
4631
4632 // Get an external reference to the handler address.
4633 ExternalReference handler_address(Top::k_handler_address);
4634
4635 // Make sure that there's nothing left on the stack above the
4636 // handler structure.
4637 if (FLAG_debug_code) {
4638 __ mov(eax, Operand::StaticVariable(handler_address));
4639 __ cmp(esp, Operand(eax));
4640 __ Assert(equal, "stack pointer should point to top handler");
4641 }
4642
4643 // If we can fall off the end of the try block, unlink from try chain.
4644 if (has_valid_frame()) {
4645 // The next handler address is on top of the frame. Unlink from
4646 // the handler list and drop the rest of this handler from the
4647 // frame.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01004648 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00004649 frame_->EmitPop(Operand::StaticVariable(handler_address));
4650 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
4651 if (has_unlinks) {
4652 exit.Jump();
4653 }
4654 }
4655
4656 // Generate unlink code for the (formerly) shadowing targets that
4657 // have been jumped to. Deallocate each shadow target.
4658 Result return_value;
4659 for (int i = 0; i < shadows.length(); i++) {
4660 if (shadows[i]->is_linked()) {
4661 // Unlink from try chain; be careful not to destroy the TOS if
4662 // there is one.
4663 if (i == kReturnShadowIndex) {
4664 shadows[i]->Bind(&return_value);
4665 return_value.ToRegister(eax);
4666 } else {
4667 shadows[i]->Bind();
4668 }
4669 // Because we can be jumping here (to spilled code) from
4670 // unspilled code, we need to reestablish a spilled frame at
4671 // this block.
4672 frame_->SpillAll();
4673
4674 // Reload sp from the top handler, because some statements that we
4675 // break from (eg, for...in) may have left stuff on the stack.
4676 __ mov(esp, Operand::StaticVariable(handler_address));
4677 frame_->Forget(frame_->height() - handler_height);
4678
Kristian Monsen50ef84f2010-07-29 15:18:00 +01004679 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00004680 frame_->EmitPop(Operand::StaticVariable(handler_address));
4681 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
4682
4683 if (i == kReturnShadowIndex) {
4684 if (!function_return_is_shadowed_) frame_->PrepareForReturn();
4685 shadows[i]->other_target()->Jump(&return_value);
4686 } else {
4687 shadows[i]->other_target()->Jump();
4688 }
4689 }
4690 }
4691
4692 exit.Bind();
4693}
4694
4695
Steve Block3ce2e202009-11-05 08:53:23 +00004696void CodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004697 ASSERT(!in_spilled_code());
4698 VirtualFrame::SpilledScope spilled_scope;
Steve Block3ce2e202009-11-05 08:53:23 +00004699 Comment cmnt(masm_, "[ TryFinallyStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00004700 CodeForStatementPosition(node);
4701
4702 // State: Used to keep track of reason for entering the finally
4703 // block. Should probably be extended to hold information for
4704 // break/continue from within the try block.
4705 enum { FALLING, THROWING, JUMPING };
4706
4707 JumpTarget try_block;
4708 JumpTarget finally_block;
4709
4710 try_block.Call();
4711
4712 frame_->EmitPush(eax);
4713 // In case of thrown exceptions, this is where we continue.
4714 __ Set(ecx, Immediate(Smi::FromInt(THROWING)));
4715 finally_block.Jump();
4716
4717 // --- Try block ---
4718 try_block.Bind();
4719
4720 frame_->PushTryHandler(TRY_FINALLY_HANDLER);
4721 int handler_height = frame_->height();
4722
4723 // Shadow the jump targets for all escapes from the try block, including
4724 // returns. During shadowing, the original target is hidden as the
4725 // ShadowTarget and operations on the original actually affect the
4726 // shadowing target.
4727 //
4728 // We should probably try to unify the escaping targets and the return
4729 // target.
4730 int nof_escapes = node->escaping_targets()->length();
4731 List<ShadowTarget*> shadows(1 + nof_escapes);
4732
4733 // Add the shadow target for the function return.
4734 static const int kReturnShadowIndex = 0;
4735 shadows.Add(new ShadowTarget(&function_return_));
4736 bool function_return_was_shadowed = function_return_is_shadowed_;
4737 function_return_is_shadowed_ = true;
4738 ASSERT(shadows[kReturnShadowIndex]->other_target() == &function_return_);
4739
4740 // Add the remaining shadow targets.
4741 for (int i = 0; i < nof_escapes; i++) {
4742 shadows.Add(new ShadowTarget(node->escaping_targets()->at(i)));
4743 }
4744
4745 // Generate code for the statements in the try block.
4746 VisitStatementsAndSpill(node->try_block()->statements());
4747
4748 // Stop the introduced shadowing and count the number of required unlinks.
4749 // After shadowing stops, the original targets are unshadowed and the
4750 // ShadowTargets represent the formerly shadowing targets.
4751 int nof_unlinks = 0;
4752 for (int i = 0; i < shadows.length(); i++) {
4753 shadows[i]->StopShadowing();
4754 if (shadows[i]->is_linked()) nof_unlinks++;
4755 }
4756 function_return_is_shadowed_ = function_return_was_shadowed;
4757
4758 // Get an external reference to the handler address.
4759 ExternalReference handler_address(Top::k_handler_address);
4760
4761 // If we can fall off the end of the try block, unlink from the try
4762 // chain and set the state on the frame to FALLING.
4763 if (has_valid_frame()) {
4764 // The next handler address is on top of the frame.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01004765 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00004766 frame_->EmitPop(Operand::StaticVariable(handler_address));
4767 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
4768
4769 // Fake a top of stack value (unneeded when FALLING) and set the
4770 // state in ecx, then jump around the unlink blocks if any.
4771 frame_->EmitPush(Immediate(Factory::undefined_value()));
4772 __ Set(ecx, Immediate(Smi::FromInt(FALLING)));
4773 if (nof_unlinks > 0) {
4774 finally_block.Jump();
4775 }
4776 }
4777
4778 // Generate code to unlink and set the state for the (formerly)
4779 // shadowing targets that have been jumped to.
4780 for (int i = 0; i < shadows.length(); i++) {
4781 if (shadows[i]->is_linked()) {
4782 // If we have come from the shadowed return, the return value is
4783 // on the virtual frame. We must preserve it until it is
4784 // pushed.
4785 if (i == kReturnShadowIndex) {
4786 Result return_value;
4787 shadows[i]->Bind(&return_value);
4788 return_value.ToRegister(eax);
4789 } else {
4790 shadows[i]->Bind();
4791 }
4792 // Because we can be jumping here (to spilled code) from
4793 // unspilled code, we need to reestablish a spilled frame at
4794 // this block.
4795 frame_->SpillAll();
4796
4797 // Reload sp from the top handler, because some statements that
4798 // we break from (eg, for...in) may have left stuff on the
4799 // stack.
4800 __ mov(esp, Operand::StaticVariable(handler_address));
4801 frame_->Forget(frame_->height() - handler_height);
4802
4803 // Unlink this handler and drop it from the frame.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01004804 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00004805 frame_->EmitPop(Operand::StaticVariable(handler_address));
4806 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
4807
4808 if (i == kReturnShadowIndex) {
4809 // If this target shadowed the function return, materialize
4810 // the return value on the stack.
4811 frame_->EmitPush(eax);
4812 } else {
4813 // Fake TOS for targets that shadowed breaks and continues.
4814 frame_->EmitPush(Immediate(Factory::undefined_value()));
4815 }
4816 __ Set(ecx, Immediate(Smi::FromInt(JUMPING + i)));
4817 if (--nof_unlinks > 0) {
4818 // If this is not the last unlink block, jump around the next.
4819 finally_block.Jump();
4820 }
4821 }
4822 }
4823
4824 // --- Finally block ---
4825 finally_block.Bind();
4826
4827 // Push the state on the stack.
4828 frame_->EmitPush(ecx);
4829
4830 // We keep two elements on the stack - the (possibly faked) result
4831 // and the state - while evaluating the finally block.
4832 //
4833 // Generate code for the statements in the finally block.
4834 VisitStatementsAndSpill(node->finally_block()->statements());
4835
4836 if (has_valid_frame()) {
4837 // Restore state and return value or faked TOS.
4838 frame_->EmitPop(ecx);
4839 frame_->EmitPop(eax);
4840 }
4841
4842 // Generate code to jump to the right destination for all used
4843 // formerly shadowing targets. Deallocate each shadow target.
4844 for (int i = 0; i < shadows.length(); i++) {
4845 if (has_valid_frame() && shadows[i]->is_bound()) {
4846 BreakTarget* original = shadows[i]->other_target();
4847 __ cmp(Operand(ecx), Immediate(Smi::FromInt(JUMPING + i)));
4848 if (i == kReturnShadowIndex) {
4849 // The return value is (already) in eax.
4850 Result return_value = allocator_->Allocate(eax);
4851 ASSERT(return_value.is_valid());
4852 if (function_return_is_shadowed_) {
4853 original->Branch(equal, &return_value);
4854 } else {
4855 // Branch around the preparation for return which may emit
4856 // code.
4857 JumpTarget skip;
4858 skip.Branch(not_equal);
4859 frame_->PrepareForReturn();
4860 original->Jump(&return_value);
4861 skip.Bind();
4862 }
4863 } else {
4864 original->Branch(equal);
4865 }
4866 }
4867 }
4868
4869 if (has_valid_frame()) {
4870 // Check if we need to rethrow the exception.
4871 JumpTarget exit;
4872 __ cmp(Operand(ecx), Immediate(Smi::FromInt(THROWING)));
4873 exit.Branch(not_equal);
4874
4875 // Rethrow exception.
4876 frame_->EmitPush(eax); // undo pop from above
4877 frame_->CallRuntime(Runtime::kReThrow, 1);
4878
4879 // Done.
4880 exit.Bind();
4881 }
4882}
4883
4884
4885void CodeGenerator::VisitDebuggerStatement(DebuggerStatement* node) {
4886 ASSERT(!in_spilled_code());
4887 Comment cmnt(masm_, "[ DebuggerStatement");
4888 CodeForStatementPosition(node);
4889#ifdef ENABLE_DEBUGGER_SUPPORT
4890 // Spill everything, even constants, to the frame.
4891 frame_->SpillAll();
Leon Clarke4515c472010-02-03 11:58:03 +00004892
Andrei Popescu402d9372010-02-26 13:31:12 +00004893 frame_->DebugBreak();
Steve Blocka7e24c12009-10-30 11:49:00 +00004894 // Ignore the return value.
4895#endif
4896}
4897
4898
Steve Block6ded16b2010-05-10 14:33:55 +01004899Result CodeGenerator::InstantiateFunction(
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004900 Handle<SharedFunctionInfo> function_info,
4901 bool pretenure) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004902 // The inevitable call will sync frame elements to memory anyway, so
4903 // we do it eagerly to allow us to push the arguments directly into
4904 // place.
Andrei Popescu402d9372010-02-26 13:31:12 +00004905 frame()->SyncRange(0, frame()->element_count() - 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00004906
Leon Clarkee46be812010-01-19 14:06:41 +00004907 // Use the fast case closure allocation code that allocates in new
4908 // space for nested functions that don't need literals cloning.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004909 if (scope()->is_function_scope() &&
4910 function_info->num_literals() == 0 &&
4911 !pretenure) {
Leon Clarkee46be812010-01-19 14:06:41 +00004912 FastNewClosureStub stub;
Steve Block6ded16b2010-05-10 14:33:55 +01004913 frame()->EmitPush(Immediate(function_info));
Andrei Popescu402d9372010-02-26 13:31:12 +00004914 return frame()->CallStub(&stub, 1);
Leon Clarkee46be812010-01-19 14:06:41 +00004915 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01004916 // Call the runtime to instantiate the function based on the
4917 // shared function info.
Andrei Popescu402d9372010-02-26 13:31:12 +00004918 frame()->EmitPush(esi);
Steve Block6ded16b2010-05-10 14:33:55 +01004919 frame()->EmitPush(Immediate(function_info));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004920 frame()->EmitPush(Immediate(pretenure
4921 ? Factory::true_value()
4922 : Factory::false_value()));
4923 return frame()->CallRuntime(Runtime::kNewClosure, 3);
Leon Clarkee46be812010-01-19 14:06:41 +00004924 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004925}
4926
4927
4928void CodeGenerator::VisitFunctionLiteral(FunctionLiteral* node) {
4929 Comment cmnt(masm_, "[ FunctionLiteral");
Steve Block6ded16b2010-05-10 14:33:55 +01004930 ASSERT(!in_safe_int32_mode());
4931 // Build the function info and instantiate it.
4932 Handle<SharedFunctionInfo> function_info =
Ben Murdochf87a2032010-10-22 12:50:53 +01004933 Compiler::BuildFunctionInfo(node, script());
Steve Blocka7e24c12009-10-30 11:49:00 +00004934 // Check for stack-overflow exception.
Ben Murdochf87a2032010-10-22 12:50:53 +01004935 if (function_info.is_null()) {
4936 SetStackOverflow();
4937 return;
4938 }
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004939 Result result = InstantiateFunction(function_info, node->pretenure());
Andrei Popescu402d9372010-02-26 13:31:12 +00004940 frame()->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00004941}
4942
4943
Steve Block6ded16b2010-05-10 14:33:55 +01004944void CodeGenerator::VisitSharedFunctionInfoLiteral(
4945 SharedFunctionInfoLiteral* node) {
4946 ASSERT(!in_safe_int32_mode());
4947 Comment cmnt(masm_, "[ SharedFunctionInfoLiteral");
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004948 Result result = InstantiateFunction(node->shared_function_info(), false);
Andrei Popescu402d9372010-02-26 13:31:12 +00004949 frame()->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00004950}
4951
4952
4953void CodeGenerator::VisitConditional(Conditional* node) {
4954 Comment cmnt(masm_, "[ Conditional");
Steve Block6ded16b2010-05-10 14:33:55 +01004955 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00004956 JumpTarget then;
4957 JumpTarget else_;
4958 JumpTarget exit;
4959 ControlDestination dest(&then, &else_, true);
Steve Blockd0582a62009-12-15 09:54:21 +00004960 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00004961
4962 if (dest.false_was_fall_through()) {
4963 // The else target was bound, so we compile the else part first.
Steve Blockd0582a62009-12-15 09:54:21 +00004964 Load(node->else_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00004965
4966 if (then.is_linked()) {
4967 exit.Jump();
4968 then.Bind();
Steve Blockd0582a62009-12-15 09:54:21 +00004969 Load(node->then_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00004970 }
4971 } else {
4972 // The then target was bound, so we compile the then part first.
Steve Blockd0582a62009-12-15 09:54:21 +00004973 Load(node->then_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00004974
4975 if (else_.is_linked()) {
4976 exit.Jump();
4977 else_.Bind();
Steve Blockd0582a62009-12-15 09:54:21 +00004978 Load(node->else_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00004979 }
4980 }
4981
4982 exit.Bind();
4983}
4984
4985
Leon Clarkef7060e22010-06-03 12:02:55 +01004986void CodeGenerator::LoadFromSlot(Slot* slot, TypeofState typeof_state) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004987 if (slot->type() == Slot::LOOKUP) {
4988 ASSERT(slot->var()->is_dynamic());
Steve Blocka7e24c12009-10-30 11:49:00 +00004989 JumpTarget slow;
4990 JumpTarget done;
Leon Clarkef7060e22010-06-03 12:02:55 +01004991 Result value;
Steve Blocka7e24c12009-10-30 11:49:00 +00004992
Kristian Monsen25f61362010-05-21 11:50:48 +01004993 // Generate fast case for loading from slots that correspond to
4994 // local/global variables or arguments unless they are shadowed by
4995 // eval-introduced bindings.
4996 EmitDynamicLoadFromSlotFastCase(slot,
4997 typeof_state,
Leon Clarkef7060e22010-06-03 12:02:55 +01004998 &value,
Kristian Monsen25f61362010-05-21 11:50:48 +01004999 &slow,
5000 &done);
Steve Blocka7e24c12009-10-30 11:49:00 +00005001
5002 slow.Bind();
5003 // A runtime call is inevitable. We eagerly sync frame elements
5004 // to memory so that we can push the arguments directly into place
5005 // on top of the frame.
Andrei Popescu402d9372010-02-26 13:31:12 +00005006 frame()->SyncRange(0, frame()->element_count() - 1);
5007 frame()->EmitPush(esi);
5008 frame()->EmitPush(Immediate(slot->var()->name()));
Steve Blocka7e24c12009-10-30 11:49:00 +00005009 if (typeof_state == INSIDE_TYPEOF) {
Leon Clarkef7060e22010-06-03 12:02:55 +01005010 value =
Andrei Popescu402d9372010-02-26 13:31:12 +00005011 frame()->CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00005012 } else {
Leon Clarkef7060e22010-06-03 12:02:55 +01005013 value = frame()->CallRuntime(Runtime::kLoadContextSlot, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00005014 }
5015
Leon Clarkef7060e22010-06-03 12:02:55 +01005016 done.Bind(&value);
5017 frame_->Push(&value);
Steve Blocka7e24c12009-10-30 11:49:00 +00005018
5019 } else if (slot->var()->mode() == Variable::CONST) {
5020 // Const slots may contain 'the hole' value (the constant hasn't been
5021 // initialized yet) which needs to be converted into the 'undefined'
5022 // value.
5023 //
5024 // We currently spill the virtual frame because constants use the
5025 // potentially unsafe direct-frame access of SlotOperand.
5026 VirtualFrame::SpilledScope spilled_scope;
5027 Comment cmnt(masm_, "[ Load const");
Andrei Popescu402d9372010-02-26 13:31:12 +00005028 Label exit;
Steve Blocka7e24c12009-10-30 11:49:00 +00005029 __ mov(ecx, SlotOperand(slot, ecx));
5030 __ cmp(ecx, Factory::the_hole_value());
Andrei Popescu402d9372010-02-26 13:31:12 +00005031 __ j(not_equal, &exit);
Steve Blocka7e24c12009-10-30 11:49:00 +00005032 __ mov(ecx, Factory::undefined_value());
Andrei Popescu402d9372010-02-26 13:31:12 +00005033 __ bind(&exit);
Leon Clarkef7060e22010-06-03 12:02:55 +01005034 frame()->EmitPush(ecx);
Steve Blocka7e24c12009-10-30 11:49:00 +00005035
5036 } else if (slot->type() == Slot::PARAMETER) {
Andrei Popescu402d9372010-02-26 13:31:12 +00005037 frame()->PushParameterAt(slot->index());
Steve Blocka7e24c12009-10-30 11:49:00 +00005038
5039 } else if (slot->type() == Slot::LOCAL) {
Andrei Popescu402d9372010-02-26 13:31:12 +00005040 frame()->PushLocalAt(slot->index());
Steve Blocka7e24c12009-10-30 11:49:00 +00005041
5042 } else {
5043 // The other remaining slot types (LOOKUP and GLOBAL) cannot reach
5044 // here.
5045 //
5046 // The use of SlotOperand below is safe for an unspilled frame
5047 // because it will always be a context slot.
5048 ASSERT(slot->type() == Slot::CONTEXT);
Leon Clarkef7060e22010-06-03 12:02:55 +01005049 Result temp = allocator()->Allocate();
5050 ASSERT(temp.is_valid());
5051 __ mov(temp.reg(), SlotOperand(slot, temp.reg()));
5052 frame()->Push(&temp);
Steve Blocka7e24c12009-10-30 11:49:00 +00005053 }
5054}
5055
5056
Leon Clarkef7060e22010-06-03 12:02:55 +01005057void CodeGenerator::LoadFromSlotCheckForArguments(Slot* slot,
Andrei Popescu402d9372010-02-26 13:31:12 +00005058 TypeofState state) {
Leon Clarkef7060e22010-06-03 12:02:55 +01005059 LoadFromSlot(slot, state);
Steve Blocka7e24c12009-10-30 11:49:00 +00005060
5061 // Bail out quickly if we're not using lazy arguments allocation.
Leon Clarkef7060e22010-06-03 12:02:55 +01005062 if (ArgumentsMode() != LAZY_ARGUMENTS_ALLOCATION) return;
Steve Blocka7e24c12009-10-30 11:49:00 +00005063
5064 // ... or if the slot isn't a non-parameter arguments slot.
Leon Clarkef7060e22010-06-03 12:02:55 +01005065 if (slot->type() == Slot::PARAMETER || !slot->is_arguments()) return;
Steve Blocka7e24c12009-10-30 11:49:00 +00005066
5067 // If the loaded value is a constant, we know if the arguments
5068 // object has been lazily loaded yet.
Leon Clarkef7060e22010-06-03 12:02:55 +01005069 Result result = frame()->Pop();
Andrei Popescu402d9372010-02-26 13:31:12 +00005070 if (result.is_constant()) {
5071 if (result.handle()->IsTheHole()) {
Leon Clarkef7060e22010-06-03 12:02:55 +01005072 result = StoreArgumentsObject(false);
Steve Blocka7e24c12009-10-30 11:49:00 +00005073 }
Leon Clarkef7060e22010-06-03 12:02:55 +01005074 frame()->Push(&result);
5075 return;
Steve Blocka7e24c12009-10-30 11:49:00 +00005076 }
Leon Clarkef7060e22010-06-03 12:02:55 +01005077 ASSERT(result.is_register());
Steve Blocka7e24c12009-10-30 11:49:00 +00005078 // The loaded value is in a register. If it is the sentinel that
5079 // indicates that we haven't loaded the arguments object yet, we
5080 // need to do it now.
5081 JumpTarget exit;
Andrei Popescu402d9372010-02-26 13:31:12 +00005082 __ cmp(Operand(result.reg()), Immediate(Factory::the_hole_value()));
Leon Clarkef7060e22010-06-03 12:02:55 +01005083 frame()->Push(&result);
5084 exit.Branch(not_equal);
Andrei Popescu402d9372010-02-26 13:31:12 +00005085
Andrei Popescu402d9372010-02-26 13:31:12 +00005086 result = StoreArgumentsObject(false);
Leon Clarkef7060e22010-06-03 12:02:55 +01005087 frame()->SetElementAt(0, &result);
5088 result.Unuse();
5089 exit.Bind();
5090 return;
Steve Blocka7e24c12009-10-30 11:49:00 +00005091}
5092
5093
5094Result CodeGenerator::LoadFromGlobalSlotCheckExtensions(
5095 Slot* slot,
5096 TypeofState typeof_state,
5097 JumpTarget* slow) {
Steve Block6ded16b2010-05-10 14:33:55 +01005098 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005099 // Check that no extension objects have been created by calls to
5100 // eval from the current scope to the global scope.
5101 Register context = esi;
5102 Result tmp = allocator_->Allocate();
5103 ASSERT(tmp.is_valid()); // All non-reserved registers were available.
5104
5105 Scope* s = scope();
5106 while (s != NULL) {
5107 if (s->num_heap_slots() > 0) {
5108 if (s->calls_eval()) {
5109 // Check that extension is NULL.
5110 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
5111 Immediate(0));
5112 slow->Branch(not_equal, not_taken);
5113 }
5114 // Load next context in chain.
5115 __ mov(tmp.reg(), ContextOperand(context, Context::CLOSURE_INDEX));
5116 __ mov(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset));
5117 context = tmp.reg();
5118 }
5119 // If no outer scope calls eval, we do not need to check more
5120 // context extensions. If we have reached an eval scope, we check
5121 // all extensions from this point.
5122 if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break;
5123 s = s->outer_scope();
5124 }
5125
5126 if (s != NULL && s->is_eval_scope()) {
5127 // Loop up the context chain. There is no frame effect so it is
5128 // safe to use raw labels here.
5129 Label next, fast;
5130 if (!context.is(tmp.reg())) {
5131 __ mov(tmp.reg(), context);
5132 }
5133 __ bind(&next);
5134 // Terminate at global context.
5135 __ cmp(FieldOperand(tmp.reg(), HeapObject::kMapOffset),
5136 Immediate(Factory::global_context_map()));
5137 __ j(equal, &fast);
5138 // Check that extension is NULL.
5139 __ cmp(ContextOperand(tmp.reg(), Context::EXTENSION_INDEX), Immediate(0));
5140 slow->Branch(not_equal, not_taken);
5141 // Load next context in chain.
5142 __ mov(tmp.reg(), ContextOperand(tmp.reg(), Context::CLOSURE_INDEX));
5143 __ mov(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset));
5144 __ jmp(&next);
5145 __ bind(&fast);
5146 }
5147 tmp.Unuse();
5148
5149 // All extension objects were empty and it is safe to use a global
5150 // load IC call.
Andrei Popescu402d9372010-02-26 13:31:12 +00005151 // The register allocator prefers eax if it is free, so the code generator
5152 // will load the global object directly into eax, which is where the LoadIC
5153 // expects it.
5154 frame_->Spill(eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00005155 LoadGlobal();
5156 frame_->Push(slot->var()->name());
5157 RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
5158 ? RelocInfo::CODE_TARGET
5159 : RelocInfo::CODE_TARGET_CONTEXT;
5160 Result answer = frame_->CallLoadIC(mode);
5161 // A test eax instruction following the call signals that the inobject
5162 // property case was inlined. Ensure that there is not a test eax
5163 // instruction here.
5164 __ nop();
Steve Blocka7e24c12009-10-30 11:49:00 +00005165 return answer;
5166}
5167
5168
Kristian Monsen25f61362010-05-21 11:50:48 +01005169void CodeGenerator::EmitDynamicLoadFromSlotFastCase(Slot* slot,
5170 TypeofState typeof_state,
5171 Result* result,
5172 JumpTarget* slow,
5173 JumpTarget* done) {
5174 // Generate fast-case code for variables that might be shadowed by
5175 // eval-introduced variables. Eval is used a lot without
5176 // introducing variables. In those cases, we do not want to
5177 // perform a runtime call for all variables in the scope
5178 // containing the eval.
5179 if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) {
5180 *result = LoadFromGlobalSlotCheckExtensions(slot, typeof_state, slow);
5181 done->Jump(result);
5182
5183 } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01005184 Slot* potential_slot = slot->var()->local_if_not_shadowed()->AsSlot();
Kristian Monsen25f61362010-05-21 11:50:48 +01005185 Expression* rewrite = slot->var()->local_if_not_shadowed()->rewrite();
5186 if (potential_slot != NULL) {
5187 // Generate fast case for locals that rewrite to slots.
5188 // Allocate a fresh register to use as a temp in
5189 // ContextSlotOperandCheckExtensions and to hold the result
5190 // value.
5191 *result = allocator()->Allocate();
5192 ASSERT(result->is_valid());
5193 __ mov(result->reg(),
5194 ContextSlotOperandCheckExtensions(potential_slot, *result, slow));
5195 if (potential_slot->var()->mode() == Variable::CONST) {
5196 __ cmp(result->reg(), Factory::the_hole_value());
5197 done->Branch(not_equal, result);
5198 __ mov(result->reg(), Factory::undefined_value());
5199 }
5200 done->Jump(result);
5201 } else if (rewrite != NULL) {
5202 // Generate fast case for calls of an argument function.
5203 Property* property = rewrite->AsProperty();
5204 if (property != NULL) {
5205 VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
5206 Literal* key_literal = property->key()->AsLiteral();
5207 if (obj_proxy != NULL &&
5208 key_literal != NULL &&
5209 obj_proxy->IsArguments() &&
5210 key_literal->handle()->IsSmi()) {
5211 // Load arguments object if there are no eval-introduced
5212 // variables. Then load the argument from the arguments
5213 // object using keyed load.
5214 Result arguments = allocator()->Allocate();
5215 ASSERT(arguments.is_valid());
5216 __ mov(arguments.reg(),
Kristian Monsen0d5e1162010-09-30 15:31:59 +01005217 ContextSlotOperandCheckExtensions(obj_proxy->var()->AsSlot(),
Kristian Monsen25f61362010-05-21 11:50:48 +01005218 arguments,
5219 slow));
5220 frame_->Push(&arguments);
5221 frame_->Push(key_literal->handle());
5222 *result = EmitKeyedLoad();
5223 done->Jump(result);
5224 }
5225 }
5226 }
5227 }
5228}
5229
5230
Steve Blocka7e24c12009-10-30 11:49:00 +00005231void CodeGenerator::StoreToSlot(Slot* slot, InitState init_state) {
5232 if (slot->type() == Slot::LOOKUP) {
5233 ASSERT(slot->var()->is_dynamic());
5234
5235 // For now, just do a runtime call. Since the call is inevitable,
5236 // we eagerly sync the virtual frame so we can directly push the
5237 // arguments into place.
5238 frame_->SyncRange(0, frame_->element_count() - 1);
5239
5240 frame_->EmitPush(esi);
5241 frame_->EmitPush(Immediate(slot->var()->name()));
5242
5243 Result value;
5244 if (init_state == CONST_INIT) {
5245 // Same as the case for a normal store, but ignores attribute
5246 // (e.g. READ_ONLY) of context slot so that we can initialize const
5247 // properties (introduced via eval("const foo = (some expr);")). Also,
5248 // uses the current function context instead of the top context.
5249 //
5250 // Note that we must declare the foo upon entry of eval(), via a
5251 // context slot declaration, but we cannot initialize it at the same
5252 // time, because the const declaration may be at the end of the eval
5253 // code (sigh...) and the const variable may have been used before
5254 // (where its value is 'undefined'). Thus, we can only do the
5255 // initialization when we actually encounter the expression and when
5256 // the expression operands are defined and valid, and thus we need the
5257 // split into 2 operations: declaration of the context slot followed
5258 // by initialization.
5259 value = frame_->CallRuntime(Runtime::kInitializeConstContextSlot, 3);
5260 } else {
5261 value = frame_->CallRuntime(Runtime::kStoreContextSlot, 3);
5262 }
5263 // Storing a variable must keep the (new) value on the expression
5264 // stack. This is necessary for compiling chained assignment
5265 // expressions.
5266 frame_->Push(&value);
5267
5268 } else {
5269 ASSERT(!slot->var()->is_dynamic());
5270
5271 JumpTarget exit;
5272 if (init_state == CONST_INIT) {
5273 ASSERT(slot->var()->mode() == Variable::CONST);
5274 // Only the first const initialization must be executed (the slot
5275 // still contains 'the hole' value). When the assignment is executed,
5276 // the code is identical to a normal store (see below).
5277 //
5278 // We spill the frame in the code below because the direct-frame
5279 // access of SlotOperand is potentially unsafe with an unspilled
5280 // frame.
5281 VirtualFrame::SpilledScope spilled_scope;
5282 Comment cmnt(masm_, "[ Init const");
5283 __ mov(ecx, SlotOperand(slot, ecx));
5284 __ cmp(ecx, Factory::the_hole_value());
5285 exit.Branch(not_equal);
5286 }
5287
5288 // We must execute the store. Storing a variable must keep the (new)
5289 // value on the stack. This is necessary for compiling assignment
5290 // expressions.
5291 //
5292 // Note: We will reach here even with slot->var()->mode() ==
5293 // Variable::CONST because of const declarations which will initialize
5294 // consts to 'the hole' value and by doing so, end up calling this code.
5295 if (slot->type() == Slot::PARAMETER) {
5296 frame_->StoreToParameterAt(slot->index());
5297 } else if (slot->type() == Slot::LOCAL) {
5298 frame_->StoreToLocalAt(slot->index());
5299 } else {
5300 // The other slot types (LOOKUP and GLOBAL) cannot reach here.
5301 //
5302 // The use of SlotOperand below is safe for an unspilled frame
5303 // because the slot is a context slot.
5304 ASSERT(slot->type() == Slot::CONTEXT);
5305 frame_->Dup();
5306 Result value = frame_->Pop();
5307 value.ToRegister();
5308 Result start = allocator_->Allocate();
5309 ASSERT(start.is_valid());
5310 __ mov(SlotOperand(slot, start.reg()), value.reg());
5311 // RecordWrite may destroy the value registers.
5312 //
5313 // TODO(204): Avoid actually spilling when the value is not
5314 // needed (probably the common case).
5315 frame_->Spill(value.reg());
5316 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
5317 Result temp = allocator_->Allocate();
5318 ASSERT(temp.is_valid());
5319 __ RecordWrite(start.reg(), offset, value.reg(), temp.reg());
5320 // The results start, value, and temp are unused by going out of
5321 // scope.
5322 }
5323
5324 exit.Bind();
5325 }
5326}
5327
5328
Steve Block6ded16b2010-05-10 14:33:55 +01005329void CodeGenerator::VisitSlot(Slot* slot) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005330 Comment cmnt(masm_, "[ Slot");
Steve Block6ded16b2010-05-10 14:33:55 +01005331 if (in_safe_int32_mode()) {
5332 if ((slot->type() == Slot::LOCAL && !slot->is_arguments())) {
5333 frame()->UntaggedPushLocalAt(slot->index());
5334 } else if (slot->type() == Slot::PARAMETER) {
5335 frame()->UntaggedPushParameterAt(slot->index());
5336 } else {
5337 UNREACHABLE();
5338 }
5339 } else {
Leon Clarkef7060e22010-06-03 12:02:55 +01005340 LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF);
Steve Block6ded16b2010-05-10 14:33:55 +01005341 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005342}
5343
5344
5345void CodeGenerator::VisitVariableProxy(VariableProxy* node) {
5346 Comment cmnt(masm_, "[ VariableProxy");
5347 Variable* var = node->var();
5348 Expression* expr = var->rewrite();
5349 if (expr != NULL) {
5350 Visit(expr);
5351 } else {
5352 ASSERT(var->is_global());
Steve Block6ded16b2010-05-10 14:33:55 +01005353 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005354 Reference ref(this, node);
Steve Blockd0582a62009-12-15 09:54:21 +00005355 ref.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00005356 }
5357}
5358
5359
5360void CodeGenerator::VisitLiteral(Literal* node) {
5361 Comment cmnt(masm_, "[ Literal");
Steve Block6ded16b2010-05-10 14:33:55 +01005362 if (in_safe_int32_mode()) {
5363 frame_->PushUntaggedElement(node->handle());
5364 } else {
5365 frame_->Push(node->handle());
5366 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005367}
5368
5369
Steve Blockd0582a62009-12-15 09:54:21 +00005370void CodeGenerator::PushUnsafeSmi(Handle<Object> value) {
5371 ASSERT(value->IsSmi());
5372 int bits = reinterpret_cast<int>(*value);
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08005373 __ push(Immediate(bits ^ jit_cookie_));
5374 __ xor_(Operand(esp, 0), Immediate(jit_cookie_));
Steve Blockd0582a62009-12-15 09:54:21 +00005375}
5376
5377
5378void CodeGenerator::StoreUnsafeSmiToLocal(int offset, Handle<Object> value) {
5379 ASSERT(value->IsSmi());
5380 int bits = reinterpret_cast<int>(*value);
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08005381 __ mov(Operand(ebp, offset), Immediate(bits ^ jit_cookie_));
5382 __ xor_(Operand(ebp, offset), Immediate(jit_cookie_));
Steve Blockd0582a62009-12-15 09:54:21 +00005383}
5384
5385
5386void CodeGenerator::MoveUnsafeSmi(Register target, Handle<Object> value) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005387 ASSERT(target.is_valid());
5388 ASSERT(value->IsSmi());
5389 int bits = reinterpret_cast<int>(*value);
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08005390 __ Set(target, Immediate(bits ^ jit_cookie_));
5391 __ xor_(target, jit_cookie_);
Steve Blocka7e24c12009-10-30 11:49:00 +00005392}
5393
5394
5395bool CodeGenerator::IsUnsafeSmi(Handle<Object> value) {
5396 if (!value->IsSmi()) return false;
5397 int int_value = Smi::cast(*value)->value();
5398 return !is_intn(int_value, kMaxSmiInlinedBits);
5399}
5400
5401
5402// Materialize the regexp literal 'node' in the literals array
5403// 'literals' of the function. Leave the regexp boilerplate in
5404// 'boilerplate'.
5405class DeferredRegExpLiteral: public DeferredCode {
5406 public:
5407 DeferredRegExpLiteral(Register boilerplate,
5408 Register literals,
5409 RegExpLiteral* node)
5410 : boilerplate_(boilerplate), literals_(literals), node_(node) {
5411 set_comment("[ DeferredRegExpLiteral");
5412 }
5413
5414 void Generate();
5415
5416 private:
5417 Register boilerplate_;
5418 Register literals_;
5419 RegExpLiteral* node_;
5420};
5421
5422
5423void DeferredRegExpLiteral::Generate() {
5424 // Since the entry is undefined we call the runtime system to
5425 // compute the literal.
5426 // Literal array (0).
5427 __ push(literals_);
5428 // Literal index (1).
5429 __ push(Immediate(Smi::FromInt(node_->literal_index())));
5430 // RegExp pattern (2).
5431 __ push(Immediate(node_->pattern()));
5432 // RegExp flags (3).
5433 __ push(Immediate(node_->flags()));
5434 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
5435 if (!boilerplate_.is(eax)) __ mov(boilerplate_, eax);
5436}
5437
5438
Ben Murdochbb769b22010-08-11 14:56:33 +01005439class DeferredAllocateInNewSpace: public DeferredCode {
5440 public:
Steve Block791712a2010-08-27 10:21:07 +01005441 DeferredAllocateInNewSpace(int size,
5442 Register target,
5443 int registers_to_save = 0)
5444 : size_(size), target_(target), registers_to_save_(registers_to_save) {
Ben Murdochbb769b22010-08-11 14:56:33 +01005445 ASSERT(size >= kPointerSize && size <= Heap::MaxObjectSizeInNewSpace());
Steve Block791712a2010-08-27 10:21:07 +01005446 ASSERT_EQ(0, registers_to_save & target.bit());
Ben Murdochbb769b22010-08-11 14:56:33 +01005447 set_comment("[ DeferredAllocateInNewSpace");
5448 }
5449 void Generate();
5450
5451 private:
5452 int size_;
5453 Register target_;
Steve Block791712a2010-08-27 10:21:07 +01005454 int registers_to_save_;
Ben Murdochbb769b22010-08-11 14:56:33 +01005455};
5456
5457
5458void DeferredAllocateInNewSpace::Generate() {
Steve Block791712a2010-08-27 10:21:07 +01005459 for (int i = 0; i < kNumRegs; i++) {
5460 if (registers_to_save_ & (1 << i)) {
5461 Register save_register = { i };
5462 __ push(save_register);
5463 }
5464 }
Ben Murdochbb769b22010-08-11 14:56:33 +01005465 __ push(Immediate(Smi::FromInt(size_)));
5466 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
5467 if (!target_.is(eax)) {
5468 __ mov(target_, eax);
5469 }
Steve Block791712a2010-08-27 10:21:07 +01005470 for (int i = kNumRegs - 1; i >= 0; i--) {
5471 if (registers_to_save_ & (1 << i)) {
5472 Register save_register = { i };
5473 __ pop(save_register);
5474 }
5475 }
Ben Murdochbb769b22010-08-11 14:56:33 +01005476}
5477
5478
Steve Blocka7e24c12009-10-30 11:49:00 +00005479void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01005480 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005481 Comment cmnt(masm_, "[ RegExp Literal");
5482
5483 // Retrieve the literals array and check the allocated entry. Begin
5484 // with a writable copy of the function of this activation in a
5485 // register.
5486 frame_->PushFunction();
5487 Result literals = frame_->Pop();
5488 literals.ToRegister();
5489 frame_->Spill(literals.reg());
5490
5491 // Load the literals array of the function.
5492 __ mov(literals.reg(),
5493 FieldOperand(literals.reg(), JSFunction::kLiteralsOffset));
5494
5495 // Load the literal at the ast saved index.
5496 Result boilerplate = allocator_->Allocate();
5497 ASSERT(boilerplate.is_valid());
5498 int literal_offset =
5499 FixedArray::kHeaderSize + node->literal_index() * kPointerSize;
5500 __ mov(boilerplate.reg(), FieldOperand(literals.reg(), literal_offset));
5501
5502 // Check whether we need to materialize the RegExp object. If so,
5503 // jump to the deferred code passing the literals array.
5504 DeferredRegExpLiteral* deferred =
5505 new DeferredRegExpLiteral(boilerplate.reg(), literals.reg(), node);
5506 __ cmp(boilerplate.reg(), Factory::undefined_value());
5507 deferred->Branch(equal);
5508 deferred->BindExit();
Steve Blocka7e24c12009-10-30 11:49:00 +00005509
Ben Murdochbb769b22010-08-11 14:56:33 +01005510 // Register of boilerplate contains RegExp object.
5511
5512 Result tmp = allocator()->Allocate();
5513 ASSERT(tmp.is_valid());
5514
5515 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
5516
5517 DeferredAllocateInNewSpace* allocate_fallback =
5518 new DeferredAllocateInNewSpace(size, literals.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00005519 frame_->Push(&boilerplate);
Ben Murdochbb769b22010-08-11 14:56:33 +01005520 frame_->SpillTop();
5521 __ AllocateInNewSpace(size,
5522 literals.reg(),
5523 tmp.reg(),
5524 no_reg,
5525 allocate_fallback->entry_label(),
5526 TAG_OBJECT);
5527 allocate_fallback->BindExit();
5528 boilerplate = frame_->Pop();
5529 // Copy from boilerplate to clone and return clone.
5530
5531 for (int i = 0; i < size; i += kPointerSize) {
5532 __ mov(tmp.reg(), FieldOperand(boilerplate.reg(), i));
5533 __ mov(FieldOperand(literals.reg(), i), tmp.reg());
5534 }
5535 frame_->Push(&literals);
Steve Blocka7e24c12009-10-30 11:49:00 +00005536}
5537
5538
Steve Blocka7e24c12009-10-30 11:49:00 +00005539void CodeGenerator::VisitObjectLiteral(ObjectLiteral* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01005540 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005541 Comment cmnt(masm_, "[ ObjectLiteral");
5542
Leon Clarkee46be812010-01-19 14:06:41 +00005543 // Load a writable copy of the function of this activation in a
Steve Blocka7e24c12009-10-30 11:49:00 +00005544 // register.
5545 frame_->PushFunction();
5546 Result literals = frame_->Pop();
5547 literals.ToRegister();
5548 frame_->Spill(literals.reg());
5549
5550 // Load the literals array of the function.
5551 __ mov(literals.reg(),
5552 FieldOperand(literals.reg(), JSFunction::kLiteralsOffset));
Leon Clarkee46be812010-01-19 14:06:41 +00005553 // Literal array.
5554 frame_->Push(&literals);
5555 // Literal index.
5556 frame_->Push(Smi::FromInt(node->literal_index()));
5557 // Constant properties.
5558 frame_->Push(node->constant_properties());
Steve Block6ded16b2010-05-10 14:33:55 +01005559 // Should the object literal have fast elements?
5560 frame_->Push(Smi::FromInt(node->fast_elements() ? 1 : 0));
Leon Clarkee46be812010-01-19 14:06:41 +00005561 Result clone;
5562 if (node->depth() > 1) {
Steve Block6ded16b2010-05-10 14:33:55 +01005563 clone = frame_->CallRuntime(Runtime::kCreateObjectLiteral, 4);
Leon Clarkee46be812010-01-19 14:06:41 +00005564 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01005565 clone = frame_->CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
Steve Blocka7e24c12009-10-30 11:49:00 +00005566 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005567 frame_->Push(&clone);
5568
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08005569 // Mark all computed expressions that are bound to a key that
5570 // is shadowed by a later occurrence of the same key. For the
5571 // marked expressions, no store code is emitted.
5572 node->CalculateEmitStore();
5573
Steve Blocka7e24c12009-10-30 11:49:00 +00005574 for (int i = 0; i < node->properties()->length(); i++) {
5575 ObjectLiteral::Property* property = node->properties()->at(i);
5576 switch (property->kind()) {
5577 case ObjectLiteral::Property::CONSTANT:
5578 break;
5579 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
5580 if (CompileTimeValue::IsCompileTimeValue(property->value())) break;
5581 // else fall through.
5582 case ObjectLiteral::Property::COMPUTED: {
5583 Handle<Object> key(property->key()->handle());
5584 if (key->IsSymbol()) {
5585 // Duplicate the object as the IC receiver.
5586 frame_->Dup();
5587 Load(property->value());
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08005588 if (property->emit_store()) {
5589 Result ignored =
5590 frame_->CallStoreIC(Handle<String>::cast(key), false);
5591 // A test eax instruction following the store IC call would
5592 // indicate the presence of an inlined version of the
5593 // store. Add a nop to indicate that there is no such
5594 // inlined version.
5595 __ nop();
5596 } else {
5597 frame_->Drop(2);
5598 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005599 break;
5600 }
5601 // Fall through
5602 }
5603 case ObjectLiteral::Property::PROTOTYPE: {
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08005604 // Duplicate the object as an argument to the runtime call.
5605 frame_->Dup();
5606 Load(property->key());
5607 Load(property->value());
5608 if (property->emit_store()) {
5609 // Ignore the result.
5610 Result ignored = frame_->CallRuntime(Runtime::kSetProperty, 3);
5611 } else {
5612 frame_->Drop(3);
5613 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005614 break;
5615 }
5616 case ObjectLiteral::Property::SETTER: {
5617 // Duplicate the object as an argument to the runtime call.
5618 frame_->Dup();
5619 Load(property->key());
5620 frame_->Push(Smi::FromInt(1));
5621 Load(property->value());
5622 Result ignored = frame_->CallRuntime(Runtime::kDefineAccessor, 4);
5623 // Ignore the result.
5624 break;
5625 }
5626 case ObjectLiteral::Property::GETTER: {
5627 // Duplicate the object as an argument to the runtime call.
5628 frame_->Dup();
5629 Load(property->key());
5630 frame_->Push(Smi::FromInt(0));
5631 Load(property->value());
5632 Result ignored = frame_->CallRuntime(Runtime::kDefineAccessor, 4);
5633 // Ignore the result.
5634 break;
5635 }
5636 default: UNREACHABLE();
5637 }
5638 }
5639}
5640
5641
Steve Blocka7e24c12009-10-30 11:49:00 +00005642void CodeGenerator::VisitArrayLiteral(ArrayLiteral* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01005643 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005644 Comment cmnt(masm_, "[ ArrayLiteral");
5645
Leon Clarkee46be812010-01-19 14:06:41 +00005646 // Load a writable copy of the function of this activation in a
Steve Blocka7e24c12009-10-30 11:49:00 +00005647 // register.
5648 frame_->PushFunction();
5649 Result literals = frame_->Pop();
5650 literals.ToRegister();
5651 frame_->Spill(literals.reg());
5652
5653 // Load the literals array of the function.
5654 __ mov(literals.reg(),
5655 FieldOperand(literals.reg(), JSFunction::kLiteralsOffset));
5656
Leon Clarkee46be812010-01-19 14:06:41 +00005657 frame_->Push(&literals);
5658 frame_->Push(Smi::FromInt(node->literal_index()));
5659 frame_->Push(node->constant_elements());
5660 int length = node->values()->length();
5661 Result clone;
Iain Merrick75681382010-08-19 15:07:18 +01005662 if (node->constant_elements()->map() == Heap::fixed_cow_array_map()) {
5663 FastCloneShallowArrayStub stub(
5664 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
5665 clone = frame_->CallStub(&stub, 3);
5666 __ IncrementCounter(&Counters::cow_arrays_created_stub, 1);
5667 } else if (node->depth() > 1) {
Leon Clarkee46be812010-01-19 14:06:41 +00005668 clone = frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3);
Iain Merrick75681382010-08-19 15:07:18 +01005669 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
Leon Clarkee46be812010-01-19 14:06:41 +00005670 clone = frame_->CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
5671 } else {
Iain Merrick75681382010-08-19 15:07:18 +01005672 FastCloneShallowArrayStub stub(
5673 FastCloneShallowArrayStub::CLONE_ELEMENTS, length);
Leon Clarkee46be812010-01-19 14:06:41 +00005674 clone = frame_->CallStub(&stub, 3);
Steve Blocka7e24c12009-10-30 11:49:00 +00005675 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005676 frame_->Push(&clone);
5677
5678 // Generate code to set the elements in the array that are not
5679 // literals.
Leon Clarkee46be812010-01-19 14:06:41 +00005680 for (int i = 0; i < length; i++) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005681 Expression* value = node->values()->at(i);
5682
Iain Merrick75681382010-08-19 15:07:18 +01005683 if (!CompileTimeValue::ArrayLiteralElementNeedsInitialization(value)) {
5684 continue;
5685 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005686
5687 // The property must be set by generated code.
5688 Load(value);
5689
5690 // Get the property value off the stack.
5691 Result prop_value = frame_->Pop();
5692 prop_value.ToRegister();
5693
5694 // Fetch the array literal while leaving a copy on the stack and
5695 // use it to get the elements array.
5696 frame_->Dup();
5697 Result elements = frame_->Pop();
5698 elements.ToRegister();
5699 frame_->Spill(elements.reg());
5700 // Get the elements array.
5701 __ mov(elements.reg(),
5702 FieldOperand(elements.reg(), JSObject::kElementsOffset));
5703
5704 // Write to the indexed properties array.
5705 int offset = i * kPointerSize + FixedArray::kHeaderSize;
5706 __ mov(FieldOperand(elements.reg(), offset), prop_value.reg());
5707
5708 // Update the write barrier for the array address.
5709 frame_->Spill(prop_value.reg()); // Overwritten by the write barrier.
5710 Result scratch = allocator_->Allocate();
5711 ASSERT(scratch.is_valid());
5712 __ RecordWrite(elements.reg(), offset, prop_value.reg(), scratch.reg());
5713 }
5714}
5715
5716
5717void CodeGenerator::VisitCatchExtensionObject(CatchExtensionObject* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01005718 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005719 ASSERT(!in_spilled_code());
5720 // Call runtime routine to allocate the catch extension object and
5721 // assign the exception value to the catch variable.
5722 Comment cmnt(masm_, "[ CatchExtensionObject");
5723 Load(node->key());
5724 Load(node->value());
5725 Result result =
5726 frame_->CallRuntime(Runtime::kCreateCatchExtensionObject, 2);
5727 frame_->Push(&result);
5728}
5729
5730
Andrei Popescu402d9372010-02-26 13:31:12 +00005731void CodeGenerator::EmitSlotAssignment(Assignment* node) {
5732#ifdef DEBUG
5733 int original_height = frame()->height();
5734#endif
5735 Comment cmnt(masm(), "[ Variable Assignment");
5736 Variable* var = node->target()->AsVariableProxy()->AsVariable();
5737 ASSERT(var != NULL);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01005738 Slot* slot = var->AsSlot();
Andrei Popescu402d9372010-02-26 13:31:12 +00005739 ASSERT(slot != NULL);
5740
5741 // Evaluate the right-hand side.
5742 if (node->is_compound()) {
Steve Block6ded16b2010-05-10 14:33:55 +01005743 // For a compound assignment the right-hand side is a binary operation
5744 // between the current property value and the actual right-hand side.
Leon Clarkef7060e22010-06-03 12:02:55 +01005745 LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF);
Andrei Popescu402d9372010-02-26 13:31:12 +00005746 Load(node->value());
5747
Steve Block6ded16b2010-05-10 14:33:55 +01005748 // Perform the binary operation.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005749 bool overwrite_value = node->value()->ResultOverwriteAllowed();
Steve Block6ded16b2010-05-10 14:33:55 +01005750 // Construct the implicit binary operation.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005751 BinaryOperation expr(node);
Steve Block6ded16b2010-05-10 14:33:55 +01005752 GenericBinaryOperation(&expr,
Andrei Popescu402d9372010-02-26 13:31:12 +00005753 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
5754 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01005755 // For non-compound assignment just load the right-hand side.
Andrei Popescu402d9372010-02-26 13:31:12 +00005756 Load(node->value());
5757 }
5758
5759 // Perform the assignment.
5760 if (var->mode() != Variable::CONST || node->op() == Token::INIT_CONST) {
5761 CodeForSourcePosition(node->position());
5762 StoreToSlot(slot,
5763 node->op() == Token::INIT_CONST ? CONST_INIT : NOT_CONST_INIT);
5764 }
5765 ASSERT(frame()->height() == original_height + 1);
5766}
5767
5768
5769void CodeGenerator::EmitNamedPropertyAssignment(Assignment* node) {
5770#ifdef DEBUG
5771 int original_height = frame()->height();
5772#endif
5773 Comment cmnt(masm(), "[ Named Property Assignment");
5774 Variable* var = node->target()->AsVariableProxy()->AsVariable();
5775 Property* prop = node->target()->AsProperty();
5776 ASSERT(var == NULL || (prop == NULL && var->is_global()));
5777
Steve Block6ded16b2010-05-10 14:33:55 +01005778 // Initialize name and evaluate the receiver sub-expression if necessary. If
5779 // the receiver is trivial it is not placed on the stack at this point, but
5780 // loaded whenever actually needed.
Andrei Popescu402d9372010-02-26 13:31:12 +00005781 Handle<String> name;
5782 bool is_trivial_receiver = false;
5783 if (var != NULL) {
5784 name = var->name();
5785 } else {
5786 Literal* lit = prop->key()->AsLiteral();
5787 ASSERT_NOT_NULL(lit);
5788 name = Handle<String>::cast(lit->handle());
5789 // Do not materialize the receiver on the frame if it is trivial.
5790 is_trivial_receiver = prop->obj()->IsTrivial();
5791 if (!is_trivial_receiver) Load(prop->obj());
5792 }
5793
Steve Block6ded16b2010-05-10 14:33:55 +01005794 // Change to slow case in the beginning of an initialization block to
5795 // avoid the quadratic behavior of repeatedly adding fast properties.
Andrei Popescu402d9372010-02-26 13:31:12 +00005796 if (node->starts_initialization_block()) {
Steve Block6ded16b2010-05-10 14:33:55 +01005797 // Initialization block consists of assignments of the form expr.x = ..., so
5798 // this will never be an assignment to a variable, so there must be a
5799 // receiver object.
Andrei Popescu402d9372010-02-26 13:31:12 +00005800 ASSERT_EQ(NULL, var);
Andrei Popescu402d9372010-02-26 13:31:12 +00005801 if (is_trivial_receiver) {
5802 frame()->Push(prop->obj());
5803 } else {
5804 frame()->Dup();
5805 }
5806 Result ignored = frame()->CallRuntime(Runtime::kToSlowProperties, 1);
5807 }
5808
Steve Block6ded16b2010-05-10 14:33:55 +01005809 // Change to fast case at the end of an initialization block. To prepare for
5810 // that add an extra copy of the receiver to the frame, so that it can be
5811 // converted back to fast case after the assignment.
Andrei Popescu402d9372010-02-26 13:31:12 +00005812 if (node->ends_initialization_block() && !is_trivial_receiver) {
Andrei Popescu402d9372010-02-26 13:31:12 +00005813 frame()->Dup();
5814 }
5815
Steve Block6ded16b2010-05-10 14:33:55 +01005816 // Stack layout:
5817 // [tos] : receiver (only materialized if non-trivial)
5818 // [tos+1] : receiver if at the end of an initialization block
5819
Andrei Popescu402d9372010-02-26 13:31:12 +00005820 // Evaluate the right-hand side.
5821 if (node->is_compound()) {
Steve Block6ded16b2010-05-10 14:33:55 +01005822 // For a compound assignment the right-hand side is a binary operation
5823 // between the current property value and the actual right-hand side.
Andrei Popescu402d9372010-02-26 13:31:12 +00005824 if (is_trivial_receiver) {
5825 frame()->Push(prop->obj());
5826 } else if (var != NULL) {
5827 // The LoadIC stub expects the object in eax.
5828 // Freeing eax causes the code generator to load the global into it.
5829 frame_->Spill(eax);
5830 LoadGlobal();
5831 } else {
5832 frame()->Dup();
5833 }
5834 Result value = EmitNamedLoad(name, var != NULL);
5835 frame()->Push(&value);
5836 Load(node->value());
5837
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005838 bool overwrite_value = node->value()->ResultOverwriteAllowed();
Steve Block6ded16b2010-05-10 14:33:55 +01005839 // Construct the implicit binary operation.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005840 BinaryOperation expr(node);
Steve Block6ded16b2010-05-10 14:33:55 +01005841 GenericBinaryOperation(&expr,
Andrei Popescu402d9372010-02-26 13:31:12 +00005842 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
5843 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01005844 // For non-compound assignment just load the right-hand side.
Andrei Popescu402d9372010-02-26 13:31:12 +00005845 Load(node->value());
5846 }
5847
Steve Block6ded16b2010-05-10 14:33:55 +01005848 // Stack layout:
5849 // [tos] : value
5850 // [tos+1] : receiver (only materialized if non-trivial)
5851 // [tos+2] : receiver if at the end of an initialization block
5852
Andrei Popescu402d9372010-02-26 13:31:12 +00005853 // Perform the assignment. It is safe to ignore constants here.
5854 ASSERT(var == NULL || var->mode() != Variable::CONST);
5855 ASSERT_NE(Token::INIT_CONST, node->op());
5856 if (is_trivial_receiver) {
5857 Result value = frame()->Pop();
5858 frame()->Push(prop->obj());
5859 frame()->Push(&value);
5860 }
5861 CodeForSourcePosition(node->position());
5862 bool is_contextual = (var != NULL);
5863 Result answer = EmitNamedStore(name, is_contextual);
5864 frame()->Push(&answer);
5865
Steve Block6ded16b2010-05-10 14:33:55 +01005866 // Stack layout:
5867 // [tos] : result
5868 // [tos+1] : receiver if at the end of an initialization block
5869
Andrei Popescu402d9372010-02-26 13:31:12 +00005870 if (node->ends_initialization_block()) {
5871 ASSERT_EQ(NULL, var);
5872 // The argument to the runtime call is the receiver.
5873 if (is_trivial_receiver) {
5874 frame()->Push(prop->obj());
5875 } else {
5876 // A copy of the receiver is below the value of the assignment. Swap
5877 // the receiver and the value of the assignment expression.
5878 Result result = frame()->Pop();
5879 Result receiver = frame()->Pop();
5880 frame()->Push(&result);
5881 frame()->Push(&receiver);
5882 }
5883 Result ignored = frame_->CallRuntime(Runtime::kToFastProperties, 1);
5884 }
5885
Steve Block6ded16b2010-05-10 14:33:55 +01005886 // Stack layout:
5887 // [tos] : result
5888
Andrei Popescu402d9372010-02-26 13:31:12 +00005889 ASSERT_EQ(frame()->height(), original_height + 1);
5890}
5891
5892
5893void CodeGenerator::EmitKeyedPropertyAssignment(Assignment* node) {
5894#ifdef DEBUG
5895 int original_height = frame()->height();
5896#endif
Steve Block6ded16b2010-05-10 14:33:55 +01005897 Comment cmnt(masm_, "[ Keyed Property Assignment");
Andrei Popescu402d9372010-02-26 13:31:12 +00005898 Property* prop = node->target()->AsProperty();
5899 ASSERT_NOT_NULL(prop);
5900
5901 // Evaluate the receiver subexpression.
5902 Load(prop->obj());
5903
Steve Block6ded16b2010-05-10 14:33:55 +01005904 // Change to slow case in the beginning of an initialization block to
5905 // avoid the quadratic behavior of repeatedly adding fast properties.
Andrei Popescu402d9372010-02-26 13:31:12 +00005906 if (node->starts_initialization_block()) {
Andrei Popescu402d9372010-02-26 13:31:12 +00005907 frame_->Dup();
5908 Result ignored = frame_->CallRuntime(Runtime::kToSlowProperties, 1);
5909 }
5910
Steve Block6ded16b2010-05-10 14:33:55 +01005911 // Change to fast case at the end of an initialization block. To prepare for
5912 // that add an extra copy of the receiver to the frame, so that it can be
5913 // converted back to fast case after the assignment.
Andrei Popescu402d9372010-02-26 13:31:12 +00005914 if (node->ends_initialization_block()) {
Andrei Popescu402d9372010-02-26 13:31:12 +00005915 frame_->Dup();
5916 }
5917
5918 // Evaluate the key subexpression.
5919 Load(prop->key());
5920
Steve Block6ded16b2010-05-10 14:33:55 +01005921 // Stack layout:
5922 // [tos] : key
5923 // [tos+1] : receiver
5924 // [tos+2] : receiver if at the end of an initialization block
5925
Andrei Popescu402d9372010-02-26 13:31:12 +00005926 // Evaluate the right-hand side.
5927 if (node->is_compound()) {
Steve Block6ded16b2010-05-10 14:33:55 +01005928 // For a compound assignment the right-hand side is a binary operation
5929 // between the current property value and the actual right-hand side.
5930 // Duplicate receiver and key for loading the current property value.
Andrei Popescu402d9372010-02-26 13:31:12 +00005931 frame()->PushElementAt(1);
5932 frame()->PushElementAt(1);
5933 Result value = EmitKeyedLoad();
5934 frame()->Push(&value);
5935 Load(node->value());
5936
Steve Block6ded16b2010-05-10 14:33:55 +01005937 // Perform the binary operation.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005938 bool overwrite_value = node->value()->ResultOverwriteAllowed();
5939 BinaryOperation expr(node);
Steve Block6ded16b2010-05-10 14:33:55 +01005940 GenericBinaryOperation(&expr,
Andrei Popescu402d9372010-02-26 13:31:12 +00005941 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
5942 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01005943 // For non-compound assignment just load the right-hand side.
Andrei Popescu402d9372010-02-26 13:31:12 +00005944 Load(node->value());
5945 }
5946
Steve Block6ded16b2010-05-10 14:33:55 +01005947 // Stack layout:
5948 // [tos] : value
5949 // [tos+1] : key
5950 // [tos+2] : receiver
5951 // [tos+3] : receiver if at the end of an initialization block
5952
Andrei Popescu402d9372010-02-26 13:31:12 +00005953 // Perform the assignment. It is safe to ignore constants here.
5954 ASSERT(node->op() != Token::INIT_CONST);
5955 CodeForSourcePosition(node->position());
5956 Result answer = EmitKeyedStore(prop->key()->type());
5957 frame()->Push(&answer);
5958
Steve Block6ded16b2010-05-10 14:33:55 +01005959 // Stack layout:
5960 // [tos] : result
5961 // [tos+1] : receiver if at the end of an initialization block
5962
5963 // Change to fast case at the end of an initialization block.
Andrei Popescu402d9372010-02-26 13:31:12 +00005964 if (node->ends_initialization_block()) {
5965 // The argument to the runtime call is the extra copy of the receiver,
5966 // which is below the value of the assignment. Swap the receiver and
5967 // the value of the assignment expression.
5968 Result result = frame()->Pop();
5969 Result receiver = frame()->Pop();
5970 frame()->Push(&result);
5971 frame()->Push(&receiver);
5972 Result ignored = frame_->CallRuntime(Runtime::kToFastProperties, 1);
5973 }
5974
Steve Block6ded16b2010-05-10 14:33:55 +01005975 // Stack layout:
5976 // [tos] : result
5977
Andrei Popescu402d9372010-02-26 13:31:12 +00005978 ASSERT(frame()->height() == original_height + 1);
5979}
5980
5981
Steve Blocka7e24c12009-10-30 11:49:00 +00005982void CodeGenerator::VisitAssignment(Assignment* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01005983 ASSERT(!in_safe_int32_mode());
Leon Clarked91b9f72010-01-27 17:25:45 +00005984#ifdef DEBUG
Andrei Popescu402d9372010-02-26 13:31:12 +00005985 int original_height = frame()->height();
Leon Clarked91b9f72010-01-27 17:25:45 +00005986#endif
Andrei Popescu402d9372010-02-26 13:31:12 +00005987 Variable* var = node->target()->AsVariableProxy()->AsVariable();
5988 Property* prop = node->target()->AsProperty();
Steve Blocka7e24c12009-10-30 11:49:00 +00005989
Andrei Popescu402d9372010-02-26 13:31:12 +00005990 if (var != NULL && !var->is_global()) {
5991 EmitSlotAssignment(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00005992
Andrei Popescu402d9372010-02-26 13:31:12 +00005993 } else if ((prop != NULL && prop->key()->IsPropertyName()) ||
5994 (var != NULL && var->is_global())) {
5995 // Properties whose keys are property names and global variables are
5996 // treated as named property references. We do not need to consider
5997 // global 'this' because it is not a valid left-hand side.
5998 EmitNamedPropertyAssignment(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00005999
Andrei Popescu402d9372010-02-26 13:31:12 +00006000 } else if (prop != NULL) {
6001 // Other properties (including rewritten parameters for a function that
6002 // uses arguments) are keyed property assignments.
6003 EmitKeyedPropertyAssignment(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00006004
Andrei Popescu402d9372010-02-26 13:31:12 +00006005 } else {
6006 // Invalid left-hand side.
6007 Load(node->target());
6008 Result result = frame()->CallRuntime(Runtime::kThrowReferenceError, 1);
6009 // The runtime call doesn't actually return but the code generator will
6010 // still generate code and expects a certain frame height.
6011 frame()->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00006012 }
Andrei Popescu402d9372010-02-26 13:31:12 +00006013
6014 ASSERT(frame()->height() == original_height + 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00006015}
6016
6017
6018void CodeGenerator::VisitThrow(Throw* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01006019 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00006020 Comment cmnt(masm_, "[ Throw");
6021 Load(node->exception());
6022 Result result = frame_->CallRuntime(Runtime::kThrow, 1);
6023 frame_->Push(&result);
6024}
6025
6026
6027void CodeGenerator::VisitProperty(Property* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01006028 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00006029 Comment cmnt(masm_, "[ Property");
6030 Reference property(this, node);
Steve Blockd0582a62009-12-15 09:54:21 +00006031 property.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00006032}
6033
6034
6035void CodeGenerator::VisitCall(Call* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01006036 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00006037 Comment cmnt(masm_, "[ Call");
6038
6039 Expression* function = node->expression();
6040 ZoneList<Expression*>* args = node->arguments();
6041
6042 // Check if the function is a variable or a property.
6043 Variable* var = function->AsVariableProxy()->AsVariable();
6044 Property* property = function->AsProperty();
6045
6046 // ------------------------------------------------------------------------
6047 // Fast-case: Use inline caching.
6048 // ---
6049 // According to ECMA-262, section 11.2.3, page 44, the function to call
6050 // must be resolved after the arguments have been evaluated. The IC code
6051 // automatically handles this by loading the arguments before the function
6052 // is resolved in cache misses (this also holds for megamorphic calls).
6053 // ------------------------------------------------------------------------
6054
6055 if (var != NULL && var->is_possibly_eval()) {
6056 // ----------------------------------
6057 // JavaScript example: 'eval(arg)' // eval is not known to be shadowed
6058 // ----------------------------------
6059
6060 // In a call to eval, we first call %ResolvePossiblyDirectEval to
6061 // resolve the function we need to call and the receiver of the
6062 // call. Then we call the resolved function using the given
6063 // arguments.
6064
6065 // Prepare the stack for the call to the resolved function.
6066 Load(function);
6067
6068 // Allocate a frame slot for the receiver.
6069 frame_->Push(Factory::undefined_value());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006070
6071 // Load the arguments.
Steve Blocka7e24c12009-10-30 11:49:00 +00006072 int arg_count = args->length();
6073 for (int i = 0; i < arg_count; i++) {
6074 Load(args->at(i));
Leon Clarkef7060e22010-06-03 12:02:55 +01006075 frame_->SpillTop();
Steve Blocka7e24c12009-10-30 11:49:00 +00006076 }
6077
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006078 // Result to hold the result of the function resolution and the
6079 // final result of the eval call.
6080 Result result;
6081
6082 // If we know that eval can only be shadowed by eval-introduced
6083 // variables we attempt to load the global eval function directly
6084 // in generated code. If we succeed, there is no need to perform a
6085 // context lookup in the runtime system.
6086 JumpTarget done;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01006087 if (var->AsSlot() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) {
6088 ASSERT(var->AsSlot()->type() == Slot::LOOKUP);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006089 JumpTarget slow;
6090 // Prepare the stack for the call to
6091 // ResolvePossiblyDirectEvalNoLookup by pushing the loaded
6092 // function, the first argument to the eval call and the
6093 // receiver.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01006094 Result fun = LoadFromGlobalSlotCheckExtensions(var->AsSlot(),
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006095 NOT_INSIDE_TYPEOF,
6096 &slow);
6097 frame_->Push(&fun);
6098 if (arg_count > 0) {
6099 frame_->PushElementAt(arg_count);
6100 } else {
6101 frame_->Push(Factory::undefined_value());
6102 }
6103 frame_->PushParameterAt(-1);
6104
6105 // Resolve the call.
6106 result =
6107 frame_->CallRuntime(Runtime::kResolvePossiblyDirectEvalNoLookup, 3);
6108
6109 done.Jump(&result);
6110 slow.Bind();
6111 }
6112
6113 // Prepare the stack for the call to ResolvePossiblyDirectEval by
6114 // pushing the loaded function, the first argument to the eval
6115 // call and the receiver.
Steve Blocka7e24c12009-10-30 11:49:00 +00006116 frame_->PushElementAt(arg_count + 1);
6117 if (arg_count > 0) {
6118 frame_->PushElementAt(arg_count);
6119 } else {
6120 frame_->Push(Factory::undefined_value());
6121 }
Leon Clarkee46be812010-01-19 14:06:41 +00006122 frame_->PushParameterAt(-1);
6123
Steve Blocka7e24c12009-10-30 11:49:00 +00006124 // Resolve the call.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006125 result = frame_->CallRuntime(Runtime::kResolvePossiblyDirectEval, 3);
6126
6127 // If we generated fast-case code bind the jump-target where fast
6128 // and slow case merge.
6129 if (done.is_linked()) done.Bind(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00006130
Leon Clarkee46be812010-01-19 14:06:41 +00006131 // The runtime call returns a pair of values in eax (function) and
6132 // edx (receiver). Touch up the stack with the right values.
6133 Result receiver = allocator_->Allocate(edx);
6134 frame_->SetElementAt(arg_count + 1, &result);
6135 frame_->SetElementAt(arg_count, &receiver);
6136 receiver.Unuse();
Steve Blocka7e24c12009-10-30 11:49:00 +00006137
6138 // Call the function.
6139 CodeForSourcePosition(node->position());
6140 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Leon Clarkee46be812010-01-19 14:06:41 +00006141 CallFunctionStub call_function(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
Steve Blocka7e24c12009-10-30 11:49:00 +00006142 result = frame_->CallStub(&call_function, arg_count + 1);
6143
6144 // Restore the context and overwrite the function on the stack with
6145 // the result.
6146 frame_->RestoreContextRegister();
6147 frame_->SetElementAt(0, &result);
6148
6149 } else if (var != NULL && !var->is_this() && var->is_global()) {
6150 // ----------------------------------
6151 // JavaScript example: 'foo(1, 2, 3)' // foo is global
6152 // ----------------------------------
6153
Steve Blocka7e24c12009-10-30 11:49:00 +00006154 // Pass the global object as the receiver and let the IC stub
6155 // patch the stack to use the global proxy as 'this' in the
6156 // invoked function.
6157 LoadGlobal();
6158
6159 // Load the arguments.
6160 int arg_count = args->length();
6161 for (int i = 0; i < arg_count; i++) {
6162 Load(args->at(i));
Leon Clarkef7060e22010-06-03 12:02:55 +01006163 frame_->SpillTop();
Steve Blocka7e24c12009-10-30 11:49:00 +00006164 }
6165
Leon Clarkee46be812010-01-19 14:06:41 +00006166 // Push the name of the function onto the frame.
6167 frame_->Push(var->name());
6168
Steve Blocka7e24c12009-10-30 11:49:00 +00006169 // Call the IC initialization code.
6170 CodeForSourcePosition(node->position());
6171 Result result = frame_->CallCallIC(RelocInfo::CODE_TARGET_CONTEXT,
6172 arg_count,
6173 loop_nesting());
6174 frame_->RestoreContextRegister();
Leon Clarkee46be812010-01-19 14:06:41 +00006175 frame_->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00006176
Kristian Monsen0d5e1162010-09-30 15:31:59 +01006177 } else if (var != NULL && var->AsSlot() != NULL &&
6178 var->AsSlot()->type() == Slot::LOOKUP) {
Steve Blocka7e24c12009-10-30 11:49:00 +00006179 // ----------------------------------
Kristian Monsen25f61362010-05-21 11:50:48 +01006180 // JavaScript examples:
6181 //
6182 // with (obj) foo(1, 2, 3) // foo may be in obj.
6183 //
6184 // function f() {};
6185 // function g() {
6186 // eval(...);
6187 // f(); // f could be in extension object.
6188 // }
Steve Blocka7e24c12009-10-30 11:49:00 +00006189 // ----------------------------------
6190
Kristian Monsen25f61362010-05-21 11:50:48 +01006191 JumpTarget slow, done;
6192 Result function;
6193
6194 // Generate fast case for loading functions from slots that
6195 // correspond to local/global variables or arguments unless they
6196 // are shadowed by eval-introduced bindings.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01006197 EmitDynamicLoadFromSlotFastCase(var->AsSlot(),
Kristian Monsen25f61362010-05-21 11:50:48 +01006198 NOT_INSIDE_TYPEOF,
6199 &function,
6200 &slow,
6201 &done);
6202
6203 slow.Bind();
6204 // Enter the runtime system to load the function from the context.
6205 // Sync the frame so we can push the arguments directly into
6206 // place.
Steve Blocka7e24c12009-10-30 11:49:00 +00006207 frame_->SyncRange(0, frame_->element_count() - 1);
6208 frame_->EmitPush(esi);
6209 frame_->EmitPush(Immediate(var->name()));
6210 frame_->CallRuntime(Runtime::kLoadContextSlot, 2);
6211 // The runtime call returns a pair of values in eax and edx. The
6212 // looked-up function is in eax and the receiver is in edx. These
6213 // register references are not ref counted here. We spill them
6214 // eagerly since they are arguments to an inevitable call (and are
6215 // not sharable by the arguments).
6216 ASSERT(!allocator()->is_used(eax));
6217 frame_->EmitPush(eax);
6218
6219 // Load the receiver.
6220 ASSERT(!allocator()->is_used(edx));
6221 frame_->EmitPush(edx);
6222
Kristian Monsen25f61362010-05-21 11:50:48 +01006223 // If fast case code has been generated, emit code to push the
6224 // function and receiver and have the slow path jump around this
6225 // code.
6226 if (done.is_linked()) {
6227 JumpTarget call;
6228 call.Jump();
6229 done.Bind(&function);
6230 frame_->Push(&function);
6231 LoadGlobalReceiver();
6232 call.Bind();
6233 }
6234
Steve Blocka7e24c12009-10-30 11:49:00 +00006235 // Call the function.
Leon Clarkee46be812010-01-19 14:06:41 +00006236 CallWithArguments(args, NO_CALL_FUNCTION_FLAGS, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00006237
6238 } else if (property != NULL) {
6239 // Check if the key is a literal string.
6240 Literal* literal = property->key()->AsLiteral();
6241
6242 if (literal != NULL && literal->handle()->IsSymbol()) {
6243 // ------------------------------------------------------------------
6244 // JavaScript example: 'object.foo(1, 2, 3)' or 'map["key"](1, 2, 3)'
6245 // ------------------------------------------------------------------
6246
6247 Handle<String> name = Handle<String>::cast(literal->handle());
6248
6249 if (ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION &&
6250 name->IsEqualTo(CStrVector("apply")) &&
6251 args->length() == 2 &&
6252 args->at(1)->AsVariableProxy() != NULL &&
6253 args->at(1)->AsVariableProxy()->IsArguments()) {
6254 // Use the optimized Function.prototype.apply that avoids
6255 // allocating lazily allocated arguments objects.
Leon Clarked91b9f72010-01-27 17:25:45 +00006256 CallApplyLazy(property->obj(),
Steve Blocka7e24c12009-10-30 11:49:00 +00006257 args->at(0),
6258 args->at(1)->AsVariableProxy(),
6259 node->position());
6260
6261 } else {
Leon Clarkee46be812010-01-19 14:06:41 +00006262 // Push the receiver onto the frame.
Steve Blocka7e24c12009-10-30 11:49:00 +00006263 Load(property->obj());
6264
6265 // Load the arguments.
6266 int arg_count = args->length();
6267 for (int i = 0; i < arg_count; i++) {
6268 Load(args->at(i));
Leon Clarkef7060e22010-06-03 12:02:55 +01006269 frame_->SpillTop();
Steve Blocka7e24c12009-10-30 11:49:00 +00006270 }
6271
Leon Clarkee46be812010-01-19 14:06:41 +00006272 // Push the name of the function onto the frame.
6273 frame_->Push(name);
6274
Steve Blocka7e24c12009-10-30 11:49:00 +00006275 // Call the IC initialization code.
6276 CodeForSourcePosition(node->position());
6277 Result result =
6278 frame_->CallCallIC(RelocInfo::CODE_TARGET, arg_count,
6279 loop_nesting());
6280 frame_->RestoreContextRegister();
Leon Clarkee46be812010-01-19 14:06:41 +00006281 frame_->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00006282 }
6283
6284 } else {
6285 // -------------------------------------------
6286 // JavaScript example: 'array[index](1, 2, 3)'
6287 // -------------------------------------------
6288
6289 // Load the function to call from the property through a reference.
Steve Blocka7e24c12009-10-30 11:49:00 +00006290
6291 // Pass receiver to called function.
6292 if (property->is_synthetic()) {
Leon Clarked91b9f72010-01-27 17:25:45 +00006293 Reference ref(this, property);
6294 ref.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00006295 // Use global object as receiver.
6296 LoadGlobalReceiver();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006297 // Call the function.
6298 CallWithArguments(args, RECEIVER_MIGHT_BE_VALUE, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00006299 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006300 // Push the receiver onto the frame.
Leon Clarked91b9f72010-01-27 17:25:45 +00006301 Load(property->obj());
Steve Blocka7e24c12009-10-30 11:49:00 +00006302
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006303 // Load the name of the function.
6304 Load(property->key());
6305
6306 // Swap the name of the function and the receiver on the stack to follow
6307 // the calling convention for call ICs.
6308 Result key = frame_->Pop();
6309 Result receiver = frame_->Pop();
6310 frame_->Push(&key);
6311 frame_->Push(&receiver);
6312 key.Unuse();
6313 receiver.Unuse();
6314
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006315 // Load the arguments.
6316 int arg_count = args->length();
6317 for (int i = 0; i < arg_count; i++) {
6318 Load(args->at(i));
6319 frame_->SpillTop();
6320 }
6321
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006322 // Place the key on top of stack and call the IC initialization code.
6323 frame_->PushElementAt(arg_count + 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006324 CodeForSourcePosition(node->position());
6325 Result result =
6326 frame_->CallKeyedCallIC(RelocInfo::CODE_TARGET,
6327 arg_count,
6328 loop_nesting());
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006329 frame_->Drop(); // Drop the key still on the stack.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006330 frame_->RestoreContextRegister();
6331 frame_->Push(&result);
6332 }
Steve Blocka7e24c12009-10-30 11:49:00 +00006333 }
6334
6335 } else {
6336 // ----------------------------------
6337 // JavaScript example: 'foo(1, 2, 3)' // foo is not global
6338 // ----------------------------------
6339
6340 // Load the function.
6341 Load(function);
6342
6343 // Pass the global proxy as the receiver.
6344 LoadGlobalReceiver();
6345
6346 // Call the function.
Leon Clarkee46be812010-01-19 14:06:41 +00006347 CallWithArguments(args, NO_CALL_FUNCTION_FLAGS, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00006348 }
6349}
6350
6351
6352void CodeGenerator::VisitCallNew(CallNew* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01006353 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00006354 Comment cmnt(masm_, "[ CallNew");
6355
6356 // According to ECMA-262, section 11.2.2, page 44, the function
6357 // expression in new calls must be evaluated before the
6358 // arguments. This is different from ordinary calls, where the
6359 // actual function to call is resolved after the arguments have been
6360 // evaluated.
6361
Kristian Monsen80d68ea2010-09-08 11:05:35 +01006362 // Push constructor on the stack. If it's not a function it's used as
6363 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
6364 // ignored.
Steve Blocka7e24c12009-10-30 11:49:00 +00006365 Load(node->expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00006366
6367 // Push the arguments ("left-to-right") on the stack.
6368 ZoneList<Expression*>* args = node->arguments();
6369 int arg_count = args->length();
6370 for (int i = 0; i < arg_count; i++) {
6371 Load(args->at(i));
6372 }
6373
6374 // Call the construct call builtin that handles allocation and
6375 // constructor invocation.
6376 CodeForSourcePosition(node->position());
6377 Result result = frame_->CallConstructor(arg_count);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01006378 frame_->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00006379}
6380
6381
6382void CodeGenerator::GenerateIsSmi(ZoneList<Expression*>* args) {
6383 ASSERT(args->length() == 1);
6384 Load(args->at(0));
6385 Result value = frame_->Pop();
6386 value.ToRegister();
6387 ASSERT(value.is_valid());
6388 __ test(value.reg(), Immediate(kSmiTagMask));
6389 value.Unuse();
6390 destination()->Split(zero);
6391}
6392
6393
6394void CodeGenerator::GenerateLog(ZoneList<Expression*>* args) {
6395 // Conditionally generate a log call.
6396 // Args:
6397 // 0 (literal string): The type of logging (corresponds to the flags).
6398 // This is used to determine whether or not to generate the log call.
6399 // 1 (string): Format string. Access the string at argument index 2
6400 // with '%2s' (see Logger::LogRuntime for all the formats).
6401 // 2 (array): Arguments to the format string.
6402 ASSERT_EQ(args->length(), 3);
6403#ifdef ENABLE_LOGGING_AND_PROFILING
6404 if (ShouldGenerateLog(args->at(0))) {
6405 Load(args->at(1));
6406 Load(args->at(2));
6407 frame_->CallRuntime(Runtime::kLog, 2);
6408 }
6409#endif
6410 // Finally, we're expected to leave a value on the top of the stack.
6411 frame_->Push(Factory::undefined_value());
6412}
6413
6414
6415void CodeGenerator::GenerateIsNonNegativeSmi(ZoneList<Expression*>* args) {
6416 ASSERT(args->length() == 1);
6417 Load(args->at(0));
6418 Result value = frame_->Pop();
6419 value.ToRegister();
6420 ASSERT(value.is_valid());
Steve Block6ded16b2010-05-10 14:33:55 +01006421 __ test(value.reg(), Immediate(kSmiTagMask | kSmiSignMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00006422 value.Unuse();
6423 destination()->Split(zero);
6424}
6425
6426
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006427class DeferredStringCharCodeAt : public DeferredCode {
6428 public:
6429 DeferredStringCharCodeAt(Register object,
6430 Register index,
6431 Register scratch,
6432 Register result)
6433 : result_(result),
6434 char_code_at_generator_(object,
6435 index,
6436 scratch,
6437 result,
6438 &need_conversion_,
6439 &need_conversion_,
6440 &index_out_of_range_,
6441 STRING_INDEX_IS_NUMBER) {}
6442
6443 StringCharCodeAtGenerator* fast_case_generator() {
6444 return &char_code_at_generator_;
6445 }
6446
6447 virtual void Generate() {
6448 VirtualFrameRuntimeCallHelper call_helper(frame_state());
6449 char_code_at_generator_.GenerateSlow(masm(), call_helper);
6450
6451 __ bind(&need_conversion_);
6452 // Move the undefined value into the result register, which will
6453 // trigger conversion.
6454 __ Set(result_, Immediate(Factory::undefined_value()));
6455 __ jmp(exit_label());
6456
6457 __ bind(&index_out_of_range_);
6458 // When the index is out of range, the spec requires us to return
6459 // NaN.
6460 __ Set(result_, Immediate(Factory::nan_value()));
6461 __ jmp(exit_label());
6462 }
6463
6464 private:
6465 Register result_;
6466
6467 Label need_conversion_;
6468 Label index_out_of_range_;
6469
6470 StringCharCodeAtGenerator char_code_at_generator_;
6471};
6472
6473
6474// This generates code that performs a String.prototype.charCodeAt() call
6475// or returns a smi in order to trigger conversion.
6476void CodeGenerator::GenerateStringCharCodeAt(ZoneList<Expression*>* args) {
6477 Comment(masm_, "[ GenerateStringCharCodeAt");
Steve Blocka7e24c12009-10-30 11:49:00 +00006478 ASSERT(args->length() == 2);
6479
Steve Blocka7e24c12009-10-30 11:49:00 +00006480 Load(args->at(0));
6481 Load(args->at(1));
6482 Result index = frame_->Pop();
6483 Result object = frame_->Pop();
Steve Blocka7e24c12009-10-30 11:49:00 +00006484 object.ToRegister();
6485 index.ToRegister();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006486 // We might mutate the object register.
Steve Blocka7e24c12009-10-30 11:49:00 +00006487 frame_->Spill(object.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00006488
Steve Block6ded16b2010-05-10 14:33:55 +01006489 // We need two extra registers.
6490 Result result = allocator()->Allocate();
6491 ASSERT(result.is_valid());
6492 Result scratch = allocator()->Allocate();
6493 ASSERT(scratch.is_valid());
Steve Blocka7e24c12009-10-30 11:49:00 +00006494
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006495 DeferredStringCharCodeAt* deferred =
6496 new DeferredStringCharCodeAt(object.reg(),
6497 index.reg(),
6498 scratch.reg(),
6499 result.reg());
6500 deferred->fast_case_generator()->GenerateFast(masm_);
6501 deferred->BindExit();
Steve Block6ded16b2010-05-10 14:33:55 +01006502 frame_->Push(&result);
6503}
6504
6505
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006506class DeferredStringCharFromCode : public DeferredCode {
6507 public:
6508 DeferredStringCharFromCode(Register code,
6509 Register result)
6510 : char_from_code_generator_(code, result) {}
6511
6512 StringCharFromCodeGenerator* fast_case_generator() {
6513 return &char_from_code_generator_;
6514 }
6515
6516 virtual void Generate() {
6517 VirtualFrameRuntimeCallHelper call_helper(frame_state());
6518 char_from_code_generator_.GenerateSlow(masm(), call_helper);
6519 }
6520
6521 private:
6522 StringCharFromCodeGenerator char_from_code_generator_;
6523};
6524
6525
6526// Generates code for creating a one-char string from a char code.
6527void CodeGenerator::GenerateStringCharFromCode(ZoneList<Expression*>* args) {
6528 Comment(masm_, "[ GenerateStringCharFromCode");
Steve Block6ded16b2010-05-10 14:33:55 +01006529 ASSERT(args->length() == 1);
6530
6531 Load(args->at(0));
6532
6533 Result code = frame_->Pop();
6534 code.ToRegister();
6535 ASSERT(code.is_valid());
6536
Steve Block6ded16b2010-05-10 14:33:55 +01006537 Result result = allocator()->Allocate();
6538 ASSERT(result.is_valid());
6539
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006540 DeferredStringCharFromCode* deferred = new DeferredStringCharFromCode(
6541 code.reg(), result.reg());
6542 deferred->fast_case_generator()->GenerateFast(masm_);
6543 deferred->BindExit();
6544 frame_->Push(&result);
6545}
6546
6547
6548class DeferredStringCharAt : public DeferredCode {
6549 public:
6550 DeferredStringCharAt(Register object,
6551 Register index,
6552 Register scratch1,
6553 Register scratch2,
6554 Register result)
6555 : result_(result),
6556 char_at_generator_(object,
6557 index,
6558 scratch1,
6559 scratch2,
6560 result,
6561 &need_conversion_,
6562 &need_conversion_,
6563 &index_out_of_range_,
6564 STRING_INDEX_IS_NUMBER) {}
6565
6566 StringCharAtGenerator* fast_case_generator() {
6567 return &char_at_generator_;
6568 }
6569
6570 virtual void Generate() {
6571 VirtualFrameRuntimeCallHelper call_helper(frame_state());
6572 char_at_generator_.GenerateSlow(masm(), call_helper);
6573
6574 __ bind(&need_conversion_);
6575 // Move smi zero into the result register, which will trigger
6576 // conversion.
6577 __ Set(result_, Immediate(Smi::FromInt(0)));
6578 __ jmp(exit_label());
6579
6580 __ bind(&index_out_of_range_);
6581 // When the index is out of range, the spec requires us to return
6582 // the empty string.
6583 __ Set(result_, Immediate(Factory::empty_string()));
6584 __ jmp(exit_label());
6585 }
6586
6587 private:
6588 Register result_;
6589
6590 Label need_conversion_;
6591 Label index_out_of_range_;
6592
6593 StringCharAtGenerator char_at_generator_;
6594};
6595
6596
6597// This generates code that performs a String.prototype.charAt() call
6598// or returns a smi in order to trigger conversion.
6599void CodeGenerator::GenerateStringCharAt(ZoneList<Expression*>* args) {
6600 Comment(masm_, "[ GenerateStringCharAt");
6601 ASSERT(args->length() == 2);
6602
6603 Load(args->at(0));
6604 Load(args->at(1));
6605 Result index = frame_->Pop();
6606 Result object = frame_->Pop();
6607 object.ToRegister();
6608 index.ToRegister();
6609 // We might mutate the object register.
6610 frame_->Spill(object.reg());
6611
6612 // We need three extra registers.
6613 Result result = allocator()->Allocate();
6614 ASSERT(result.is_valid());
6615 Result scratch1 = allocator()->Allocate();
6616 ASSERT(scratch1.is_valid());
6617 Result scratch2 = allocator()->Allocate();
6618 ASSERT(scratch2.is_valid());
6619
6620 DeferredStringCharAt* deferred =
6621 new DeferredStringCharAt(object.reg(),
6622 index.reg(),
6623 scratch1.reg(),
6624 scratch2.reg(),
6625 result.reg());
6626 deferred->fast_case_generator()->GenerateFast(masm_);
6627 deferred->BindExit();
Steve Block6ded16b2010-05-10 14:33:55 +01006628 frame_->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00006629}
6630
6631
6632void CodeGenerator::GenerateIsArray(ZoneList<Expression*>* args) {
6633 ASSERT(args->length() == 1);
6634 Load(args->at(0));
6635 Result value = frame_->Pop();
6636 value.ToRegister();
6637 ASSERT(value.is_valid());
6638 __ test(value.reg(), Immediate(kSmiTagMask));
6639 destination()->false_target()->Branch(equal);
6640 // It is a heap object - get map.
6641 Result temp = allocator()->Allocate();
6642 ASSERT(temp.is_valid());
6643 // Check if the object is a JS array or not.
6644 __ CmpObjectType(value.reg(), JS_ARRAY_TYPE, temp.reg());
6645 value.Unuse();
6646 temp.Unuse();
6647 destination()->Split(equal);
6648}
6649
6650
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006651void CodeGenerator::GenerateFastAsciiArrayJoin(ZoneList<Expression*>* args) {
6652 ASSERT(args->length() == 2);
6653 Load(args->at(1));
6654 Load(args->at(0));
6655 Result array_result = frame_->Pop();
6656 array_result.ToRegister(eax);
6657 frame_->SpillAll();
6658
6659 Label bailout;
6660 Label done;
6661 // All aliases of the same register have disjoint lifetimes.
6662 Register array = eax;
6663 Register result_pos = no_reg;
6664
6665 Register index = edi;
6666
6667 Register current_string_length = ecx; // Will be ecx when live.
6668
6669 Register current_string = edx;
6670
6671 Register scratch = ebx;
6672
6673 Register scratch_2 = esi;
6674 Register new_padding_chars = scratch_2;
6675
6676 Operand separator = Operand(esp, 4 * kPointerSize); // Already pushed.
6677 Operand elements = Operand(esp, 3 * kPointerSize);
6678 Operand result = Operand(esp, 2 * kPointerSize);
6679 Operand padding_chars = Operand(esp, 1 * kPointerSize);
6680 Operand array_length = Operand(esp, 0);
6681 __ sub(Operand(esp), Immediate(4 * kPointerSize));
6682
6683 // Check that eax is a JSArray
6684 __ test(array, Immediate(kSmiTagMask));
6685 __ j(zero, &bailout);
6686 __ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
6687 __ j(not_equal, &bailout);
6688
6689 // Check that the array has fast elements.
6690 __ test_b(FieldOperand(scratch, Map::kBitField2Offset),
6691 1 << Map::kHasFastElements);
6692 __ j(zero, &bailout);
6693
6694 // If the array is empty, return the empty string.
6695 __ mov(scratch, FieldOperand(array, JSArray::kLengthOffset));
6696 __ sar(scratch, 1);
6697 Label non_trivial;
6698 __ j(not_zero, &non_trivial);
6699 __ mov(result, Factory::empty_string());
6700 __ jmp(&done);
6701
6702 __ bind(&non_trivial);
6703 __ mov(array_length, scratch);
6704
6705 __ mov(scratch, FieldOperand(array, JSArray::kElementsOffset));
6706 __ mov(elements, scratch);
6707
6708 // End of array's live range.
6709 result_pos = array;
6710 array = no_reg;
6711
6712
6713 // Check that the separator is a flat ascii string.
6714 __ mov(current_string, separator);
6715 __ test(current_string, Immediate(kSmiTagMask));
6716 __ j(zero, &bailout);
6717 __ mov(scratch, FieldOperand(current_string, HeapObject::kMapOffset));
6718 __ mov_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
6719 __ and_(scratch, Immediate(
6720 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
6721 __ cmp(scratch, kStringTag | kAsciiStringTag | kSeqStringTag);
6722 __ j(not_equal, &bailout);
6723 // If the separator is the empty string, replace it with NULL.
6724 // The test for NULL is quicker than the empty string test, in a loop.
6725 __ cmp(FieldOperand(current_string, SeqAsciiString::kLengthOffset),
6726 Immediate(0));
6727 Label separator_checked;
6728 __ j(not_zero, &separator_checked);
6729 __ mov(separator, Immediate(0));
6730 __ bind(&separator_checked);
6731
6732 // Check that elements[0] is a flat ascii string, and copy it in new space.
6733 __ mov(scratch, elements);
6734 __ mov(current_string, FieldOperand(scratch, FixedArray::kHeaderSize));
6735 __ test(current_string, Immediate(kSmiTagMask));
6736 __ j(zero, &bailout);
6737 __ mov(scratch, FieldOperand(current_string, HeapObject::kMapOffset));
6738 __ mov_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
6739 __ and_(scratch, Immediate(
6740 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
6741 __ cmp(scratch, kStringTag | kAsciiStringTag | kSeqStringTag);
6742 __ j(not_equal, &bailout);
6743
6744 // Allocate space to copy it. Round up the size to the alignment granularity.
6745 __ mov(current_string_length,
6746 FieldOperand(current_string, String::kLengthOffset));
6747 __ shr(current_string_length, 1);
6748
6749 // Live registers and stack values:
6750 // current_string_length: length of elements[0].
6751
6752 // New string result in new space = elements[0]
6753 __ AllocateAsciiString(result_pos, current_string_length, scratch_2,
6754 index, no_reg, &bailout);
6755 __ mov(result, result_pos);
6756
6757 // Adjust current_string_length to include padding bytes at end of string.
6758 // Keep track of the number of padding bytes.
6759 __ mov(new_padding_chars, current_string_length);
6760 __ add(Operand(current_string_length), Immediate(kObjectAlignmentMask));
6761 __ and_(Operand(current_string_length), Immediate(~kObjectAlignmentMask));
6762 __ sub(new_padding_chars, Operand(current_string_length));
6763 __ neg(new_padding_chars);
6764 __ mov(padding_chars, new_padding_chars);
6765
6766 Label copy_loop_1_done;
6767 Label copy_loop_1;
6768 __ test(current_string_length, Operand(current_string_length));
6769 __ j(zero, &copy_loop_1_done);
6770 __ bind(&copy_loop_1);
6771 __ sub(Operand(current_string_length), Immediate(kPointerSize));
6772 __ mov(scratch, FieldOperand(current_string, current_string_length,
6773 times_1, SeqAsciiString::kHeaderSize));
6774 __ mov(FieldOperand(result_pos, current_string_length,
6775 times_1, SeqAsciiString::kHeaderSize),
6776 scratch);
6777 __ j(not_zero, &copy_loop_1);
6778 __ bind(&copy_loop_1_done);
6779
6780 __ mov(index, Immediate(1));
6781 // Loop condition: while (index < length).
6782 Label loop;
6783 __ bind(&loop);
6784 __ cmp(index, array_length);
6785 __ j(greater_equal, &done);
6786
6787 // If the separator is the empty string, signalled by NULL, skip it.
6788 Label separator_done;
6789 __ mov(current_string, separator);
6790 __ test(current_string, Operand(current_string));
6791 __ j(zero, &separator_done);
6792
6793 // Append separator to result. It is known to be a flat ascii string.
6794 __ AppendStringToTopOfNewSpace(current_string, current_string_length,
6795 result_pos, scratch, scratch_2, result,
6796 padding_chars, &bailout);
6797 __ bind(&separator_done);
6798
6799 // Add next element of array to the end of the result.
6800 // Get current_string = array[index].
6801 __ mov(scratch, elements);
6802 __ mov(current_string, FieldOperand(scratch, index,
6803 times_pointer_size,
6804 FixedArray::kHeaderSize));
6805 // If current != flat ascii string drop result, return undefined.
6806 __ test(current_string, Immediate(kSmiTagMask));
6807 __ j(zero, &bailout);
6808 __ mov(scratch, FieldOperand(current_string, HeapObject::kMapOffset));
6809 __ mov_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
6810 __ and_(scratch, Immediate(
6811 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
6812 __ cmp(scratch, kStringTag | kAsciiStringTag | kSeqStringTag);
6813 __ j(not_equal, &bailout);
6814
6815 // Append current to the result.
6816 __ AppendStringToTopOfNewSpace(current_string, current_string_length,
6817 result_pos, scratch, scratch_2, result,
6818 padding_chars, &bailout);
6819 __ add(Operand(index), Immediate(1));
6820 __ jmp(&loop); // End while (index < length).
6821
6822 __ bind(&bailout);
6823 __ mov(result, Factory::undefined_value());
6824 __ bind(&done);
6825 __ mov(eax, result);
6826 // Drop temp values from the stack, and restore context register.
6827 __ add(Operand(esp), Immediate(4 * kPointerSize));
6828
6829 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
6830 frame_->Drop(1);
6831 frame_->Push(&array_result);
6832}
6833
6834
Andrei Popescu402d9372010-02-26 13:31:12 +00006835void CodeGenerator::GenerateIsRegExp(ZoneList<Expression*>* args) {
6836 ASSERT(args->length() == 1);
6837 Load(args->at(0));
6838 Result value = frame_->Pop();
6839 value.ToRegister();
6840 ASSERT(value.is_valid());
6841 __ test(value.reg(), Immediate(kSmiTagMask));
6842 destination()->false_target()->Branch(equal);
6843 // It is a heap object - get map.
6844 Result temp = allocator()->Allocate();
6845 ASSERT(temp.is_valid());
6846 // Check if the object is a regexp.
6847 __ CmpObjectType(value.reg(), JS_REGEXP_TYPE, temp.reg());
6848 value.Unuse();
6849 temp.Unuse();
6850 destination()->Split(equal);
6851}
6852
6853
Steve Blockd0582a62009-12-15 09:54:21 +00006854void CodeGenerator::GenerateIsObject(ZoneList<Expression*>* args) {
6855 // This generates a fast version of:
6856 // (typeof(arg) === 'object' || %_ClassOf(arg) == 'RegExp')
6857 ASSERT(args->length() == 1);
6858 Load(args->at(0));
6859 Result obj = frame_->Pop();
6860 obj.ToRegister();
6861
6862 __ test(obj.reg(), Immediate(kSmiTagMask));
6863 destination()->false_target()->Branch(zero);
6864 __ cmp(obj.reg(), Factory::null_value());
6865 destination()->true_target()->Branch(equal);
6866
6867 Result map = allocator()->Allocate();
6868 ASSERT(map.is_valid());
6869 __ mov(map.reg(), FieldOperand(obj.reg(), HeapObject::kMapOffset));
6870 // Undetectable objects behave like undefined when tested with typeof.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006871 __ test_b(FieldOperand(map.reg(), Map::kBitFieldOffset),
6872 1 << Map::kIsUndetectable);
Steve Blockd0582a62009-12-15 09:54:21 +00006873 destination()->false_target()->Branch(not_zero);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006874 // Do a range test for JSObject type. We can't use
6875 // MacroAssembler::IsInstanceJSObjectType, because we are using a
6876 // ControlDestination, so we copy its implementation here.
Steve Blockd0582a62009-12-15 09:54:21 +00006877 __ movzx_b(map.reg(), FieldOperand(map.reg(), Map::kInstanceTypeOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006878 __ sub(Operand(map.reg()), Immediate(FIRST_JS_OBJECT_TYPE));
6879 __ cmp(map.reg(), LAST_JS_OBJECT_TYPE - FIRST_JS_OBJECT_TYPE);
Steve Blockd0582a62009-12-15 09:54:21 +00006880 obj.Unuse();
6881 map.Unuse();
Leon Clarkef7060e22010-06-03 12:02:55 +01006882 destination()->Split(below_equal);
Steve Blockd0582a62009-12-15 09:54:21 +00006883}
6884
6885
Iain Merrick75681382010-08-19 15:07:18 +01006886void CodeGenerator::GenerateIsSpecObject(ZoneList<Expression*>* args) {
Ben Murdoch3bec4d22010-07-22 14:51:16 +01006887 // This generates a fast version of:
6888 // (typeof(arg) === 'object' || %_ClassOf(arg) == 'RegExp' ||
6889 // typeof(arg) == function).
6890 // It includes undetectable objects (as opposed to IsObject).
6891 ASSERT(args->length() == 1);
6892 Load(args->at(0));
6893 Result value = frame_->Pop();
6894 value.ToRegister();
6895 ASSERT(value.is_valid());
6896 __ test(value.reg(), Immediate(kSmiTagMask));
6897 destination()->false_target()->Branch(equal);
6898
6899 // Check that this is an object.
6900 frame_->Spill(value.reg());
6901 __ CmpObjectType(value.reg(), FIRST_JS_OBJECT_TYPE, value.reg());
6902 value.Unuse();
6903 destination()->Split(above_equal);
6904}
6905
6906
Iain Merrick75681382010-08-19 15:07:18 +01006907// Deferred code to check whether the String JavaScript object is safe for using
6908// default value of. This code is called after the bit caching this information
6909// in the map has been checked with the map for the object in the map_result_
6910// register. On return the register map_result_ contains 1 for true and 0 for
6911// false.
6912class DeferredIsStringWrapperSafeForDefaultValueOf : public DeferredCode {
6913 public:
6914 DeferredIsStringWrapperSafeForDefaultValueOf(Register object,
6915 Register map_result,
6916 Register scratch1,
6917 Register scratch2)
6918 : object_(object),
6919 map_result_(map_result),
6920 scratch1_(scratch1),
6921 scratch2_(scratch2) { }
6922
6923 virtual void Generate() {
6924 Label false_result;
6925
6926 // Check that map is loaded as expected.
6927 if (FLAG_debug_code) {
6928 __ cmp(map_result_, FieldOperand(object_, HeapObject::kMapOffset));
6929 __ Assert(equal, "Map not in expected register");
6930 }
6931
6932 // Check for fast case object. Generate false result for slow case object.
6933 __ mov(scratch1_, FieldOperand(object_, JSObject::kPropertiesOffset));
6934 __ mov(scratch1_, FieldOperand(scratch1_, HeapObject::kMapOffset));
6935 __ cmp(scratch1_, Factory::hash_table_map());
6936 __ j(equal, &false_result);
6937
6938 // Look for valueOf symbol in the descriptor array, and indicate false if
6939 // found. The type is not checked, so if it is a transition it is a false
6940 // negative.
6941 __ mov(map_result_,
6942 FieldOperand(map_result_, Map::kInstanceDescriptorsOffset));
6943 __ mov(scratch1_, FieldOperand(map_result_, FixedArray::kLengthOffset));
6944 // map_result_: descriptor array
6945 // scratch1_: length of descriptor array
6946 // Calculate the end of the descriptor array.
6947 STATIC_ASSERT(kSmiTag == 0);
6948 STATIC_ASSERT(kSmiTagSize == 1);
6949 STATIC_ASSERT(kPointerSize == 4);
6950 __ lea(scratch1_,
6951 Operand(map_result_, scratch1_, times_2, FixedArray::kHeaderSize));
6952 // Calculate location of the first key name.
6953 __ add(Operand(map_result_),
6954 Immediate(FixedArray::kHeaderSize +
6955 DescriptorArray::kFirstIndex * kPointerSize));
6956 // Loop through all the keys in the descriptor array. If one of these is the
6957 // symbol valueOf the result is false.
6958 Label entry, loop;
6959 __ jmp(&entry);
6960 __ bind(&loop);
6961 __ mov(scratch2_, FieldOperand(map_result_, 0));
6962 __ cmp(scratch2_, Factory::value_of_symbol());
6963 __ j(equal, &false_result);
6964 __ add(Operand(map_result_), Immediate(kPointerSize));
6965 __ bind(&entry);
6966 __ cmp(map_result_, Operand(scratch1_));
6967 __ j(not_equal, &loop);
6968
6969 // Reload map as register map_result_ was used as temporary above.
6970 __ mov(map_result_, FieldOperand(object_, HeapObject::kMapOffset));
6971
6972 // If a valueOf property is not found on the object check that it's
6973 // prototype is the un-modified String prototype. If not result is false.
6974 __ mov(scratch1_, FieldOperand(map_result_, Map::kPrototypeOffset));
6975 __ test(scratch1_, Immediate(kSmiTagMask));
6976 __ j(zero, &false_result);
6977 __ mov(scratch1_, FieldOperand(scratch1_, HeapObject::kMapOffset));
6978 __ mov(scratch2_, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
6979 __ mov(scratch2_,
6980 FieldOperand(scratch2_, GlobalObject::kGlobalContextOffset));
6981 __ cmp(scratch1_,
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006982 ContextOperand(scratch2_,
6983 Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
Iain Merrick75681382010-08-19 15:07:18 +01006984 __ j(not_equal, &false_result);
6985 // Set the bit in the map to indicate that it has been checked safe for
6986 // default valueOf and set true result.
6987 __ or_(FieldOperand(map_result_, Map::kBitField2Offset),
6988 Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
6989 __ Set(map_result_, Immediate(1));
6990 __ jmp(exit_label());
6991 __ bind(&false_result);
6992 // Set false result.
6993 __ Set(map_result_, Immediate(0));
6994 }
6995
6996 private:
6997 Register object_;
6998 Register map_result_;
6999 Register scratch1_;
7000 Register scratch2_;
7001};
7002
7003
7004void CodeGenerator::GenerateIsStringWrapperSafeForDefaultValueOf(
7005 ZoneList<Expression*>* args) {
7006 ASSERT(args->length() == 1);
7007 Load(args->at(0));
7008 Result obj = frame_->Pop(); // Pop the string wrapper.
7009 obj.ToRegister();
7010 ASSERT(obj.is_valid());
7011 if (FLAG_debug_code) {
7012 __ AbortIfSmi(obj.reg());
7013 }
7014
7015 // Check whether this map has already been checked to be safe for default
7016 // valueOf.
7017 Result map_result = allocator()->Allocate();
7018 ASSERT(map_result.is_valid());
7019 __ mov(map_result.reg(), FieldOperand(obj.reg(), HeapObject::kMapOffset));
7020 __ test_b(FieldOperand(map_result.reg(), Map::kBitField2Offset),
7021 1 << Map::kStringWrapperSafeForDefaultValueOf);
7022 destination()->true_target()->Branch(not_zero);
7023
7024 // We need an additional two scratch registers for the deferred code.
7025 Result temp1 = allocator()->Allocate();
7026 ASSERT(temp1.is_valid());
7027 Result temp2 = allocator()->Allocate();
7028 ASSERT(temp2.is_valid());
7029
7030 DeferredIsStringWrapperSafeForDefaultValueOf* deferred =
7031 new DeferredIsStringWrapperSafeForDefaultValueOf(
7032 obj.reg(), map_result.reg(), temp1.reg(), temp2.reg());
7033 deferred->Branch(zero);
7034 deferred->BindExit();
7035 __ test(map_result.reg(), Operand(map_result.reg()));
7036 obj.Unuse();
7037 map_result.Unuse();
7038 temp1.Unuse();
7039 temp2.Unuse();
7040 destination()->Split(not_equal);
7041}
7042
7043
Steve Blockd0582a62009-12-15 09:54:21 +00007044void CodeGenerator::GenerateIsFunction(ZoneList<Expression*>* args) {
7045 // This generates a fast version of:
7046 // (%_ClassOf(arg) === 'Function')
7047 ASSERT(args->length() == 1);
7048 Load(args->at(0));
7049 Result obj = frame_->Pop();
7050 obj.ToRegister();
7051 __ test(obj.reg(), Immediate(kSmiTagMask));
7052 destination()->false_target()->Branch(zero);
7053 Result temp = allocator()->Allocate();
7054 ASSERT(temp.is_valid());
7055 __ CmpObjectType(obj.reg(), JS_FUNCTION_TYPE, temp.reg());
7056 obj.Unuse();
7057 temp.Unuse();
7058 destination()->Split(equal);
7059}
7060
7061
Leon Clarked91b9f72010-01-27 17:25:45 +00007062void CodeGenerator::GenerateIsUndetectableObject(ZoneList<Expression*>* args) {
7063 ASSERT(args->length() == 1);
7064 Load(args->at(0));
7065 Result obj = frame_->Pop();
7066 obj.ToRegister();
7067 __ test(obj.reg(), Immediate(kSmiTagMask));
7068 destination()->false_target()->Branch(zero);
7069 Result temp = allocator()->Allocate();
7070 ASSERT(temp.is_valid());
7071 __ mov(temp.reg(),
7072 FieldOperand(obj.reg(), HeapObject::kMapOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007073 __ test_b(FieldOperand(temp.reg(), Map::kBitFieldOffset),
7074 1 << Map::kIsUndetectable);
Leon Clarked91b9f72010-01-27 17:25:45 +00007075 obj.Unuse();
7076 temp.Unuse();
7077 destination()->Split(not_zero);
7078}
7079
7080
Steve Blocka7e24c12009-10-30 11:49:00 +00007081void CodeGenerator::GenerateIsConstructCall(ZoneList<Expression*>* args) {
7082 ASSERT(args->length() == 0);
7083
7084 // Get the frame pointer for the calling frame.
7085 Result fp = allocator()->Allocate();
7086 __ mov(fp.reg(), Operand(ebp, StandardFrameConstants::kCallerFPOffset));
7087
7088 // Skip the arguments adaptor frame if it exists.
7089 Label check_frame_marker;
7090 __ cmp(Operand(fp.reg(), StandardFrameConstants::kContextOffset),
7091 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
7092 __ j(not_equal, &check_frame_marker);
7093 __ mov(fp.reg(), Operand(fp.reg(), StandardFrameConstants::kCallerFPOffset));
7094
7095 // Check the marker in the calling frame.
7096 __ bind(&check_frame_marker);
7097 __ cmp(Operand(fp.reg(), StandardFrameConstants::kMarkerOffset),
7098 Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
7099 fp.Unuse();
7100 destination()->Split(equal);
7101}
7102
7103
7104void CodeGenerator::GenerateArgumentsLength(ZoneList<Expression*>* args) {
7105 ASSERT(args->length() == 0);
Steve Block6ded16b2010-05-10 14:33:55 +01007106
7107 Result fp = allocator_->Allocate();
7108 Result result = allocator_->Allocate();
7109 ASSERT(fp.is_valid() && result.is_valid());
7110
7111 Label exit;
7112
7113 // Get the number of formal parameters.
7114 __ Set(result.reg(), Immediate(Smi::FromInt(scope()->num_parameters())));
7115
7116 // Check if the calling frame is an arguments adaptor frame.
7117 __ mov(fp.reg(), Operand(ebp, StandardFrameConstants::kCallerFPOffset));
7118 __ cmp(Operand(fp.reg(), StandardFrameConstants::kContextOffset),
7119 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
7120 __ j(not_equal, &exit);
7121
7122 // Arguments adaptor case: Read the arguments length from the
7123 // adaptor frame.
7124 __ mov(result.reg(),
7125 Operand(fp.reg(), ArgumentsAdaptorFrameConstants::kLengthOffset));
7126
7127 __ bind(&exit);
7128 result.set_type_info(TypeInfo::Smi());
7129 if (FLAG_debug_code) __ AbortIfNotSmi(result.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00007130 frame_->Push(&result);
7131}
7132
7133
7134void CodeGenerator::GenerateClassOf(ZoneList<Expression*>* args) {
7135 ASSERT(args->length() == 1);
7136 JumpTarget leave, null, function, non_function_constructor;
7137 Load(args->at(0)); // Load the object.
7138 Result obj = frame_->Pop();
7139 obj.ToRegister();
7140 frame_->Spill(obj.reg());
7141
7142 // If the object is a smi, we return null.
7143 __ test(obj.reg(), Immediate(kSmiTagMask));
7144 null.Branch(zero);
7145
7146 // Check that the object is a JS object but take special care of JS
7147 // functions to make sure they have 'Function' as their class.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007148 __ CmpObjectType(obj.reg(), FIRST_JS_OBJECT_TYPE, obj.reg());
7149 null.Branch(below);
Steve Blocka7e24c12009-10-30 11:49:00 +00007150
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007151 // As long as JS_FUNCTION_TYPE is the last instance type and it is
7152 // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
7153 // LAST_JS_OBJECT_TYPE.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01007154 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
7155 STATIC_ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007156 __ CmpInstanceType(obj.reg(), JS_FUNCTION_TYPE);
7157 function.Branch(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +00007158
7159 // Check if the constructor in the map is a function.
7160 { Result tmp = allocator()->Allocate();
7161 __ mov(obj.reg(), FieldOperand(obj.reg(), Map::kConstructorOffset));
7162 __ CmpObjectType(obj.reg(), JS_FUNCTION_TYPE, tmp.reg());
7163 non_function_constructor.Branch(not_equal);
7164 }
7165
7166 // The map register now contains the constructor function. Grab the
7167 // instance class name from there.
7168 __ mov(obj.reg(),
7169 FieldOperand(obj.reg(), JSFunction::kSharedFunctionInfoOffset));
7170 __ mov(obj.reg(),
7171 FieldOperand(obj.reg(), SharedFunctionInfo::kInstanceClassNameOffset));
7172 frame_->Push(&obj);
7173 leave.Jump();
7174
7175 // Functions have class 'Function'.
7176 function.Bind();
7177 frame_->Push(Factory::function_class_symbol());
7178 leave.Jump();
7179
7180 // Objects with a non-function constructor have class 'Object'.
7181 non_function_constructor.Bind();
7182 frame_->Push(Factory::Object_symbol());
7183 leave.Jump();
7184
7185 // Non-JS objects have class null.
7186 null.Bind();
7187 frame_->Push(Factory::null_value());
7188
7189 // All done.
7190 leave.Bind();
7191}
7192
7193
7194void CodeGenerator::GenerateValueOf(ZoneList<Expression*>* args) {
7195 ASSERT(args->length() == 1);
7196 JumpTarget leave;
7197 Load(args->at(0)); // Load the object.
7198 frame_->Dup();
7199 Result object = frame_->Pop();
7200 object.ToRegister();
7201 ASSERT(object.is_valid());
7202 // if (object->IsSmi()) return object.
7203 __ test(object.reg(), Immediate(kSmiTagMask));
7204 leave.Branch(zero, taken);
7205 // It is a heap object - get map.
7206 Result temp = allocator()->Allocate();
7207 ASSERT(temp.is_valid());
7208 // if (!object->IsJSValue()) return object.
7209 __ CmpObjectType(object.reg(), JS_VALUE_TYPE, temp.reg());
7210 leave.Branch(not_equal, not_taken);
7211 __ mov(temp.reg(), FieldOperand(object.reg(), JSValue::kValueOffset));
7212 object.Unuse();
7213 frame_->SetElementAt(0, &temp);
7214 leave.Bind();
7215}
7216
7217
7218void CodeGenerator::GenerateSetValueOf(ZoneList<Expression*>* args) {
7219 ASSERT(args->length() == 2);
7220 JumpTarget leave;
7221 Load(args->at(0)); // Load the object.
7222 Load(args->at(1)); // Load the value.
7223 Result value = frame_->Pop();
7224 Result object = frame_->Pop();
7225 value.ToRegister();
7226 object.ToRegister();
7227
7228 // if (object->IsSmi()) return value.
7229 __ test(object.reg(), Immediate(kSmiTagMask));
7230 leave.Branch(zero, &value, taken);
7231
7232 // It is a heap object - get its map.
7233 Result scratch = allocator_->Allocate();
7234 ASSERT(scratch.is_valid());
7235 // if (!object->IsJSValue()) return value.
7236 __ CmpObjectType(object.reg(), JS_VALUE_TYPE, scratch.reg());
7237 leave.Branch(not_equal, &value, not_taken);
7238
7239 // Store the value.
7240 __ mov(FieldOperand(object.reg(), JSValue::kValueOffset), value.reg());
7241 // Update the write barrier. Save the value as it will be
7242 // overwritten by the write barrier code and is needed afterward.
7243 Result duplicate_value = allocator_->Allocate();
7244 ASSERT(duplicate_value.is_valid());
7245 __ mov(duplicate_value.reg(), value.reg());
7246 // The object register is also overwritten by the write barrier and
7247 // possibly aliased in the frame.
7248 frame_->Spill(object.reg());
7249 __ RecordWrite(object.reg(), JSValue::kValueOffset, duplicate_value.reg(),
7250 scratch.reg());
7251 object.Unuse();
7252 scratch.Unuse();
7253 duplicate_value.Unuse();
7254
7255 // Leave.
7256 leave.Bind(&value);
7257 frame_->Push(&value);
7258}
7259
7260
Steve Block6ded16b2010-05-10 14:33:55 +01007261void CodeGenerator::GenerateArguments(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00007262 ASSERT(args->length() == 1);
7263
7264 // ArgumentsAccessStub expects the key in edx and the formal
7265 // parameter count in eax.
7266 Load(args->at(0));
7267 Result key = frame_->Pop();
7268 // Explicitly create a constant result.
Andrei Popescu31002712010-02-23 13:46:05 +00007269 Result count(Handle<Smi>(Smi::FromInt(scope()->num_parameters())));
Steve Blocka7e24c12009-10-30 11:49:00 +00007270 // Call the shared stub to get to arguments[key].
7271 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
7272 Result result = frame_->CallStub(&stub, &key, &count);
7273 frame_->Push(&result);
7274}
7275
7276
7277void CodeGenerator::GenerateObjectEquals(ZoneList<Expression*>* args) {
7278 ASSERT(args->length() == 2);
7279
7280 // Load the two objects into registers and perform the comparison.
7281 Load(args->at(0));
7282 Load(args->at(1));
7283 Result right = frame_->Pop();
7284 Result left = frame_->Pop();
7285 right.ToRegister();
7286 left.ToRegister();
7287 __ cmp(right.reg(), Operand(left.reg()));
7288 right.Unuse();
7289 left.Unuse();
7290 destination()->Split(equal);
7291}
7292
7293
7294void CodeGenerator::GenerateGetFramePointer(ZoneList<Expression*>* args) {
7295 ASSERT(args->length() == 0);
Kristian Monsen50ef84f2010-07-29 15:18:00 +01007296 STATIC_ASSERT(kSmiTag == 0); // EBP value is aligned, so it looks like a Smi.
Steve Blocka7e24c12009-10-30 11:49:00 +00007297 Result ebp_as_smi = allocator_->Allocate();
7298 ASSERT(ebp_as_smi.is_valid());
7299 __ mov(ebp_as_smi.reg(), Operand(ebp));
7300 frame_->Push(&ebp_as_smi);
7301}
7302
7303
Steve Block6ded16b2010-05-10 14:33:55 +01007304void CodeGenerator::GenerateRandomHeapNumber(
7305 ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00007306 ASSERT(args->length() == 0);
7307 frame_->SpillAll();
7308
Steve Block6ded16b2010-05-10 14:33:55 +01007309 Label slow_allocate_heapnumber;
7310 Label heapnumber_allocated;
Steve Blocka7e24c12009-10-30 11:49:00 +00007311
Steve Block6ded16b2010-05-10 14:33:55 +01007312 __ AllocateHeapNumber(edi, ebx, ecx, &slow_allocate_heapnumber);
7313 __ jmp(&heapnumber_allocated);
Steve Blocka7e24c12009-10-30 11:49:00 +00007314
Steve Block6ded16b2010-05-10 14:33:55 +01007315 __ bind(&slow_allocate_heapnumber);
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007316 // Allocate a heap number.
7317 __ CallRuntime(Runtime::kNumberAlloc, 0);
Steve Block6ded16b2010-05-10 14:33:55 +01007318 __ mov(edi, eax);
7319
7320 __ bind(&heapnumber_allocated);
7321
7322 __ PrepareCallCFunction(0, ebx);
7323 __ CallCFunction(ExternalReference::random_uint32_function(), 0);
7324
7325 // Convert 32 random bits in eax to 0.(32 random bits) in a double
7326 // by computing:
7327 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
7328 // This is implemented on both SSE2 and FPU.
7329 if (CpuFeatures::IsSupported(SSE2)) {
7330 CpuFeatures::Scope fscope(SSE2);
7331 __ mov(ebx, Immediate(0x49800000)); // 1.0 x 2^20 as single.
7332 __ movd(xmm1, Operand(ebx));
7333 __ movd(xmm0, Operand(eax));
7334 __ cvtss2sd(xmm1, xmm1);
7335 __ pxor(xmm0, xmm1);
7336 __ subsd(xmm0, xmm1);
7337 __ movdbl(FieldOperand(edi, HeapNumber::kValueOffset), xmm0);
7338 } else {
7339 // 0x4130000000000000 is 1.0 x 2^20 as a double.
7340 __ mov(FieldOperand(edi, HeapNumber::kExponentOffset),
7341 Immediate(0x41300000));
7342 __ mov(FieldOperand(edi, HeapNumber::kMantissaOffset), eax);
7343 __ fld_d(FieldOperand(edi, HeapNumber::kValueOffset));
7344 __ mov(FieldOperand(edi, HeapNumber::kMantissaOffset), Immediate(0));
7345 __ fld_d(FieldOperand(edi, HeapNumber::kValueOffset));
7346 __ fsubp(1);
7347 __ fstp_d(FieldOperand(edi, HeapNumber::kValueOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00007348 }
Steve Block6ded16b2010-05-10 14:33:55 +01007349 __ mov(eax, edi);
Steve Blocka7e24c12009-10-30 11:49:00 +00007350
7351 Result result = allocator_->Allocate(eax);
7352 frame_->Push(&result);
7353}
7354
7355
Steve Blockd0582a62009-12-15 09:54:21 +00007356void CodeGenerator::GenerateStringAdd(ZoneList<Expression*>* args) {
7357 ASSERT_EQ(2, args->length());
7358
7359 Load(args->at(0));
7360 Load(args->at(1));
7361
7362 StringAddStub stub(NO_STRING_ADD_FLAGS);
7363 Result answer = frame_->CallStub(&stub, 2);
7364 frame_->Push(&answer);
7365}
7366
7367
Leon Clarkee46be812010-01-19 14:06:41 +00007368void CodeGenerator::GenerateSubString(ZoneList<Expression*>* args) {
7369 ASSERT_EQ(3, args->length());
7370
7371 Load(args->at(0));
7372 Load(args->at(1));
7373 Load(args->at(2));
7374
7375 SubStringStub stub;
7376 Result answer = frame_->CallStub(&stub, 3);
7377 frame_->Push(&answer);
7378}
7379
7380
7381void CodeGenerator::GenerateStringCompare(ZoneList<Expression*>* args) {
7382 ASSERT_EQ(2, args->length());
7383
7384 Load(args->at(0));
7385 Load(args->at(1));
7386
7387 StringCompareStub stub;
7388 Result answer = frame_->CallStub(&stub, 2);
7389 frame_->Push(&answer);
7390}
7391
7392
7393void CodeGenerator::GenerateRegExpExec(ZoneList<Expression*>* args) {
Steve Block6ded16b2010-05-10 14:33:55 +01007394 ASSERT_EQ(4, args->length());
Leon Clarkee46be812010-01-19 14:06:41 +00007395
7396 // Load the arguments on the stack and call the stub.
7397 Load(args->at(0));
7398 Load(args->at(1));
7399 Load(args->at(2));
7400 Load(args->at(3));
Ben Murdochb0fe1622011-05-05 13:52:32 +01007401
Leon Clarkee46be812010-01-19 14:06:41 +00007402 RegExpExecStub stub;
7403 Result result = frame_->CallStub(&stub, 4);
7404 frame_->Push(&result);
7405}
7406
7407
Steve Block6ded16b2010-05-10 14:33:55 +01007408void CodeGenerator::GenerateRegExpConstructResult(ZoneList<Expression*>* args) {
Steve Block6ded16b2010-05-10 14:33:55 +01007409 ASSERT_EQ(3, args->length());
Ben Murdochb0fe1622011-05-05 13:52:32 +01007410
Steve Block6ded16b2010-05-10 14:33:55 +01007411 Load(args->at(0)); // Size of array, smi.
7412 Load(args->at(1)); // "index" property value.
7413 Load(args->at(2)); // "input" property value.
Steve Block6ded16b2010-05-10 14:33:55 +01007414
Ben Murdochb0fe1622011-05-05 13:52:32 +01007415 RegExpConstructResultStub stub;
7416 Result result = frame_->CallStub(&stub, 3);
7417 frame_->Push(&result);
Steve Block6ded16b2010-05-10 14:33:55 +01007418}
7419
7420
7421class DeferredSearchCache: public DeferredCode {
7422 public:
7423 DeferredSearchCache(Register dst, Register cache, Register key)
7424 : dst_(dst), cache_(cache), key_(key) {
7425 set_comment("[ DeferredSearchCache");
7426 }
7427
7428 virtual void Generate();
7429
7430 private:
Kristian Monsen25f61362010-05-21 11:50:48 +01007431 Register dst_; // on invocation Smi index of finger, on exit
7432 // holds value being looked up.
7433 Register cache_; // instance of JSFunctionResultCache.
7434 Register key_; // key being looked up.
Steve Block6ded16b2010-05-10 14:33:55 +01007435};
7436
7437
7438void DeferredSearchCache::Generate() {
Kristian Monsen25f61362010-05-21 11:50:48 +01007439 Label first_loop, search_further, second_loop, cache_miss;
7440
7441 // Smi-tagging is equivalent to multiplying by 2.
7442 STATIC_ASSERT(kSmiTag == 0);
7443 STATIC_ASSERT(kSmiTagSize == 1);
7444
7445 Smi* kEntrySizeSmi = Smi::FromInt(JSFunctionResultCache::kEntrySize);
7446 Smi* kEntriesIndexSmi = Smi::FromInt(JSFunctionResultCache::kEntriesIndex);
7447
7448 // Check the cache from finger to start of the cache.
7449 __ bind(&first_loop);
7450 __ sub(Operand(dst_), Immediate(kEntrySizeSmi));
7451 __ cmp(Operand(dst_), Immediate(kEntriesIndexSmi));
7452 __ j(less, &search_further);
7453
7454 __ cmp(key_, CodeGenerator::FixedArrayElementOperand(cache_, dst_));
7455 __ j(not_equal, &first_loop);
7456
7457 __ mov(FieldOperand(cache_, JSFunctionResultCache::kFingerOffset), dst_);
7458 __ mov(dst_, CodeGenerator::FixedArrayElementOperand(cache_, dst_, 1));
7459 __ jmp(exit_label());
7460
7461 __ bind(&search_further);
7462
7463 // Check the cache from end of cache up to finger.
7464 __ mov(dst_, FieldOperand(cache_, JSFunctionResultCache::kCacheSizeOffset));
7465
7466 __ bind(&second_loop);
7467 __ sub(Operand(dst_), Immediate(kEntrySizeSmi));
7468 // Consider prefetching into some reg.
7469 __ cmp(dst_, FieldOperand(cache_, JSFunctionResultCache::kFingerOffset));
7470 __ j(less_equal, &cache_miss);
7471
7472 __ cmp(key_, CodeGenerator::FixedArrayElementOperand(cache_, dst_));
7473 __ j(not_equal, &second_loop);
7474
7475 __ mov(FieldOperand(cache_, JSFunctionResultCache::kFingerOffset), dst_);
7476 __ mov(dst_, CodeGenerator::FixedArrayElementOperand(cache_, dst_, 1));
7477 __ jmp(exit_label());
7478
7479 __ bind(&cache_miss);
7480 __ push(cache_); // store a reference to cache
7481 __ push(key_); // store a key
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01007482 __ push(Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
Steve Block6ded16b2010-05-10 14:33:55 +01007483 __ push(key_);
Kristian Monsen25f61362010-05-21 11:50:48 +01007484 // On ia32 function must be in edi.
7485 __ mov(edi, FieldOperand(cache_, JSFunctionResultCache::kFactoryOffset));
7486 ParameterCount expected(1);
7487 __ InvokeFunction(edi, expected, CALL_FUNCTION);
7488
7489 // Find a place to put new cached value into.
7490 Label add_new_entry, update_cache;
7491 __ mov(ecx, Operand(esp, kPointerSize)); // restore the cache
7492 // Possible optimization: cache size is constant for the given cache
7493 // so technically we could use a constant here. However, if we have
7494 // cache miss this optimization would hardly matter much.
7495
7496 // Check if we could add new entry to cache.
7497 __ mov(ebx, FieldOperand(ecx, FixedArray::kLengthOffset));
Kristian Monsen25f61362010-05-21 11:50:48 +01007498 __ cmp(ebx, FieldOperand(ecx, JSFunctionResultCache::kCacheSizeOffset));
7499 __ j(greater, &add_new_entry);
7500
7501 // Check if we could evict entry after finger.
7502 __ mov(edx, FieldOperand(ecx, JSFunctionResultCache::kFingerOffset));
7503 __ add(Operand(edx), Immediate(kEntrySizeSmi));
7504 __ cmp(ebx, Operand(edx));
7505 __ j(greater, &update_cache);
7506
7507 // Need to wrap over the cache.
7508 __ mov(edx, Immediate(kEntriesIndexSmi));
7509 __ jmp(&update_cache);
7510
7511 __ bind(&add_new_entry);
7512 __ mov(edx, FieldOperand(ecx, JSFunctionResultCache::kCacheSizeOffset));
7513 __ lea(ebx, Operand(edx, JSFunctionResultCache::kEntrySize << 1));
7514 __ mov(FieldOperand(ecx, JSFunctionResultCache::kCacheSizeOffset), ebx);
7515
7516 // Update the cache itself.
7517 // edx holds the index.
7518 __ bind(&update_cache);
7519 __ pop(ebx); // restore the key
7520 __ mov(FieldOperand(ecx, JSFunctionResultCache::kFingerOffset), edx);
7521 // Store key.
7522 __ mov(CodeGenerator::FixedArrayElementOperand(ecx, edx), ebx);
7523 __ RecordWrite(ecx, 0, ebx, edx);
7524
7525 // Store value.
7526 __ pop(ecx); // restore the cache.
7527 __ mov(edx, FieldOperand(ecx, JSFunctionResultCache::kFingerOffset));
7528 __ add(Operand(edx), Immediate(Smi::FromInt(1)));
7529 __ mov(ebx, eax);
7530 __ mov(CodeGenerator::FixedArrayElementOperand(ecx, edx), ebx);
7531 __ RecordWrite(ecx, 0, ebx, edx);
7532
Steve Block6ded16b2010-05-10 14:33:55 +01007533 if (!dst_.is(eax)) {
7534 __ mov(dst_, eax);
7535 }
7536}
7537
7538
7539void CodeGenerator::GenerateGetFromCache(ZoneList<Expression*>* args) {
7540 ASSERT_EQ(2, args->length());
7541
7542 ASSERT_NE(NULL, args->at(0)->AsLiteral());
7543 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
7544
7545 Handle<FixedArray> jsfunction_result_caches(
7546 Top::global_context()->jsfunction_result_caches());
7547 if (jsfunction_result_caches->length() <= cache_id) {
7548 __ Abort("Attempt to use undefined cache.");
7549 frame_->Push(Factory::undefined_value());
7550 return;
7551 }
7552
7553 Load(args->at(1));
7554 Result key = frame_->Pop();
7555 key.ToRegister();
7556
7557 Result cache = allocator()->Allocate();
7558 ASSERT(cache.is_valid());
7559 __ mov(cache.reg(), ContextOperand(esi, Context::GLOBAL_INDEX));
7560 __ mov(cache.reg(),
7561 FieldOperand(cache.reg(), GlobalObject::kGlobalContextOffset));
7562 __ mov(cache.reg(),
7563 ContextOperand(cache.reg(), Context::JSFUNCTION_RESULT_CACHES_INDEX));
7564 __ mov(cache.reg(),
7565 FieldOperand(cache.reg(), FixedArray::OffsetOfElementAt(cache_id)));
7566
7567 Result tmp = allocator()->Allocate();
7568 ASSERT(tmp.is_valid());
7569
7570 DeferredSearchCache* deferred = new DeferredSearchCache(tmp.reg(),
7571 cache.reg(),
7572 key.reg());
7573
Steve Block6ded16b2010-05-10 14:33:55 +01007574 // tmp.reg() now holds finger offset as a smi.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01007575 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
Kristian Monsen25f61362010-05-21 11:50:48 +01007576 __ mov(tmp.reg(), FieldOperand(cache.reg(),
7577 JSFunctionResultCache::kFingerOffset));
7578 __ cmp(key.reg(), FixedArrayElementOperand(cache.reg(), tmp.reg()));
Steve Block6ded16b2010-05-10 14:33:55 +01007579 deferred->Branch(not_equal);
7580
Kristian Monsen25f61362010-05-21 11:50:48 +01007581 __ mov(tmp.reg(), FixedArrayElementOperand(cache.reg(), tmp.reg(), 1));
Steve Block6ded16b2010-05-10 14:33:55 +01007582
7583 deferred->BindExit();
7584 frame_->Push(&tmp);
7585}
7586
7587
Andrei Popescu402d9372010-02-26 13:31:12 +00007588void CodeGenerator::GenerateNumberToString(ZoneList<Expression*>* args) {
7589 ASSERT_EQ(args->length(), 1);
7590
7591 // Load the argument on the stack and call the stub.
7592 Load(args->at(0));
7593 NumberToStringStub stub;
7594 Result result = frame_->CallStub(&stub, 1);
7595 frame_->Push(&result);
7596}
7597
7598
Steve Block6ded16b2010-05-10 14:33:55 +01007599class DeferredSwapElements: public DeferredCode {
7600 public:
7601 DeferredSwapElements(Register object, Register index1, Register index2)
7602 : object_(object), index1_(index1), index2_(index2) {
7603 set_comment("[ DeferredSwapElements");
7604 }
7605
7606 virtual void Generate();
7607
7608 private:
7609 Register object_, index1_, index2_;
7610};
7611
7612
7613void DeferredSwapElements::Generate() {
7614 __ push(object_);
7615 __ push(index1_);
7616 __ push(index2_);
7617 __ CallRuntime(Runtime::kSwapElements, 3);
7618}
7619
7620
7621void CodeGenerator::GenerateSwapElements(ZoneList<Expression*>* args) {
7622 // Note: this code assumes that indices are passed are within
7623 // elements' bounds and refer to valid (not holes) values.
7624 Comment cmnt(masm_, "[ GenerateSwapElements");
7625
7626 ASSERT_EQ(3, args->length());
7627
7628 Load(args->at(0));
7629 Load(args->at(1));
7630 Load(args->at(2));
7631
7632 Result index2 = frame_->Pop();
7633 index2.ToRegister();
7634
7635 Result index1 = frame_->Pop();
7636 index1.ToRegister();
7637
7638 Result object = frame_->Pop();
7639 object.ToRegister();
7640
7641 Result tmp1 = allocator()->Allocate();
7642 tmp1.ToRegister();
7643 Result tmp2 = allocator()->Allocate();
7644 tmp2.ToRegister();
7645
7646 frame_->Spill(object.reg());
7647 frame_->Spill(index1.reg());
7648 frame_->Spill(index2.reg());
7649
7650 DeferredSwapElements* deferred = new DeferredSwapElements(object.reg(),
7651 index1.reg(),
7652 index2.reg());
7653
7654 // Fetch the map and check if array is in fast case.
7655 // Check that object doesn't require security checks and
7656 // has no indexed interceptor.
7657 __ CmpObjectType(object.reg(), FIRST_JS_OBJECT_TYPE, tmp1.reg());
Leon Clarkef7060e22010-06-03 12:02:55 +01007658 deferred->Branch(below);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007659 __ test_b(FieldOperand(tmp1.reg(), Map::kBitFieldOffset),
7660 KeyedLoadIC::kSlowCaseBitFieldMask);
Steve Block6ded16b2010-05-10 14:33:55 +01007661 deferred->Branch(not_zero);
7662
Iain Merrick75681382010-08-19 15:07:18 +01007663 // Check the object's elements are in fast case and writable.
Steve Block6ded16b2010-05-10 14:33:55 +01007664 __ mov(tmp1.reg(), FieldOperand(object.reg(), JSObject::kElementsOffset));
7665 __ cmp(FieldOperand(tmp1.reg(), HeapObject::kMapOffset),
7666 Immediate(Factory::fixed_array_map()));
7667 deferred->Branch(not_equal);
7668
7669 // Smi-tagging is equivalent to multiplying by 2.
7670 STATIC_ASSERT(kSmiTag == 0);
7671 STATIC_ASSERT(kSmiTagSize == 1);
7672
7673 // Check that both indices are smis.
7674 __ mov(tmp2.reg(), index1.reg());
7675 __ or_(tmp2.reg(), Operand(index2.reg()));
7676 __ test(tmp2.reg(), Immediate(kSmiTagMask));
7677 deferred->Branch(not_zero);
7678
Ben Murdochdb5a90a2011-01-06 18:27:03 +00007679 // Check that both indices are valid.
7680 __ mov(tmp2.reg(), FieldOperand(object.reg(), JSArray::kLengthOffset));
7681 __ cmp(tmp2.reg(), Operand(index1.reg()));
7682 deferred->Branch(below_equal);
7683 __ cmp(tmp2.reg(), Operand(index2.reg()));
7684 deferred->Branch(below_equal);
7685
Steve Block6ded16b2010-05-10 14:33:55 +01007686 // Bring addresses into index1 and index2.
Kristian Monsen25f61362010-05-21 11:50:48 +01007687 __ lea(index1.reg(), FixedArrayElementOperand(tmp1.reg(), index1.reg()));
7688 __ lea(index2.reg(), FixedArrayElementOperand(tmp1.reg(), index2.reg()));
Steve Block6ded16b2010-05-10 14:33:55 +01007689
7690 // Swap elements.
7691 __ mov(object.reg(), Operand(index1.reg(), 0));
7692 __ mov(tmp2.reg(), Operand(index2.reg(), 0));
7693 __ mov(Operand(index2.reg(), 0), object.reg());
7694 __ mov(Operand(index1.reg(), 0), tmp2.reg());
7695
7696 Label done;
7697 __ InNewSpace(tmp1.reg(), tmp2.reg(), equal, &done);
7698 // Possible optimization: do a check that both values are Smis
7699 // (or them and test against Smi mask.)
7700
7701 __ mov(tmp2.reg(), tmp1.reg());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007702 __ RecordWriteHelper(tmp2.reg(), index1.reg(), object.reg());
7703 __ RecordWriteHelper(tmp1.reg(), index2.reg(), object.reg());
Steve Block6ded16b2010-05-10 14:33:55 +01007704 __ bind(&done);
7705
7706 deferred->BindExit();
7707 frame_->Push(Factory::undefined_value());
7708}
7709
7710
7711void CodeGenerator::GenerateCallFunction(ZoneList<Expression*>* args) {
7712 Comment cmnt(masm_, "[ GenerateCallFunction");
7713
7714 ASSERT(args->length() >= 2);
7715
7716 int n_args = args->length() - 2; // for receiver and function.
7717 Load(args->at(0)); // receiver
7718 for (int i = 0; i < n_args; i++) {
7719 Load(args->at(i + 1));
7720 }
7721 Load(args->at(n_args + 1)); // function
7722 Result result = frame_->CallJSFunction(n_args);
7723 frame_->Push(&result);
7724}
7725
7726
7727// Generates the Math.pow method. Only handles special cases and
7728// branches to the runtime system for everything else. Please note
7729// that this function assumes that the callsite has executed ToNumber
7730// on both arguments.
7731void CodeGenerator::GenerateMathPow(ZoneList<Expression*>* args) {
7732 ASSERT(args->length() == 2);
7733 Load(args->at(0));
7734 Load(args->at(1));
7735 if (!CpuFeatures::IsSupported(SSE2)) {
7736 Result res = frame_->CallRuntime(Runtime::kMath_pow, 2);
7737 frame_->Push(&res);
7738 } else {
7739 CpuFeatures::Scope use_sse2(SSE2);
7740 Label allocate_return;
7741 // Load the two operands while leaving the values on the frame.
7742 frame()->Dup();
7743 Result exponent = frame()->Pop();
7744 exponent.ToRegister();
7745 frame()->Spill(exponent.reg());
7746 frame()->PushElementAt(1);
7747 Result base = frame()->Pop();
7748 base.ToRegister();
7749 frame()->Spill(base.reg());
7750
7751 Result answer = allocator()->Allocate();
7752 ASSERT(answer.is_valid());
7753 ASSERT(!exponent.reg().is(base.reg()));
7754 JumpTarget call_runtime;
7755
7756 // Save 1 in xmm3 - we need this several times later on.
7757 __ mov(answer.reg(), Immediate(1));
7758 __ cvtsi2sd(xmm3, Operand(answer.reg()));
7759
7760 Label exponent_nonsmi;
7761 Label base_nonsmi;
7762 // If the exponent is a heap number go to that specific case.
7763 __ test(exponent.reg(), Immediate(kSmiTagMask));
7764 __ j(not_zero, &exponent_nonsmi);
7765 __ test(base.reg(), Immediate(kSmiTagMask));
7766 __ j(not_zero, &base_nonsmi);
7767
7768 // Optimized version when y is an integer.
7769 Label powi;
7770 __ SmiUntag(base.reg());
7771 __ cvtsi2sd(xmm0, Operand(base.reg()));
7772 __ jmp(&powi);
7773 // exponent is smi and base is a heapnumber.
7774 __ bind(&base_nonsmi);
7775 __ cmp(FieldOperand(base.reg(), HeapObject::kMapOffset),
7776 Factory::heap_number_map());
7777 call_runtime.Branch(not_equal);
7778
7779 __ movdbl(xmm0, FieldOperand(base.reg(), HeapNumber::kValueOffset));
7780
7781 // Optimized version of pow if y is an integer.
7782 __ bind(&powi);
7783 __ SmiUntag(exponent.reg());
7784
7785 // Save exponent in base as we need to check if exponent is negative later.
7786 // We know that base and exponent are in different registers.
7787 __ mov(base.reg(), exponent.reg());
7788
7789 // Get absolute value of exponent.
7790 Label no_neg;
7791 __ cmp(exponent.reg(), 0);
7792 __ j(greater_equal, &no_neg);
7793 __ neg(exponent.reg());
7794 __ bind(&no_neg);
7795
7796 // Load xmm1 with 1.
7797 __ movsd(xmm1, xmm3);
7798 Label while_true;
7799 Label no_multiply;
7800
7801 __ bind(&while_true);
7802 __ shr(exponent.reg(), 1);
7803 __ j(not_carry, &no_multiply);
7804 __ mulsd(xmm1, xmm0);
7805 __ bind(&no_multiply);
7806 __ test(exponent.reg(), Operand(exponent.reg()));
7807 __ mulsd(xmm0, xmm0);
7808 __ j(not_zero, &while_true);
7809
7810 // x has the original value of y - if y is negative return 1/result.
7811 __ test(base.reg(), Operand(base.reg()));
7812 __ j(positive, &allocate_return);
7813 // Special case if xmm1 has reached infinity.
7814 __ mov(answer.reg(), Immediate(0x7FB00000));
7815 __ movd(xmm0, Operand(answer.reg()));
7816 __ cvtss2sd(xmm0, xmm0);
7817 __ ucomisd(xmm0, xmm1);
7818 call_runtime.Branch(equal);
7819 __ divsd(xmm3, xmm1);
7820 __ movsd(xmm1, xmm3);
7821 __ jmp(&allocate_return);
7822
7823 // exponent (or both) is a heapnumber - no matter what we should now work
7824 // on doubles.
7825 __ bind(&exponent_nonsmi);
7826 __ cmp(FieldOperand(exponent.reg(), HeapObject::kMapOffset),
7827 Factory::heap_number_map());
7828 call_runtime.Branch(not_equal);
7829 __ movdbl(xmm1, FieldOperand(exponent.reg(), HeapNumber::kValueOffset));
7830 // Test if exponent is nan.
7831 __ ucomisd(xmm1, xmm1);
7832 call_runtime.Branch(parity_even);
7833
7834 Label base_not_smi;
7835 Label handle_special_cases;
7836 __ test(base.reg(), Immediate(kSmiTagMask));
7837 __ j(not_zero, &base_not_smi);
7838 __ SmiUntag(base.reg());
7839 __ cvtsi2sd(xmm0, Operand(base.reg()));
7840 __ jmp(&handle_special_cases);
7841 __ bind(&base_not_smi);
7842 __ cmp(FieldOperand(base.reg(), HeapObject::kMapOffset),
7843 Factory::heap_number_map());
7844 call_runtime.Branch(not_equal);
7845 __ mov(answer.reg(), FieldOperand(base.reg(), HeapNumber::kExponentOffset));
7846 __ and_(answer.reg(), HeapNumber::kExponentMask);
7847 __ cmp(Operand(answer.reg()), Immediate(HeapNumber::kExponentMask));
7848 // base is NaN or +/-Infinity
7849 call_runtime.Branch(greater_equal);
7850 __ movdbl(xmm0, FieldOperand(base.reg(), HeapNumber::kValueOffset));
7851
7852 // base is in xmm0 and exponent is in xmm1.
7853 __ bind(&handle_special_cases);
7854 Label not_minus_half;
7855 // Test for -0.5.
7856 // Load xmm2 with -0.5.
7857 __ mov(answer.reg(), Immediate(0xBF000000));
7858 __ movd(xmm2, Operand(answer.reg()));
7859 __ cvtss2sd(xmm2, xmm2);
7860 // xmm2 now has -0.5.
7861 __ ucomisd(xmm2, xmm1);
7862 __ j(not_equal, &not_minus_half);
7863
7864 // Calculates reciprocal of square root.
7865 // Note that 1/sqrt(x) = sqrt(1/x))
7866 __ divsd(xmm3, xmm0);
7867 __ movsd(xmm1, xmm3);
7868 __ sqrtsd(xmm1, xmm1);
7869 __ jmp(&allocate_return);
7870
7871 // Test for 0.5.
7872 __ bind(&not_minus_half);
7873 // Load xmm2 with 0.5.
7874 // Since xmm3 is 1 and xmm2 is -0.5 this is simply xmm2 + xmm3.
7875 __ addsd(xmm2, xmm3);
7876 // xmm2 now has 0.5.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01007877 __ ucomisd(xmm2, xmm1);
Steve Block6ded16b2010-05-10 14:33:55 +01007878 call_runtime.Branch(not_equal);
7879 // Calculates square root.
7880 __ movsd(xmm1, xmm0);
7881 __ sqrtsd(xmm1, xmm1);
7882
7883 JumpTarget done;
7884 Label failure, success;
7885 __ bind(&allocate_return);
7886 // Make a copy of the frame to enable us to handle allocation
7887 // failure after the JumpTarget jump.
7888 VirtualFrame* clone = new VirtualFrame(frame());
7889 __ AllocateHeapNumber(answer.reg(), exponent.reg(),
7890 base.reg(), &failure);
7891 __ movdbl(FieldOperand(answer.reg(), HeapNumber::kValueOffset), xmm1);
7892 // Remove the two original values from the frame - we only need those
7893 // in the case where we branch to runtime.
7894 frame()->Drop(2);
7895 exponent.Unuse();
7896 base.Unuse();
7897 done.Jump(&answer);
7898 // Use the copy of the original frame as our current frame.
7899 RegisterFile empty_regs;
7900 SetFrame(clone, &empty_regs);
7901 // If we experience an allocation failure we branch to runtime.
7902 __ bind(&failure);
7903 call_runtime.Bind();
7904 answer = frame()->CallRuntime(Runtime::kMath_pow_cfunction, 2);
7905
7906 done.Bind(&answer);
7907 frame()->Push(&answer);
7908 }
7909}
7910
7911
Andrei Popescu402d9372010-02-26 13:31:12 +00007912void CodeGenerator::GenerateMathSin(ZoneList<Expression*>* args) {
7913 ASSERT_EQ(args->length(), 1);
7914 Load(args->at(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01007915 TranscendentalCacheStub stub(TranscendentalCache::SIN,
7916 TranscendentalCacheStub::TAGGED);
Andrei Popescu402d9372010-02-26 13:31:12 +00007917 Result result = frame_->CallStub(&stub, 1);
7918 frame_->Push(&result);
7919}
7920
7921
7922void CodeGenerator::GenerateMathCos(ZoneList<Expression*>* args) {
7923 ASSERT_EQ(args->length(), 1);
7924 Load(args->at(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01007925 TranscendentalCacheStub stub(TranscendentalCache::COS,
7926 TranscendentalCacheStub::TAGGED);
7927 Result result = frame_->CallStub(&stub, 1);
7928 frame_->Push(&result);
7929}
7930
7931
7932void CodeGenerator::GenerateMathLog(ZoneList<Expression*>* args) {
7933 ASSERT_EQ(args->length(), 1);
7934 Load(args->at(0));
7935 TranscendentalCacheStub stub(TranscendentalCache::LOG,
7936 TranscendentalCacheStub::TAGGED);
Andrei Popescu402d9372010-02-26 13:31:12 +00007937 Result result = frame_->CallStub(&stub, 1);
7938 frame_->Push(&result);
7939}
7940
7941
Steve Block6ded16b2010-05-10 14:33:55 +01007942// Generates the Math.sqrt method. Please note - this function assumes that
7943// the callsite has executed ToNumber on the argument.
7944void CodeGenerator::GenerateMathSqrt(ZoneList<Expression*>* args) {
7945 ASSERT_EQ(args->length(), 1);
7946 Load(args->at(0));
7947
7948 if (!CpuFeatures::IsSupported(SSE2)) {
7949 Result result = frame()->CallRuntime(Runtime::kMath_sqrt, 1);
7950 frame()->Push(&result);
7951 } else {
7952 CpuFeatures::Scope use_sse2(SSE2);
7953 // Leave original value on the frame if we need to call runtime.
7954 frame()->Dup();
7955 Result result = frame()->Pop();
7956 result.ToRegister();
7957 frame()->Spill(result.reg());
7958 Label runtime;
7959 Label non_smi;
7960 Label load_done;
7961 JumpTarget end;
7962
7963 __ test(result.reg(), Immediate(kSmiTagMask));
7964 __ j(not_zero, &non_smi);
7965 __ SmiUntag(result.reg());
7966 __ cvtsi2sd(xmm0, Operand(result.reg()));
7967 __ jmp(&load_done);
7968 __ bind(&non_smi);
7969 __ cmp(FieldOperand(result.reg(), HeapObject::kMapOffset),
7970 Factory::heap_number_map());
7971 __ j(not_equal, &runtime);
7972 __ movdbl(xmm0, FieldOperand(result.reg(), HeapNumber::kValueOffset));
7973
7974 __ bind(&load_done);
7975 __ sqrtsd(xmm0, xmm0);
7976 // A copy of the virtual frame to allow us to go to runtime after the
7977 // JumpTarget jump.
7978 Result scratch = allocator()->Allocate();
7979 VirtualFrame* clone = new VirtualFrame(frame());
7980 __ AllocateHeapNumber(result.reg(), scratch.reg(), no_reg, &runtime);
7981
7982 __ movdbl(FieldOperand(result.reg(), HeapNumber::kValueOffset), xmm0);
7983 frame()->Drop(1);
7984 scratch.Unuse();
7985 end.Jump(&result);
7986 // We only branch to runtime if we have an allocation error.
7987 // Use the copy of the original frame as our current frame.
7988 RegisterFile empty_regs;
7989 SetFrame(clone, &empty_regs);
7990 __ bind(&runtime);
7991 result = frame()->CallRuntime(Runtime::kMath_sqrt, 1);
7992
7993 end.Bind(&result);
7994 frame()->Push(&result);
7995 }
7996}
7997
7998
Ben Murdochbb769b22010-08-11 14:56:33 +01007999void CodeGenerator::GenerateIsRegExpEquivalent(ZoneList<Expression*>* args) {
8000 ASSERT_EQ(2, args->length());
8001 Load(args->at(0));
8002 Load(args->at(1));
8003 Result right_res = frame_->Pop();
8004 Result left_res = frame_->Pop();
8005 right_res.ToRegister();
8006 left_res.ToRegister();
8007 Result tmp_res = allocator()->Allocate();
8008 ASSERT(tmp_res.is_valid());
8009 Register right = right_res.reg();
8010 Register left = left_res.reg();
8011 Register tmp = tmp_res.reg();
8012 right_res.Unuse();
8013 left_res.Unuse();
8014 tmp_res.Unuse();
8015 __ cmp(left, Operand(right));
8016 destination()->true_target()->Branch(equal);
8017 // Fail if either is a non-HeapObject.
8018 __ mov(tmp, left);
8019 __ and_(Operand(tmp), right);
8020 __ test(Operand(tmp), Immediate(kSmiTagMask));
8021 destination()->false_target()->Branch(equal);
8022 __ CmpObjectType(left, JS_REGEXP_TYPE, tmp);
8023 destination()->false_target()->Branch(not_equal);
8024 __ cmp(tmp, FieldOperand(right, HeapObject::kMapOffset));
8025 destination()->false_target()->Branch(not_equal);
8026 __ mov(tmp, FieldOperand(left, JSRegExp::kDataOffset));
8027 __ cmp(tmp, FieldOperand(right, JSRegExp::kDataOffset));
8028 destination()->Split(equal);
8029}
8030
8031
Kristian Monsen80d68ea2010-09-08 11:05:35 +01008032void CodeGenerator::GenerateHasCachedArrayIndex(ZoneList<Expression*>* args) {
8033 ASSERT(args->length() == 1);
8034 Load(args->at(0));
8035 Result value = frame_->Pop();
8036 value.ToRegister();
8037 ASSERT(value.is_valid());
8038 if (FLAG_debug_code) {
8039 __ AbortIfNotString(value.reg());
8040 }
8041
8042 __ test(FieldOperand(value.reg(), String::kHashFieldOffset),
8043 Immediate(String::kContainsCachedArrayIndexMask));
8044
8045 value.Unuse();
8046 destination()->Split(zero);
8047}
8048
8049
8050void CodeGenerator::GenerateGetCachedArrayIndex(ZoneList<Expression*>* args) {
8051 ASSERT(args->length() == 1);
8052 Load(args->at(0));
8053 Result string = frame_->Pop();
8054 string.ToRegister();
8055 if (FLAG_debug_code) {
8056 __ AbortIfNotString(string.reg());
8057 }
8058
8059 Result number = allocator()->Allocate();
8060 ASSERT(number.is_valid());
8061 __ mov(number.reg(), FieldOperand(string.reg(), String::kHashFieldOffset));
8062 __ IndexFromHash(number.reg(), number.reg());
8063 string.Unuse();
8064 frame_->Push(&number);
8065}
8066
8067
Steve Blocka7e24c12009-10-30 11:49:00 +00008068void CodeGenerator::VisitCallRuntime(CallRuntime* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01008069 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00008070 if (CheckForInlineRuntimeCall(node)) {
8071 return;
8072 }
8073
8074 ZoneList<Expression*>* args = node->arguments();
8075 Comment cmnt(masm_, "[ CallRuntime");
8076 Runtime::Function* function = node->function();
8077
8078 if (function == NULL) {
Steve Blocka7e24c12009-10-30 11:49:00 +00008079 // Push the builtins object found in the current global object.
8080 Result temp = allocator()->Allocate();
8081 ASSERT(temp.is_valid());
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08008082 __ mov(temp.reg(), GlobalObjectOperand());
Steve Blocka7e24c12009-10-30 11:49:00 +00008083 __ mov(temp.reg(), FieldOperand(temp.reg(), GlobalObject::kBuiltinsOffset));
8084 frame_->Push(&temp);
8085 }
8086
8087 // Push the arguments ("left-to-right").
8088 int arg_count = args->length();
8089 for (int i = 0; i < arg_count; i++) {
8090 Load(args->at(i));
8091 }
8092
8093 if (function == NULL) {
8094 // Call the JS runtime function.
Leon Clarkee46be812010-01-19 14:06:41 +00008095 frame_->Push(node->name());
Steve Blocka7e24c12009-10-30 11:49:00 +00008096 Result answer = frame_->CallCallIC(RelocInfo::CODE_TARGET,
8097 arg_count,
8098 loop_nesting_);
8099 frame_->RestoreContextRegister();
Leon Clarkee46be812010-01-19 14:06:41 +00008100 frame_->Push(&answer);
Steve Blocka7e24c12009-10-30 11:49:00 +00008101 } else {
8102 // Call the C runtime function.
8103 Result answer = frame_->CallRuntime(function, arg_count);
8104 frame_->Push(&answer);
8105 }
8106}
8107
8108
8109void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00008110 Comment cmnt(masm_, "[ UnaryOperation");
8111
8112 Token::Value op = node->op();
8113
8114 if (op == Token::NOT) {
8115 // Swap the true and false targets but keep the same actual label
8116 // as the fall through.
8117 destination()->Invert();
Steve Blockd0582a62009-12-15 09:54:21 +00008118 LoadCondition(node->expression(), destination(), true);
Steve Blocka7e24c12009-10-30 11:49:00 +00008119 // Swap the labels back.
8120 destination()->Invert();
8121
8122 } else if (op == Token::DELETE) {
8123 Property* property = node->expression()->AsProperty();
8124 if (property != NULL) {
8125 Load(property->obj());
8126 Load(property->key());
8127 Result answer = frame_->InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, 2);
8128 frame_->Push(&answer);
8129 return;
8130 }
8131
8132 Variable* variable = node->expression()->AsVariableProxy()->AsVariable();
8133 if (variable != NULL) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01008134 Slot* slot = variable->AsSlot();
Steve Blocka7e24c12009-10-30 11:49:00 +00008135 if (variable->is_global()) {
8136 LoadGlobal();
8137 frame_->Push(variable->name());
8138 Result answer = frame_->InvokeBuiltin(Builtins::DELETE,
8139 CALL_FUNCTION, 2);
8140 frame_->Push(&answer);
8141 return;
8142
8143 } else if (slot != NULL && slot->type() == Slot::LOOKUP) {
8144 // Call the runtime to look up the context holding the named
8145 // variable. Sync the virtual frame eagerly so we can push the
8146 // arguments directly into place.
8147 frame_->SyncRange(0, frame_->element_count() - 1);
8148 frame_->EmitPush(esi);
8149 frame_->EmitPush(Immediate(variable->name()));
8150 Result context = frame_->CallRuntime(Runtime::kLookupContext, 2);
8151 ASSERT(context.is_register());
8152 frame_->EmitPush(context.reg());
8153 context.Unuse();
8154 frame_->EmitPush(Immediate(variable->name()));
8155 Result answer = frame_->InvokeBuiltin(Builtins::DELETE,
8156 CALL_FUNCTION, 2);
8157 frame_->Push(&answer);
8158 return;
8159 }
8160
8161 // Default: Result of deleting non-global, not dynamically
8162 // introduced variables is false.
8163 frame_->Push(Factory::false_value());
8164
8165 } else {
8166 // Default: Result of deleting expressions is true.
8167 Load(node->expression()); // may have side-effects
8168 frame_->SetElementAt(0, Factory::true_value());
8169 }
8170
8171 } else if (op == Token::TYPEOF) {
8172 // Special case for loading the typeof expression; see comment on
8173 // LoadTypeofExpression().
8174 LoadTypeofExpression(node->expression());
8175 Result answer = frame_->CallRuntime(Runtime::kTypeof, 1);
8176 frame_->Push(&answer);
8177
8178 } else if (op == Token::VOID) {
8179 Expression* expression = node->expression();
8180 if (expression && expression->AsLiteral() && (
8181 expression->AsLiteral()->IsTrue() ||
8182 expression->AsLiteral()->IsFalse() ||
8183 expression->AsLiteral()->handle()->IsNumber() ||
8184 expression->AsLiteral()->handle()->IsString() ||
8185 expression->AsLiteral()->handle()->IsJSRegExp() ||
8186 expression->AsLiteral()->IsNull())) {
8187 // Omit evaluating the value of the primitive literal.
8188 // It will be discarded anyway, and can have no side effect.
8189 frame_->Push(Factory::undefined_value());
8190 } else {
8191 Load(node->expression());
8192 frame_->SetElementAt(0, Factory::undefined_value());
8193 }
8194
8195 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01008196 if (in_safe_int32_mode()) {
8197 Visit(node->expression());
8198 Result value = frame_->Pop();
8199 ASSERT(value.is_untagged_int32());
8200 // Registers containing an int32 value are not multiply used.
8201 ASSERT(!value.is_register() || !frame_->is_used(value.reg()));
8202 value.ToRegister();
8203 switch (op) {
8204 case Token::SUB: {
8205 __ neg(value.reg());
8206 if (node->no_negative_zero()) {
8207 // -MIN_INT is MIN_INT with the overflow flag set.
8208 unsafe_bailout_->Branch(overflow);
8209 } else {
8210 // MIN_INT and 0 both have bad negations. They both have 31 zeros.
8211 __ test(value.reg(), Immediate(0x7FFFFFFF));
8212 unsafe_bailout_->Branch(zero);
8213 }
8214 break;
8215 }
8216 case Token::BIT_NOT: {
8217 __ not_(value.reg());
8218 break;
8219 }
8220 case Token::ADD: {
8221 // Unary plus has no effect on int32 values.
8222 break;
8223 }
8224 default:
8225 UNREACHABLE();
8226 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00008227 }
Ben Murdochb0fe1622011-05-05 13:52:32 +01008228 frame_->Push(&value);
Steve Block6ded16b2010-05-10 14:33:55 +01008229 } else {
8230 Load(node->expression());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01008231 bool can_overwrite = node->expression()->ResultOverwriteAllowed();
Leon Clarkeac952652010-07-15 11:15:24 +01008232 UnaryOverwriteMode overwrite =
8233 can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
8234 bool no_negative_zero = node->expression()->no_negative_zero();
Steve Block6ded16b2010-05-10 14:33:55 +01008235 switch (op) {
8236 case Token::NOT:
8237 case Token::DELETE:
8238 case Token::TYPEOF:
8239 UNREACHABLE(); // handled above
8240 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00008241
Steve Block6ded16b2010-05-10 14:33:55 +01008242 case Token::SUB: {
Leon Clarkeac952652010-07-15 11:15:24 +01008243 GenericUnaryOpStub stub(
8244 Token::SUB,
8245 overwrite,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01008246 NO_UNARY_FLAGS,
Leon Clarkeac952652010-07-15 11:15:24 +01008247 no_negative_zero ? kIgnoreNegativeZero : kStrictNegativeZero);
Steve Block6ded16b2010-05-10 14:33:55 +01008248 Result operand = frame_->Pop();
8249 Result answer = frame_->CallStub(&stub, &operand);
8250 answer.set_type_info(TypeInfo::Number());
8251 frame_->Push(&answer);
8252 break;
8253 }
8254 case Token::BIT_NOT: {
8255 // Smi check.
8256 JumpTarget smi_label;
8257 JumpTarget continue_label;
8258 Result operand = frame_->Pop();
8259 TypeInfo operand_info = operand.type_info();
8260 operand.ToRegister();
8261 if (operand_info.IsSmi()) {
8262 if (FLAG_debug_code) __ AbortIfNotSmi(operand.reg());
8263 frame_->Spill(operand.reg());
8264 // Set smi tag bit. It will be reset by the not operation.
8265 __ lea(operand.reg(), Operand(operand.reg(), kSmiTagMask));
8266 __ not_(operand.reg());
8267 Result answer = operand;
8268 answer.set_type_info(TypeInfo::Smi());
8269 frame_->Push(&answer);
8270 } else {
8271 __ test(operand.reg(), Immediate(kSmiTagMask));
8272 smi_label.Branch(zero, &operand, taken);
Steve Blocka7e24c12009-10-30 11:49:00 +00008273
Kristian Monsen0d5e1162010-09-30 15:31:59 +01008274 GenericUnaryOpStub stub(Token::BIT_NOT,
8275 overwrite,
8276 NO_UNARY_SMI_CODE_IN_STUB);
Steve Block6ded16b2010-05-10 14:33:55 +01008277 Result answer = frame_->CallStub(&stub, &operand);
8278 continue_label.Jump(&answer);
Leon Clarkee46be812010-01-19 14:06:41 +00008279
Steve Block6ded16b2010-05-10 14:33:55 +01008280 smi_label.Bind(&answer);
8281 answer.ToRegister();
8282 frame_->Spill(answer.reg());
8283 // Set smi tag bit. It will be reset by the not operation.
8284 __ lea(answer.reg(), Operand(answer.reg(), kSmiTagMask));
8285 __ not_(answer.reg());
Leon Clarkee46be812010-01-19 14:06:41 +00008286
Steve Block6ded16b2010-05-10 14:33:55 +01008287 continue_label.Bind(&answer);
8288 answer.set_type_info(TypeInfo::Integer32());
8289 frame_->Push(&answer);
8290 }
8291 break;
8292 }
8293 case Token::ADD: {
8294 // Smi check.
8295 JumpTarget continue_label;
8296 Result operand = frame_->Pop();
8297 TypeInfo operand_info = operand.type_info();
8298 operand.ToRegister();
8299 __ test(operand.reg(), Immediate(kSmiTagMask));
8300 continue_label.Branch(zero, &operand, taken);
Steve Blocka7e24c12009-10-30 11:49:00 +00008301
Steve Block6ded16b2010-05-10 14:33:55 +01008302 frame_->Push(&operand);
8303 Result answer = frame_->InvokeBuiltin(Builtins::TO_NUMBER,
Steve Blocka7e24c12009-10-30 11:49:00 +00008304 CALL_FUNCTION, 1);
8305
Steve Block6ded16b2010-05-10 14:33:55 +01008306 continue_label.Bind(&answer);
8307 if (operand_info.IsSmi()) {
8308 answer.set_type_info(TypeInfo::Smi());
8309 } else if (operand_info.IsInteger32()) {
8310 answer.set_type_info(TypeInfo::Integer32());
8311 } else {
8312 answer.set_type_info(TypeInfo::Number());
8313 }
8314 frame_->Push(&answer);
8315 break;
8316 }
8317 default:
8318 UNREACHABLE();
Steve Blocka7e24c12009-10-30 11:49:00 +00008319 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008320 }
8321 }
8322}
8323
8324
8325// The value in dst was optimistically incremented or decremented. The
8326// result overflowed or was not smi tagged. Undo the operation, call
8327// into the runtime to convert the argument to a number, and call the
8328// specialized add or subtract stub. The result is left in dst.
8329class DeferredPrefixCountOperation: public DeferredCode {
8330 public:
Steve Block6ded16b2010-05-10 14:33:55 +01008331 DeferredPrefixCountOperation(Register dst,
8332 bool is_increment,
8333 TypeInfo input_type)
8334 : dst_(dst), is_increment_(is_increment), input_type_(input_type) {
Steve Blocka7e24c12009-10-30 11:49:00 +00008335 set_comment("[ DeferredCountOperation");
8336 }
8337
8338 virtual void Generate();
8339
8340 private:
8341 Register dst_;
8342 bool is_increment_;
Steve Block6ded16b2010-05-10 14:33:55 +01008343 TypeInfo input_type_;
Steve Blocka7e24c12009-10-30 11:49:00 +00008344};
8345
8346
8347void DeferredPrefixCountOperation::Generate() {
8348 // Undo the optimistic smi operation.
8349 if (is_increment_) {
8350 __ sub(Operand(dst_), Immediate(Smi::FromInt(1)));
8351 } else {
8352 __ add(Operand(dst_), Immediate(Smi::FromInt(1)));
8353 }
Steve Block6ded16b2010-05-10 14:33:55 +01008354 Register left;
8355 if (input_type_.IsNumber()) {
8356 left = dst_;
Steve Blocka7e24c12009-10-30 11:49:00 +00008357 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01008358 __ push(dst_);
8359 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION);
8360 left = eax;
Steve Blocka7e24c12009-10-30 11:49:00 +00008361 }
Steve Block6ded16b2010-05-10 14:33:55 +01008362
8363 GenericBinaryOpStub stub(is_increment_ ? Token::ADD : Token::SUB,
8364 NO_OVERWRITE,
8365 NO_GENERIC_BINARY_FLAGS,
8366 TypeInfo::Number());
8367 stub.GenerateCall(masm_, left, Smi::FromInt(1));
8368
Steve Blocka7e24c12009-10-30 11:49:00 +00008369 if (!dst_.is(eax)) __ mov(dst_, eax);
8370}
8371
8372
8373// The value in dst was optimistically incremented or decremented. The
8374// result overflowed or was not smi tagged. Undo the operation and call
8375// into the runtime to convert the argument to a number. Update the
8376// original value in old. Call the specialized add or subtract stub.
8377// The result is left in dst.
8378class DeferredPostfixCountOperation: public DeferredCode {
8379 public:
Steve Block6ded16b2010-05-10 14:33:55 +01008380 DeferredPostfixCountOperation(Register dst,
8381 Register old,
8382 bool is_increment,
8383 TypeInfo input_type)
8384 : dst_(dst),
8385 old_(old),
8386 is_increment_(is_increment),
8387 input_type_(input_type) {
Steve Blocka7e24c12009-10-30 11:49:00 +00008388 set_comment("[ DeferredCountOperation");
8389 }
8390
8391 virtual void Generate();
8392
8393 private:
8394 Register dst_;
8395 Register old_;
8396 bool is_increment_;
Steve Block6ded16b2010-05-10 14:33:55 +01008397 TypeInfo input_type_;
Steve Blocka7e24c12009-10-30 11:49:00 +00008398};
8399
8400
8401void DeferredPostfixCountOperation::Generate() {
8402 // Undo the optimistic smi operation.
8403 if (is_increment_) {
8404 __ sub(Operand(dst_), Immediate(Smi::FromInt(1)));
8405 } else {
8406 __ add(Operand(dst_), Immediate(Smi::FromInt(1)));
8407 }
Steve Block6ded16b2010-05-10 14:33:55 +01008408 Register left;
8409 if (input_type_.IsNumber()) {
8410 __ push(dst_); // Save the input to use as the old value.
8411 left = dst_;
Steve Blocka7e24c12009-10-30 11:49:00 +00008412 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01008413 __ push(dst_);
8414 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION);
8415 __ push(eax); // Save the result of ToNumber to use as the old value.
8416 left = eax;
Steve Blocka7e24c12009-10-30 11:49:00 +00008417 }
Steve Block6ded16b2010-05-10 14:33:55 +01008418
8419 GenericBinaryOpStub stub(is_increment_ ? Token::ADD : Token::SUB,
8420 NO_OVERWRITE,
8421 NO_GENERIC_BINARY_FLAGS,
8422 TypeInfo::Number());
8423 stub.GenerateCall(masm_, left, Smi::FromInt(1));
8424
Steve Blocka7e24c12009-10-30 11:49:00 +00008425 if (!dst_.is(eax)) __ mov(dst_, eax);
8426 __ pop(old_);
8427}
8428
8429
8430void CodeGenerator::VisitCountOperation(CountOperation* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01008431 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00008432 Comment cmnt(masm_, "[ CountOperation");
8433
8434 bool is_postfix = node->is_postfix();
8435 bool is_increment = node->op() == Token::INC;
8436
8437 Variable* var = node->expression()->AsVariableProxy()->AsVariable();
8438 bool is_const = (var != NULL && var->mode() == Variable::CONST);
8439
8440 // Postfix operations need a stack slot under the reference to hold
8441 // the old value while the new value is being stored. This is so that
8442 // in the case that storing the new value requires a call, the old
8443 // value will be in the frame to be spilled.
8444 if (is_postfix) frame_->Push(Smi::FromInt(0));
8445
Leon Clarked91b9f72010-01-27 17:25:45 +00008446 // A constant reference is not saved to, so a constant reference is not a
8447 // compound assignment reference.
8448 { Reference target(this, node->expression(), !is_const);
Steve Blocka7e24c12009-10-30 11:49:00 +00008449 if (target.is_illegal()) {
8450 // Spoof the virtual frame to have the expected height (one higher
8451 // than on entry).
8452 if (!is_postfix) frame_->Push(Smi::FromInt(0));
8453 return;
8454 }
Steve Blockd0582a62009-12-15 09:54:21 +00008455 target.TakeValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00008456
8457 Result new_value = frame_->Pop();
8458 new_value.ToRegister();
8459
8460 Result old_value; // Only allocated in the postfix case.
8461 if (is_postfix) {
8462 // Allocate a temporary to preserve the old value.
8463 old_value = allocator_->Allocate();
8464 ASSERT(old_value.is_valid());
8465 __ mov(old_value.reg(), new_value.reg());
Steve Block6ded16b2010-05-10 14:33:55 +01008466
8467 // The return value for postfix operations is ToNumber(input).
8468 // Keep more precise type info if the input is some kind of
8469 // number already. If the input is not a number we have to wait
8470 // for the deferred code to convert it.
8471 if (new_value.type_info().IsNumber()) {
8472 old_value.set_type_info(new_value.type_info());
8473 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008474 }
Steve Block6ded16b2010-05-10 14:33:55 +01008475
Steve Blocka7e24c12009-10-30 11:49:00 +00008476 // Ensure the new value is writable.
8477 frame_->Spill(new_value.reg());
8478
Steve Block6ded16b2010-05-10 14:33:55 +01008479 Result tmp;
8480 if (new_value.is_smi()) {
8481 if (FLAG_debug_code) __ AbortIfNotSmi(new_value.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00008482 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01008483 // We don't know statically if the input is a smi.
8484 // In order to combine the overflow and the smi tag check, we need
8485 // to be able to allocate a byte register. We attempt to do so
8486 // without spilling. If we fail, we will generate separate overflow
8487 // and smi tag checks.
8488 // We allocate and clear a temporary byte register before performing
8489 // the count operation since clearing the register using xor will clear
8490 // the overflow flag.
8491 tmp = allocator_->AllocateByteRegisterWithoutSpilling();
8492 if (tmp.is_valid()) {
8493 __ Set(tmp.reg(), Immediate(0));
8494 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008495 }
8496
8497 if (is_increment) {
8498 __ add(Operand(new_value.reg()), Immediate(Smi::FromInt(1)));
8499 } else {
8500 __ sub(Operand(new_value.reg()), Immediate(Smi::FromInt(1)));
8501 }
8502
Steve Block6ded16b2010-05-10 14:33:55 +01008503 DeferredCode* deferred = NULL;
8504 if (is_postfix) {
8505 deferred = new DeferredPostfixCountOperation(new_value.reg(),
8506 old_value.reg(),
8507 is_increment,
8508 new_value.type_info());
8509 } else {
8510 deferred = new DeferredPrefixCountOperation(new_value.reg(),
8511 is_increment,
8512 new_value.type_info());
8513 }
8514
8515 if (new_value.is_smi()) {
8516 // In case we have a smi as input just check for overflow.
8517 deferred->Branch(overflow);
8518 } else {
8519 // If the count operation didn't overflow and the result is a valid
8520 // smi, we're done. Otherwise, we jump to the deferred slow-case
8521 // code.
Steve Blocka7e24c12009-10-30 11:49:00 +00008522 // We combine the overflow and the smi tag check if we could
8523 // successfully allocate a temporary byte register.
Steve Block6ded16b2010-05-10 14:33:55 +01008524 if (tmp.is_valid()) {
8525 __ setcc(overflow, tmp.reg());
8526 __ or_(Operand(tmp.reg()), new_value.reg());
8527 __ test(tmp.reg(), Immediate(kSmiTagMask));
8528 tmp.Unuse();
8529 deferred->Branch(not_zero);
8530 } else {
8531 // Otherwise we test separately for overflow and smi tag.
8532 deferred->Branch(overflow);
8533 __ test(new_value.reg(), Immediate(kSmiTagMask));
8534 deferred->Branch(not_zero);
8535 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008536 }
8537 deferred->BindExit();
8538
Steve Block6ded16b2010-05-10 14:33:55 +01008539 // Postfix count operations return their input converted to
8540 // number. The case when the input is already a number is covered
8541 // above in the allocation code for old_value.
8542 if (is_postfix && !new_value.type_info().IsNumber()) {
8543 old_value.set_type_info(TypeInfo::Number());
8544 }
8545
8546 // The result of ++ or -- is an Integer32 if the
8547 // input is a smi. Otherwise it is a number.
8548 if (new_value.is_smi()) {
8549 new_value.set_type_info(TypeInfo::Integer32());
8550 } else {
8551 new_value.set_type_info(TypeInfo::Number());
8552 }
8553
Steve Blocka7e24c12009-10-30 11:49:00 +00008554 // Postfix: store the old value in the allocated slot under the
8555 // reference.
8556 if (is_postfix) frame_->SetElementAt(target.size(), &old_value);
8557
8558 frame_->Push(&new_value);
8559 // Non-constant: update the reference.
8560 if (!is_const) target.SetValue(NOT_CONST_INIT);
8561 }
8562
8563 // Postfix: drop the new value and use the old.
8564 if (is_postfix) frame_->Drop();
8565}
8566
8567
Steve Block6ded16b2010-05-10 14:33:55 +01008568void CodeGenerator::Int32BinaryOperation(BinaryOperation* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00008569 Token::Value op = node->op();
Steve Block6ded16b2010-05-10 14:33:55 +01008570 Comment cmnt(masm_, "[ Int32BinaryOperation");
8571 ASSERT(in_safe_int32_mode());
8572 ASSERT(safe_int32_mode_enabled());
8573 ASSERT(FLAG_safe_int32_compiler);
Steve Blocka7e24c12009-10-30 11:49:00 +00008574
Steve Block6ded16b2010-05-10 14:33:55 +01008575 if (op == Token::COMMA) {
8576 // Discard left value.
8577 frame_->Nip(1);
8578 return;
8579 }
8580
8581 Result right = frame_->Pop();
8582 Result left = frame_->Pop();
8583
8584 ASSERT(right.is_untagged_int32());
8585 ASSERT(left.is_untagged_int32());
8586 // Registers containing an int32 value are not multiply used.
8587 ASSERT(!left.is_register() || !frame_->is_used(left.reg()));
8588 ASSERT(!right.is_register() || !frame_->is_used(right.reg()));
8589
8590 switch (op) {
8591 case Token::COMMA:
8592 case Token::OR:
8593 case Token::AND:
8594 UNREACHABLE();
8595 break;
8596 case Token::BIT_OR:
8597 case Token::BIT_XOR:
8598 case Token::BIT_AND:
8599 if (left.is_constant() || right.is_constant()) {
8600 int32_t value; // Put constant in value, non-constant in left.
8601 // Constants are known to be int32 values, from static analysis,
8602 // or else will be converted to int32 by implicit ECMA [[ToInt32]].
8603 if (left.is_constant()) {
8604 ASSERT(left.handle()->IsSmi() || left.handle()->IsHeapNumber());
8605 value = NumberToInt32(*left.handle());
8606 left = right;
8607 } else {
8608 ASSERT(right.handle()->IsSmi() || right.handle()->IsHeapNumber());
8609 value = NumberToInt32(*right.handle());
8610 }
8611
8612 left.ToRegister();
8613 if (op == Token::BIT_OR) {
8614 __ or_(Operand(left.reg()), Immediate(value));
8615 } else if (op == Token::BIT_XOR) {
8616 __ xor_(Operand(left.reg()), Immediate(value));
8617 } else {
8618 ASSERT(op == Token::BIT_AND);
8619 __ and_(Operand(left.reg()), Immediate(value));
8620 }
8621 } else {
8622 ASSERT(left.is_register());
8623 ASSERT(right.is_register());
8624 if (op == Token::BIT_OR) {
8625 __ or_(left.reg(), Operand(right.reg()));
8626 } else if (op == Token::BIT_XOR) {
8627 __ xor_(left.reg(), Operand(right.reg()));
8628 } else {
8629 ASSERT(op == Token::BIT_AND);
8630 __ and_(left.reg(), Operand(right.reg()));
8631 }
8632 }
8633 frame_->Push(&left);
8634 right.Unuse();
8635 break;
8636 case Token::SAR:
8637 case Token::SHL:
8638 case Token::SHR: {
8639 bool test_shr_overflow = false;
8640 left.ToRegister();
8641 if (right.is_constant()) {
8642 ASSERT(right.handle()->IsSmi() || right.handle()->IsHeapNumber());
8643 int shift_amount = NumberToInt32(*right.handle()) & 0x1F;
8644 if (op == Token::SAR) {
8645 __ sar(left.reg(), shift_amount);
8646 } else if (op == Token::SHL) {
8647 __ shl(left.reg(), shift_amount);
8648 } else {
8649 ASSERT(op == Token::SHR);
8650 __ shr(left.reg(), shift_amount);
8651 if (shift_amount == 0) test_shr_overflow = true;
8652 }
8653 } else {
8654 // Move right to ecx
8655 if (left.is_register() && left.reg().is(ecx)) {
8656 right.ToRegister();
8657 __ xchg(left.reg(), right.reg());
8658 left = right; // Left is unused here, copy of right unused by Push.
8659 } else {
8660 right.ToRegister(ecx);
8661 left.ToRegister();
8662 }
8663 if (op == Token::SAR) {
8664 __ sar_cl(left.reg());
8665 } else if (op == Token::SHL) {
8666 __ shl_cl(left.reg());
8667 } else {
8668 ASSERT(op == Token::SHR);
8669 __ shr_cl(left.reg());
8670 test_shr_overflow = true;
8671 }
8672 }
8673 {
8674 Register left_reg = left.reg();
8675 frame_->Push(&left);
8676 right.Unuse();
8677 if (test_shr_overflow && !node->to_int32()) {
8678 // Uint32 results with top bit set are not Int32 values.
8679 // If they will be forced to Int32, skip the test.
8680 // Test is needed because shr with shift amount 0 does not set flags.
8681 __ test(left_reg, Operand(left_reg));
8682 unsafe_bailout_->Branch(sign);
8683 }
8684 }
8685 break;
8686 }
8687 case Token::ADD:
8688 case Token::SUB:
8689 case Token::MUL:
8690 if ((left.is_constant() && op != Token::SUB) || right.is_constant()) {
8691 int32_t value; // Put constant in value, non-constant in left.
8692 if (right.is_constant()) {
8693 ASSERT(right.handle()->IsSmi() || right.handle()->IsHeapNumber());
8694 value = NumberToInt32(*right.handle());
8695 } else {
8696 ASSERT(left.handle()->IsSmi() || left.handle()->IsHeapNumber());
8697 value = NumberToInt32(*left.handle());
8698 left = right;
8699 }
8700
8701 left.ToRegister();
8702 if (op == Token::ADD) {
8703 __ add(Operand(left.reg()), Immediate(value));
8704 } else if (op == Token::SUB) {
8705 __ sub(Operand(left.reg()), Immediate(value));
8706 } else {
8707 ASSERT(op == Token::MUL);
8708 __ imul(left.reg(), left.reg(), value);
8709 }
8710 } else {
8711 left.ToRegister();
8712 ASSERT(left.is_register());
8713 ASSERT(right.is_register());
8714 if (op == Token::ADD) {
8715 __ add(left.reg(), Operand(right.reg()));
8716 } else if (op == Token::SUB) {
8717 __ sub(left.reg(), Operand(right.reg()));
8718 } else {
8719 ASSERT(op == Token::MUL);
8720 // We have statically verified that a negative zero can be ignored.
8721 __ imul(left.reg(), Operand(right.reg()));
8722 }
8723 }
8724 right.Unuse();
8725 frame_->Push(&left);
Russell Brenner90bac252010-11-18 13:33:46 -08008726 if (!node->to_int32() || op == Token::MUL) {
8727 // If ToInt32 is called on the result of ADD, SUB, we don't
Steve Block6ded16b2010-05-10 14:33:55 +01008728 // care about overflows.
Russell Brenner90bac252010-11-18 13:33:46 -08008729 // Result of MUL can be non-representable precisely in double so
8730 // we have to check for overflow.
Steve Block6ded16b2010-05-10 14:33:55 +01008731 unsafe_bailout_->Branch(overflow);
8732 }
8733 break;
8734 case Token::DIV:
8735 case Token::MOD: {
8736 if (right.is_register() && (right.reg().is(eax) || right.reg().is(edx))) {
8737 if (left.is_register() && left.reg().is(edi)) {
8738 right.ToRegister(ebx);
8739 } else {
8740 right.ToRegister(edi);
8741 }
8742 }
8743 left.ToRegister(eax);
8744 Result edx_reg = allocator_->Allocate(edx);
8745 right.ToRegister();
8746 // The results are unused here because BreakTarget::Branch cannot handle
8747 // live results.
8748 Register right_reg = right.reg();
8749 left.Unuse();
8750 right.Unuse();
8751 edx_reg.Unuse();
8752 __ cmp(right_reg, 0);
8753 // Ensure divisor is positive: no chance of non-int32 or -0 result.
8754 unsafe_bailout_->Branch(less_equal);
8755 __ cdq(); // Sign-extend eax into edx:eax
8756 __ idiv(right_reg);
8757 if (op == Token::MOD) {
8758 // Negative zero can arise as a negative divident with a zero result.
8759 if (!node->no_negative_zero()) {
8760 Label not_negative_zero;
8761 __ test(edx, Operand(edx));
8762 __ j(not_zero, &not_negative_zero);
8763 __ test(eax, Operand(eax));
8764 unsafe_bailout_->Branch(negative);
8765 __ bind(&not_negative_zero);
8766 }
8767 Result edx_result(edx, TypeInfo::Integer32());
8768 edx_result.set_untagged_int32(true);
8769 frame_->Push(&edx_result);
8770 } else {
8771 ASSERT(op == Token::DIV);
8772 __ test(edx, Operand(edx));
8773 unsafe_bailout_->Branch(not_equal);
8774 Result eax_result(eax, TypeInfo::Integer32());
8775 eax_result.set_untagged_int32(true);
8776 frame_->Push(&eax_result);
8777 }
8778 break;
8779 }
8780 default:
8781 UNREACHABLE();
8782 break;
8783 }
8784}
8785
8786
8787void CodeGenerator::GenerateLogicalBooleanOperation(BinaryOperation* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00008788 // According to ECMA-262 section 11.11, page 58, the binary logical
8789 // operators must yield the result of one of the two expressions
8790 // before any ToBoolean() conversions. This means that the value
8791 // produced by a && or || operator is not necessarily a boolean.
8792
8793 // NOTE: If the left hand side produces a materialized value (not
8794 // control flow), we force the right hand side to do the same. This
8795 // is necessary because we assume that if we get control flow on the
8796 // last path out of an expression we got it on all paths.
Steve Block6ded16b2010-05-10 14:33:55 +01008797 if (node->op() == Token::AND) {
8798 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00008799 JumpTarget is_true;
8800 ControlDestination dest(&is_true, destination()->false_target(), true);
Steve Blockd0582a62009-12-15 09:54:21 +00008801 LoadCondition(node->left(), &dest, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00008802
8803 if (dest.false_was_fall_through()) {
8804 // The current false target was used as the fall-through. If
8805 // there are no dangling jumps to is_true then the left
8806 // subexpression was unconditionally false. Otherwise we have
8807 // paths where we do have to evaluate the right subexpression.
8808 if (is_true.is_linked()) {
8809 // We need to compile the right subexpression. If the jump to
8810 // the current false target was a forward jump then we have a
8811 // valid frame, we have just bound the false target, and we
8812 // have to jump around the code for the right subexpression.
8813 if (has_valid_frame()) {
8814 destination()->false_target()->Unuse();
8815 destination()->false_target()->Jump();
8816 }
8817 is_true.Bind();
8818 // The left subexpression compiled to control flow, so the
8819 // right one is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00008820 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00008821 } else {
8822 // We have actually just jumped to or bound the current false
8823 // target but the current control destination is not marked as
8824 // used.
8825 destination()->Use(false);
8826 }
8827
8828 } else if (dest.is_used()) {
8829 // The left subexpression compiled to control flow (and is_true
8830 // was just bound), so the right is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00008831 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00008832
8833 } else {
8834 // We have a materialized value on the frame, so we exit with
8835 // one on all paths. There are possibly also jumps to is_true
8836 // from nested subexpressions.
8837 JumpTarget pop_and_continue;
8838 JumpTarget exit;
8839
8840 // Avoid popping the result if it converts to 'false' using the
8841 // standard ToBoolean() conversion as described in ECMA-262,
8842 // section 9.2, page 30.
8843 //
8844 // Duplicate the TOS value. The duplicate will be popped by
8845 // ToBoolean.
8846 frame_->Dup();
8847 ControlDestination dest(&pop_and_continue, &exit, true);
8848 ToBoolean(&dest);
8849
8850 // Pop the result of evaluating the first part.
8851 frame_->Drop();
8852
8853 // Compile right side expression.
8854 is_true.Bind();
8855 Load(node->right());
8856
8857 // Exit (always with a materialized value).
8858 exit.Bind();
8859 }
8860
Steve Block6ded16b2010-05-10 14:33:55 +01008861 } else {
8862 ASSERT(node->op() == Token::OR);
8863 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00008864 JumpTarget is_false;
8865 ControlDestination dest(destination()->true_target(), &is_false, false);
Steve Blockd0582a62009-12-15 09:54:21 +00008866 LoadCondition(node->left(), &dest, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00008867
8868 if (dest.true_was_fall_through()) {
8869 // The current true target was used as the fall-through. If
8870 // there are no dangling jumps to is_false then the left
8871 // subexpression was unconditionally true. Otherwise we have
8872 // paths where we do have to evaluate the right subexpression.
8873 if (is_false.is_linked()) {
8874 // We need to compile the right subexpression. If the jump to
8875 // the current true target was a forward jump then we have a
8876 // valid frame, we have just bound the true target, and we
8877 // have to jump around the code for the right subexpression.
8878 if (has_valid_frame()) {
8879 destination()->true_target()->Unuse();
8880 destination()->true_target()->Jump();
8881 }
8882 is_false.Bind();
8883 // The left subexpression compiled to control flow, so the
8884 // right one is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00008885 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00008886 } else {
8887 // We have just jumped to or bound the current true target but
8888 // the current control destination is not marked as used.
8889 destination()->Use(true);
8890 }
8891
8892 } else if (dest.is_used()) {
8893 // The left subexpression compiled to control flow (and is_false
8894 // was just bound), so the right is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00008895 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00008896
8897 } else {
8898 // We have a materialized value on the frame, so we exit with
8899 // one on all paths. There are possibly also jumps to is_false
8900 // from nested subexpressions.
8901 JumpTarget pop_and_continue;
8902 JumpTarget exit;
8903
8904 // Avoid popping the result if it converts to 'true' using the
8905 // standard ToBoolean() conversion as described in ECMA-262,
8906 // section 9.2, page 30.
8907 //
8908 // Duplicate the TOS value. The duplicate will be popped by
8909 // ToBoolean.
8910 frame_->Dup();
8911 ControlDestination dest(&exit, &pop_and_continue, false);
8912 ToBoolean(&dest);
8913
8914 // Pop the result of evaluating the first part.
8915 frame_->Drop();
8916
8917 // Compile right side expression.
8918 is_false.Bind();
8919 Load(node->right());
8920
8921 // Exit (always with a materialized value).
8922 exit.Bind();
8923 }
Steve Block6ded16b2010-05-10 14:33:55 +01008924 }
8925}
Steve Blocka7e24c12009-10-30 11:49:00 +00008926
Steve Block6ded16b2010-05-10 14:33:55 +01008927
8928void CodeGenerator::VisitBinaryOperation(BinaryOperation* node) {
8929 Comment cmnt(masm_, "[ BinaryOperation");
8930
8931 if (node->op() == Token::AND || node->op() == Token::OR) {
8932 GenerateLogicalBooleanOperation(node);
8933 } else if (in_safe_int32_mode()) {
8934 Visit(node->left());
8935 Visit(node->right());
8936 Int32BinaryOperation(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00008937 } else {
8938 // NOTE: The code below assumes that the slow cases (calls to runtime)
8939 // never return a constant/immutable object.
8940 OverwriteMode overwrite_mode = NO_OVERWRITE;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01008941 if (node->left()->ResultOverwriteAllowed()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00008942 overwrite_mode = OVERWRITE_LEFT;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01008943 } else if (node->right()->ResultOverwriteAllowed()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00008944 overwrite_mode = OVERWRITE_RIGHT;
8945 }
8946
Steve Block6ded16b2010-05-10 14:33:55 +01008947 if (node->left()->IsTrivial()) {
8948 Load(node->right());
8949 Result right = frame_->Pop();
8950 frame_->Push(node->left());
8951 frame_->Push(&right);
8952 } else {
8953 Load(node->left());
8954 Load(node->right());
8955 }
8956 GenericBinaryOperation(node, overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00008957 }
8958}
8959
8960
8961void CodeGenerator::VisitThisFunction(ThisFunction* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01008962 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00008963 frame_->PushFunction();
8964}
8965
8966
8967void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01008968 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00008969 Comment cmnt(masm_, "[ CompareOperation");
8970
Leon Clarkee46be812010-01-19 14:06:41 +00008971 bool left_already_loaded = false;
8972
Steve Blocka7e24c12009-10-30 11:49:00 +00008973 // Get the expressions from the node.
8974 Expression* left = node->left();
8975 Expression* right = node->right();
8976 Token::Value op = node->op();
8977 // To make typeof testing for natives implemented in JavaScript really
8978 // efficient, we generate special code for expressions of the form:
8979 // 'typeof <expression> == <string>'.
8980 UnaryOperation* operation = left->AsUnaryOperation();
8981 if ((op == Token::EQ || op == Token::EQ_STRICT) &&
8982 (operation != NULL && operation->op() == Token::TYPEOF) &&
8983 (right->AsLiteral() != NULL &&
8984 right->AsLiteral()->handle()->IsString())) {
8985 Handle<String> check(String::cast(*right->AsLiteral()->handle()));
8986
8987 // Load the operand and move it to a register.
8988 LoadTypeofExpression(operation->expression());
8989 Result answer = frame_->Pop();
8990 answer.ToRegister();
8991
8992 if (check->Equals(Heap::number_symbol())) {
8993 __ test(answer.reg(), Immediate(kSmiTagMask));
8994 destination()->true_target()->Branch(zero);
8995 frame_->Spill(answer.reg());
8996 __ mov(answer.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset));
8997 __ cmp(answer.reg(), Factory::heap_number_map());
8998 answer.Unuse();
8999 destination()->Split(equal);
9000
9001 } else if (check->Equals(Heap::string_symbol())) {
9002 __ test(answer.reg(), Immediate(kSmiTagMask));
9003 destination()->false_target()->Branch(zero);
9004
9005 // It can be an undetectable string object.
9006 Result temp = allocator()->Allocate();
9007 ASSERT(temp.is_valid());
9008 __ mov(temp.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009009 __ test_b(FieldOperand(temp.reg(), Map::kBitFieldOffset),
9010 1 << Map::kIsUndetectable);
Steve Blocka7e24c12009-10-30 11:49:00 +00009011 destination()->false_target()->Branch(not_zero);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009012 __ CmpInstanceType(temp.reg(), FIRST_NONSTRING_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00009013 temp.Unuse();
9014 answer.Unuse();
Andrei Popescu402d9372010-02-26 13:31:12 +00009015 destination()->Split(below);
Steve Blocka7e24c12009-10-30 11:49:00 +00009016
9017 } else if (check->Equals(Heap::boolean_symbol())) {
9018 __ cmp(answer.reg(), Factory::true_value());
9019 destination()->true_target()->Branch(equal);
9020 __ cmp(answer.reg(), Factory::false_value());
9021 answer.Unuse();
9022 destination()->Split(equal);
9023
9024 } else if (check->Equals(Heap::undefined_symbol())) {
9025 __ cmp(answer.reg(), Factory::undefined_value());
9026 destination()->true_target()->Branch(equal);
9027
9028 __ test(answer.reg(), Immediate(kSmiTagMask));
9029 destination()->false_target()->Branch(zero);
9030
9031 // It can be an undetectable object.
9032 frame_->Spill(answer.reg());
9033 __ mov(answer.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009034 __ test_b(FieldOperand(answer.reg(), Map::kBitFieldOffset),
9035 1 << Map::kIsUndetectable);
Steve Blocka7e24c12009-10-30 11:49:00 +00009036 answer.Unuse();
9037 destination()->Split(not_zero);
9038
9039 } else if (check->Equals(Heap::function_symbol())) {
9040 __ test(answer.reg(), Immediate(kSmiTagMask));
9041 destination()->false_target()->Branch(zero);
9042 frame_->Spill(answer.reg());
9043 __ CmpObjectType(answer.reg(), JS_FUNCTION_TYPE, answer.reg());
Steve Blockd0582a62009-12-15 09:54:21 +00009044 destination()->true_target()->Branch(equal);
9045 // Regular expressions are callable so typeof == 'function'.
9046 __ CmpInstanceType(answer.reg(), JS_REGEXP_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00009047 answer.Unuse();
9048 destination()->Split(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +00009049 } else if (check->Equals(Heap::object_symbol())) {
9050 __ test(answer.reg(), Immediate(kSmiTagMask));
9051 destination()->false_target()->Branch(zero);
9052 __ cmp(answer.reg(), Factory::null_value());
9053 destination()->true_target()->Branch(equal);
9054
Steve Blocka7e24c12009-10-30 11:49:00 +00009055 Result map = allocator()->Allocate();
9056 ASSERT(map.is_valid());
Steve Blockd0582a62009-12-15 09:54:21 +00009057 // Regular expressions are typeof == 'function', not 'object'.
9058 __ CmpObjectType(answer.reg(), JS_REGEXP_TYPE, map.reg());
9059 destination()->false_target()->Branch(equal);
9060
9061 // It can be an undetectable object.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009062 __ test_b(FieldOperand(map.reg(), Map::kBitFieldOffset),
9063 1 << Map::kIsUndetectable);
Steve Blocka7e24c12009-10-30 11:49:00 +00009064 destination()->false_target()->Branch(not_zero);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009065 // Do a range test for JSObject type. We can't use
9066 // MacroAssembler::IsInstanceJSObjectType, because we are using a
9067 // ControlDestination, so we copy its implementation here.
Steve Blocka7e24c12009-10-30 11:49:00 +00009068 __ movzx_b(map.reg(), FieldOperand(map.reg(), Map::kInstanceTypeOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009069 __ sub(Operand(map.reg()), Immediate(FIRST_JS_OBJECT_TYPE));
9070 __ cmp(map.reg(), LAST_JS_OBJECT_TYPE - FIRST_JS_OBJECT_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00009071 answer.Unuse();
9072 map.Unuse();
Leon Clarkef7060e22010-06-03 12:02:55 +01009073 destination()->Split(below_equal);
Steve Blocka7e24c12009-10-30 11:49:00 +00009074 } else {
9075 // Uncommon case: typeof testing against a string literal that is
9076 // never returned from the typeof operator.
9077 answer.Unuse();
9078 destination()->Goto(false);
9079 }
9080 return;
Leon Clarkee46be812010-01-19 14:06:41 +00009081 } else if (op == Token::LT &&
9082 right->AsLiteral() != NULL &&
9083 right->AsLiteral()->handle()->IsHeapNumber()) {
9084 Handle<HeapNumber> check(HeapNumber::cast(*right->AsLiteral()->handle()));
9085 if (check->value() == 2147483648.0) { // 0x80000000.
9086 Load(left);
9087 left_already_loaded = true;
9088 Result lhs = frame_->Pop();
9089 lhs.ToRegister();
9090 __ test(lhs.reg(), Immediate(kSmiTagMask));
9091 destination()->true_target()->Branch(zero); // All Smis are less.
9092 Result scratch = allocator()->Allocate();
9093 ASSERT(scratch.is_valid());
9094 __ mov(scratch.reg(), FieldOperand(lhs.reg(), HeapObject::kMapOffset));
9095 __ cmp(scratch.reg(), Factory::heap_number_map());
9096 JumpTarget not_a_number;
9097 not_a_number.Branch(not_equal, &lhs);
9098 __ mov(scratch.reg(),
9099 FieldOperand(lhs.reg(), HeapNumber::kExponentOffset));
9100 __ cmp(Operand(scratch.reg()), Immediate(0xfff00000));
9101 not_a_number.Branch(above_equal, &lhs); // It's a negative NaN or -Inf.
9102 const uint32_t borderline_exponent =
9103 (HeapNumber::kExponentBias + 31) << HeapNumber::kExponentShift;
9104 __ cmp(Operand(scratch.reg()), Immediate(borderline_exponent));
9105 scratch.Unuse();
9106 lhs.Unuse();
9107 destination()->true_target()->Branch(less);
9108 destination()->false_target()->Jump();
9109
9110 not_a_number.Bind(&lhs);
9111 frame_->Push(&lhs);
9112 }
Steve Blocka7e24c12009-10-30 11:49:00 +00009113 }
9114
9115 Condition cc = no_condition;
9116 bool strict = false;
9117 switch (op) {
9118 case Token::EQ_STRICT:
9119 strict = true;
9120 // Fall through
9121 case Token::EQ:
9122 cc = equal;
9123 break;
9124 case Token::LT:
9125 cc = less;
9126 break;
9127 case Token::GT:
9128 cc = greater;
9129 break;
9130 case Token::LTE:
9131 cc = less_equal;
9132 break;
9133 case Token::GTE:
9134 cc = greater_equal;
9135 break;
9136 case Token::IN: {
Leon Clarkee46be812010-01-19 14:06:41 +00009137 if (!left_already_loaded) Load(left);
Steve Blocka7e24c12009-10-30 11:49:00 +00009138 Load(right);
9139 Result answer = frame_->InvokeBuiltin(Builtins::IN, CALL_FUNCTION, 2);
9140 frame_->Push(&answer); // push the result
9141 return;
9142 }
9143 case Token::INSTANCEOF: {
Leon Clarkee46be812010-01-19 14:06:41 +00009144 if (!left_already_loaded) Load(left);
Steve Blocka7e24c12009-10-30 11:49:00 +00009145 Load(right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01009146 InstanceofStub stub(InstanceofStub::kNoFlags);
Steve Blocka7e24c12009-10-30 11:49:00 +00009147 Result answer = frame_->CallStub(&stub, 2);
9148 answer.ToRegister();
9149 __ test(answer.reg(), Operand(answer.reg()));
9150 answer.Unuse();
9151 destination()->Split(zero);
9152 return;
9153 }
9154 default:
9155 UNREACHABLE();
9156 }
Steve Block6ded16b2010-05-10 14:33:55 +01009157
9158 if (left->IsTrivial()) {
9159 if (!left_already_loaded) {
9160 Load(right);
9161 Result right_result = frame_->Pop();
9162 frame_->Push(left);
9163 frame_->Push(&right_result);
9164 } else {
9165 Load(right);
9166 }
9167 } else {
9168 if (!left_already_loaded) Load(left);
9169 Load(right);
9170 }
Leon Clarkee46be812010-01-19 14:06:41 +00009171 Comparison(node, cc, strict, destination());
Steve Blocka7e24c12009-10-30 11:49:00 +00009172}
9173
9174
Kristian Monsen80d68ea2010-09-08 11:05:35 +01009175void CodeGenerator::VisitCompareToNull(CompareToNull* node) {
9176 ASSERT(!in_safe_int32_mode());
9177 Comment cmnt(masm_, "[ CompareToNull");
9178
9179 Load(node->expression());
9180 Result operand = frame_->Pop();
9181 operand.ToRegister();
9182 __ cmp(operand.reg(), Factory::null_value());
9183 if (node->is_strict()) {
9184 operand.Unuse();
9185 destination()->Split(equal);
9186 } else {
9187 // The 'null' value is only equal to 'undefined' if using non-strict
9188 // comparisons.
9189 destination()->true_target()->Branch(equal);
9190 __ cmp(operand.reg(), Factory::undefined_value());
9191 destination()->true_target()->Branch(equal);
9192 __ test(operand.reg(), Immediate(kSmiTagMask));
9193 destination()->false_target()->Branch(equal);
9194
9195 // It can be an undetectable object.
9196 // Use a scratch register in preference to spilling operand.reg().
9197 Result temp = allocator()->Allocate();
9198 ASSERT(temp.is_valid());
9199 __ mov(temp.reg(),
9200 FieldOperand(operand.reg(), HeapObject::kMapOffset));
9201 __ test_b(FieldOperand(temp.reg(), Map::kBitFieldOffset),
9202 1 << Map::kIsUndetectable);
9203 temp.Unuse();
9204 operand.Unuse();
9205 destination()->Split(not_zero);
9206 }
9207}
9208
9209
Steve Blocka7e24c12009-10-30 11:49:00 +00009210#ifdef DEBUG
9211bool CodeGenerator::HasValidEntryRegisters() {
9212 return (allocator()->count(eax) == (frame()->is_used(eax) ? 1 : 0))
9213 && (allocator()->count(ebx) == (frame()->is_used(ebx) ? 1 : 0))
9214 && (allocator()->count(ecx) == (frame()->is_used(ecx) ? 1 : 0))
9215 && (allocator()->count(edx) == (frame()->is_used(edx) ? 1 : 0))
9216 && (allocator()->count(edi) == (frame()->is_used(edi) ? 1 : 0));
9217}
9218#endif
9219
9220
9221// Emit a LoadIC call to get the value from receiver and leave it in
Andrei Popescu402d9372010-02-26 13:31:12 +00009222// dst.
Steve Blocka7e24c12009-10-30 11:49:00 +00009223class DeferredReferenceGetNamedValue: public DeferredCode {
9224 public:
9225 DeferredReferenceGetNamedValue(Register dst,
9226 Register receiver,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009227 Handle<String> name,
9228 bool is_contextual)
9229 : dst_(dst),
9230 receiver_(receiver),
9231 name_(name),
Ben Murdochf87a2032010-10-22 12:50:53 +01009232 is_contextual_(is_contextual),
9233 is_dont_delete_(false) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009234 set_comment(is_contextual
9235 ? "[ DeferredReferenceGetNamedValue (contextual)"
9236 : "[ DeferredReferenceGetNamedValue");
Steve Blocka7e24c12009-10-30 11:49:00 +00009237 }
9238
9239 virtual void Generate();
9240
9241 Label* patch_site() { return &patch_site_; }
9242
Ben Murdochf87a2032010-10-22 12:50:53 +01009243 void set_is_dont_delete(bool value) {
9244 ASSERT(is_contextual_);
9245 is_dont_delete_ = value;
9246 }
9247
Steve Blocka7e24c12009-10-30 11:49:00 +00009248 private:
9249 Label patch_site_;
9250 Register dst_;
9251 Register receiver_;
9252 Handle<String> name_;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009253 bool is_contextual_;
Ben Murdochf87a2032010-10-22 12:50:53 +01009254 bool is_dont_delete_;
Steve Blocka7e24c12009-10-30 11:49:00 +00009255};
9256
9257
9258void DeferredReferenceGetNamedValue::Generate() {
Andrei Popescu402d9372010-02-26 13:31:12 +00009259 if (!receiver_.is(eax)) {
9260 __ mov(eax, receiver_);
9261 }
Steve Blocka7e24c12009-10-30 11:49:00 +00009262 __ Set(ecx, Immediate(name_));
9263 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009264 RelocInfo::Mode mode = is_contextual_
9265 ? RelocInfo::CODE_TARGET_CONTEXT
9266 : RelocInfo::CODE_TARGET;
9267 __ call(ic, mode);
9268 // The call must be followed by:
9269 // - a test eax instruction to indicate that the inobject property
9270 // case was inlined.
Ben Murdochf87a2032010-10-22 12:50:53 +01009271 // - a mov ecx or mov edx instruction to indicate that the
9272 // contextual property load was inlined.
Steve Blocka7e24c12009-10-30 11:49:00 +00009273 //
9274 // Store the delta to the map check instruction here in the test
9275 // instruction. Use masm_-> instead of the __ macro since the
9276 // latter can't return a value.
9277 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
9278 // Here we use masm_-> instead of the __ macro because this is the
9279 // instruction that gets patched and coverage code gets in the way.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009280 if (is_contextual_) {
Ben Murdochf87a2032010-10-22 12:50:53 +01009281 masm_->mov(is_dont_delete_ ? edx : ecx, -delta_to_patch_site);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009282 __ IncrementCounter(&Counters::named_load_global_inline_miss, 1);
Ben Murdochf87a2032010-10-22 12:50:53 +01009283 if (is_dont_delete_) {
9284 __ IncrementCounter(&Counters::dont_delete_hint_miss, 1);
9285 }
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009286 } else {
9287 masm_->test(eax, Immediate(-delta_to_patch_site));
9288 __ IncrementCounter(&Counters::named_load_inline_miss, 1);
9289 }
Steve Blocka7e24c12009-10-30 11:49:00 +00009290
9291 if (!dst_.is(eax)) __ mov(dst_, eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00009292}
9293
9294
9295class DeferredReferenceGetKeyedValue: public DeferredCode {
9296 public:
9297 explicit DeferredReferenceGetKeyedValue(Register dst,
9298 Register receiver,
Andrei Popescu402d9372010-02-26 13:31:12 +00009299 Register key)
9300 : dst_(dst), receiver_(receiver), key_(key) {
Steve Blocka7e24c12009-10-30 11:49:00 +00009301 set_comment("[ DeferredReferenceGetKeyedValue");
9302 }
9303
9304 virtual void Generate();
9305
9306 Label* patch_site() { return &patch_site_; }
9307
9308 private:
9309 Label patch_site_;
9310 Register dst_;
9311 Register receiver_;
9312 Register key_;
Steve Blocka7e24c12009-10-30 11:49:00 +00009313};
9314
9315
9316void DeferredReferenceGetKeyedValue::Generate() {
Andrei Popescu402d9372010-02-26 13:31:12 +00009317 if (!receiver_.is(eax)) {
9318 // Register eax is available for key.
9319 if (!key_.is(eax)) {
9320 __ mov(eax, key_);
9321 }
9322 if (!receiver_.is(edx)) {
9323 __ mov(edx, receiver_);
9324 }
9325 } else if (!key_.is(edx)) {
9326 // Register edx is available for receiver.
9327 if (!receiver_.is(edx)) {
9328 __ mov(edx, receiver_);
9329 }
9330 if (!key_.is(eax)) {
9331 __ mov(eax, key_);
9332 }
9333 } else {
9334 __ xchg(edx, eax);
9335 }
Steve Blocka7e24c12009-10-30 11:49:00 +00009336 // Calculate the delta from the IC call instruction to the map check
9337 // cmp instruction in the inlined version. This delta is stored in
9338 // a test(eax, delta) instruction after the call so that we can find
9339 // it in the IC initialization code and patch the cmp instruction.
9340 // This means that we cannot allow test instructions after calls to
9341 // KeyedLoadIC stubs in other places.
9342 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
Andrei Popescu402d9372010-02-26 13:31:12 +00009343 __ call(ic, RelocInfo::CODE_TARGET);
Steve Blocka7e24c12009-10-30 11:49:00 +00009344 // The delta from the start of the map-compare instruction to the
9345 // test instruction. We use masm_-> directly here instead of the __
9346 // macro because the macro sometimes uses macro expansion to turn
9347 // into something that can't return a value. This is encountered
9348 // when doing generated code coverage tests.
9349 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
9350 // Here we use masm_-> instead of the __ macro because this is the
9351 // instruction that gets patched and coverage code gets in the way.
9352 masm_->test(eax, Immediate(-delta_to_patch_site));
9353 __ IncrementCounter(&Counters::keyed_load_inline_miss, 1);
9354
9355 if (!dst_.is(eax)) __ mov(dst_, eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00009356}
9357
9358
9359class DeferredReferenceSetKeyedValue: public DeferredCode {
9360 public:
9361 DeferredReferenceSetKeyedValue(Register value,
9362 Register key,
Steve Block6ded16b2010-05-10 14:33:55 +01009363 Register receiver,
9364 Register scratch)
9365 : value_(value),
9366 key_(key),
9367 receiver_(receiver),
9368 scratch_(scratch) {
Steve Blocka7e24c12009-10-30 11:49:00 +00009369 set_comment("[ DeferredReferenceSetKeyedValue");
9370 }
9371
9372 virtual void Generate();
9373
9374 Label* patch_site() { return &patch_site_; }
9375
9376 private:
9377 Register value_;
9378 Register key_;
9379 Register receiver_;
Steve Block6ded16b2010-05-10 14:33:55 +01009380 Register scratch_;
Steve Blocka7e24c12009-10-30 11:49:00 +00009381 Label patch_site_;
9382};
9383
9384
9385void DeferredReferenceSetKeyedValue::Generate() {
9386 __ IncrementCounter(&Counters::keyed_store_inline_miss, 1);
Steve Block6ded16b2010-05-10 14:33:55 +01009387 // Move value_ to eax, key_ to ecx, and receiver_ to edx.
9388 Register old_value = value_;
9389
9390 // First, move value to eax.
9391 if (!value_.is(eax)) {
9392 if (key_.is(eax)) {
9393 // Move key_ out of eax, preferably to ecx.
9394 if (!value_.is(ecx) && !receiver_.is(ecx)) {
9395 __ mov(ecx, key_);
9396 key_ = ecx;
9397 } else {
9398 __ mov(scratch_, key_);
9399 key_ = scratch_;
9400 }
9401 }
9402 if (receiver_.is(eax)) {
9403 // Move receiver_ out of eax, preferably to edx.
9404 if (!value_.is(edx) && !key_.is(edx)) {
9405 __ mov(edx, receiver_);
9406 receiver_ = edx;
9407 } else {
9408 // Both moves to scratch are from eax, also, no valid path hits both.
9409 __ mov(scratch_, receiver_);
9410 receiver_ = scratch_;
9411 }
9412 }
9413 __ mov(eax, value_);
9414 value_ = eax;
9415 }
9416
9417 // Now value_ is in eax. Move the other two to the right positions.
9418 // We do not update the variables key_ and receiver_ to ecx and edx.
9419 if (key_.is(ecx)) {
9420 if (!receiver_.is(edx)) {
9421 __ mov(edx, receiver_);
9422 }
9423 } else if (key_.is(edx)) {
9424 if (receiver_.is(ecx)) {
9425 __ xchg(edx, ecx);
9426 } else {
9427 __ mov(ecx, key_);
9428 if (!receiver_.is(edx)) {
9429 __ mov(edx, receiver_);
9430 }
9431 }
9432 } else { // Key is not in edx or ecx.
9433 if (!receiver_.is(edx)) {
9434 __ mov(edx, receiver_);
9435 }
9436 __ mov(ecx, key_);
9437 }
9438
Steve Blocka7e24c12009-10-30 11:49:00 +00009439 // Call the IC stub.
9440 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
9441 __ call(ic, RelocInfo::CODE_TARGET);
9442 // The delta from the start of the map-compare instruction to the
9443 // test instruction. We use masm_-> directly here instead of the
9444 // __ macro because the macro sometimes uses macro expansion to turn
9445 // into something that can't return a value. This is encountered
9446 // when doing generated code coverage tests.
9447 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
9448 // Here we use masm_-> instead of the __ macro because this is the
9449 // instruction that gets patched and coverage code gets in the way.
9450 masm_->test(eax, Immediate(-delta_to_patch_site));
Steve Block6ded16b2010-05-10 14:33:55 +01009451 // Restore value (returned from store IC) register.
9452 if (!old_value.is(eax)) __ mov(old_value, eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00009453}
9454
9455
Andrei Popescu402d9372010-02-26 13:31:12 +00009456Result CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) {
9457#ifdef DEBUG
9458 int original_height = frame()->height();
9459#endif
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009460
9461 bool contextual_load_in_builtin =
9462 is_contextual &&
9463 (Bootstrapper::IsActive() ||
9464 (!info_->closure().is_null() && info_->closure()->IsBuiltin()));
9465
Andrei Popescu402d9372010-02-26 13:31:12 +00009466 Result result;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009467 // Do not inline in the global code or when not in loop.
9468 if (scope()->is_global_scope() ||
9469 loop_nesting() == 0 ||
9470 contextual_load_in_builtin) {
Andrei Popescu402d9372010-02-26 13:31:12 +00009471 Comment cmnt(masm(), "[ Load from named Property");
9472 frame()->Push(name);
9473
9474 RelocInfo::Mode mode = is_contextual
9475 ? RelocInfo::CODE_TARGET_CONTEXT
9476 : RelocInfo::CODE_TARGET;
9477 result = frame()->CallLoadIC(mode);
9478 // A test eax instruction following the call signals that the inobject
9479 // property case was inlined. Ensure that there is not a test eax
9480 // instruction here.
9481 __ nop();
9482 } else {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009483 // Inline the property load.
9484 Comment cmnt(masm(), is_contextual
9485 ? "[ Inlined contextual property load"
9486 : "[ Inlined named property load");
Andrei Popescu402d9372010-02-26 13:31:12 +00009487 Result receiver = frame()->Pop();
9488 receiver.ToRegister();
9489
9490 result = allocator()->Allocate();
9491 ASSERT(result.is_valid());
9492 DeferredReferenceGetNamedValue* deferred =
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009493 new DeferredReferenceGetNamedValue(result.reg(),
9494 receiver.reg(),
9495 name,
9496 is_contextual);
Andrei Popescu402d9372010-02-26 13:31:12 +00009497
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009498 if (!is_contextual) {
9499 // Check that the receiver is a heap object.
9500 __ test(receiver.reg(), Immediate(kSmiTagMask));
9501 deferred->Branch(zero);
9502 }
Andrei Popescu402d9372010-02-26 13:31:12 +00009503
9504 __ bind(deferred->patch_site());
9505 // This is the map check instruction that will be patched (so we can't
9506 // use the double underscore macro that may insert instructions).
9507 // Initially use an invalid map to force a failure.
9508 masm()->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
9509 Immediate(Factory::null_value()));
9510 // This branch is always a forwards branch so it's always a fixed size
9511 // which allows the assert below to succeed and patching to work.
9512 deferred->Branch(not_equal);
9513
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009514 // The delta from the patch label to the actual load must be
9515 // statically known.
Andrei Popescu402d9372010-02-26 13:31:12 +00009516 ASSERT(masm()->SizeOfCodeGeneratedSince(deferred->patch_site()) ==
9517 LoadIC::kOffsetToLoadInstruction);
Andrei Popescu402d9372010-02-26 13:31:12 +00009518
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009519 if (is_contextual) {
9520 // Load the (initialy invalid) cell and get its value.
9521 masm()->mov(result.reg(), Factory::null_value());
9522 if (FLAG_debug_code) {
9523 __ cmp(FieldOperand(result.reg(), HeapObject::kMapOffset),
9524 Factory::global_property_cell_map());
9525 __ Assert(equal, "Uninitialized inlined contextual load");
9526 }
9527 __ mov(result.reg(),
9528 FieldOperand(result.reg(), JSGlobalPropertyCell::kValueOffset));
Ben Murdochf87a2032010-10-22 12:50:53 +01009529 bool is_dont_delete = false;
9530 if (!info_->closure().is_null()) {
9531 // When doing lazy compilation we can check if the global cell
9532 // already exists and use its "don't delete" status as a hint.
9533 AssertNoAllocation no_gc;
9534 v8::internal::GlobalObject* global_object =
9535 info_->closure()->context()->global();
9536 LookupResult lookup;
9537 global_object->LocalLookupRealNamedProperty(*name, &lookup);
9538 if (lookup.IsProperty() && lookup.type() == NORMAL) {
9539 ASSERT(lookup.holder() == global_object);
9540 ASSERT(global_object->property_dictionary()->ValueAt(
9541 lookup.GetDictionaryEntry())->IsJSGlobalPropertyCell());
9542 is_dont_delete = lookup.IsDontDelete();
9543 }
9544 }
9545 deferred->set_is_dont_delete(is_dont_delete);
9546 if (!is_dont_delete) {
9547 __ cmp(result.reg(), Factory::the_hole_value());
9548 deferred->Branch(equal);
9549 } else if (FLAG_debug_code) {
9550 __ cmp(result.reg(), Factory::the_hole_value());
9551 __ Check(not_equal, "DontDelete cells can't contain the hole");
9552 }
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009553 __ IncrementCounter(&Counters::named_load_global_inline, 1);
Ben Murdochf87a2032010-10-22 12:50:53 +01009554 if (is_dont_delete) {
9555 __ IncrementCounter(&Counters::dont_delete_hint_hit, 1);
9556 }
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009557 } else {
9558 // The initial (invalid) offset has to be large enough to force a 32-bit
9559 // instruction encoding to allow patching with an arbitrary offset. Use
9560 // kMaxInt (minus kHeapObjectTag).
9561 int offset = kMaxInt;
9562 masm()->mov(result.reg(), FieldOperand(receiver.reg(), offset));
9563 __ IncrementCounter(&Counters::named_load_inline, 1);
9564 }
9565
Andrei Popescu402d9372010-02-26 13:31:12 +00009566 deferred->BindExit();
9567 }
9568 ASSERT(frame()->height() == original_height - 1);
9569 return result;
9570}
9571
9572
9573Result CodeGenerator::EmitNamedStore(Handle<String> name, bool is_contextual) {
9574#ifdef DEBUG
9575 int expected_height = frame()->height() - (is_contextual ? 1 : 2);
9576#endif
Kristian Monsen50ef84f2010-07-29 15:18:00 +01009577
9578 Result result;
9579 if (is_contextual || scope()->is_global_scope() || loop_nesting() == 0) {
9580 result = frame()->CallStoreIC(name, is_contextual);
9581 // A test eax instruction following the call signals that the inobject
9582 // property case was inlined. Ensure that there is not a test eax
9583 // instruction here.
9584 __ nop();
9585 } else {
9586 // Inline the in-object property case.
9587 JumpTarget slow, done;
9588 Label patch_site;
9589
9590 // Get the value and receiver from the stack.
9591 Result value = frame()->Pop();
9592 value.ToRegister();
9593 Result receiver = frame()->Pop();
9594 receiver.ToRegister();
9595
9596 // Allocate result register.
9597 result = allocator()->Allocate();
9598 ASSERT(result.is_valid() && receiver.is_valid() && value.is_valid());
9599
9600 // Check that the receiver is a heap object.
9601 __ test(receiver.reg(), Immediate(kSmiTagMask));
9602 slow.Branch(zero, &value, &receiver);
9603
9604 // This is the map check instruction that will be patched (so we can't
9605 // use the double underscore macro that may insert instructions).
9606 // Initially use an invalid map to force a failure.
9607 __ bind(&patch_site);
9608 masm()->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
9609 Immediate(Factory::null_value()));
9610 // This branch is always a forwards branch so it's always a fixed size
9611 // which allows the assert below to succeed and patching to work.
9612 slow.Branch(not_equal, &value, &receiver);
9613
9614 // The delta from the patch label to the store offset must be
9615 // statically known.
9616 ASSERT(masm()->SizeOfCodeGeneratedSince(&patch_site) ==
9617 StoreIC::kOffsetToStoreInstruction);
9618
9619 // The initial (invalid) offset has to be large enough to force a 32-bit
9620 // instruction encoding to allow patching with an arbitrary offset. Use
9621 // kMaxInt (minus kHeapObjectTag).
9622 int offset = kMaxInt;
9623 __ mov(FieldOperand(receiver.reg(), offset), value.reg());
9624 __ mov(result.reg(), Operand(value.reg()));
9625
9626 // Allocate scratch register for write barrier.
9627 Result scratch = allocator()->Allocate();
9628 ASSERT(scratch.is_valid());
9629
9630 // The write barrier clobbers all input registers, so spill the
9631 // receiver and the value.
9632 frame_->Spill(receiver.reg());
9633 frame_->Spill(value.reg());
9634
9635 // If the receiver and the value share a register allocate a new
9636 // register for the receiver.
9637 if (receiver.reg().is(value.reg())) {
9638 receiver = allocator()->Allocate();
9639 ASSERT(receiver.is_valid());
9640 __ mov(receiver.reg(), Operand(value.reg()));
9641 }
9642
9643 // Update the write barrier. To save instructions in the inlined
9644 // version we do not filter smis.
9645 Label skip_write_barrier;
9646 __ InNewSpace(receiver.reg(), value.reg(), equal, &skip_write_barrier);
9647 int delta_to_record_write = masm_->SizeOfCodeGeneratedSince(&patch_site);
9648 __ lea(scratch.reg(), Operand(receiver.reg(), offset));
9649 __ RecordWriteHelper(receiver.reg(), scratch.reg(), value.reg());
9650 if (FLAG_debug_code) {
9651 __ mov(receiver.reg(), Immediate(BitCast<int32_t>(kZapValue)));
9652 __ mov(value.reg(), Immediate(BitCast<int32_t>(kZapValue)));
9653 __ mov(scratch.reg(), Immediate(BitCast<int32_t>(kZapValue)));
9654 }
9655 __ bind(&skip_write_barrier);
9656 value.Unuse();
9657 scratch.Unuse();
9658 receiver.Unuse();
9659 done.Jump(&result);
9660
9661 slow.Bind(&value, &receiver);
9662 frame()->Push(&receiver);
9663 frame()->Push(&value);
9664 result = frame()->CallStoreIC(name, is_contextual);
9665 // Encode the offset to the map check instruction and the offset
9666 // to the write barrier store address computation in a test eax
9667 // instruction.
9668 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site);
9669 __ test(eax,
9670 Immediate((delta_to_record_write << 16) | delta_to_patch_site));
9671 done.Bind(&result);
9672 }
Andrei Popescu402d9372010-02-26 13:31:12 +00009673
9674 ASSERT_EQ(expected_height, frame()->height());
9675 return result;
9676}
9677
9678
9679Result CodeGenerator::EmitKeyedLoad() {
9680#ifdef DEBUG
9681 int original_height = frame()->height();
9682#endif
9683 Result result;
9684 // Inline array load code if inside of a loop. We do not know the
9685 // receiver map yet, so we initially generate the code with a check
9686 // against an invalid map. In the inline cache code, we patch the map
9687 // check if appropriate.
Leon Clarked91b9f72010-01-27 17:25:45 +00009688 if (loop_nesting() > 0) {
9689 Comment cmnt(masm_, "[ Inlined load from keyed Property");
9690
Leon Clarked91b9f72010-01-27 17:25:45 +00009691 // Use a fresh temporary to load the elements without destroying
9692 // the receiver which is needed for the deferred slow case.
9693 Result elements = allocator()->Allocate();
9694 ASSERT(elements.is_valid());
9695
Leon Clarkef7060e22010-06-03 12:02:55 +01009696 Result key = frame_->Pop();
9697 Result receiver = frame_->Pop();
9698 key.ToRegister();
9699 receiver.ToRegister();
9700
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009701 // If key and receiver are shared registers on the frame, their values will
9702 // be automatically saved and restored when going to deferred code.
9703 // The result is in elements, which is guaranteed non-shared.
Leon Clarked91b9f72010-01-27 17:25:45 +00009704 DeferredReferenceGetKeyedValue* deferred =
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009705 new DeferredReferenceGetKeyedValue(elements.reg(),
Leon Clarked91b9f72010-01-27 17:25:45 +00009706 receiver.reg(),
Andrei Popescu402d9372010-02-26 13:31:12 +00009707 key.reg());
Leon Clarked91b9f72010-01-27 17:25:45 +00009708
Andrei Popescu402d9372010-02-26 13:31:12 +00009709 __ test(receiver.reg(), Immediate(kSmiTagMask));
9710 deferred->Branch(zero);
Leon Clarked91b9f72010-01-27 17:25:45 +00009711
Leon Clarkef7060e22010-06-03 12:02:55 +01009712 // Check that the receiver has the expected map.
Leon Clarked91b9f72010-01-27 17:25:45 +00009713 // Initially, use an invalid map. The map is patched in the IC
9714 // initialization code.
9715 __ bind(deferred->patch_site());
9716 // Use masm-> here instead of the double underscore macro since extra
9717 // coverage code can interfere with the patching.
9718 masm_->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
Steve Block8defd9f2010-07-08 12:39:36 +01009719 Immediate(Factory::null_value()));
Leon Clarked91b9f72010-01-27 17:25:45 +00009720 deferred->Branch(not_equal);
9721
9722 // Check that the key is a smi.
Steve Block6ded16b2010-05-10 14:33:55 +01009723 if (!key.is_smi()) {
9724 __ test(key.reg(), Immediate(kSmiTagMask));
9725 deferred->Branch(not_zero);
9726 } else {
9727 if (FLAG_debug_code) __ AbortIfNotSmi(key.reg());
9728 }
Leon Clarked91b9f72010-01-27 17:25:45 +00009729
Iain Merrick75681382010-08-19 15:07:18 +01009730 // Get the elements array from the receiver.
Leon Clarked91b9f72010-01-27 17:25:45 +00009731 __ mov(elements.reg(),
9732 FieldOperand(receiver.reg(), JSObject::kElementsOffset));
Iain Merrick75681382010-08-19 15:07:18 +01009733 __ AssertFastElements(elements.reg());
Leon Clarked91b9f72010-01-27 17:25:45 +00009734
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009735 // Check that the key is within bounds.
9736 __ cmp(key.reg(),
Leon Clarked91b9f72010-01-27 17:25:45 +00009737 FieldOperand(elements.reg(), FixedArray::kLengthOffset));
9738 deferred->Branch(above_equal);
9739
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009740 // Load and check that the result is not the hole.
9741 // Key holds a smi.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01009742 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009743 __ mov(elements.reg(),
9744 FieldOperand(elements.reg(),
9745 key.reg(),
9746 times_2,
9747 FixedArray::kHeaderSize));
9748 result = elements;
Andrei Popescu402d9372010-02-26 13:31:12 +00009749 __ cmp(Operand(result.reg()), Immediate(Factory::the_hole_value()));
Leon Clarked91b9f72010-01-27 17:25:45 +00009750 deferred->Branch(equal);
9751 __ IncrementCounter(&Counters::keyed_load_inline, 1);
9752
9753 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00009754 } else {
9755 Comment cmnt(masm_, "[ Load from keyed Property");
Andrei Popescu402d9372010-02-26 13:31:12 +00009756 result = frame_->CallKeyedLoadIC(RelocInfo::CODE_TARGET);
Leon Clarked91b9f72010-01-27 17:25:45 +00009757 // Make sure that we do not have a test instruction after the
9758 // call. A test instruction after the call is used to
9759 // indicate that we have generated an inline version of the
9760 // keyed load. The explicit nop instruction is here because
9761 // the push that follows might be peep-hole optimized away.
9762 __ nop();
Leon Clarked91b9f72010-01-27 17:25:45 +00009763 }
Andrei Popescu402d9372010-02-26 13:31:12 +00009764 ASSERT(frame()->height() == original_height - 2);
9765 return result;
9766}
9767
9768
9769Result CodeGenerator::EmitKeyedStore(StaticType* key_type) {
9770#ifdef DEBUG
9771 int original_height = frame()->height();
9772#endif
9773 Result result;
9774 // Generate inlined version of the keyed store if the code is in a loop
9775 // and the key is likely to be a smi.
9776 if (loop_nesting() > 0 && key_type->IsLikelySmi()) {
9777 Comment cmnt(masm(), "[ Inlined store to keyed Property");
9778
9779 // Get the receiver, key and value into registers.
9780 result = frame()->Pop();
9781 Result key = frame()->Pop();
9782 Result receiver = frame()->Pop();
9783
9784 Result tmp = allocator_->Allocate();
9785 ASSERT(tmp.is_valid());
Steve Block6ded16b2010-05-10 14:33:55 +01009786 Result tmp2 = allocator_->Allocate();
9787 ASSERT(tmp2.is_valid());
Andrei Popescu402d9372010-02-26 13:31:12 +00009788
9789 // Determine whether the value is a constant before putting it in a
9790 // register.
9791 bool value_is_constant = result.is_constant();
9792
9793 // Make sure that value, key and receiver are in registers.
9794 result.ToRegister();
9795 key.ToRegister();
9796 receiver.ToRegister();
9797
9798 DeferredReferenceSetKeyedValue* deferred =
9799 new DeferredReferenceSetKeyedValue(result.reg(),
9800 key.reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01009801 receiver.reg(),
9802 tmp.reg());
Andrei Popescu402d9372010-02-26 13:31:12 +00009803
9804 // Check that the receiver is not a smi.
9805 __ test(receiver.reg(), Immediate(kSmiTagMask));
9806 deferred->Branch(zero);
9807
Steve Block6ded16b2010-05-10 14:33:55 +01009808 // Check that the key is a smi.
9809 if (!key.is_smi()) {
9810 __ test(key.reg(), Immediate(kSmiTagMask));
9811 deferred->Branch(not_zero);
9812 } else {
9813 if (FLAG_debug_code) __ AbortIfNotSmi(key.reg());
9814 }
9815
Andrei Popescu402d9372010-02-26 13:31:12 +00009816 // Check that the receiver is a JSArray.
Steve Block6ded16b2010-05-10 14:33:55 +01009817 __ CmpObjectType(receiver.reg(), JS_ARRAY_TYPE, tmp.reg());
Andrei Popescu402d9372010-02-26 13:31:12 +00009818 deferred->Branch(not_equal);
9819
9820 // Check that the key is within bounds. Both the key and the length of
Steve Block6ded16b2010-05-10 14:33:55 +01009821 // the JSArray are smis. Use unsigned comparison to handle negative keys.
Andrei Popescu402d9372010-02-26 13:31:12 +00009822 __ cmp(key.reg(),
9823 FieldOperand(receiver.reg(), JSArray::kLengthOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01009824 deferred->Branch(above_equal);
Andrei Popescu402d9372010-02-26 13:31:12 +00009825
9826 // Get the elements array from the receiver and check that it is not a
9827 // dictionary.
9828 __ mov(tmp.reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01009829 FieldOperand(receiver.reg(), JSArray::kElementsOffset));
9830
9831 // Check whether it is possible to omit the write barrier. If the elements
9832 // array is in new space or the value written is a smi we can safely update
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009833 // the elements array without write barrier.
Steve Block6ded16b2010-05-10 14:33:55 +01009834 Label in_new_space;
9835 __ InNewSpace(tmp.reg(), tmp2.reg(), equal, &in_new_space);
9836 if (!value_is_constant) {
9837 __ test(result.reg(), Immediate(kSmiTagMask));
9838 deferred->Branch(not_zero);
9839 }
9840
9841 __ bind(&in_new_space);
Andrei Popescu402d9372010-02-26 13:31:12 +00009842 // Bind the deferred code patch site to be able to locate the fixed
9843 // array map comparison. When debugging, we patch this comparison to
9844 // always fail so that we will hit the IC call in the deferred code
9845 // which will allow the debugger to break for fast case stores.
9846 __ bind(deferred->patch_site());
9847 __ cmp(FieldOperand(tmp.reg(), HeapObject::kMapOffset),
9848 Immediate(Factory::fixed_array_map()));
9849 deferred->Branch(not_equal);
9850
9851 // Store the value.
Kristian Monsen25f61362010-05-21 11:50:48 +01009852 __ mov(FixedArrayElementOperand(tmp.reg(), key.reg()), result.reg());
Andrei Popescu402d9372010-02-26 13:31:12 +00009853 __ IncrementCounter(&Counters::keyed_store_inline, 1);
9854
9855 deferred->BindExit();
9856 } else {
9857 result = frame()->CallKeyedStoreIC();
9858 // Make sure that we do not have a test instruction after the
9859 // call. A test instruction after the call is used to
9860 // indicate that we have generated an inline version of the
9861 // keyed store.
9862 __ nop();
Andrei Popescu402d9372010-02-26 13:31:12 +00009863 }
9864 ASSERT(frame()->height() == original_height - 3);
9865 return result;
Leon Clarked91b9f72010-01-27 17:25:45 +00009866}
9867
9868
Steve Blocka7e24c12009-10-30 11:49:00 +00009869#undef __
9870#define __ ACCESS_MASM(masm)
9871
9872
9873Handle<String> Reference::GetName() {
9874 ASSERT(type_ == NAMED);
9875 Property* property = expression_->AsProperty();
9876 if (property == NULL) {
9877 // Global variable reference treated as a named property reference.
9878 VariableProxy* proxy = expression_->AsVariableProxy();
9879 ASSERT(proxy->AsVariable() != NULL);
9880 ASSERT(proxy->AsVariable()->is_global());
9881 return proxy->name();
9882 } else {
9883 Literal* raw_name = property->key()->AsLiteral();
9884 ASSERT(raw_name != NULL);
Andrei Popescu402d9372010-02-26 13:31:12 +00009885 return Handle<String>::cast(raw_name->handle());
Steve Blocka7e24c12009-10-30 11:49:00 +00009886 }
9887}
9888
9889
Steve Blockd0582a62009-12-15 09:54:21 +00009890void Reference::GetValue() {
Steve Blocka7e24c12009-10-30 11:49:00 +00009891 ASSERT(!cgen_->in_spilled_code());
9892 ASSERT(cgen_->HasValidEntryRegisters());
9893 ASSERT(!is_illegal());
9894 MacroAssembler* masm = cgen_->masm();
9895
9896 // Record the source position for the property load.
9897 Property* property = expression_->AsProperty();
9898 if (property != NULL) {
9899 cgen_->CodeForSourcePosition(property->position());
9900 }
9901
9902 switch (type_) {
9903 case SLOT: {
9904 Comment cmnt(masm, "[ Load from Slot");
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009905 Slot* slot = expression_->AsVariableProxy()->AsVariable()->AsSlot();
Steve Blocka7e24c12009-10-30 11:49:00 +00009906 ASSERT(slot != NULL);
Leon Clarkef7060e22010-06-03 12:02:55 +01009907 cgen_->LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF);
Andrei Popescu402d9372010-02-26 13:31:12 +00009908 if (!persist_after_get_) set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00009909 break;
9910 }
9911
9912 case NAMED: {
Steve Blocka7e24c12009-10-30 11:49:00 +00009913 Variable* var = expression_->AsVariableProxy()->AsVariable();
9914 bool is_global = var != NULL;
9915 ASSERT(!is_global || var->is_global());
Andrei Popescu402d9372010-02-26 13:31:12 +00009916 if (persist_after_get_) cgen_->frame()->Dup();
9917 Result result = cgen_->EmitNamedLoad(GetName(), is_global);
9918 if (!persist_after_get_) set_unloaded();
9919 cgen_->frame()->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00009920 break;
9921 }
9922
9923 case KEYED: {
Andrei Popescu402d9372010-02-26 13:31:12 +00009924 if (persist_after_get_) {
9925 cgen_->frame()->PushElementAt(1);
9926 cgen_->frame()->PushElementAt(1);
9927 }
9928 Result value = cgen_->EmitKeyedLoad();
Leon Clarked91b9f72010-01-27 17:25:45 +00009929 cgen_->frame()->Push(&value);
Andrei Popescu402d9372010-02-26 13:31:12 +00009930 if (!persist_after_get_) set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00009931 break;
9932 }
9933
9934 default:
9935 UNREACHABLE();
9936 }
9937}
9938
9939
Steve Blockd0582a62009-12-15 09:54:21 +00009940void Reference::TakeValue() {
Steve Blocka7e24c12009-10-30 11:49:00 +00009941 // For non-constant frame-allocated slots, we invalidate the value in the
9942 // slot. For all others, we fall back on GetValue.
9943 ASSERT(!cgen_->in_spilled_code());
9944 ASSERT(!is_illegal());
9945 if (type_ != SLOT) {
Steve Blockd0582a62009-12-15 09:54:21 +00009946 GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00009947 return;
9948 }
9949
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009950 Slot* slot = expression_->AsVariableProxy()->AsVariable()->AsSlot();
Steve Blocka7e24c12009-10-30 11:49:00 +00009951 ASSERT(slot != NULL);
9952 if (slot->type() == Slot::LOOKUP ||
9953 slot->type() == Slot::CONTEXT ||
9954 slot->var()->mode() == Variable::CONST ||
9955 slot->is_arguments()) {
Steve Blockd0582a62009-12-15 09:54:21 +00009956 GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00009957 return;
9958 }
9959
9960 // Only non-constant, frame-allocated parameters and locals can
9961 // reach here. Be careful not to use the optimizations for arguments
9962 // object access since it may not have been initialized yet.
9963 ASSERT(!slot->is_arguments());
9964 if (slot->type() == Slot::PARAMETER) {
9965 cgen_->frame()->TakeParameterAt(slot->index());
9966 } else {
9967 ASSERT(slot->type() == Slot::LOCAL);
9968 cgen_->frame()->TakeLocalAt(slot->index());
9969 }
Leon Clarked91b9f72010-01-27 17:25:45 +00009970
9971 ASSERT(persist_after_get_);
9972 // Do not unload the reference, because it is used in SetValue.
Steve Blocka7e24c12009-10-30 11:49:00 +00009973}
9974
9975
9976void Reference::SetValue(InitState init_state) {
9977 ASSERT(cgen_->HasValidEntryRegisters());
9978 ASSERT(!is_illegal());
9979 MacroAssembler* masm = cgen_->masm();
9980 switch (type_) {
9981 case SLOT: {
9982 Comment cmnt(masm, "[ Store to Slot");
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009983 Slot* slot = expression_->AsVariableProxy()->AsVariable()->AsSlot();
Steve Blocka7e24c12009-10-30 11:49:00 +00009984 ASSERT(slot != NULL);
9985 cgen_->StoreToSlot(slot, init_state);
Andrei Popescu402d9372010-02-26 13:31:12 +00009986 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00009987 break;
9988 }
9989
9990 case NAMED: {
9991 Comment cmnt(masm, "[ Store to named Property");
Andrei Popescu402d9372010-02-26 13:31:12 +00009992 Result answer = cgen_->EmitNamedStore(GetName(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00009993 cgen_->frame()->Push(&answer);
Leon Clarke4515c472010-02-03 11:58:03 +00009994 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00009995 break;
9996 }
9997
9998 case KEYED: {
9999 Comment cmnt(masm, "[ Store to keyed Property");
Steve Blocka7e24c12009-10-30 11:49:00 +000010000 Property* property = expression()->AsProperty();
10001 ASSERT(property != NULL);
Steve Block6ded16b2010-05-10 14:33:55 +010010002
Andrei Popescu402d9372010-02-26 13:31:12 +000010003 Result answer = cgen_->EmitKeyedStore(property->key()->type());
10004 cgen_->frame()->Push(&answer);
10005 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +000010006 break;
10007 }
10008
Andrei Popescu402d9372010-02-26 13:31:12 +000010009 case UNLOADED:
10010 case ILLEGAL:
Steve Blocka7e24c12009-10-30 11:49:00 +000010011 UNREACHABLE();
10012 }
10013}
10014
10015
Steve Blocka7e24c12009-10-30 11:49:00 +000010016#undef __
10017
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010010018#define __ masm.
10019
Ben Murdochb0fe1622011-05-05 13:52:32 +010010020
10021static void MemCopyWrapper(void* dest, const void* src, size_t size) {
10022 memcpy(dest, src, size);
10023}
10024
10025
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010010026MemCopyFunction CreateMemCopyFunction() {
Ben Murdochb0fe1622011-05-05 13:52:32 +010010027 HandleScope scope;
10028 MacroAssembler masm(NULL, 1 * KB);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010010029
10030 // Generated code is put into a fixed, unmovable, buffer, and not into
10031 // the V8 heap. We can't, and don't, refer to any relocatable addresses
10032 // (e.g. the JavaScript nan-object).
10033
10034 // 32-bit C declaration function calls pass arguments on stack.
10035
10036 // Stack layout:
10037 // esp[12]: Third argument, size.
10038 // esp[8]: Second argument, source pointer.
10039 // esp[4]: First argument, destination pointer.
10040 // esp[0]: return address
10041
10042 const int kDestinationOffset = 1 * kPointerSize;
10043 const int kSourceOffset = 2 * kPointerSize;
10044 const int kSizeOffset = 3 * kPointerSize;
10045
10046 int stack_offset = 0; // Update if we change the stack height.
10047
10048 if (FLAG_debug_code) {
10049 __ cmp(Operand(esp, kSizeOffset + stack_offset),
10050 Immediate(kMinComplexMemCopy));
10051 Label ok;
10052 __ j(greater_equal, &ok);
10053 __ int3();
10054 __ bind(&ok);
10055 }
10056 if (CpuFeatures::IsSupported(SSE2)) {
10057 CpuFeatures::Scope enable(SSE2);
10058 __ push(edi);
10059 __ push(esi);
10060 stack_offset += 2 * kPointerSize;
10061 Register dst = edi;
10062 Register src = esi;
10063 Register count = ecx;
10064 __ mov(dst, Operand(esp, stack_offset + kDestinationOffset));
10065 __ mov(src, Operand(esp, stack_offset + kSourceOffset));
10066 __ mov(count, Operand(esp, stack_offset + kSizeOffset));
10067
10068
10069 __ movdqu(xmm0, Operand(src, 0));
10070 __ movdqu(Operand(dst, 0), xmm0);
10071 __ mov(edx, dst);
10072 __ and_(edx, 0xF);
10073 __ neg(edx);
10074 __ add(Operand(edx), Immediate(16));
10075 __ add(dst, Operand(edx));
10076 __ add(src, Operand(edx));
10077 __ sub(Operand(count), edx);
10078
10079 // edi is now aligned. Check if esi is also aligned.
10080 Label unaligned_source;
10081 __ test(Operand(src), Immediate(0x0F));
10082 __ j(not_zero, &unaligned_source);
10083 {
10084 __ IncrementCounter(&Counters::memcopy_aligned, 1);
10085 // Copy loop for aligned source and destination.
10086 __ mov(edx, count);
10087 Register loop_count = ecx;
10088 Register count = edx;
10089 __ shr(loop_count, 5);
10090 {
10091 // Main copy loop.
10092 Label loop;
10093 __ bind(&loop);
10094 __ prefetch(Operand(src, 0x20), 1);
10095 __ movdqa(xmm0, Operand(src, 0x00));
10096 __ movdqa(xmm1, Operand(src, 0x10));
10097 __ add(Operand(src), Immediate(0x20));
10098
10099 __ movdqa(Operand(dst, 0x00), xmm0);
10100 __ movdqa(Operand(dst, 0x10), xmm1);
10101 __ add(Operand(dst), Immediate(0x20));
10102
10103 __ dec(loop_count);
10104 __ j(not_zero, &loop);
10105 }
10106
10107 // At most 31 bytes to copy.
10108 Label move_less_16;
10109 __ test(Operand(count), Immediate(0x10));
10110 __ j(zero, &move_less_16);
10111 __ movdqa(xmm0, Operand(src, 0));
10112 __ add(Operand(src), Immediate(0x10));
10113 __ movdqa(Operand(dst, 0), xmm0);
10114 __ add(Operand(dst), Immediate(0x10));
10115 __ bind(&move_less_16);
10116
10117 // At most 15 bytes to copy. Copy 16 bytes at end of string.
10118 __ and_(count, 0xF);
10119 __ movdqu(xmm0, Operand(src, count, times_1, -0x10));
10120 __ movdqu(Operand(dst, count, times_1, -0x10), xmm0);
10121
Ben Murdochb0fe1622011-05-05 13:52:32 +010010122 __ mov(eax, Operand(esp, stack_offset + kDestinationOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010010123 __ pop(esi);
10124 __ pop(edi);
10125 __ ret(0);
10126 }
10127 __ Align(16);
10128 {
10129 // Copy loop for unaligned source and aligned destination.
10130 // If source is not aligned, we can't read it as efficiently.
10131 __ bind(&unaligned_source);
10132 __ IncrementCounter(&Counters::memcopy_unaligned, 1);
10133 __ mov(edx, ecx);
10134 Register loop_count = ecx;
10135 Register count = edx;
10136 __ shr(loop_count, 5);
10137 {
10138 // Main copy loop
10139 Label loop;
10140 __ bind(&loop);
10141 __ prefetch(Operand(src, 0x20), 1);
10142 __ movdqu(xmm0, Operand(src, 0x00));
10143 __ movdqu(xmm1, Operand(src, 0x10));
10144 __ add(Operand(src), Immediate(0x20));
10145
10146 __ movdqa(Operand(dst, 0x00), xmm0);
10147 __ movdqa(Operand(dst, 0x10), xmm1);
10148 __ add(Operand(dst), Immediate(0x20));
10149
10150 __ dec(loop_count);
10151 __ j(not_zero, &loop);
10152 }
10153
10154 // At most 31 bytes to copy.
10155 Label move_less_16;
10156 __ test(Operand(count), Immediate(0x10));
10157 __ j(zero, &move_less_16);
10158 __ movdqu(xmm0, Operand(src, 0));
10159 __ add(Operand(src), Immediate(0x10));
10160 __ movdqa(Operand(dst, 0), xmm0);
10161 __ add(Operand(dst), Immediate(0x10));
10162 __ bind(&move_less_16);
10163
10164 // At most 15 bytes to copy. Copy 16 bytes at end of string.
10165 __ and_(count, 0x0F);
10166 __ movdqu(xmm0, Operand(src, count, times_1, -0x10));
10167 __ movdqu(Operand(dst, count, times_1, -0x10), xmm0);
10168
Ben Murdochb0fe1622011-05-05 13:52:32 +010010169 __ mov(eax, Operand(esp, stack_offset + kDestinationOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010010170 __ pop(esi);
10171 __ pop(edi);
10172 __ ret(0);
10173 }
10174
10175 } else {
10176 __ IncrementCounter(&Counters::memcopy_noxmm, 1);
10177 // SSE2 not supported. Unlikely to happen in practice.
10178 __ push(edi);
10179 __ push(esi);
10180 stack_offset += 2 * kPointerSize;
10181 __ cld();
10182 Register dst = edi;
10183 Register src = esi;
10184 Register count = ecx;
10185 __ mov(dst, Operand(esp, stack_offset + kDestinationOffset));
10186 __ mov(src, Operand(esp, stack_offset + kSourceOffset));
10187 __ mov(count, Operand(esp, stack_offset + kSizeOffset));
10188
10189 // Copy the first word.
10190 __ mov(eax, Operand(src, 0));
10191 __ mov(Operand(dst, 0), eax);
10192
10193 // Increment src,dstso that dst is aligned.
10194 __ mov(edx, dst);
10195 __ and_(edx, 0x03);
10196 __ neg(edx);
10197 __ add(Operand(edx), Immediate(4)); // edx = 4 - (dst & 3)
10198 __ add(dst, Operand(edx));
10199 __ add(src, Operand(edx));
10200 __ sub(Operand(count), edx);
10201 // edi is now aligned, ecx holds number of remaning bytes to copy.
10202
10203 __ mov(edx, count);
10204 count = edx;
10205 __ shr(ecx, 2); // Make word count instead of byte count.
10206 __ rep_movs();
10207
10208 // At most 3 bytes left to copy. Copy 4 bytes at end of string.
10209 __ and_(count, 3);
10210 __ mov(eax, Operand(src, count, times_1, -4));
10211 __ mov(Operand(dst, count, times_1, -4), eax);
10212
Ben Murdochb0fe1622011-05-05 13:52:32 +010010213 __ mov(eax, Operand(esp, stack_offset + kDestinationOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010010214 __ pop(esi);
10215 __ pop(edi);
10216 __ ret(0);
10217 }
10218
10219 CodeDesc desc;
10220 masm.GetCode(&desc);
Ben Murdochb0fe1622011-05-05 13:52:32 +010010221 ASSERT(desc.reloc_size == 0);
10222
10223 // Copy the generated code into an executable chunk and return a pointer
10224 // to the first instruction in it as a C++ function pointer.
10225 LargeObjectChunk* chunk = LargeObjectChunk::New(desc.instr_size, EXECUTABLE);
10226 if (chunk == NULL) return &MemCopyWrapper;
10227 memcpy(chunk->GetStartAddress(), desc.buffer, desc.instr_size);
10228 CPU::FlushICache(chunk->GetStartAddress(), desc.instr_size);
10229 return FUNCTION_CAST<MemCopyFunction>(chunk->GetStartAddress());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010010230}
10231
10232#undef __
10233
Steve Blocka7e24c12009-10-30 11:49:00 +000010234} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +010010235
10236#endif // V8_TARGET_ARCH_IA32