blob: cf990a00e4ae2ff0476ef75d50f1c31dfd79b2fe [file] [log] [blame]
Leon Clarked91b9f72010-01-27 17:25:45 +00001// Copyright 2010 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_IA32)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "codegen-inl.h"
Kristian Monsen80d68ea2010-09-08 11:05:35 +010033#include "bootstrapper.h"
34#include "code-stubs.h"
Steve Blockd0582a62009-12-15 09:54:21 +000035#include "compiler.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000036#include "debug.h"
37#include "ic-inl.h"
38#include "parser.h"
Leon Clarkee46be812010-01-19 14:06:41 +000039#include "regexp-macro-assembler.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000040#include "register-allocator-inl.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000041#include "scopes.h"
Steve Block6ded16b2010-05-10 14:33:55 +010042#include "virtual-frame-inl.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000043
44namespace v8 {
45namespace internal {
46
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010047#define __ ACCESS_MASM(masm)
Steve Blocka7e24c12009-10-30 11:49:00 +000048
49// -------------------------------------------------------------------------
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010050// Platform-specific FrameRegisterState functions.
Steve Blocka7e24c12009-10-30 11:49:00 +000051
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010052void FrameRegisterState::Save(MacroAssembler* masm) const {
Steve Blocka7e24c12009-10-30 11:49:00 +000053 for (int i = 0; i < RegisterAllocator::kNumRegisters; i++) {
54 int action = registers_[i];
55 if (action == kPush) {
56 __ push(RegisterAllocator::ToRegister(i));
57 } else if (action != kIgnore && (action & kSyncedFlag) == 0) {
58 __ mov(Operand(ebp, action), RegisterAllocator::ToRegister(i));
59 }
60 }
61}
62
63
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010064void FrameRegisterState::Restore(MacroAssembler* masm) const {
Steve Blocka7e24c12009-10-30 11:49:00 +000065 // Restore registers in reverse order due to the stack.
66 for (int i = RegisterAllocator::kNumRegisters - 1; i >= 0; i--) {
67 int action = registers_[i];
68 if (action == kPush) {
69 __ pop(RegisterAllocator::ToRegister(i));
70 } else if (action != kIgnore) {
71 action &= ~kSyncedFlag;
72 __ mov(RegisterAllocator::ToRegister(i), Operand(ebp, action));
73 }
74 }
75}
76
77
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010078#undef __
79#define __ ACCESS_MASM(masm_)
80
81// -------------------------------------------------------------------------
82// Platform-specific DeferredCode functions.
83
84void DeferredCode::SaveRegisters() {
85 frame_state_.Save(masm_);
86}
87
88
89void DeferredCode::RestoreRegisters() {
90 frame_state_.Restore(masm_);
91}
92
93
94// -------------------------------------------------------------------------
95// Platform-specific RuntimeCallHelper functions.
96
97void VirtualFrameRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
98 frame_state_->Save(masm);
99}
100
101
102void VirtualFrameRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
103 frame_state_->Restore(masm);
104}
105
106
Ben Murdochb0fe1622011-05-05 13:52:32 +0100107void StubRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100108 masm->EnterInternalFrame();
109}
110
111
Ben Murdochb0fe1622011-05-05 13:52:32 +0100112void StubRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100113 masm->LeaveInternalFrame();
114}
115
116
Steve Blocka7e24c12009-10-30 11:49:00 +0000117// -------------------------------------------------------------------------
118// CodeGenState implementation.
119
120CodeGenState::CodeGenState(CodeGenerator* owner)
121 : owner_(owner),
Steve Blocka7e24c12009-10-30 11:49:00 +0000122 destination_(NULL),
123 previous_(NULL) {
124 owner_->set_state(this);
125}
126
127
128CodeGenState::CodeGenState(CodeGenerator* owner,
Steve Blocka7e24c12009-10-30 11:49:00 +0000129 ControlDestination* destination)
130 : owner_(owner),
Steve Blocka7e24c12009-10-30 11:49:00 +0000131 destination_(destination),
132 previous_(owner->state()) {
133 owner_->set_state(this);
134}
135
136
137CodeGenState::~CodeGenState() {
138 ASSERT(owner_->state() == this);
139 owner_->set_state(previous_);
140}
141
Steve Blocka7e24c12009-10-30 11:49:00 +0000142// -------------------------------------------------------------------------
Ben Murdoch3bec4d22010-07-22 14:51:16 +0100143// CodeGenerator implementation.
Steve Blocka7e24c12009-10-30 11:49:00 +0000144
Andrei Popescu31002712010-02-23 13:46:05 +0000145CodeGenerator::CodeGenerator(MacroAssembler* masm)
146 : deferred_(8),
Leon Clarke4515c472010-02-03 11:58:03 +0000147 masm_(masm),
Andrei Popescu31002712010-02-23 13:46:05 +0000148 info_(NULL),
Steve Blocka7e24c12009-10-30 11:49:00 +0000149 frame_(NULL),
150 allocator_(NULL),
151 state_(NULL),
152 loop_nesting_(0),
Steve Block6ded16b2010-05-10 14:33:55 +0100153 in_safe_int32_mode_(false),
154 safe_int32_mode_enabled_(true),
Steve Blocka7e24c12009-10-30 11:49:00 +0000155 function_return_is_shadowed_(false),
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -0800156 in_spilled_code_(false),
Steve Block44f0eee2011-05-26 01:26:41 +0100157 jit_cookie_((FLAG_mask_constants_with_cookie) ?
158 V8::RandomPrivate(Isolate::Current()) : 0) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000159}
160
161
162// Calling conventions:
163// ebp: caller's frame pointer
164// esp: stack pointer
165// edi: called JS function
166// esi: callee's context
167
Andrei Popescu402d9372010-02-26 13:31:12 +0000168void CodeGenerator::Generate(CompilationInfo* info) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000169 // Record the position for debugging purposes.
Andrei Popescu31002712010-02-23 13:46:05 +0000170 CodeForFunctionPosition(info->function());
Steve Block6ded16b2010-05-10 14:33:55 +0100171 Comment cmnt(masm_, "[ function compiled by virtual frame code generator");
Steve Blocka7e24c12009-10-30 11:49:00 +0000172
173 // Initialize state.
Andrei Popescu31002712010-02-23 13:46:05 +0000174 info_ = info;
Steve Blocka7e24c12009-10-30 11:49:00 +0000175 ASSERT(allocator_ == NULL);
176 RegisterAllocator register_allocator(this);
177 allocator_ = &register_allocator;
178 ASSERT(frame_ == NULL);
179 frame_ = new VirtualFrame();
180 set_in_spilled_code(false);
181
182 // Adjust for function-level loop nesting.
Steve Block6ded16b2010-05-10 14:33:55 +0100183 ASSERT_EQ(0, loop_nesting_);
Ben Murdochf87a2032010-10-22 12:50:53 +0100184 loop_nesting_ = info->is_in_loop() ? 1 : 0;
Steve Blocka7e24c12009-10-30 11:49:00 +0000185
Steve Block44f0eee2011-05-26 01:26:41 +0100186 masm()->isolate()->set_jump_target_compiling_deferred_code(false);
Steve Blocka7e24c12009-10-30 11:49:00 +0000187
Ben Murdochf87a2032010-10-22 12:50:53 +0100188 {
Steve Blocka7e24c12009-10-30 11:49:00 +0000189 CodeGenState state(this);
190
191 // Entry:
192 // Stack: receiver, arguments, return address.
193 // ebp: caller's frame pointer
194 // esp: stack pointer
195 // edi: called JS function
196 // esi: callee's context
197 allocator_->Initialize();
Steve Blocka7e24c12009-10-30 11:49:00 +0000198
Ben Murdochf87a2032010-10-22 12:50:53 +0100199#ifdef DEBUG
200 if (strlen(FLAG_stop_at) > 0 &&
201 info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
202 frame_->SpillAll();
203 __ int3();
204 }
205#endif
206
Iain Merrick75681382010-08-19 15:07:18 +0100207 frame_->Enter();
Leon Clarke4515c472010-02-03 11:58:03 +0000208
Iain Merrick75681382010-08-19 15:07:18 +0100209 // Allocate space for locals and initialize them.
210 frame_->AllocateStackSlots();
Leon Clarke4515c472010-02-03 11:58:03 +0000211
Iain Merrick75681382010-08-19 15:07:18 +0100212 // Allocate the local context if needed.
213 int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
214 if (heap_slots > 0) {
215 Comment cmnt(masm_, "[ allocate local context");
216 // Allocate local context.
217 // Get outer context and create a new context based on it.
218 frame_->PushFunction();
219 Result context;
220 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
221 FastNewContextStub stub(heap_slots);
222 context = frame_->CallStub(&stub, 1);
223 } else {
224 context = frame_->CallRuntime(Runtime::kNewContext, 1);
Leon Clarke4515c472010-02-03 11:58:03 +0000225 }
226
Iain Merrick75681382010-08-19 15:07:18 +0100227 // Update context local.
228 frame_->SaveContextRegister();
Leon Clarke4515c472010-02-03 11:58:03 +0000229
Iain Merrick75681382010-08-19 15:07:18 +0100230 // Verify that the runtime call result and esi agree.
231 if (FLAG_debug_code) {
232 __ cmp(context.reg(), Operand(esi));
233 __ Assert(equal, "Runtime::NewContext should end up in esi");
Andrei Popescu402d9372010-02-26 13:31:12 +0000234 }
Leon Clarke4515c472010-02-03 11:58:03 +0000235 }
236
Iain Merrick75681382010-08-19 15:07:18 +0100237 // TODO(1241774): Improve this code:
238 // 1) only needed if we have a context
239 // 2) no need to recompute context ptr every single time
240 // 3) don't copy parameter operand code from SlotOperand!
241 {
242 Comment cmnt2(masm_, "[ copy context parameters into .context");
243 // Note that iteration order is relevant here! If we have the same
244 // parameter twice (e.g., function (x, y, x)), and that parameter
245 // needs to be copied into the context, it must be the last argument
246 // passed to the parameter that needs to be copied. This is a rare
247 // case so we don't check for it, instead we rely on the copying
248 // order: such a parameter is copied repeatedly into the same
249 // context location and thus the last value is what is seen inside
250 // the function.
251 for (int i = 0; i < scope()->num_parameters(); i++) {
252 Variable* par = scope()->parameter(i);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100253 Slot* slot = par->AsSlot();
Iain Merrick75681382010-08-19 15:07:18 +0100254 if (slot != NULL && slot->type() == Slot::CONTEXT) {
255 // The use of SlotOperand below is safe in unspilled code
256 // because the slot is guaranteed to be a context slot.
257 //
258 // There are no parameters in the global scope.
259 ASSERT(!scope()->is_global_scope());
260 frame_->PushParameterAt(i);
261 Result value = frame_->Pop();
262 value.ToRegister();
263
264 // SlotOperand loads context.reg() with the context object
265 // stored to, used below in RecordWrite.
266 Result context = allocator_->Allocate();
267 ASSERT(context.is_valid());
268 __ mov(SlotOperand(slot, context.reg()), value.reg());
269 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
270 Result scratch = allocator_->Allocate();
271 ASSERT(scratch.is_valid());
272 frame_->Spill(context.reg());
273 frame_->Spill(value.reg());
274 __ RecordWrite(context.reg(), offset, value.reg(), scratch.reg());
275 }
276 }
277 }
278
279 // Store the arguments object. This must happen after context
280 // initialization because the arguments object may be stored in
281 // the context.
282 if (ArgumentsMode() != NO_ARGUMENTS_ALLOCATION) {
283 StoreArgumentsObject(true);
284 }
285
286 // Initialize ThisFunction reference if present.
287 if (scope()->is_function_scope() && scope()->function() != NULL) {
Steve Block44f0eee2011-05-26 01:26:41 +0100288 frame_->Push(FACTORY->the_hole_value());
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100289 StoreToSlot(scope()->function()->AsSlot(), NOT_CONST_INIT);
Iain Merrick75681382010-08-19 15:07:18 +0100290 }
291
292
Steve Blocka7e24c12009-10-30 11:49:00 +0000293 // Initialize the function return target after the locals are set
294 // up, because it needs the expected frame height from the frame.
295 function_return_.set_direction(JumpTarget::BIDIRECTIONAL);
296 function_return_is_shadowed_ = false;
297
Steve Blocka7e24c12009-10-30 11:49:00 +0000298 // Generate code to 'execute' declarations and initialize functions
299 // (source elements). In case of an illegal redeclaration we need to
300 // handle that instead of processing the declarations.
Andrei Popescu31002712010-02-23 13:46:05 +0000301 if (scope()->HasIllegalRedeclaration()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000302 Comment cmnt(masm_, "[ illegal redeclarations");
Andrei Popescu31002712010-02-23 13:46:05 +0000303 scope()->VisitIllegalRedeclaration(this);
Steve Blocka7e24c12009-10-30 11:49:00 +0000304 } else {
305 Comment cmnt(masm_, "[ declarations");
Andrei Popescu31002712010-02-23 13:46:05 +0000306 ProcessDeclarations(scope()->declarations());
Steve Blocka7e24c12009-10-30 11:49:00 +0000307 // Bail out if a stack-overflow exception occurred when processing
308 // declarations.
309 if (HasStackOverflow()) return;
310 }
311
312 if (FLAG_trace) {
313 frame_->CallRuntime(Runtime::kTraceEnter, 0);
314 // Ignore the return value.
315 }
316 CheckStack();
317
318 // Compile the body of the function in a vanilla state. Don't
319 // bother compiling all the code if the scope has an illegal
320 // redeclaration.
Andrei Popescu31002712010-02-23 13:46:05 +0000321 if (!scope()->HasIllegalRedeclaration()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000322 Comment cmnt(masm_, "[ function body");
323#ifdef DEBUG
Steve Block44f0eee2011-05-26 01:26:41 +0100324 bool is_builtin = info->isolate()->bootstrapper()->IsActive();
Steve Blocka7e24c12009-10-30 11:49:00 +0000325 bool should_trace =
326 is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls;
327 if (should_trace) {
328 frame_->CallRuntime(Runtime::kDebugTrace, 0);
329 // Ignore the return value.
330 }
331#endif
Andrei Popescu31002712010-02-23 13:46:05 +0000332 VisitStatements(info->function()->body());
Steve Blocka7e24c12009-10-30 11:49:00 +0000333
334 // Handle the return from the function.
335 if (has_valid_frame()) {
336 // If there is a valid frame, control flow can fall off the end of
337 // the body. In that case there is an implicit return statement.
338 ASSERT(!function_return_is_shadowed_);
Andrei Popescu31002712010-02-23 13:46:05 +0000339 CodeForReturnPosition(info->function());
Steve Blocka7e24c12009-10-30 11:49:00 +0000340 frame_->PrepareForReturn();
Steve Block44f0eee2011-05-26 01:26:41 +0100341 Result undefined(FACTORY->undefined_value());
Steve Blocka7e24c12009-10-30 11:49:00 +0000342 if (function_return_.is_bound()) {
343 function_return_.Jump(&undefined);
344 } else {
345 function_return_.Bind(&undefined);
346 GenerateReturnSequence(&undefined);
347 }
348 } else if (function_return_.is_linked()) {
349 // If the return target has dangling jumps to it, then we have not
350 // yet generated the return sequence. This can happen when (a)
351 // control does not flow off the end of the body so we did not
352 // compile an artificial return statement just above, and (b) there
353 // are return statements in the body but (c) they are all shadowed.
354 Result return_value;
355 function_return_.Bind(&return_value);
356 GenerateReturnSequence(&return_value);
357 }
358 }
359 }
360
361 // Adjust for function-level loop nesting.
Ben Murdochf87a2032010-10-22 12:50:53 +0100362 ASSERT_EQ(loop_nesting_, info->is_in_loop() ? 1 : 0);
Steve Block6ded16b2010-05-10 14:33:55 +0100363 loop_nesting_ = 0;
Steve Blocka7e24c12009-10-30 11:49:00 +0000364
365 // Code generation state must be reset.
366 ASSERT(state_ == NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000367 ASSERT(!function_return_is_shadowed_);
368 function_return_.Unuse();
369 DeleteFrame();
370
371 // Process any deferred code using the register allocator.
372 if (!HasStackOverflow()) {
Steve Block44f0eee2011-05-26 01:26:41 +0100373 info->isolate()->set_jump_target_compiling_deferred_code(true);
Steve Blocka7e24c12009-10-30 11:49:00 +0000374 ProcessDeferred();
Steve Block44f0eee2011-05-26 01:26:41 +0100375 info->isolate()->set_jump_target_compiling_deferred_code(false);
Steve Blocka7e24c12009-10-30 11:49:00 +0000376 }
377
378 // There is no need to delete the register allocator, it is a
379 // stack-allocated local.
380 allocator_ = NULL;
Steve Blocka7e24c12009-10-30 11:49:00 +0000381}
382
383
384Operand CodeGenerator::SlotOperand(Slot* slot, Register tmp) {
385 // Currently, this assertion will fail if we try to assign to
386 // a constant variable that is constant because it is read-only
387 // (such as the variable referring to a named function expression).
388 // We need to implement assignments to read-only variables.
389 // Ideally, we should do this during AST generation (by converting
390 // such assignments into expression statements); however, in general
391 // we may not be able to make the decision until past AST generation,
392 // that is when the entire program is known.
393 ASSERT(slot != NULL);
394 int index = slot->index();
395 switch (slot->type()) {
396 case Slot::PARAMETER:
397 return frame_->ParameterAt(index);
398
399 case Slot::LOCAL:
400 return frame_->LocalAt(index);
401
402 case Slot::CONTEXT: {
403 // Follow the context chain if necessary.
404 ASSERT(!tmp.is(esi)); // do not overwrite context register
405 Register context = esi;
406 int chain_length = scope()->ContextChainLength(slot->var()->scope());
407 for (int i = 0; i < chain_length; i++) {
408 // Load the closure.
409 // (All contexts, even 'with' contexts, have a closure,
410 // and it is the same for all contexts inside a function.
411 // There is no need to go to the function context first.)
412 __ mov(tmp, ContextOperand(context, Context::CLOSURE_INDEX));
413 // Load the function context (which is the incoming, outer context).
414 __ mov(tmp, FieldOperand(tmp, JSFunction::kContextOffset));
415 context = tmp;
416 }
417 // We may have a 'with' context now. Get the function context.
418 // (In fact this mov may never be the needed, since the scope analysis
419 // may not permit a direct context access in this case and thus we are
420 // always at a function context. However it is safe to dereference be-
421 // cause the function context of a function context is itself. Before
422 // deleting this mov we should try to create a counter-example first,
423 // though...)
424 __ mov(tmp, ContextOperand(context, Context::FCONTEXT_INDEX));
425 return ContextOperand(tmp, index);
426 }
427
428 default:
429 UNREACHABLE();
430 return Operand(eax);
431 }
432}
433
434
435Operand CodeGenerator::ContextSlotOperandCheckExtensions(Slot* slot,
436 Result tmp,
437 JumpTarget* slow) {
438 ASSERT(slot->type() == Slot::CONTEXT);
439 ASSERT(tmp.is_register());
440 Register context = esi;
441
442 for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) {
443 if (s->num_heap_slots() > 0) {
444 if (s->calls_eval()) {
445 // Check that extension is NULL.
446 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
447 Immediate(0));
448 slow->Branch(not_equal, not_taken);
449 }
450 __ mov(tmp.reg(), ContextOperand(context, Context::CLOSURE_INDEX));
451 __ mov(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset));
452 context = tmp.reg();
453 }
454 }
455 // Check that last extension is NULL.
456 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
457 slow->Branch(not_equal, not_taken);
458 __ mov(tmp.reg(), ContextOperand(context, Context::FCONTEXT_INDEX));
459 return ContextOperand(tmp.reg(), slot->index());
460}
461
462
463// Emit code to load the value of an expression to the top of the
464// frame. If the expression is boolean-valued it may be compiled (or
465// partially compiled) into control flow to the control destination.
466// If force_control is true, control flow is forced.
Steve Block6ded16b2010-05-10 14:33:55 +0100467void CodeGenerator::LoadCondition(Expression* expr,
Steve Blocka7e24c12009-10-30 11:49:00 +0000468 ControlDestination* dest,
469 bool force_control) {
470 ASSERT(!in_spilled_code());
471 int original_height = frame_->height();
472
Steve Blockd0582a62009-12-15 09:54:21 +0000473 { CodeGenState new_state(this, dest);
Steve Block6ded16b2010-05-10 14:33:55 +0100474 Visit(expr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000475
476 // If we hit a stack overflow, we may not have actually visited
477 // the expression. In that case, we ensure that we have a
478 // valid-looking frame state because we will continue to generate
479 // code as we unwind the C++ stack.
480 //
481 // It's possible to have both a stack overflow and a valid frame
482 // state (eg, a subexpression overflowed, visiting it returned
483 // with a dummied frame state, and visiting this expression
484 // returned with a normal-looking state).
485 if (HasStackOverflow() &&
486 !dest->is_used() &&
487 frame_->height() == original_height) {
488 dest->Goto(true);
489 }
490 }
491
492 if (force_control && !dest->is_used()) {
493 // Convert the TOS value into flow to the control destination.
494 ToBoolean(dest);
495 }
496
497 ASSERT(!(force_control && !dest->is_used()));
498 ASSERT(dest->is_used() || frame_->height() == original_height + 1);
499}
500
501
Steve Blockd0582a62009-12-15 09:54:21 +0000502void CodeGenerator::LoadAndSpill(Expression* expression) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000503 ASSERT(in_spilled_code());
504 set_in_spilled_code(false);
Steve Blockd0582a62009-12-15 09:54:21 +0000505 Load(expression);
Steve Blocka7e24c12009-10-30 11:49:00 +0000506 frame_->SpillAll();
507 set_in_spilled_code(true);
508}
509
510
Steve Block6ded16b2010-05-10 14:33:55 +0100511void CodeGenerator::LoadInSafeInt32Mode(Expression* expr,
512 BreakTarget* unsafe_bailout) {
513 set_unsafe_bailout(unsafe_bailout);
514 set_in_safe_int32_mode(true);
515 Load(expr);
516 Result value = frame_->Pop();
517 ASSERT(frame_->HasNoUntaggedInt32Elements());
518 if (expr->GuaranteedSmiResult()) {
519 ConvertInt32ResultToSmi(&value);
520 } else {
521 ConvertInt32ResultToNumber(&value);
522 }
523 set_in_safe_int32_mode(false);
524 set_unsafe_bailout(NULL);
525 frame_->Push(&value);
526}
527
528
529void CodeGenerator::LoadWithSafeInt32ModeDisabled(Expression* expr) {
530 set_safe_int32_mode_enabled(false);
531 Load(expr);
532 set_safe_int32_mode_enabled(true);
533}
534
535
536void CodeGenerator::ConvertInt32ResultToSmi(Result* value) {
537 ASSERT(value->is_untagged_int32());
538 if (value->is_register()) {
539 __ add(value->reg(), Operand(value->reg()));
540 } else {
541 ASSERT(value->is_constant());
542 ASSERT(value->handle()->IsSmi());
543 }
544 value->set_untagged_int32(false);
545 value->set_type_info(TypeInfo::Smi());
546}
547
548
549void CodeGenerator::ConvertInt32ResultToNumber(Result* value) {
550 ASSERT(value->is_untagged_int32());
551 if (value->is_register()) {
552 Register val = value->reg();
553 JumpTarget done;
554 __ add(val, Operand(val));
555 done.Branch(no_overflow, value);
556 __ sar(val, 1);
557 // If there was an overflow, bits 30 and 31 of the original number disagree.
558 __ xor_(val, 0x80000000u);
Steve Block44f0eee2011-05-26 01:26:41 +0100559 if (masm()->isolate()->cpu_features()->IsSupported(SSE2)) {
Steve Block6ded16b2010-05-10 14:33:55 +0100560 CpuFeatures::Scope fscope(SSE2);
561 __ cvtsi2sd(xmm0, Operand(val));
562 } else {
563 // Move val to ST[0] in the FPU
564 // Push and pop are safe with respect to the virtual frame because
565 // all synced elements are below the actual stack pointer.
566 __ push(val);
567 __ fild_s(Operand(esp, 0));
568 __ pop(val);
569 }
570 Result scratch = allocator_->Allocate();
571 ASSERT(scratch.is_register());
572 Label allocation_failed;
573 __ AllocateHeapNumber(val, scratch.reg(),
574 no_reg, &allocation_failed);
575 VirtualFrame* clone = new VirtualFrame(frame_);
576 scratch.Unuse();
Steve Block44f0eee2011-05-26 01:26:41 +0100577 if (masm()->isolate()->cpu_features()->IsSupported(SSE2)) {
Steve Block6ded16b2010-05-10 14:33:55 +0100578 CpuFeatures::Scope fscope(SSE2);
579 __ movdbl(FieldOperand(val, HeapNumber::kValueOffset), xmm0);
580 } else {
581 __ fstp_d(FieldOperand(val, HeapNumber::kValueOffset));
582 }
583 done.Jump(value);
584
585 // Establish the virtual frame, cloned from where AllocateHeapNumber
586 // jumped to allocation_failed.
587 RegisterFile empty_regs;
588 SetFrame(clone, &empty_regs);
589 __ bind(&allocation_failed);
Steve Block44f0eee2011-05-26 01:26:41 +0100590 if (!masm()->isolate()->cpu_features()->IsSupported(SSE2)) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100591 // Pop the value from the floating point stack.
592 __ fstp(0);
593 }
Steve Block6ded16b2010-05-10 14:33:55 +0100594 unsafe_bailout_->Jump();
595
596 done.Bind(value);
597 } else {
598 ASSERT(value->is_constant());
599 }
600 value->set_untagged_int32(false);
601 value->set_type_info(TypeInfo::Integer32());
602}
603
604
Steve Blockd0582a62009-12-15 09:54:21 +0000605void CodeGenerator::Load(Expression* expr) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000606#ifdef DEBUG
607 int original_height = frame_->height();
608#endif
609 ASSERT(!in_spilled_code());
Steve Blocka7e24c12009-10-30 11:49:00 +0000610
Steve Block6ded16b2010-05-10 14:33:55 +0100611 // If the expression should be a side-effect-free 32-bit int computation,
612 // compile that SafeInt32 path, and a bailout path.
613 if (!in_safe_int32_mode() &&
614 safe_int32_mode_enabled() &&
615 expr->side_effect_free() &&
616 expr->num_bit_ops() > 2 &&
Steve Block44f0eee2011-05-26 01:26:41 +0100617 masm()->isolate()->cpu_features()->IsSupported(SSE2)) {
Steve Block6ded16b2010-05-10 14:33:55 +0100618 BreakTarget unsafe_bailout;
619 JumpTarget done;
620 unsafe_bailout.set_expected_height(frame_->height());
621 LoadInSafeInt32Mode(expr, &unsafe_bailout);
622 done.Jump();
623
624 if (unsafe_bailout.is_linked()) {
625 unsafe_bailout.Bind();
626 LoadWithSafeInt32ModeDisabled(expr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000627 }
Steve Block6ded16b2010-05-10 14:33:55 +0100628 done.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +0000629 } else {
Steve Block6ded16b2010-05-10 14:33:55 +0100630 JumpTarget true_target;
631 JumpTarget false_target;
Steve Block6ded16b2010-05-10 14:33:55 +0100632 ControlDestination dest(&true_target, &false_target, true);
633 LoadCondition(expr, &dest, false);
634
635 if (dest.false_was_fall_through()) {
636 // The false target was just bound.
Steve Blocka7e24c12009-10-30 11:49:00 +0000637 JumpTarget loaded;
Steve Block44f0eee2011-05-26 01:26:41 +0100638 frame_->Push(FACTORY->false_value());
Steve Block6ded16b2010-05-10 14:33:55 +0100639 // There may be dangling jumps to the true target.
Steve Blocka7e24c12009-10-30 11:49:00 +0000640 if (true_target.is_linked()) {
Steve Block6ded16b2010-05-10 14:33:55 +0100641 loaded.Jump();
Steve Blocka7e24c12009-10-30 11:49:00 +0000642 true_target.Bind();
Steve Block44f0eee2011-05-26 01:26:41 +0100643 frame_->Push(FACTORY->true_value());
Steve Block6ded16b2010-05-10 14:33:55 +0100644 loaded.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +0000645 }
Steve Block6ded16b2010-05-10 14:33:55 +0100646
647 } else if (dest.is_used()) {
648 // There is true, and possibly false, control flow (with true as
649 // the fall through).
650 JumpTarget loaded;
Steve Block44f0eee2011-05-26 01:26:41 +0100651 frame_->Push(FACTORY->true_value());
Steve Blocka7e24c12009-10-30 11:49:00 +0000652 if (false_target.is_linked()) {
Steve Block6ded16b2010-05-10 14:33:55 +0100653 loaded.Jump();
Steve Blocka7e24c12009-10-30 11:49:00 +0000654 false_target.Bind();
Steve Block44f0eee2011-05-26 01:26:41 +0100655 frame_->Push(FACTORY->false_value());
Steve Block6ded16b2010-05-10 14:33:55 +0100656 loaded.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +0000657 }
Steve Block6ded16b2010-05-10 14:33:55 +0100658
659 } else {
660 // We have a valid value on top of the frame, but we still may
661 // have dangling jumps to the true and false targets from nested
662 // subexpressions (eg, the left subexpressions of the
663 // short-circuited boolean operators).
664 ASSERT(has_valid_frame());
665 if (true_target.is_linked() || false_target.is_linked()) {
666 JumpTarget loaded;
667 loaded.Jump(); // Don't lose the current TOS.
668 if (true_target.is_linked()) {
669 true_target.Bind();
Steve Block44f0eee2011-05-26 01:26:41 +0100670 frame_->Push(FACTORY->true_value());
Steve Block6ded16b2010-05-10 14:33:55 +0100671 if (false_target.is_linked()) {
672 loaded.Jump();
673 }
674 }
675 if (false_target.is_linked()) {
676 false_target.Bind();
Steve Block44f0eee2011-05-26 01:26:41 +0100677 frame_->Push(FACTORY->false_value());
Steve Block6ded16b2010-05-10 14:33:55 +0100678 }
679 loaded.Bind();
680 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000681 }
682 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000683 ASSERT(has_valid_frame());
684 ASSERT(frame_->height() == original_height + 1);
685}
686
687
688void CodeGenerator::LoadGlobal() {
689 if (in_spilled_code()) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800690 frame_->EmitPush(GlobalObjectOperand());
Steve Blocka7e24c12009-10-30 11:49:00 +0000691 } else {
692 Result temp = allocator_->Allocate();
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800693 __ mov(temp.reg(), GlobalObjectOperand());
Steve Blocka7e24c12009-10-30 11:49:00 +0000694 frame_->Push(&temp);
695 }
696}
697
698
699void CodeGenerator::LoadGlobalReceiver() {
700 Result temp = allocator_->Allocate();
701 Register reg = temp.reg();
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800702 __ mov(reg, GlobalObjectOperand());
Steve Blocka7e24c12009-10-30 11:49:00 +0000703 __ mov(reg, FieldOperand(reg, GlobalObject::kGlobalReceiverOffset));
704 frame_->Push(&temp);
705}
706
707
Steve Blockd0582a62009-12-15 09:54:21 +0000708void CodeGenerator::LoadTypeofExpression(Expression* expr) {
709 // Special handling of identifiers as subexpressions of typeof.
710 Variable* variable = expr->AsVariableProxy()->AsVariable();
Steve Blocka7e24c12009-10-30 11:49:00 +0000711 if (variable != NULL && !variable->is_this() && variable->is_global()) {
Steve Blockd0582a62009-12-15 09:54:21 +0000712 // For a global variable we build the property reference
713 // <global>.<variable> and perform a (regular non-contextual) property
714 // load to make sure we do not get reference errors.
Steve Blocka7e24c12009-10-30 11:49:00 +0000715 Slot global(variable, Slot::CONTEXT, Context::GLOBAL_INDEX);
716 Literal key(variable->name());
Steve Blocka7e24c12009-10-30 11:49:00 +0000717 Property property(&global, &key, RelocInfo::kNoPosition);
Steve Blockd0582a62009-12-15 09:54:21 +0000718 Reference ref(this, &property);
719 ref.GetValue();
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100720 } else if (variable != NULL && variable->AsSlot() != NULL) {
Steve Blockd0582a62009-12-15 09:54:21 +0000721 // For a variable that rewrites to a slot, we signal it is the immediate
722 // subexpression of a typeof.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100723 LoadFromSlotCheckForArguments(variable->AsSlot(), INSIDE_TYPEOF);
Steve Blocka7e24c12009-10-30 11:49:00 +0000724 } else {
Steve Blockd0582a62009-12-15 09:54:21 +0000725 // Anything else can be handled normally.
726 Load(expr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000727 }
728}
729
730
Andrei Popescu31002712010-02-23 13:46:05 +0000731ArgumentsAllocationMode CodeGenerator::ArgumentsMode() {
732 if (scope()->arguments() == NULL) return NO_ARGUMENTS_ALLOCATION;
Steve Block44f0eee2011-05-26 01:26:41 +0100733
734 // In strict mode there is no need for shadow arguments.
735 ASSERT(scope()->arguments_shadow() != NULL || scope()->is_strict_mode());
736
Steve Blocka7e24c12009-10-30 11:49:00 +0000737 // We don't want to do lazy arguments allocation for functions that
738 // have heap-allocated contexts, because it interfers with the
739 // uninitialized const tracking in the context objects.
Steve Block44f0eee2011-05-26 01:26:41 +0100740 return (scope()->num_heap_slots() > 0 || scope()->is_strict_mode())
Steve Blocka7e24c12009-10-30 11:49:00 +0000741 ? EAGER_ARGUMENTS_ALLOCATION
742 : LAZY_ARGUMENTS_ALLOCATION;
743}
744
745
746Result CodeGenerator::StoreArgumentsObject(bool initial) {
747 ArgumentsAllocationMode mode = ArgumentsMode();
748 ASSERT(mode != NO_ARGUMENTS_ALLOCATION);
749
750 Comment cmnt(masm_, "[ store arguments object");
751 if (mode == LAZY_ARGUMENTS_ALLOCATION && initial) {
Ben Murdoch086aeea2011-05-13 15:57:08 +0100752 // When using lazy arguments allocation, we store the arguments marker value
Steve Blocka7e24c12009-10-30 11:49:00 +0000753 // as a sentinel indicating that the arguments object hasn't been
754 // allocated yet.
Steve Block44f0eee2011-05-26 01:26:41 +0100755 frame_->Push(FACTORY->arguments_marker());
Steve Blocka7e24c12009-10-30 11:49:00 +0000756 } else {
Steve Block44f0eee2011-05-26 01:26:41 +0100757 ArgumentsAccessStub stub(is_strict_mode()
758 ? ArgumentsAccessStub::NEW_STRICT
759 : ArgumentsAccessStub::NEW_NON_STRICT);
Steve Blocka7e24c12009-10-30 11:49:00 +0000760 frame_->PushFunction();
761 frame_->PushReceiverSlotAddress();
Andrei Popescu31002712010-02-23 13:46:05 +0000762 frame_->Push(Smi::FromInt(scope()->num_parameters()));
Steve Blocka7e24c12009-10-30 11:49:00 +0000763 Result result = frame_->CallStub(&stub, 3);
764 frame_->Push(&result);
765 }
766
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100767 Variable* arguments = scope()->arguments();
768 Variable* shadow = scope()->arguments_shadow();
Steve Block44f0eee2011-05-26 01:26:41 +0100769
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100770 ASSERT(arguments != NULL && arguments->AsSlot() != NULL);
Steve Block44f0eee2011-05-26 01:26:41 +0100771 ASSERT((shadow != NULL && shadow->AsSlot() != NULL) ||
772 scope()->is_strict_mode());
773
Leon Clarkee46be812010-01-19 14:06:41 +0000774 JumpTarget done;
775 bool skip_arguments = false;
776 if (mode == LAZY_ARGUMENTS_ALLOCATION && !initial) {
Ben Murdoch3bec4d22010-07-22 14:51:16 +0100777 // We have to skip storing into the arguments slot if it has
778 // already been written to. This can happen if the a function
779 // has a local variable named 'arguments'.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100780 LoadFromSlot(arguments->AsSlot(), NOT_INSIDE_TYPEOF);
Leon Clarkef7060e22010-06-03 12:02:55 +0100781 Result probe = frame_->Pop();
Leon Clarkee46be812010-01-19 14:06:41 +0000782 if (probe.is_constant()) {
783 // We have to skip updating the arguments object if it has
784 // been assigned a proper value.
Ben Murdoch086aeea2011-05-13 15:57:08 +0100785 skip_arguments = !probe.handle()->IsArgumentsMarker();
Leon Clarkee46be812010-01-19 14:06:41 +0000786 } else {
Steve Block44f0eee2011-05-26 01:26:41 +0100787 __ cmp(Operand(probe.reg()), Immediate(FACTORY->arguments_marker()));
Leon Clarkee46be812010-01-19 14:06:41 +0000788 probe.Unuse();
789 done.Branch(not_equal);
Steve Blocka7e24c12009-10-30 11:49:00 +0000790 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000791 }
Leon Clarkee46be812010-01-19 14:06:41 +0000792 if (!skip_arguments) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100793 StoreToSlot(arguments->AsSlot(), NOT_CONST_INIT);
Leon Clarkee46be812010-01-19 14:06:41 +0000794 if (mode == LAZY_ARGUMENTS_ALLOCATION) done.Bind();
795 }
Steve Block44f0eee2011-05-26 01:26:41 +0100796 if (shadow != NULL) {
797 StoreToSlot(shadow->AsSlot(), NOT_CONST_INIT);
798 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000799 return frame_->Pop();
800}
801
Leon Clarked91b9f72010-01-27 17:25:45 +0000802//------------------------------------------------------------------------------
803// CodeGenerator implementation of variables, lookups, and stores.
Steve Blocka7e24c12009-10-30 11:49:00 +0000804
Leon Clarked91b9f72010-01-27 17:25:45 +0000805Reference::Reference(CodeGenerator* cgen,
806 Expression* expression,
807 bool persist_after_get)
808 : cgen_(cgen),
809 expression_(expression),
810 type_(ILLEGAL),
811 persist_after_get_(persist_after_get) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000812 cgen->LoadReference(this);
813}
814
815
816Reference::~Reference() {
Leon Clarked91b9f72010-01-27 17:25:45 +0000817 ASSERT(is_unloaded() || is_illegal());
Steve Blocka7e24c12009-10-30 11:49:00 +0000818}
819
820
821void CodeGenerator::LoadReference(Reference* ref) {
822 // References are loaded from both spilled and unspilled code. Set the
823 // state to unspilled to allow that (and explicitly spill after
824 // construction at the construction sites).
825 bool was_in_spilled_code = in_spilled_code_;
826 in_spilled_code_ = false;
827
828 Comment cmnt(masm_, "[ LoadReference");
829 Expression* e = ref->expression();
830 Property* property = e->AsProperty();
831 Variable* var = e->AsVariableProxy()->AsVariable();
832
833 if (property != NULL) {
834 // The expression is either a property or a variable proxy that rewrites
835 // to a property.
836 Load(property->obj());
Leon Clarkee46be812010-01-19 14:06:41 +0000837 if (property->key()->IsPropertyName()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000838 ref->set_type(Reference::NAMED);
839 } else {
840 Load(property->key());
841 ref->set_type(Reference::KEYED);
842 }
843 } else if (var != NULL) {
844 // The expression is a variable proxy that does not rewrite to a
845 // property. Global variables are treated as named property references.
846 if (var->is_global()) {
Andrei Popescu402d9372010-02-26 13:31:12 +0000847 // If eax is free, the register allocator prefers it. Thus the code
848 // generator will load the global object into eax, which is where
849 // LoadIC wants it. Most uses of Reference call LoadIC directly
850 // after the reference is created.
851 frame_->Spill(eax);
Steve Blocka7e24c12009-10-30 11:49:00 +0000852 LoadGlobal();
853 ref->set_type(Reference::NAMED);
854 } else {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100855 ASSERT(var->AsSlot() != NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000856 ref->set_type(Reference::SLOT);
857 }
858 } else {
859 // Anything else is a runtime error.
860 Load(e);
861 frame_->CallRuntime(Runtime::kThrowReferenceError, 1);
862 }
863
864 in_spilled_code_ = was_in_spilled_code;
865}
866
867
Steve Blocka7e24c12009-10-30 11:49:00 +0000868// ECMA-262, section 9.2, page 30: ToBoolean(). Pop the top of stack and
869// convert it to a boolean in the condition code register or jump to
870// 'false_target'/'true_target' as appropriate.
871void CodeGenerator::ToBoolean(ControlDestination* dest) {
872 Comment cmnt(masm_, "[ ToBoolean");
873
874 // The value to convert should be popped from the frame.
875 Result value = frame_->Pop();
876 value.ToRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +0000877
Steve Block6ded16b2010-05-10 14:33:55 +0100878 if (value.is_integer32()) { // Also takes Smi case.
879 Comment cmnt(masm_, "ONLY_INTEGER_32");
Andrei Popescu402d9372010-02-26 13:31:12 +0000880 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100881 Label ok;
882 __ AbortIfNotNumber(value.reg());
883 __ test(value.reg(), Immediate(kSmiTagMask));
884 __ j(zero, &ok);
885 __ fldz();
886 __ fld_d(FieldOperand(value.reg(), HeapNumber::kValueOffset));
887 __ FCmp();
888 __ j(not_zero, &ok);
889 __ Abort("Smi was wrapped in HeapNumber in output from bitop");
890 __ bind(&ok);
891 }
892 // In the integer32 case there are no Smis hidden in heap numbers, so we
893 // need only test for Smi zero.
894 __ test(value.reg(), Operand(value.reg()));
895 dest->false_target()->Branch(zero);
896 value.Unuse();
897 dest->Split(not_zero);
898 } else if (value.is_number()) {
899 Comment cmnt(masm_, "ONLY_NUMBER");
900 // Fast case if TypeInfo indicates only numbers.
901 if (FLAG_debug_code) {
902 __ AbortIfNotNumber(value.reg());
Andrei Popescu402d9372010-02-26 13:31:12 +0000903 }
904 // Smi => false iff zero.
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100905 STATIC_ASSERT(kSmiTag == 0);
Andrei Popescu402d9372010-02-26 13:31:12 +0000906 __ test(value.reg(), Operand(value.reg()));
907 dest->false_target()->Branch(zero);
908 __ test(value.reg(), Immediate(kSmiTagMask));
909 dest->true_target()->Branch(zero);
910 __ fldz();
911 __ fld_d(FieldOperand(value.reg(), HeapNumber::kValueOffset));
912 __ FCmp();
913 value.Unuse();
914 dest->Split(not_zero);
915 } else {
916 // Fast case checks.
917 // 'false' => false.
Steve Block44f0eee2011-05-26 01:26:41 +0100918 __ cmp(value.reg(), FACTORY->false_value());
Andrei Popescu402d9372010-02-26 13:31:12 +0000919 dest->false_target()->Branch(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +0000920
Andrei Popescu402d9372010-02-26 13:31:12 +0000921 // 'true' => true.
Steve Block44f0eee2011-05-26 01:26:41 +0100922 __ cmp(value.reg(), FACTORY->true_value());
Andrei Popescu402d9372010-02-26 13:31:12 +0000923 dest->true_target()->Branch(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +0000924
Andrei Popescu402d9372010-02-26 13:31:12 +0000925 // 'undefined' => false.
Steve Block44f0eee2011-05-26 01:26:41 +0100926 __ cmp(value.reg(), FACTORY->undefined_value());
Andrei Popescu402d9372010-02-26 13:31:12 +0000927 dest->false_target()->Branch(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +0000928
Andrei Popescu402d9372010-02-26 13:31:12 +0000929 // Smi => false iff zero.
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100930 STATIC_ASSERT(kSmiTag == 0);
Andrei Popescu402d9372010-02-26 13:31:12 +0000931 __ test(value.reg(), Operand(value.reg()));
932 dest->false_target()->Branch(zero);
933 __ test(value.reg(), Immediate(kSmiTagMask));
934 dest->true_target()->Branch(zero);
Steve Blocka7e24c12009-10-30 11:49:00 +0000935
Andrei Popescu402d9372010-02-26 13:31:12 +0000936 // Call the stub for all other cases.
937 frame_->Push(&value); // Undo the Pop() from above.
938 ToBooleanStub stub;
939 Result temp = frame_->CallStub(&stub, 1);
940 // Convert the result to a condition code.
941 __ test(temp.reg(), Operand(temp.reg()));
942 temp.Unuse();
943 dest->Split(not_equal);
944 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000945}
946
947
Iain Merrick75681382010-08-19 15:07:18 +0100948// Perform or call the specialized stub for a binary operation. Requires the
949// three registers left, right and dst to be distinct and spilled. This
950// deferred operation has up to three entry points: The main one calls the
951// runtime system. The second is for when the result is a non-Smi. The
952// third is for when at least one of the inputs is non-Smi and we have SSE2.
Steve Blocka7e24c12009-10-30 11:49:00 +0000953class DeferredInlineBinaryOperation: public DeferredCode {
954 public:
955 DeferredInlineBinaryOperation(Token::Value op,
956 Register dst,
957 Register left,
958 Register right,
Steve Block6ded16b2010-05-10 14:33:55 +0100959 TypeInfo left_info,
960 TypeInfo right_info,
Steve Blocka7e24c12009-10-30 11:49:00 +0000961 OverwriteMode mode)
Steve Block6ded16b2010-05-10 14:33:55 +0100962 : op_(op), dst_(dst), left_(left), right_(right),
963 left_info_(left_info), right_info_(right_info), mode_(mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000964 set_comment("[ DeferredInlineBinaryOperation");
Iain Merrick75681382010-08-19 15:07:18 +0100965 ASSERT(!left.is(right));
Steve Blocka7e24c12009-10-30 11:49:00 +0000966 }
967
968 virtual void Generate();
969
Iain Merrick75681382010-08-19 15:07:18 +0100970 // This stub makes explicit calls to SaveRegisters(), RestoreRegisters() and
971 // Exit().
972 virtual bool AutoSaveAndRestore() { return false; }
973
974 void JumpToAnswerOutOfRange(Condition cond);
975 void JumpToConstantRhs(Condition cond, Smi* smi_value);
976 Label* NonSmiInputLabel();
977
Steve Blocka7e24c12009-10-30 11:49:00 +0000978 private:
Iain Merrick75681382010-08-19 15:07:18 +0100979 void GenerateAnswerOutOfRange();
980 void GenerateNonSmiInput();
981
Steve Blocka7e24c12009-10-30 11:49:00 +0000982 Token::Value op_;
983 Register dst_;
984 Register left_;
985 Register right_;
Steve Block6ded16b2010-05-10 14:33:55 +0100986 TypeInfo left_info_;
987 TypeInfo right_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000988 OverwriteMode mode_;
Iain Merrick75681382010-08-19 15:07:18 +0100989 Label answer_out_of_range_;
990 Label non_smi_input_;
991 Label constant_rhs_;
992 Smi* smi_value_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000993};
994
995
Iain Merrick75681382010-08-19 15:07:18 +0100996Label* DeferredInlineBinaryOperation::NonSmiInputLabel() {
Steve Block44f0eee2011-05-26 01:26:41 +0100997 if (Token::IsBitOp(op_) &&
998 masm()->isolate()->cpu_features()->IsSupported(SSE2)) {
Iain Merrick75681382010-08-19 15:07:18 +0100999 return &non_smi_input_;
1000 } else {
1001 return entry_label();
1002 }
1003}
1004
1005
1006void DeferredInlineBinaryOperation::JumpToAnswerOutOfRange(Condition cond) {
1007 __ j(cond, &answer_out_of_range_);
1008}
1009
1010
1011void DeferredInlineBinaryOperation::JumpToConstantRhs(Condition cond,
1012 Smi* smi_value) {
1013 smi_value_ = smi_value;
1014 __ j(cond, &constant_rhs_);
1015}
1016
1017
Steve Blocka7e24c12009-10-30 11:49:00 +00001018void DeferredInlineBinaryOperation::Generate() {
Iain Merrick75681382010-08-19 15:07:18 +01001019 // Registers are not saved implicitly for this stub, so we should not
1020 // tread on the registers that were not passed to us.
Steve Block44f0eee2011-05-26 01:26:41 +01001021 if (masm()->isolate()->cpu_features()->IsSupported(SSE2) &&
Iain Merrick75681382010-08-19 15:07:18 +01001022 ((op_ == Token::ADD) ||
1023 (op_ == Token::SUB) ||
1024 (op_ == Token::MUL) ||
1025 (op_ == Token::DIV))) {
Leon Clarkee46be812010-01-19 14:06:41 +00001026 CpuFeatures::Scope use_sse2(SSE2);
1027 Label call_runtime, after_alloc_failure;
1028 Label left_smi, right_smi, load_right, do_op;
Steve Block6ded16b2010-05-10 14:33:55 +01001029 if (!left_info_.IsSmi()) {
1030 __ test(left_, Immediate(kSmiTagMask));
1031 __ j(zero, &left_smi);
1032 if (!left_info_.IsNumber()) {
1033 __ cmp(FieldOperand(left_, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01001034 FACTORY->heap_number_map());
Steve Block6ded16b2010-05-10 14:33:55 +01001035 __ j(not_equal, &call_runtime);
1036 }
1037 __ movdbl(xmm0, FieldOperand(left_, HeapNumber::kValueOffset));
1038 if (mode_ == OVERWRITE_LEFT) {
1039 __ mov(dst_, left_);
1040 }
1041 __ jmp(&load_right);
Leon Clarkee46be812010-01-19 14:06:41 +00001042
Steve Block6ded16b2010-05-10 14:33:55 +01001043 __ bind(&left_smi);
1044 } else {
1045 if (FLAG_debug_code) __ AbortIfNotSmi(left_);
1046 }
Leon Clarkee46be812010-01-19 14:06:41 +00001047 __ SmiUntag(left_);
1048 __ cvtsi2sd(xmm0, Operand(left_));
1049 __ SmiTag(left_);
1050 if (mode_ == OVERWRITE_LEFT) {
1051 Label alloc_failure;
1052 __ push(left_);
1053 __ AllocateHeapNumber(dst_, left_, no_reg, &after_alloc_failure);
1054 __ pop(left_);
1055 }
1056
1057 __ bind(&load_right);
Steve Block6ded16b2010-05-10 14:33:55 +01001058 if (!right_info_.IsSmi()) {
1059 __ test(right_, Immediate(kSmiTagMask));
1060 __ j(zero, &right_smi);
1061 if (!right_info_.IsNumber()) {
1062 __ cmp(FieldOperand(right_, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01001063 FACTORY->heap_number_map());
Steve Block6ded16b2010-05-10 14:33:55 +01001064 __ j(not_equal, &call_runtime);
1065 }
1066 __ movdbl(xmm1, FieldOperand(right_, HeapNumber::kValueOffset));
1067 if (mode_ == OVERWRITE_RIGHT) {
1068 __ mov(dst_, right_);
1069 } else if (mode_ == NO_OVERWRITE) {
1070 Label alloc_failure;
1071 __ push(left_);
1072 __ AllocateHeapNumber(dst_, left_, no_reg, &after_alloc_failure);
1073 __ pop(left_);
1074 }
1075 __ jmp(&do_op);
Leon Clarkee46be812010-01-19 14:06:41 +00001076
Steve Block6ded16b2010-05-10 14:33:55 +01001077 __ bind(&right_smi);
1078 } else {
1079 if (FLAG_debug_code) __ AbortIfNotSmi(right_);
1080 }
Leon Clarkee46be812010-01-19 14:06:41 +00001081 __ SmiUntag(right_);
1082 __ cvtsi2sd(xmm1, Operand(right_));
1083 __ SmiTag(right_);
1084 if (mode_ == OVERWRITE_RIGHT || mode_ == NO_OVERWRITE) {
Leon Clarkee46be812010-01-19 14:06:41 +00001085 __ push(left_);
1086 __ AllocateHeapNumber(dst_, left_, no_reg, &after_alloc_failure);
1087 __ pop(left_);
1088 }
1089
1090 __ bind(&do_op);
1091 switch (op_) {
1092 case Token::ADD: __ addsd(xmm0, xmm1); break;
1093 case Token::SUB: __ subsd(xmm0, xmm1); break;
1094 case Token::MUL: __ mulsd(xmm0, xmm1); break;
1095 case Token::DIV: __ divsd(xmm0, xmm1); break;
1096 default: UNREACHABLE();
1097 }
1098 __ movdbl(FieldOperand(dst_, HeapNumber::kValueOffset), xmm0);
Iain Merrick75681382010-08-19 15:07:18 +01001099 Exit();
1100
Leon Clarkee46be812010-01-19 14:06:41 +00001101
1102 __ bind(&after_alloc_failure);
1103 __ pop(left_);
1104 __ bind(&call_runtime);
1105 }
Iain Merrick75681382010-08-19 15:07:18 +01001106 // Register spilling is not done implicitly for this stub.
1107 // We can't postpone it any more now though.
1108 SaveRegisters();
1109
Steve Block6ded16b2010-05-10 14:33:55 +01001110 GenericBinaryOpStub stub(op_,
1111 mode_,
1112 NO_SMI_CODE_IN_STUB,
1113 TypeInfo::Combine(left_info_, right_info_));
Steve Block3ce2e202009-11-05 08:53:23 +00001114 stub.GenerateCall(masm_, left_, right_);
Steve Blocka7e24c12009-10-30 11:49:00 +00001115 if (!dst_.is(eax)) __ mov(dst_, eax);
Iain Merrick75681382010-08-19 15:07:18 +01001116 RestoreRegisters();
1117 Exit();
1118
1119 if (non_smi_input_.is_linked() || constant_rhs_.is_linked()) {
1120 GenerateNonSmiInput();
1121 }
1122 if (answer_out_of_range_.is_linked()) {
1123 GenerateAnswerOutOfRange();
1124 }
1125}
1126
1127
1128void DeferredInlineBinaryOperation::GenerateNonSmiInput() {
1129 // We know at least one of the inputs was not a Smi.
1130 // This is a third entry point into the deferred code.
1131 // We may not overwrite left_ because we want to be able
1132 // to call the handling code for non-smi answer and it
1133 // might want to overwrite the heap number in left_.
1134 ASSERT(!right_.is(dst_));
1135 ASSERT(!left_.is(dst_));
1136 ASSERT(!left_.is(right_));
1137 // This entry point is used for bit ops where the right hand side
1138 // is a constant Smi and the left hand side is a heap object. It
1139 // is also used for bit ops where both sides are unknown, but where
1140 // at least one of them is a heap object.
1141 bool rhs_is_constant = constant_rhs_.is_linked();
1142 // We can't generate code for both cases.
1143 ASSERT(!non_smi_input_.is_linked() || !constant_rhs_.is_linked());
1144
1145 if (FLAG_debug_code) {
1146 __ int3(); // We don't fall through into this code.
1147 }
1148
1149 __ bind(&non_smi_input_);
1150
1151 if (rhs_is_constant) {
1152 __ bind(&constant_rhs_);
1153 // In this case the input is a heap object and it is in the dst_ register.
1154 // The left_ and right_ registers have not been initialized yet.
1155 __ mov(right_, Immediate(smi_value_));
1156 __ mov(left_, Operand(dst_));
Steve Block44f0eee2011-05-26 01:26:41 +01001157 if (!masm()->isolate()->cpu_features()->IsSupported(SSE2)) {
Iain Merrick75681382010-08-19 15:07:18 +01001158 __ jmp(entry_label());
1159 return;
1160 } else {
1161 CpuFeatures::Scope use_sse2(SSE2);
1162 __ JumpIfNotNumber(dst_, left_info_, entry_label());
1163 __ ConvertToInt32(dst_, left_, dst_, left_info_, entry_label());
1164 __ SmiUntag(right_);
1165 }
1166 } else {
1167 // We know we have SSE2 here because otherwise the label is not linked (see
1168 // NonSmiInputLabel).
1169 CpuFeatures::Scope use_sse2(SSE2);
1170 // Handle the non-constant right hand side situation:
1171 if (left_info_.IsSmi()) {
1172 // Right is a heap object.
1173 __ JumpIfNotNumber(right_, right_info_, entry_label());
1174 __ ConvertToInt32(right_, right_, dst_, right_info_, entry_label());
1175 __ mov(dst_, Operand(left_));
1176 __ SmiUntag(dst_);
1177 } else if (right_info_.IsSmi()) {
1178 // Left is a heap object.
1179 __ JumpIfNotNumber(left_, left_info_, entry_label());
1180 __ ConvertToInt32(dst_, left_, dst_, left_info_, entry_label());
1181 __ SmiUntag(right_);
1182 } else {
1183 // Here we don't know if it's one or both that is a heap object.
1184 Label only_right_is_heap_object, got_both;
1185 __ mov(dst_, Operand(left_));
1186 __ SmiUntag(dst_, &only_right_is_heap_object);
1187 // Left was a heap object.
1188 __ JumpIfNotNumber(left_, left_info_, entry_label());
1189 __ ConvertToInt32(dst_, left_, dst_, left_info_, entry_label());
1190 __ SmiUntag(right_, &got_both);
1191 // Both were heap objects.
1192 __ rcl(right_, 1); // Put tag back.
1193 __ JumpIfNotNumber(right_, right_info_, entry_label());
1194 __ ConvertToInt32(right_, right_, no_reg, right_info_, entry_label());
1195 __ jmp(&got_both);
1196 __ bind(&only_right_is_heap_object);
1197 __ JumpIfNotNumber(right_, right_info_, entry_label());
1198 __ ConvertToInt32(right_, right_, no_reg, right_info_, entry_label());
1199 __ bind(&got_both);
1200 }
1201 }
1202 ASSERT(op_ == Token::BIT_AND ||
1203 op_ == Token::BIT_OR ||
1204 op_ == Token::BIT_XOR ||
1205 right_.is(ecx));
1206 switch (op_) {
1207 case Token::BIT_AND: __ and_(dst_, Operand(right_)); break;
1208 case Token::BIT_OR: __ or_(dst_, Operand(right_)); break;
1209 case Token::BIT_XOR: __ xor_(dst_, Operand(right_)); break;
1210 case Token::SHR: __ shr_cl(dst_); break;
1211 case Token::SAR: __ sar_cl(dst_); break;
1212 case Token::SHL: __ shl_cl(dst_); break;
1213 default: UNREACHABLE();
1214 }
1215 if (op_ == Token::SHR) {
1216 // Check that the *unsigned* result fits in a smi. Neither of
1217 // the two high-order bits can be set:
1218 // * 0x80000000: high bit would be lost when smi tagging.
1219 // * 0x40000000: this number would convert to negative when smi
1220 // tagging.
1221 __ test(dst_, Immediate(0xc0000000));
1222 __ j(not_zero, &answer_out_of_range_);
1223 } else {
1224 // Check that the *signed* result fits in a smi.
1225 __ cmp(dst_, 0xc0000000);
1226 __ j(negative, &answer_out_of_range_);
1227 }
1228 __ SmiTag(dst_);
1229 Exit();
1230}
1231
1232
1233void DeferredInlineBinaryOperation::GenerateAnswerOutOfRange() {
1234 Label after_alloc_failure2;
1235 Label allocation_ok;
1236 __ bind(&after_alloc_failure2);
1237 // We have to allocate a number, causing a GC, while keeping hold of
1238 // the answer in dst_. The answer is not a Smi. We can't just call the
1239 // runtime shift function here because we already threw away the inputs.
1240 __ xor_(left_, Operand(left_));
1241 __ shl(dst_, 1); // Put top bit in carry flag and Smi tag the low bits.
1242 __ rcr(left_, 1); // Rotate with carry.
1243 __ push(dst_); // Smi tagged low 31 bits.
1244 __ push(left_); // 0 or 0x80000000, which is Smi tagged in both cases.
1245 __ CallRuntime(Runtime::kNumberAlloc, 0);
1246 if (!left_.is(eax)) {
1247 __ mov(left_, eax);
1248 }
1249 __ pop(right_); // High bit.
1250 __ pop(dst_); // Low 31 bits.
1251 __ shr(dst_, 1); // Put 0 in top bit.
1252 __ or_(dst_, Operand(right_));
1253 __ jmp(&allocation_ok);
1254
1255 // This is the second entry point to the deferred code. It is used only by
1256 // the bit operations.
1257 // The dst_ register has the answer. It is not Smi tagged. If mode_ is
1258 // OVERWRITE_LEFT then left_ must contain either an overwritable heap number
1259 // or a Smi.
1260 // Put a heap number pointer in left_.
1261 __ bind(&answer_out_of_range_);
1262 SaveRegisters();
1263 if (mode_ == OVERWRITE_LEFT) {
1264 __ test(left_, Immediate(kSmiTagMask));
1265 __ j(not_zero, &allocation_ok);
1266 }
1267 // This trashes right_.
1268 __ AllocateHeapNumber(left_, right_, no_reg, &after_alloc_failure2);
1269 __ bind(&allocation_ok);
Steve Block44f0eee2011-05-26 01:26:41 +01001270 if (masm()->isolate()->cpu_features()->IsSupported(SSE2) &&
1271 op_ != Token::SHR) {
Iain Merrick75681382010-08-19 15:07:18 +01001272 CpuFeatures::Scope use_sse2(SSE2);
1273 ASSERT(Token::IsBitOp(op_));
1274 // Signed conversion.
1275 __ cvtsi2sd(xmm0, Operand(dst_));
1276 __ movdbl(FieldOperand(left_, HeapNumber::kValueOffset), xmm0);
1277 } else {
1278 if (op_ == Token::SHR) {
1279 __ push(Immediate(0)); // High word of unsigned value.
1280 __ push(dst_);
1281 __ fild_d(Operand(esp, 0));
1282 __ Drop(2);
1283 } else {
1284 ASSERT(Token::IsBitOp(op_));
1285 __ push(dst_);
1286 __ fild_s(Operand(esp, 0)); // Signed conversion.
1287 __ pop(dst_);
1288 }
1289 __ fstp_d(FieldOperand(left_, HeapNumber::kValueOffset));
1290 }
1291 __ mov(dst_, left_);
1292 RestoreRegisters();
1293 Exit();
Steve Blocka7e24c12009-10-30 11:49:00 +00001294}
1295
1296
Steve Block6ded16b2010-05-10 14:33:55 +01001297static TypeInfo CalculateTypeInfo(TypeInfo operands_type,
1298 Token::Value op,
1299 const Result& right,
1300 const Result& left) {
1301 // Set TypeInfo of result according to the operation performed.
1302 // Rely on the fact that smis have a 31 bit payload on ia32.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01001303 STATIC_ASSERT(kSmiValueSize == 31);
Steve Block6ded16b2010-05-10 14:33:55 +01001304 switch (op) {
1305 case Token::COMMA:
1306 return right.type_info();
1307 case Token::OR:
1308 case Token::AND:
1309 // Result type can be either of the two input types.
1310 return operands_type;
1311 case Token::BIT_AND: {
1312 // Anding with positive Smis will give you a Smi.
1313 if (right.is_constant() && right.handle()->IsSmi() &&
1314 Smi::cast(*right.handle())->value() >= 0) {
1315 return TypeInfo::Smi();
1316 } else if (left.is_constant() && left.handle()->IsSmi() &&
1317 Smi::cast(*left.handle())->value() >= 0) {
1318 return TypeInfo::Smi();
1319 }
1320 return (operands_type.IsSmi())
1321 ? TypeInfo::Smi()
1322 : TypeInfo::Integer32();
1323 }
1324 case Token::BIT_OR: {
1325 // Oring with negative Smis will give you a Smi.
1326 if (right.is_constant() && right.handle()->IsSmi() &&
1327 Smi::cast(*right.handle())->value() < 0) {
1328 return TypeInfo::Smi();
1329 } else if (left.is_constant() && left.handle()->IsSmi() &&
1330 Smi::cast(*left.handle())->value() < 0) {
1331 return TypeInfo::Smi();
1332 }
1333 return (operands_type.IsSmi())
1334 ? TypeInfo::Smi()
1335 : TypeInfo::Integer32();
1336 }
1337 case Token::BIT_XOR:
1338 // Result is always a 32 bit integer. Smi property of inputs is preserved.
1339 return (operands_type.IsSmi())
1340 ? TypeInfo::Smi()
1341 : TypeInfo::Integer32();
1342 case Token::SAR:
1343 if (left.is_smi()) return TypeInfo::Smi();
1344 // Result is a smi if we shift by a constant >= 1, otherwise an integer32.
1345 // Shift amount is masked with 0x1F (ECMA standard 11.7.2).
1346 return (right.is_constant() && right.handle()->IsSmi()
1347 && (Smi::cast(*right.handle())->value() & 0x1F) >= 1)
1348 ? TypeInfo::Smi()
1349 : TypeInfo::Integer32();
1350 case Token::SHR:
1351 // Result is a smi if we shift by a constant >= 2, an integer32 if
1352 // we shift by 1, and an unsigned 32-bit integer if we shift by 0.
1353 if (right.is_constant() && right.handle()->IsSmi()) {
1354 int shift_amount = Smi::cast(*right.handle())->value() & 0x1F;
1355 if (shift_amount > 1) {
1356 return TypeInfo::Smi();
1357 } else if (shift_amount > 0) {
1358 return TypeInfo::Integer32();
1359 }
1360 }
1361 return TypeInfo::Number();
1362 case Token::ADD:
1363 if (operands_type.IsSmi()) {
1364 // The Integer32 range is big enough to take the sum of any two Smis.
1365 return TypeInfo::Integer32();
1366 } else if (operands_type.IsNumber()) {
1367 return TypeInfo::Number();
1368 } else if (left.type_info().IsString() || right.type_info().IsString()) {
1369 return TypeInfo::String();
1370 } else {
1371 return TypeInfo::Unknown();
1372 }
1373 case Token::SHL:
1374 return TypeInfo::Integer32();
1375 case Token::SUB:
1376 // The Integer32 range is big enough to take the difference of any two
1377 // Smis.
1378 return (operands_type.IsSmi()) ?
1379 TypeInfo::Integer32() :
1380 TypeInfo::Number();
1381 case Token::MUL:
1382 case Token::DIV:
1383 case Token::MOD:
1384 // Result is always a number.
1385 return TypeInfo::Number();
1386 default:
1387 UNREACHABLE();
1388 }
1389 UNREACHABLE();
1390 return TypeInfo::Unknown();
1391}
1392
1393
1394void CodeGenerator::GenericBinaryOperation(BinaryOperation* expr,
Steve Blocka7e24c12009-10-30 11:49:00 +00001395 OverwriteMode overwrite_mode) {
1396 Comment cmnt(masm_, "[ BinaryOperation");
Steve Block6ded16b2010-05-10 14:33:55 +01001397 Token::Value op = expr->op();
Steve Blocka7e24c12009-10-30 11:49:00 +00001398 Comment cmnt_token(masm_, Token::String(op));
1399
1400 if (op == Token::COMMA) {
1401 // Simply discard left value.
1402 frame_->Nip(1);
1403 return;
1404 }
1405
Steve Blocka7e24c12009-10-30 11:49:00 +00001406 Result right = frame_->Pop();
1407 Result left = frame_->Pop();
1408
1409 if (op == Token::ADD) {
Steve Block6ded16b2010-05-10 14:33:55 +01001410 const bool left_is_string = left.type_info().IsString();
1411 const bool right_is_string = right.type_info().IsString();
1412 // Make sure constant strings have string type info.
1413 ASSERT(!(left.is_constant() && left.handle()->IsString()) ||
1414 left_is_string);
1415 ASSERT(!(right.is_constant() && right.handle()->IsString()) ||
1416 right_is_string);
Steve Blocka7e24c12009-10-30 11:49:00 +00001417 if (left_is_string || right_is_string) {
1418 frame_->Push(&left);
1419 frame_->Push(&right);
1420 Result answer;
1421 if (left_is_string) {
1422 if (right_is_string) {
Steve Block6ded16b2010-05-10 14:33:55 +01001423 StringAddStub stub(NO_STRING_CHECK_IN_STUB);
1424 answer = frame_->CallStub(&stub, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001425 } else {
Iain Merrick9ac36c92010-09-13 15:29:50 +01001426 StringAddStub stub(NO_STRING_CHECK_LEFT_IN_STUB);
1427 answer = frame_->CallStub(&stub, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001428 }
1429 } else if (right_is_string) {
Iain Merrick9ac36c92010-09-13 15:29:50 +01001430 StringAddStub stub(NO_STRING_CHECK_RIGHT_IN_STUB);
1431 answer = frame_->CallStub(&stub, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001432 }
Steve Block6ded16b2010-05-10 14:33:55 +01001433 answer.set_type_info(TypeInfo::String());
Steve Blocka7e24c12009-10-30 11:49:00 +00001434 frame_->Push(&answer);
1435 return;
1436 }
1437 // Neither operand is known to be a string.
1438 }
1439
Andrei Popescu402d9372010-02-26 13:31:12 +00001440 bool left_is_smi_constant = left.is_constant() && left.handle()->IsSmi();
1441 bool left_is_non_smi_constant = left.is_constant() && !left.handle()->IsSmi();
1442 bool right_is_smi_constant = right.is_constant() && right.handle()->IsSmi();
1443 bool right_is_non_smi_constant =
1444 right.is_constant() && !right.handle()->IsSmi();
Steve Blocka7e24c12009-10-30 11:49:00 +00001445
Andrei Popescu402d9372010-02-26 13:31:12 +00001446 if (left_is_smi_constant && right_is_smi_constant) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001447 // Compute the constant result at compile time, and leave it on the frame.
1448 int left_int = Smi::cast(*left.handle())->value();
1449 int right_int = Smi::cast(*right.handle())->value();
1450 if (FoldConstantSmis(op, left_int, right_int)) return;
1451 }
1452
Andrei Popescu402d9372010-02-26 13:31:12 +00001453 // Get number type of left and right sub-expressions.
Steve Block6ded16b2010-05-10 14:33:55 +01001454 TypeInfo operands_type =
1455 TypeInfo::Combine(left.type_info(), right.type_info());
1456
1457 TypeInfo result_type = CalculateTypeInfo(operands_type, op, right, left);
Andrei Popescu402d9372010-02-26 13:31:12 +00001458
Leon Clarked91b9f72010-01-27 17:25:45 +00001459 Result answer;
Andrei Popescu402d9372010-02-26 13:31:12 +00001460 if (left_is_non_smi_constant || right_is_non_smi_constant) {
Leon Clarked91b9f72010-01-27 17:25:45 +00001461 // Go straight to the slow case, with no smi code.
Andrei Popescu402d9372010-02-26 13:31:12 +00001462 GenericBinaryOpStub stub(op,
1463 overwrite_mode,
1464 NO_SMI_CODE_IN_STUB,
1465 operands_type);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001466 answer = GenerateGenericBinaryOpStubCall(&stub, &left, &right);
Andrei Popescu402d9372010-02-26 13:31:12 +00001467 } else if (right_is_smi_constant) {
Steve Block6ded16b2010-05-10 14:33:55 +01001468 answer = ConstantSmiBinaryOperation(expr, &left, right.handle(),
1469 false, overwrite_mode);
Andrei Popescu402d9372010-02-26 13:31:12 +00001470 } else if (left_is_smi_constant) {
Steve Block6ded16b2010-05-10 14:33:55 +01001471 answer = ConstantSmiBinaryOperation(expr, &right, left.handle(),
1472 true, overwrite_mode);
Leon Clarkeeab96aa2010-01-27 16:31:12 +00001473 } else {
Leon Clarked91b9f72010-01-27 17:25:45 +00001474 // Set the flags based on the operation, type and loop nesting level.
1475 // Bit operations always assume they likely operate on Smis. Still only
1476 // generate the inline Smi check code if this operation is part of a loop.
1477 // For all other operations only inline the Smi check code for likely smis
1478 // if the operation is part of a loop.
Steve Block6ded16b2010-05-10 14:33:55 +01001479 if (loop_nesting() > 0 &&
1480 (Token::IsBitOp(op) ||
1481 operands_type.IsInteger32() ||
1482 expr->type()->IsLikelySmi())) {
1483 answer = LikelySmiBinaryOperation(expr, &left, &right, overwrite_mode);
Leon Clarked91b9f72010-01-27 17:25:45 +00001484 } else {
Andrei Popescu402d9372010-02-26 13:31:12 +00001485 GenericBinaryOpStub stub(op,
1486 overwrite_mode,
1487 NO_GENERIC_BINARY_FLAGS,
1488 operands_type);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001489 answer = GenerateGenericBinaryOpStubCall(&stub, &left, &right);
Leon Clarkeeab96aa2010-01-27 16:31:12 +00001490 }
Leon Clarkeeab96aa2010-01-27 16:31:12 +00001491 }
Andrei Popescu402d9372010-02-26 13:31:12 +00001492
Steve Block6ded16b2010-05-10 14:33:55 +01001493 answer.set_type_info(result_type);
Leon Clarked91b9f72010-01-27 17:25:45 +00001494 frame_->Push(&answer);
Steve Blocka7e24c12009-10-30 11:49:00 +00001495}
1496
1497
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001498Result CodeGenerator::GenerateGenericBinaryOpStubCall(GenericBinaryOpStub* stub,
1499 Result* left,
1500 Result* right) {
1501 if (stub->ArgsInRegistersSupported()) {
1502 stub->SetArgsInRegisters();
1503 return frame_->CallStub(stub, left, right);
1504 } else {
1505 frame_->Push(left);
1506 frame_->Push(right);
1507 return frame_->CallStub(stub, 2);
1508 }
1509}
1510
1511
Steve Blocka7e24c12009-10-30 11:49:00 +00001512bool CodeGenerator::FoldConstantSmis(Token::Value op, int left, int right) {
Steve Block44f0eee2011-05-26 01:26:41 +01001513 Object* answer_object = HEAP->undefined_value();
Steve Blocka7e24c12009-10-30 11:49:00 +00001514 switch (op) {
1515 case Token::ADD:
1516 if (Smi::IsValid(left + right)) {
1517 answer_object = Smi::FromInt(left + right);
1518 }
1519 break;
1520 case Token::SUB:
1521 if (Smi::IsValid(left - right)) {
1522 answer_object = Smi::FromInt(left - right);
1523 }
1524 break;
1525 case Token::MUL: {
1526 double answer = static_cast<double>(left) * right;
1527 if (answer >= Smi::kMinValue && answer <= Smi::kMaxValue) {
1528 // If the product is zero and the non-zero factor is negative,
1529 // the spec requires us to return floating point negative zero.
1530 if (answer != 0 || (left >= 0 && right >= 0)) {
1531 answer_object = Smi::FromInt(static_cast<int>(answer));
1532 }
1533 }
1534 }
1535 break;
1536 case Token::DIV:
1537 case Token::MOD:
1538 break;
1539 case Token::BIT_OR:
1540 answer_object = Smi::FromInt(left | right);
1541 break;
1542 case Token::BIT_AND:
1543 answer_object = Smi::FromInt(left & right);
1544 break;
1545 case Token::BIT_XOR:
1546 answer_object = Smi::FromInt(left ^ right);
1547 break;
1548
1549 case Token::SHL: {
1550 int shift_amount = right & 0x1F;
1551 if (Smi::IsValid(left << shift_amount)) {
1552 answer_object = Smi::FromInt(left << shift_amount);
1553 }
1554 break;
1555 }
1556 case Token::SHR: {
1557 int shift_amount = right & 0x1F;
1558 unsigned int unsigned_left = left;
1559 unsigned_left >>= shift_amount;
1560 if (unsigned_left <= static_cast<unsigned int>(Smi::kMaxValue)) {
1561 answer_object = Smi::FromInt(unsigned_left);
1562 }
1563 break;
1564 }
1565 case Token::SAR: {
1566 int shift_amount = right & 0x1F;
1567 unsigned int unsigned_left = left;
1568 if (left < 0) {
1569 // Perform arithmetic shift of a negative number by
1570 // complementing number, logical shifting, complementing again.
1571 unsigned_left = ~unsigned_left;
1572 unsigned_left >>= shift_amount;
1573 unsigned_left = ~unsigned_left;
1574 } else {
1575 unsigned_left >>= shift_amount;
1576 }
Ben Murdoch3bec4d22010-07-22 14:51:16 +01001577 ASSERT(Smi::IsValid(static_cast<int32_t>(unsigned_left)));
1578 answer_object = Smi::FromInt(static_cast<int32_t>(unsigned_left));
Steve Blocka7e24c12009-10-30 11:49:00 +00001579 break;
1580 }
1581 default:
1582 UNREACHABLE();
1583 break;
1584 }
Steve Block44f0eee2011-05-26 01:26:41 +01001585 if (answer_object->IsUndefined()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001586 return false;
1587 }
1588 frame_->Push(Handle<Object>(answer_object));
1589 return true;
1590}
1591
1592
Kristian Monsen50ef84f2010-07-29 15:18:00 +01001593void CodeGenerator::JumpIfBothSmiUsingTypeInfo(Result* left,
1594 Result* right,
1595 JumpTarget* both_smi) {
1596 TypeInfo left_info = left->type_info();
1597 TypeInfo right_info = right->type_info();
1598 if (left_info.IsDouble() || left_info.IsString() ||
1599 right_info.IsDouble() || right_info.IsString()) {
1600 // We know that left and right are not both smi. Don't do any tests.
1601 return;
1602 }
1603
1604 if (left->reg().is(right->reg())) {
1605 if (!left_info.IsSmi()) {
1606 __ test(left->reg(), Immediate(kSmiTagMask));
1607 both_smi->Branch(zero);
1608 } else {
1609 if (FLAG_debug_code) __ AbortIfNotSmi(left->reg());
1610 left->Unuse();
1611 right->Unuse();
1612 both_smi->Jump();
1613 }
1614 } else if (!left_info.IsSmi()) {
1615 if (!right_info.IsSmi()) {
1616 Result temp = allocator_->Allocate();
1617 ASSERT(temp.is_valid());
1618 __ mov(temp.reg(), left->reg());
1619 __ or_(temp.reg(), Operand(right->reg()));
1620 __ test(temp.reg(), Immediate(kSmiTagMask));
1621 temp.Unuse();
1622 both_smi->Branch(zero);
1623 } else {
1624 __ test(left->reg(), Immediate(kSmiTagMask));
1625 both_smi->Branch(zero);
1626 }
1627 } else {
1628 if (FLAG_debug_code) __ AbortIfNotSmi(left->reg());
1629 if (!right_info.IsSmi()) {
1630 __ test(right->reg(), Immediate(kSmiTagMask));
1631 both_smi->Branch(zero);
1632 } else {
1633 if (FLAG_debug_code) __ AbortIfNotSmi(right->reg());
1634 left->Unuse();
1635 right->Unuse();
1636 both_smi->Jump();
1637 }
1638 }
1639}
1640
1641
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001642void CodeGenerator::JumpIfNotBothSmiUsingTypeInfo(Register left,
1643 Register right,
1644 Register scratch,
1645 TypeInfo left_info,
1646 TypeInfo right_info,
1647 DeferredCode* deferred) {
Iain Merrick75681382010-08-19 15:07:18 +01001648 JumpIfNotBothSmiUsingTypeInfo(left,
1649 right,
1650 scratch,
1651 left_info,
1652 right_info,
1653 deferred->entry_label());
1654}
1655
1656
1657void CodeGenerator::JumpIfNotBothSmiUsingTypeInfo(Register left,
1658 Register right,
1659 Register scratch,
1660 TypeInfo left_info,
1661 TypeInfo right_info,
1662 Label* on_not_smi) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001663 if (left.is(right)) {
1664 if (!left_info.IsSmi()) {
1665 __ test(left, Immediate(kSmiTagMask));
Iain Merrick75681382010-08-19 15:07:18 +01001666 __ j(not_zero, on_not_smi);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001667 } else {
1668 if (FLAG_debug_code) __ AbortIfNotSmi(left);
1669 }
1670 } else if (!left_info.IsSmi()) {
1671 if (!right_info.IsSmi()) {
1672 __ mov(scratch, left);
1673 __ or_(scratch, Operand(right));
1674 __ test(scratch, Immediate(kSmiTagMask));
Iain Merrick75681382010-08-19 15:07:18 +01001675 __ j(not_zero, on_not_smi);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001676 } else {
1677 __ test(left, Immediate(kSmiTagMask));
Iain Merrick75681382010-08-19 15:07:18 +01001678 __ j(not_zero, on_not_smi);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001679 if (FLAG_debug_code) __ AbortIfNotSmi(right);
1680 }
1681 } else {
1682 if (FLAG_debug_code) __ AbortIfNotSmi(left);
1683 if (!right_info.IsSmi()) {
1684 __ test(right, Immediate(kSmiTagMask));
Iain Merrick75681382010-08-19 15:07:18 +01001685 __ j(not_zero, on_not_smi);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001686 } else {
1687 if (FLAG_debug_code) __ AbortIfNotSmi(right);
1688 }
1689 }
1690}
Steve Block6ded16b2010-05-10 14:33:55 +01001691
1692
Steve Blocka7e24c12009-10-30 11:49:00 +00001693// Implements a binary operation using a deferred code object and some
1694// inline code to operate on smis quickly.
Steve Block6ded16b2010-05-10 14:33:55 +01001695Result CodeGenerator::LikelySmiBinaryOperation(BinaryOperation* expr,
Leon Clarked91b9f72010-01-27 17:25:45 +00001696 Result* left,
1697 Result* right,
1698 OverwriteMode overwrite_mode) {
Kristian Monsen25f61362010-05-21 11:50:48 +01001699 // Copy the type info because left and right may be overwritten.
1700 TypeInfo left_type_info = left->type_info();
1701 TypeInfo right_type_info = right->type_info();
Steve Block6ded16b2010-05-10 14:33:55 +01001702 Token::Value op = expr->op();
Leon Clarked91b9f72010-01-27 17:25:45 +00001703 Result answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00001704 // Special handling of div and mod because they use fixed registers.
1705 if (op == Token::DIV || op == Token::MOD) {
1706 // We need eax as the quotient register, edx as the remainder
1707 // register, neither left nor right in eax or edx, and left copied
1708 // to eax.
1709 Result quotient;
1710 Result remainder;
1711 bool left_is_in_eax = false;
1712 // Step 1: get eax for quotient.
1713 if ((left->is_register() && left->reg().is(eax)) ||
1714 (right->is_register() && right->reg().is(eax))) {
1715 // One or both is in eax. Use a fresh non-edx register for
1716 // them.
1717 Result fresh = allocator_->Allocate();
1718 ASSERT(fresh.is_valid());
1719 if (fresh.reg().is(edx)) {
1720 remainder = fresh;
1721 fresh = allocator_->Allocate();
1722 ASSERT(fresh.is_valid());
1723 }
1724 if (left->is_register() && left->reg().is(eax)) {
1725 quotient = *left;
1726 *left = fresh;
1727 left_is_in_eax = true;
1728 }
1729 if (right->is_register() && right->reg().is(eax)) {
1730 quotient = *right;
1731 *right = fresh;
1732 }
1733 __ mov(fresh.reg(), eax);
1734 } else {
1735 // Neither left nor right is in eax.
1736 quotient = allocator_->Allocate(eax);
1737 }
1738 ASSERT(quotient.is_register() && quotient.reg().is(eax));
1739 ASSERT(!(left->is_register() && left->reg().is(eax)));
1740 ASSERT(!(right->is_register() && right->reg().is(eax)));
1741
1742 // Step 2: get edx for remainder if necessary.
1743 if (!remainder.is_valid()) {
1744 if ((left->is_register() && left->reg().is(edx)) ||
1745 (right->is_register() && right->reg().is(edx))) {
1746 Result fresh = allocator_->Allocate();
1747 ASSERT(fresh.is_valid());
1748 if (left->is_register() && left->reg().is(edx)) {
1749 remainder = *left;
1750 *left = fresh;
1751 }
1752 if (right->is_register() && right->reg().is(edx)) {
1753 remainder = *right;
1754 *right = fresh;
1755 }
1756 __ mov(fresh.reg(), edx);
1757 } else {
1758 // Neither left nor right is in edx.
1759 remainder = allocator_->Allocate(edx);
1760 }
1761 }
1762 ASSERT(remainder.is_register() && remainder.reg().is(edx));
1763 ASSERT(!(left->is_register() && left->reg().is(edx)));
1764 ASSERT(!(right->is_register() && right->reg().is(edx)));
1765
1766 left->ToRegister();
1767 right->ToRegister();
1768 frame_->Spill(eax);
1769 frame_->Spill(edx);
Iain Merrick75681382010-08-19 15:07:18 +01001770 // DeferredInlineBinaryOperation requires all the registers that it is
1771 // told about to be spilled and distinct.
1772 Result distinct_right = frame_->MakeDistinctAndSpilled(left, right);
Steve Blocka7e24c12009-10-30 11:49:00 +00001773
1774 // Check that left and right are smi tagged.
1775 DeferredInlineBinaryOperation* deferred =
1776 new DeferredInlineBinaryOperation(op,
1777 (op == Token::DIV) ? eax : edx,
1778 left->reg(),
Iain Merrick75681382010-08-19 15:07:18 +01001779 distinct_right.reg(),
Kristian Monsen25f61362010-05-21 11:50:48 +01001780 left_type_info,
1781 right_type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00001782 overwrite_mode);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001783 JumpIfNotBothSmiUsingTypeInfo(left->reg(), right->reg(), edx,
1784 left_type_info, right_type_info, deferred);
1785 if (!left_is_in_eax) {
1786 __ mov(eax, left->reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001787 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001788 // Sign extend eax into edx:eax.
1789 __ cdq();
1790 // Check for 0 divisor.
1791 __ test(right->reg(), Operand(right->reg()));
1792 deferred->Branch(zero);
1793 // Divide edx:eax by the right operand.
1794 __ idiv(right->reg());
1795
1796 // Complete the operation.
1797 if (op == Token::DIV) {
1798 // Check for negative zero result. If result is zero, and divisor
1799 // is negative, return a floating point negative zero. The
1800 // virtual frame is unchanged in this block, so local control flow
Steve Block6ded16b2010-05-10 14:33:55 +01001801 // can use a Label rather than a JumpTarget. If the context of this
1802 // expression will treat -0 like 0, do not do this test.
1803 if (!expr->no_negative_zero()) {
1804 Label non_zero_result;
1805 __ test(left->reg(), Operand(left->reg()));
1806 __ j(not_zero, &non_zero_result);
1807 __ test(right->reg(), Operand(right->reg()));
1808 deferred->Branch(negative);
1809 __ bind(&non_zero_result);
1810 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001811 // Check for the corner case of dividing the most negative smi by
1812 // -1. We cannot use the overflow flag, since it is not set by
1813 // idiv instruction.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01001814 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001815 __ cmp(eax, 0x40000000);
1816 deferred->Branch(equal);
1817 // Check that the remainder is zero.
1818 __ test(edx, Operand(edx));
1819 deferred->Branch(not_zero);
1820 // Tag the result and store it in the quotient register.
Leon Clarkee46be812010-01-19 14:06:41 +00001821 __ SmiTag(eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00001822 deferred->BindExit();
1823 left->Unuse();
1824 right->Unuse();
Leon Clarked91b9f72010-01-27 17:25:45 +00001825 answer = quotient;
Steve Blocka7e24c12009-10-30 11:49:00 +00001826 } else {
1827 ASSERT(op == Token::MOD);
1828 // Check for a negative zero result. If the result is zero, and
1829 // the dividend is negative, return a floating point negative
1830 // zero. The frame is unchanged in this block, so local control
1831 // flow can use a Label rather than a JumpTarget.
Steve Block6ded16b2010-05-10 14:33:55 +01001832 if (!expr->no_negative_zero()) {
1833 Label non_zero_result;
1834 __ test(edx, Operand(edx));
1835 __ j(not_zero, &non_zero_result, taken);
1836 __ test(left->reg(), Operand(left->reg()));
1837 deferred->Branch(negative);
1838 __ bind(&non_zero_result);
1839 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001840 deferred->BindExit();
1841 left->Unuse();
1842 right->Unuse();
Leon Clarked91b9f72010-01-27 17:25:45 +00001843 answer = remainder;
Steve Blocka7e24c12009-10-30 11:49:00 +00001844 }
Leon Clarked91b9f72010-01-27 17:25:45 +00001845 ASSERT(answer.is_valid());
1846 return answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00001847 }
1848
1849 // Special handling of shift operations because they use fixed
1850 // registers.
1851 if (op == Token::SHL || op == Token::SHR || op == Token::SAR) {
1852 // Move left out of ecx if necessary.
1853 if (left->is_register() && left->reg().is(ecx)) {
1854 *left = allocator_->Allocate();
1855 ASSERT(left->is_valid());
1856 __ mov(left->reg(), ecx);
1857 }
1858 right->ToRegister(ecx);
1859 left->ToRegister();
1860 ASSERT(left->is_register() && !left->reg().is(ecx));
1861 ASSERT(right->is_register() && right->reg().is(ecx));
Iain Merrick75681382010-08-19 15:07:18 +01001862 if (left_type_info.IsSmi()) {
1863 if (FLAG_debug_code) __ AbortIfNotSmi(left->reg());
1864 }
1865 if (right_type_info.IsSmi()) {
1866 if (FLAG_debug_code) __ AbortIfNotSmi(right->reg());
1867 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001868
1869 // We will modify right, it must be spilled.
1870 frame_->Spill(ecx);
Iain Merrick75681382010-08-19 15:07:18 +01001871 // DeferredInlineBinaryOperation requires all the registers that it is told
1872 // about to be spilled and distinct. We know that right is ecx and left is
1873 // not ecx.
1874 frame_->Spill(left->reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001875
1876 // Use a fresh answer register to avoid spilling the left operand.
Leon Clarked91b9f72010-01-27 17:25:45 +00001877 answer = allocator_->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00001878 ASSERT(answer.is_valid());
Iain Merrick75681382010-08-19 15:07:18 +01001879
Steve Blocka7e24c12009-10-30 11:49:00 +00001880 DeferredInlineBinaryOperation* deferred =
1881 new DeferredInlineBinaryOperation(op,
1882 answer.reg(),
1883 left->reg(),
1884 ecx,
Kristian Monsen25f61362010-05-21 11:50:48 +01001885 left_type_info,
1886 right_type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00001887 overwrite_mode);
Iain Merrick75681382010-08-19 15:07:18 +01001888 JumpIfNotBothSmiUsingTypeInfo(left->reg(), right->reg(), answer.reg(),
1889 left_type_info, right_type_info,
1890 deferred->NonSmiInputLabel());
Steve Blocka7e24c12009-10-30 11:49:00 +00001891
Iain Merrick75681382010-08-19 15:07:18 +01001892 // Untag both operands.
1893 __ mov(answer.reg(), left->reg());
1894 __ SmiUntag(answer.reg());
1895 __ SmiUntag(right->reg()); // Right is ecx.
Steve Block6ded16b2010-05-10 14:33:55 +01001896
Steve Blocka7e24c12009-10-30 11:49:00 +00001897 // Perform the operation.
Iain Merrick75681382010-08-19 15:07:18 +01001898 ASSERT(right->reg().is(ecx));
Steve Blocka7e24c12009-10-30 11:49:00 +00001899 switch (op) {
Iain Merrick75681382010-08-19 15:07:18 +01001900 case Token::SAR: {
Steve Blockd0582a62009-12-15 09:54:21 +00001901 __ sar_cl(answer.reg());
Iain Merrick75681382010-08-19 15:07:18 +01001902 if (!left_type_info.IsSmi()) {
1903 // Check that the *signed* result fits in a smi.
1904 __ cmp(answer.reg(), 0xc0000000);
1905 deferred->JumpToAnswerOutOfRange(negative);
1906 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001907 break;
Iain Merrick75681382010-08-19 15:07:18 +01001908 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001909 case Token::SHR: {
Steve Blockd0582a62009-12-15 09:54:21 +00001910 __ shr_cl(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001911 // Check that the *unsigned* result fits in a smi. Neither of
1912 // the two high-order bits can be set:
1913 // * 0x80000000: high bit would be lost when smi tagging.
1914 // * 0x40000000: this number would convert to negative when smi
1915 // tagging.
1916 // These two cases can only happen with shifts by 0 or 1 when
1917 // handed a valid smi. If the answer cannot be represented by a
1918 // smi, restore the left and right arguments, and jump to slow
1919 // case. The low bit of the left argument may be lost, but only
1920 // in a case where it is dropped anyway.
1921 __ test(answer.reg(), Immediate(0xc0000000));
Iain Merrick75681382010-08-19 15:07:18 +01001922 deferred->JumpToAnswerOutOfRange(not_zero);
Steve Blocka7e24c12009-10-30 11:49:00 +00001923 break;
1924 }
1925 case Token::SHL: {
Steve Blockd0582a62009-12-15 09:54:21 +00001926 __ shl_cl(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001927 // Check that the *signed* result fits in a smi.
1928 __ cmp(answer.reg(), 0xc0000000);
Iain Merrick75681382010-08-19 15:07:18 +01001929 deferred->JumpToAnswerOutOfRange(negative);
Steve Blocka7e24c12009-10-30 11:49:00 +00001930 break;
1931 }
1932 default:
1933 UNREACHABLE();
1934 }
1935 // Smi-tag the result in answer.
Leon Clarkee46be812010-01-19 14:06:41 +00001936 __ SmiTag(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001937 deferred->BindExit();
1938 left->Unuse();
1939 right->Unuse();
Leon Clarked91b9f72010-01-27 17:25:45 +00001940 ASSERT(answer.is_valid());
1941 return answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00001942 }
1943
1944 // Handle the other binary operations.
1945 left->ToRegister();
1946 right->ToRegister();
Iain Merrick75681382010-08-19 15:07:18 +01001947 // DeferredInlineBinaryOperation requires all the registers that it is told
1948 // about to be spilled.
1949 Result distinct_right = frame_->MakeDistinctAndSpilled(left, right);
Steve Blocka7e24c12009-10-30 11:49:00 +00001950 // A newly allocated register answer is used to hold the answer. The
1951 // registers containing left and right are not modified so they don't
1952 // need to be spilled in the fast case.
Leon Clarked91b9f72010-01-27 17:25:45 +00001953 answer = allocator_->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00001954 ASSERT(answer.is_valid());
1955
1956 // Perform the smi tag check.
1957 DeferredInlineBinaryOperation* deferred =
1958 new DeferredInlineBinaryOperation(op,
1959 answer.reg(),
1960 left->reg(),
Iain Merrick75681382010-08-19 15:07:18 +01001961 distinct_right.reg(),
Kristian Monsen25f61362010-05-21 11:50:48 +01001962 left_type_info,
1963 right_type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00001964 overwrite_mode);
Iain Merrick75681382010-08-19 15:07:18 +01001965 Label non_smi_bit_op;
1966 if (op != Token::BIT_OR) {
1967 JumpIfNotBothSmiUsingTypeInfo(left->reg(), right->reg(), answer.reg(),
1968 left_type_info, right_type_info,
1969 deferred->NonSmiInputLabel());
1970 }
Steve Block6ded16b2010-05-10 14:33:55 +01001971
Steve Blocka7e24c12009-10-30 11:49:00 +00001972 __ mov(answer.reg(), left->reg());
1973 switch (op) {
1974 case Token::ADD:
Leon Clarked91b9f72010-01-27 17:25:45 +00001975 __ add(answer.reg(), Operand(right->reg()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001976 deferred->Branch(overflow);
1977 break;
1978
1979 case Token::SUB:
Leon Clarked91b9f72010-01-27 17:25:45 +00001980 __ sub(answer.reg(), Operand(right->reg()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001981 deferred->Branch(overflow);
1982 break;
1983
1984 case Token::MUL: {
1985 // If the smi tag is 0 we can just leave the tag on one operand.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01001986 STATIC_ASSERT(kSmiTag == 0); // Adjust code below if not the case.
Steve Blocka7e24c12009-10-30 11:49:00 +00001987 // Remove smi tag from the left operand (but keep sign).
1988 // Left-hand operand has been copied into answer.
Leon Clarkee46be812010-01-19 14:06:41 +00001989 __ SmiUntag(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001990 // Do multiplication of smis, leaving result in answer.
1991 __ imul(answer.reg(), Operand(right->reg()));
1992 // Go slow on overflows.
1993 deferred->Branch(overflow);
1994 // Check for negative zero result. If product is zero, and one
1995 // argument is negative, go to slow case. The frame is unchanged
1996 // in this block, so local control flow can use a Label rather
1997 // than a JumpTarget.
Steve Block6ded16b2010-05-10 14:33:55 +01001998 if (!expr->no_negative_zero()) {
1999 Label non_zero_result;
2000 __ test(answer.reg(), Operand(answer.reg()));
2001 __ j(not_zero, &non_zero_result, taken);
2002 __ mov(answer.reg(), left->reg());
2003 __ or_(answer.reg(), Operand(right->reg()));
2004 deferred->Branch(negative);
2005 __ xor_(answer.reg(), Operand(answer.reg())); // Positive 0 is correct.
2006 __ bind(&non_zero_result);
2007 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002008 break;
2009 }
2010
2011 case Token::BIT_OR:
2012 __ or_(answer.reg(), Operand(right->reg()));
Iain Merrick75681382010-08-19 15:07:18 +01002013 __ test(answer.reg(), Immediate(kSmiTagMask));
2014 __ j(not_zero, deferred->NonSmiInputLabel());
Steve Blocka7e24c12009-10-30 11:49:00 +00002015 break;
2016
2017 case Token::BIT_AND:
2018 __ and_(answer.reg(), Operand(right->reg()));
2019 break;
2020
2021 case Token::BIT_XOR:
2022 __ xor_(answer.reg(), Operand(right->reg()));
2023 break;
2024
2025 default:
2026 UNREACHABLE();
2027 break;
2028 }
Iain Merrick75681382010-08-19 15:07:18 +01002029
Steve Blocka7e24c12009-10-30 11:49:00 +00002030 deferred->BindExit();
2031 left->Unuse();
2032 right->Unuse();
Leon Clarked91b9f72010-01-27 17:25:45 +00002033 ASSERT(answer.is_valid());
2034 return answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00002035}
2036
2037
2038// Call the appropriate binary operation stub to compute src op value
2039// and leave the result in dst.
2040class DeferredInlineSmiOperation: public DeferredCode {
2041 public:
2042 DeferredInlineSmiOperation(Token::Value op,
2043 Register dst,
2044 Register src,
Steve Block6ded16b2010-05-10 14:33:55 +01002045 TypeInfo type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00002046 Smi* value,
2047 OverwriteMode overwrite_mode)
2048 : op_(op),
2049 dst_(dst),
2050 src_(src),
Steve Block6ded16b2010-05-10 14:33:55 +01002051 type_info_(type_info),
Steve Blocka7e24c12009-10-30 11:49:00 +00002052 value_(value),
2053 overwrite_mode_(overwrite_mode) {
Steve Block6ded16b2010-05-10 14:33:55 +01002054 if (type_info.IsSmi()) overwrite_mode_ = NO_OVERWRITE;
Steve Blocka7e24c12009-10-30 11:49:00 +00002055 set_comment("[ DeferredInlineSmiOperation");
2056 }
2057
2058 virtual void Generate();
2059
2060 private:
2061 Token::Value op_;
2062 Register dst_;
2063 Register src_;
Steve Block6ded16b2010-05-10 14:33:55 +01002064 TypeInfo type_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +00002065 Smi* value_;
2066 OverwriteMode overwrite_mode_;
2067};
2068
2069
2070void DeferredInlineSmiOperation::Generate() {
Steve Blocka7e24c12009-10-30 11:49:00 +00002071 // For mod we don't generate all the Smi code inline.
2072 GenericBinaryOpStub stub(
2073 op_,
2074 overwrite_mode_,
Steve Block6ded16b2010-05-10 14:33:55 +01002075 (op_ == Token::MOD) ? NO_GENERIC_BINARY_FLAGS : NO_SMI_CODE_IN_STUB,
2076 TypeInfo::Combine(TypeInfo::Smi(), type_info_));
Steve Block3ce2e202009-11-05 08:53:23 +00002077 stub.GenerateCall(masm_, src_, value_);
Steve Blocka7e24c12009-10-30 11:49:00 +00002078 if (!dst_.is(eax)) __ mov(dst_, eax);
2079}
2080
2081
2082// Call the appropriate binary operation stub to compute value op src
2083// and leave the result in dst.
2084class DeferredInlineSmiOperationReversed: public DeferredCode {
2085 public:
2086 DeferredInlineSmiOperationReversed(Token::Value op,
2087 Register dst,
2088 Smi* value,
2089 Register src,
Steve Block6ded16b2010-05-10 14:33:55 +01002090 TypeInfo type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00002091 OverwriteMode overwrite_mode)
2092 : op_(op),
2093 dst_(dst),
Steve Block6ded16b2010-05-10 14:33:55 +01002094 type_info_(type_info),
Steve Blocka7e24c12009-10-30 11:49:00 +00002095 value_(value),
2096 src_(src),
2097 overwrite_mode_(overwrite_mode) {
2098 set_comment("[ DeferredInlineSmiOperationReversed");
2099 }
2100
2101 virtual void Generate();
2102
2103 private:
2104 Token::Value op_;
2105 Register dst_;
Steve Block6ded16b2010-05-10 14:33:55 +01002106 TypeInfo type_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +00002107 Smi* value_;
2108 Register src_;
2109 OverwriteMode overwrite_mode_;
2110};
2111
2112
2113void DeferredInlineSmiOperationReversed::Generate() {
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002114 GenericBinaryOpStub stub(
Steve Block6ded16b2010-05-10 14:33:55 +01002115 op_,
2116 overwrite_mode_,
2117 NO_SMI_CODE_IN_STUB,
2118 TypeInfo::Combine(TypeInfo::Smi(), type_info_));
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002119 stub.GenerateCall(masm_, value_, src_);
Steve Blocka7e24c12009-10-30 11:49:00 +00002120 if (!dst_.is(eax)) __ mov(dst_, eax);
2121}
2122
2123
2124// The result of src + value is in dst. It either overflowed or was not
2125// smi tagged. Undo the speculative addition and call the appropriate
2126// specialized stub for add. The result is left in dst.
2127class DeferredInlineSmiAdd: public DeferredCode {
2128 public:
2129 DeferredInlineSmiAdd(Register dst,
Steve Block6ded16b2010-05-10 14:33:55 +01002130 TypeInfo type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00002131 Smi* value,
2132 OverwriteMode overwrite_mode)
Steve Block6ded16b2010-05-10 14:33:55 +01002133 : dst_(dst),
2134 type_info_(type_info),
2135 value_(value),
2136 overwrite_mode_(overwrite_mode) {
2137 if (type_info_.IsSmi()) overwrite_mode_ = NO_OVERWRITE;
Steve Blocka7e24c12009-10-30 11:49:00 +00002138 set_comment("[ DeferredInlineSmiAdd");
2139 }
2140
2141 virtual void Generate();
2142
2143 private:
2144 Register dst_;
Steve Block6ded16b2010-05-10 14:33:55 +01002145 TypeInfo type_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +00002146 Smi* value_;
2147 OverwriteMode overwrite_mode_;
2148};
2149
2150
2151void DeferredInlineSmiAdd::Generate() {
2152 // Undo the optimistic add operation and call the shared stub.
2153 __ sub(Operand(dst_), Immediate(value_));
Steve Block6ded16b2010-05-10 14:33:55 +01002154 GenericBinaryOpStub igostub(
2155 Token::ADD,
2156 overwrite_mode_,
2157 NO_SMI_CODE_IN_STUB,
2158 TypeInfo::Combine(TypeInfo::Smi(), type_info_));
Steve Block3ce2e202009-11-05 08:53:23 +00002159 igostub.GenerateCall(masm_, dst_, value_);
Steve Blocka7e24c12009-10-30 11:49:00 +00002160 if (!dst_.is(eax)) __ mov(dst_, eax);
2161}
2162
2163
2164// The result of value + src is in dst. It either overflowed or was not
2165// smi tagged. Undo the speculative addition and call the appropriate
2166// specialized stub for add. The result is left in dst.
2167class DeferredInlineSmiAddReversed: public DeferredCode {
2168 public:
2169 DeferredInlineSmiAddReversed(Register dst,
Steve Block6ded16b2010-05-10 14:33:55 +01002170 TypeInfo type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00002171 Smi* value,
2172 OverwriteMode overwrite_mode)
Steve Block6ded16b2010-05-10 14:33:55 +01002173 : dst_(dst),
2174 type_info_(type_info),
2175 value_(value),
2176 overwrite_mode_(overwrite_mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002177 set_comment("[ DeferredInlineSmiAddReversed");
2178 }
2179
2180 virtual void Generate();
2181
2182 private:
2183 Register dst_;
Steve Block6ded16b2010-05-10 14:33:55 +01002184 TypeInfo type_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +00002185 Smi* value_;
2186 OverwriteMode overwrite_mode_;
2187};
2188
2189
2190void DeferredInlineSmiAddReversed::Generate() {
2191 // Undo the optimistic add operation and call the shared stub.
2192 __ sub(Operand(dst_), Immediate(value_));
Steve Block6ded16b2010-05-10 14:33:55 +01002193 GenericBinaryOpStub igostub(
2194 Token::ADD,
2195 overwrite_mode_,
2196 NO_SMI_CODE_IN_STUB,
2197 TypeInfo::Combine(TypeInfo::Smi(), type_info_));
Steve Block3ce2e202009-11-05 08:53:23 +00002198 igostub.GenerateCall(masm_, value_, dst_);
Steve Blocka7e24c12009-10-30 11:49:00 +00002199 if (!dst_.is(eax)) __ mov(dst_, eax);
2200}
2201
2202
2203// The result of src - value is in dst. It either overflowed or was not
2204// smi tagged. Undo the speculative subtraction and call the
2205// appropriate specialized stub for subtract. The result is left in
2206// dst.
2207class DeferredInlineSmiSub: public DeferredCode {
2208 public:
2209 DeferredInlineSmiSub(Register dst,
Steve Block6ded16b2010-05-10 14:33:55 +01002210 TypeInfo type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00002211 Smi* value,
2212 OverwriteMode overwrite_mode)
Steve Block6ded16b2010-05-10 14:33:55 +01002213 : dst_(dst),
2214 type_info_(type_info),
2215 value_(value),
2216 overwrite_mode_(overwrite_mode) {
2217 if (type_info.IsSmi()) overwrite_mode_ = NO_OVERWRITE;
Steve Blocka7e24c12009-10-30 11:49:00 +00002218 set_comment("[ DeferredInlineSmiSub");
2219 }
2220
2221 virtual void Generate();
2222
2223 private:
2224 Register dst_;
Steve Block6ded16b2010-05-10 14:33:55 +01002225 TypeInfo type_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +00002226 Smi* value_;
2227 OverwriteMode overwrite_mode_;
2228};
2229
2230
2231void DeferredInlineSmiSub::Generate() {
2232 // Undo the optimistic sub operation and call the shared stub.
2233 __ add(Operand(dst_), Immediate(value_));
Steve Block6ded16b2010-05-10 14:33:55 +01002234 GenericBinaryOpStub igostub(
2235 Token::SUB,
2236 overwrite_mode_,
2237 NO_SMI_CODE_IN_STUB,
2238 TypeInfo::Combine(TypeInfo::Smi(), type_info_));
Steve Block3ce2e202009-11-05 08:53:23 +00002239 igostub.GenerateCall(masm_, dst_, value_);
Steve Blocka7e24c12009-10-30 11:49:00 +00002240 if (!dst_.is(eax)) __ mov(dst_, eax);
2241}
2242
2243
Kristian Monsen25f61362010-05-21 11:50:48 +01002244Result CodeGenerator::ConstantSmiBinaryOperation(BinaryOperation* expr,
2245 Result* operand,
2246 Handle<Object> value,
2247 bool reversed,
2248 OverwriteMode overwrite_mode) {
2249 // Generate inline code for a binary operation when one of the
2250 // operands is a constant smi. Consumes the argument "operand".
Steve Blocka7e24c12009-10-30 11:49:00 +00002251 if (IsUnsafeSmi(value)) {
2252 Result unsafe_operand(value);
2253 if (reversed) {
Steve Block6ded16b2010-05-10 14:33:55 +01002254 return LikelySmiBinaryOperation(expr, &unsafe_operand, operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00002255 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002256 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01002257 return LikelySmiBinaryOperation(expr, operand, &unsafe_operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00002258 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002259 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002260 }
2261
2262 // Get the literal value.
2263 Smi* smi_value = Smi::cast(*value);
2264 int int_value = smi_value->value();
2265
Steve Block6ded16b2010-05-10 14:33:55 +01002266 Token::Value op = expr->op();
Leon Clarked91b9f72010-01-27 17:25:45 +00002267 Result answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00002268 switch (op) {
2269 case Token::ADD: {
2270 operand->ToRegister();
2271 frame_->Spill(operand->reg());
2272
2273 // Optimistically add. Call the specialized add stub if the
2274 // result is not a smi or overflows.
2275 DeferredCode* deferred = NULL;
2276 if (reversed) {
2277 deferred = new DeferredInlineSmiAddReversed(operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002278 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002279 smi_value,
2280 overwrite_mode);
2281 } else {
2282 deferred = new DeferredInlineSmiAdd(operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002283 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002284 smi_value,
2285 overwrite_mode);
2286 }
2287 __ add(Operand(operand->reg()), Immediate(value));
2288 deferred->Branch(overflow);
Steve Block6ded16b2010-05-10 14:33:55 +01002289 if (!operand->type_info().IsSmi()) {
2290 __ test(operand->reg(), Immediate(kSmiTagMask));
2291 deferred->Branch(not_zero);
2292 } else if (FLAG_debug_code) {
2293 __ AbortIfNotSmi(operand->reg());
2294 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002295 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00002296 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00002297 break;
2298 }
2299
2300 case Token::SUB: {
2301 DeferredCode* deferred = NULL;
Steve Blocka7e24c12009-10-30 11:49:00 +00002302 if (reversed) {
2303 // The reversed case is only hit when the right operand is not a
2304 // constant.
2305 ASSERT(operand->is_register());
2306 answer = allocator()->Allocate();
2307 ASSERT(answer.is_valid());
2308 __ Set(answer.reg(), Immediate(value));
Steve Block6ded16b2010-05-10 14:33:55 +01002309 deferred =
2310 new DeferredInlineSmiOperationReversed(op,
2311 answer.reg(),
2312 smi_value,
2313 operand->reg(),
2314 operand->type_info(),
2315 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002316 __ sub(answer.reg(), Operand(operand->reg()));
2317 } else {
2318 operand->ToRegister();
2319 frame_->Spill(operand->reg());
2320 answer = *operand;
2321 deferred = new DeferredInlineSmiSub(operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002322 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002323 smi_value,
2324 overwrite_mode);
2325 __ sub(Operand(operand->reg()), Immediate(value));
2326 }
2327 deferred->Branch(overflow);
Steve Block6ded16b2010-05-10 14:33:55 +01002328 if (!operand->type_info().IsSmi()) {
2329 __ test(answer.reg(), Immediate(kSmiTagMask));
2330 deferred->Branch(not_zero);
2331 } else if (FLAG_debug_code) {
2332 __ AbortIfNotSmi(operand->reg());
2333 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002334 deferred->BindExit();
2335 operand->Unuse();
Steve Blocka7e24c12009-10-30 11:49:00 +00002336 break;
2337 }
2338
2339 case Token::SAR:
2340 if (reversed) {
2341 Result constant_operand(value);
Steve Block6ded16b2010-05-10 14:33:55 +01002342 answer = LikelySmiBinaryOperation(expr, &constant_operand, operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00002343 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002344 } else {
2345 // Only the least significant 5 bits of the shift value are used.
2346 // In the slow case, this masking is done inside the runtime call.
2347 int shift_value = int_value & 0x1f;
2348 operand->ToRegister();
2349 frame_->Spill(operand->reg());
Steve Block6ded16b2010-05-10 14:33:55 +01002350 if (!operand->type_info().IsSmi()) {
2351 DeferredInlineSmiOperation* deferred =
2352 new DeferredInlineSmiOperation(op,
2353 operand->reg(),
2354 operand->reg(),
2355 operand->type_info(),
2356 smi_value,
2357 overwrite_mode);
2358 __ test(operand->reg(), Immediate(kSmiTagMask));
2359 deferred->Branch(not_zero);
2360 if (shift_value > 0) {
2361 __ sar(operand->reg(), shift_value);
2362 __ and_(operand->reg(), ~kSmiTagMask);
2363 }
2364 deferred->BindExit();
2365 } else {
2366 if (FLAG_debug_code) {
2367 __ AbortIfNotSmi(operand->reg());
2368 }
2369 if (shift_value > 0) {
2370 __ sar(operand->reg(), shift_value);
2371 __ and_(operand->reg(), ~kSmiTagMask);
2372 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002373 }
Leon Clarked91b9f72010-01-27 17:25:45 +00002374 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00002375 }
2376 break;
2377
2378 case Token::SHR:
2379 if (reversed) {
2380 Result constant_operand(value);
Steve Block6ded16b2010-05-10 14:33:55 +01002381 answer = LikelySmiBinaryOperation(expr, &constant_operand, operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00002382 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002383 } else {
2384 // Only the least significant 5 bits of the shift value are used.
2385 // In the slow case, this masking is done inside the runtime call.
2386 int shift_value = int_value & 0x1f;
2387 operand->ToRegister();
Leon Clarked91b9f72010-01-27 17:25:45 +00002388 answer = allocator()->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00002389 ASSERT(answer.is_valid());
2390 DeferredInlineSmiOperation* deferred =
2391 new DeferredInlineSmiOperation(op,
2392 answer.reg(),
2393 operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002394 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002395 smi_value,
2396 overwrite_mode);
Steve Block6ded16b2010-05-10 14:33:55 +01002397 if (!operand->type_info().IsSmi()) {
2398 __ test(operand->reg(), Immediate(kSmiTagMask));
2399 deferred->Branch(not_zero);
2400 } else if (FLAG_debug_code) {
2401 __ AbortIfNotSmi(operand->reg());
2402 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002403 __ mov(answer.reg(), operand->reg());
Leon Clarkee46be812010-01-19 14:06:41 +00002404 __ SmiUntag(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00002405 __ shr(answer.reg(), shift_value);
2406 // A negative Smi shifted right two is in the positive Smi range.
2407 if (shift_value < 2) {
2408 __ test(answer.reg(), Immediate(0xc0000000));
2409 deferred->Branch(not_zero);
2410 }
2411 operand->Unuse();
Leon Clarkee46be812010-01-19 14:06:41 +00002412 __ SmiTag(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00002413 deferred->BindExit();
Steve Blocka7e24c12009-10-30 11:49:00 +00002414 }
2415 break;
2416
2417 case Token::SHL:
2418 if (reversed) {
Steve Block6ded16b2010-05-10 14:33:55 +01002419 // Move operand into ecx and also into a second register.
2420 // If operand is already in a register, take advantage of that.
2421 // This lets us modify ecx, but still bail out to deferred code.
Leon Clarkee46be812010-01-19 14:06:41 +00002422 Result right;
2423 Result right_copy_in_ecx;
Steve Block6ded16b2010-05-10 14:33:55 +01002424 TypeInfo right_type_info = operand->type_info();
Leon Clarkee46be812010-01-19 14:06:41 +00002425 operand->ToRegister();
2426 if (operand->reg().is(ecx)) {
2427 right = allocator()->Allocate();
2428 __ mov(right.reg(), ecx);
2429 frame_->Spill(ecx);
2430 right_copy_in_ecx = *operand;
2431 } else {
2432 right_copy_in_ecx = allocator()->Allocate(ecx);
2433 __ mov(ecx, operand->reg());
2434 right = *operand;
2435 }
2436 operand->Unuse();
2437
Leon Clarked91b9f72010-01-27 17:25:45 +00002438 answer = allocator()->Allocate();
Leon Clarkee46be812010-01-19 14:06:41 +00002439 DeferredInlineSmiOperationReversed* deferred =
2440 new DeferredInlineSmiOperationReversed(op,
2441 answer.reg(),
2442 smi_value,
2443 right.reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002444 right_type_info,
Leon Clarkee46be812010-01-19 14:06:41 +00002445 overwrite_mode);
2446 __ mov(answer.reg(), Immediate(int_value));
2447 __ sar(ecx, kSmiTagSize);
Steve Block6ded16b2010-05-10 14:33:55 +01002448 if (!right_type_info.IsSmi()) {
2449 deferred->Branch(carry);
2450 } else if (FLAG_debug_code) {
2451 __ AbortIfNotSmi(right.reg());
2452 }
Leon Clarkee46be812010-01-19 14:06:41 +00002453 __ shl_cl(answer.reg());
2454 __ cmp(answer.reg(), 0xc0000000);
2455 deferred->Branch(sign);
2456 __ SmiTag(answer.reg());
2457
2458 deferred->BindExit();
Steve Blocka7e24c12009-10-30 11:49:00 +00002459 } else {
2460 // Only the least significant 5 bits of the shift value are used.
2461 // In the slow case, this masking is done inside the runtime call.
2462 int shift_value = int_value & 0x1f;
2463 operand->ToRegister();
2464 if (shift_value == 0) {
2465 // Spill operand so it can be overwritten in the slow case.
2466 frame_->Spill(operand->reg());
2467 DeferredInlineSmiOperation* deferred =
2468 new DeferredInlineSmiOperation(op,
2469 operand->reg(),
2470 operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002471 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002472 smi_value,
2473 overwrite_mode);
2474 __ test(operand->reg(), Immediate(kSmiTagMask));
2475 deferred->Branch(not_zero);
2476 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00002477 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00002478 } else {
2479 // Use a fresh temporary for nonzero shift values.
Leon Clarked91b9f72010-01-27 17:25:45 +00002480 answer = allocator()->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00002481 ASSERT(answer.is_valid());
2482 DeferredInlineSmiOperation* deferred =
2483 new DeferredInlineSmiOperation(op,
2484 answer.reg(),
2485 operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002486 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002487 smi_value,
2488 overwrite_mode);
Steve Block6ded16b2010-05-10 14:33:55 +01002489 if (!operand->type_info().IsSmi()) {
2490 __ test(operand->reg(), Immediate(kSmiTagMask));
2491 deferred->Branch(not_zero);
2492 } else if (FLAG_debug_code) {
2493 __ AbortIfNotSmi(operand->reg());
2494 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002495 __ mov(answer.reg(), operand->reg());
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002496 STATIC_ASSERT(kSmiTag == 0); // adjust code if not the case
Steve Blocka7e24c12009-10-30 11:49:00 +00002497 // We do no shifts, only the Smi conversion, if shift_value is 1.
2498 if (shift_value > 1) {
2499 __ shl(answer.reg(), shift_value - 1);
2500 }
2501 // Convert int result to Smi, checking that it is in int range.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002502 STATIC_ASSERT(kSmiTagSize == 1); // adjust code if not the case
Steve Blocka7e24c12009-10-30 11:49:00 +00002503 __ add(answer.reg(), Operand(answer.reg()));
2504 deferred->Branch(overflow);
2505 deferred->BindExit();
2506 operand->Unuse();
Steve Blocka7e24c12009-10-30 11:49:00 +00002507 }
2508 }
2509 break;
2510
2511 case Token::BIT_OR:
2512 case Token::BIT_XOR:
2513 case Token::BIT_AND: {
2514 operand->ToRegister();
Iain Merrick75681382010-08-19 15:07:18 +01002515 // DeferredInlineBinaryOperation requires all the registers that it is
2516 // told about to be spilled.
Steve Blocka7e24c12009-10-30 11:49:00 +00002517 frame_->Spill(operand->reg());
Iain Merrick75681382010-08-19 15:07:18 +01002518 DeferredInlineBinaryOperation* deferred = NULL;
Steve Block6ded16b2010-05-10 14:33:55 +01002519 if (!operand->type_info().IsSmi()) {
Iain Merrick75681382010-08-19 15:07:18 +01002520 Result left = allocator()->Allocate();
2521 ASSERT(left.is_valid());
2522 Result right = allocator()->Allocate();
2523 ASSERT(right.is_valid());
2524 deferred = new DeferredInlineBinaryOperation(
2525 op,
2526 operand->reg(),
2527 left.reg(),
2528 right.reg(),
2529 operand->type_info(),
2530 TypeInfo::Smi(),
2531 overwrite_mode == NO_OVERWRITE ? NO_OVERWRITE : OVERWRITE_LEFT);
Steve Block6ded16b2010-05-10 14:33:55 +01002532 __ test(operand->reg(), Immediate(kSmiTagMask));
Iain Merrick75681382010-08-19 15:07:18 +01002533 deferred->JumpToConstantRhs(not_zero, smi_value);
Steve Block6ded16b2010-05-10 14:33:55 +01002534 } else if (FLAG_debug_code) {
2535 __ AbortIfNotSmi(operand->reg());
2536 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002537 if (op == Token::BIT_AND) {
2538 __ and_(Operand(operand->reg()), Immediate(value));
2539 } else if (op == Token::BIT_XOR) {
2540 if (int_value != 0) {
2541 __ xor_(Operand(operand->reg()), Immediate(value));
2542 }
2543 } else {
2544 ASSERT(op == Token::BIT_OR);
2545 if (int_value != 0) {
2546 __ or_(Operand(operand->reg()), Immediate(value));
2547 }
2548 }
Iain Merrick75681382010-08-19 15:07:18 +01002549 if (deferred != NULL) deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00002550 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00002551 break;
2552 }
2553
Andrei Popescu402d9372010-02-26 13:31:12 +00002554 case Token::DIV:
2555 if (!reversed && int_value == 2) {
2556 operand->ToRegister();
2557 frame_->Spill(operand->reg());
2558
2559 DeferredInlineSmiOperation* deferred =
2560 new DeferredInlineSmiOperation(op,
2561 operand->reg(),
2562 operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002563 operand->type_info(),
Andrei Popescu402d9372010-02-26 13:31:12 +00002564 smi_value,
2565 overwrite_mode);
2566 // Check that lowest log2(value) bits of operand are zero, and test
2567 // smi tag at the same time.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002568 STATIC_ASSERT(kSmiTag == 0);
2569 STATIC_ASSERT(kSmiTagSize == 1);
Andrei Popescu402d9372010-02-26 13:31:12 +00002570 __ test(operand->reg(), Immediate(3));
2571 deferred->Branch(not_zero); // Branch if non-smi or odd smi.
2572 __ sar(operand->reg(), 1);
2573 deferred->BindExit();
2574 answer = *operand;
2575 } else {
2576 // Cannot fall through MOD to default case, so we duplicate the
2577 // default case here.
2578 Result constant_operand(value);
2579 if (reversed) {
Steve Block6ded16b2010-05-10 14:33:55 +01002580 answer = LikelySmiBinaryOperation(expr, &constant_operand, operand,
Andrei Popescu402d9372010-02-26 13:31:12 +00002581 overwrite_mode);
2582 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01002583 answer = LikelySmiBinaryOperation(expr, operand, &constant_operand,
Andrei Popescu402d9372010-02-26 13:31:12 +00002584 overwrite_mode);
2585 }
2586 }
2587 break;
Steve Block6ded16b2010-05-10 14:33:55 +01002588
Steve Blocka7e24c12009-10-30 11:49:00 +00002589 // Generate inline code for mod of powers of 2 and negative powers of 2.
2590 case Token::MOD:
2591 if (!reversed &&
2592 int_value != 0 &&
2593 (IsPowerOf2(int_value) || IsPowerOf2(-int_value))) {
2594 operand->ToRegister();
2595 frame_->Spill(operand->reg());
Steve Block6ded16b2010-05-10 14:33:55 +01002596 DeferredCode* deferred =
2597 new DeferredInlineSmiOperation(op,
2598 operand->reg(),
2599 operand->reg(),
2600 operand->type_info(),
2601 smi_value,
2602 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002603 // Check for negative or non-Smi left hand side.
Steve Block6ded16b2010-05-10 14:33:55 +01002604 __ test(operand->reg(), Immediate(kSmiTagMask | kSmiSignMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00002605 deferred->Branch(not_zero);
2606 if (int_value < 0) int_value = -int_value;
2607 if (int_value == 1) {
2608 __ mov(operand->reg(), Immediate(Smi::FromInt(0)));
2609 } else {
2610 __ and_(operand->reg(), (int_value << kSmiTagSize) - 1);
2611 }
2612 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00002613 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00002614 break;
2615 }
2616 // Fall through if we did not find a power of 2 on the right hand side!
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002617 // The next case must be the default.
Steve Blocka7e24c12009-10-30 11:49:00 +00002618
2619 default: {
2620 Result constant_operand(value);
2621 if (reversed) {
Steve Block6ded16b2010-05-10 14:33:55 +01002622 answer = LikelySmiBinaryOperation(expr, &constant_operand, operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00002623 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002624 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01002625 answer = LikelySmiBinaryOperation(expr, operand, &constant_operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00002626 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002627 }
2628 break;
2629 }
2630 }
Leon Clarked91b9f72010-01-27 17:25:45 +00002631 ASSERT(answer.is_valid());
2632 return answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00002633}
2634
2635
Leon Clarkee46be812010-01-19 14:06:41 +00002636static bool CouldBeNaN(const Result& result) {
Steve Block6ded16b2010-05-10 14:33:55 +01002637 if (result.type_info().IsSmi()) return false;
2638 if (result.type_info().IsInteger32()) return false;
Leon Clarkee46be812010-01-19 14:06:41 +00002639 if (!result.is_constant()) return true;
2640 if (!result.handle()->IsHeapNumber()) return false;
2641 return isnan(HeapNumber::cast(*result.handle())->value());
2642}
2643
2644
Steve Block6ded16b2010-05-10 14:33:55 +01002645// Convert from signed to unsigned comparison to match the way EFLAGS are set
2646// by FPU and XMM compare instructions.
2647static Condition DoubleCondition(Condition cc) {
2648 switch (cc) {
2649 case less: return below;
2650 case equal: return equal;
2651 case less_equal: return below_equal;
2652 case greater: return above;
2653 case greater_equal: return above_equal;
2654 default: UNREACHABLE();
2655 }
2656 UNREACHABLE();
2657 return equal;
2658}
2659
2660
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002661static CompareFlags ComputeCompareFlags(NaNInformation nan_info,
2662 bool inline_number_compare) {
2663 CompareFlags flags = NO_SMI_COMPARE_IN_STUB;
2664 if (nan_info == kCantBothBeNaN) {
2665 flags = static_cast<CompareFlags>(flags | CANT_BOTH_BE_NAN);
2666 }
2667 if (inline_number_compare) {
2668 flags = static_cast<CompareFlags>(flags | NO_NUMBER_COMPARE_IN_STUB);
2669 }
2670 return flags;
2671}
2672
2673
Leon Clarkee46be812010-01-19 14:06:41 +00002674void CodeGenerator::Comparison(AstNode* node,
2675 Condition cc,
Steve Blocka7e24c12009-10-30 11:49:00 +00002676 bool strict,
2677 ControlDestination* dest) {
2678 // Strict only makes sense for equality comparisons.
2679 ASSERT(!strict || cc == equal);
2680
2681 Result left_side;
2682 Result right_side;
2683 // Implement '>' and '<=' by reversal to obtain ECMA-262 conversion order.
2684 if (cc == greater || cc == less_equal) {
2685 cc = ReverseCondition(cc);
2686 left_side = frame_->Pop();
2687 right_side = frame_->Pop();
2688 } else {
2689 right_side = frame_->Pop();
2690 left_side = frame_->Pop();
2691 }
2692 ASSERT(cc == less || cc == equal || cc == greater_equal);
2693
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002694 // If either side is a constant smi, optimize the comparison.
Leon Clarkee46be812010-01-19 14:06:41 +00002695 bool left_side_constant_smi = false;
2696 bool left_side_constant_null = false;
2697 bool left_side_constant_1_char_string = false;
2698 if (left_side.is_constant()) {
2699 left_side_constant_smi = left_side.handle()->IsSmi();
2700 left_side_constant_null = left_side.handle()->IsNull();
2701 left_side_constant_1_char_string =
2702 (left_side.handle()->IsString() &&
Steve Block6ded16b2010-05-10 14:33:55 +01002703 String::cast(*left_side.handle())->length() == 1 &&
2704 String::cast(*left_side.handle())->IsAsciiRepresentation());
Leon Clarkee46be812010-01-19 14:06:41 +00002705 }
2706 bool right_side_constant_smi = false;
2707 bool right_side_constant_null = false;
2708 bool right_side_constant_1_char_string = false;
2709 if (right_side.is_constant()) {
2710 right_side_constant_smi = right_side.handle()->IsSmi();
2711 right_side_constant_null = right_side.handle()->IsNull();
2712 right_side_constant_1_char_string =
2713 (right_side.handle()->IsString() &&
Steve Block6ded16b2010-05-10 14:33:55 +01002714 String::cast(*right_side.handle())->length() == 1 &&
2715 String::cast(*right_side.handle())->IsAsciiRepresentation());
Leon Clarkee46be812010-01-19 14:06:41 +00002716 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002717
2718 if (left_side_constant_smi || right_side_constant_smi) {
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002719 bool is_loop_condition = (node->AsExpression() != NULL) &&
2720 node->AsExpression()->is_loop_condition();
2721 ConstantSmiComparison(cc, strict, dest, &left_side, &right_side,
2722 left_side_constant_smi, right_side_constant_smi,
2723 is_loop_condition);
Leon Clarkee46be812010-01-19 14:06:41 +00002724 } else if (left_side_constant_1_char_string ||
2725 right_side_constant_1_char_string) {
2726 if (left_side_constant_1_char_string && right_side_constant_1_char_string) {
2727 // Trivial case, comparing two constants.
2728 int left_value = String::cast(*left_side.handle())->Get(0);
2729 int right_value = String::cast(*right_side.handle())->Get(0);
2730 switch (cc) {
2731 case less:
2732 dest->Goto(left_value < right_value);
2733 break;
2734 case equal:
2735 dest->Goto(left_value == right_value);
2736 break;
2737 case greater_equal:
2738 dest->Goto(left_value >= right_value);
2739 break;
2740 default:
2741 UNREACHABLE();
2742 }
2743 } else {
2744 // Only one side is a constant 1 character string.
2745 // If left side is a constant 1-character string, reverse the operands.
2746 // Since one side is a constant string, conversion order does not matter.
2747 if (left_side_constant_1_char_string) {
2748 Result temp = left_side;
2749 left_side = right_side;
2750 right_side = temp;
2751 cc = ReverseCondition(cc);
2752 // This may reintroduce greater or less_equal as the value of cc.
2753 // CompareStub and the inline code both support all values of cc.
2754 }
2755 // Implement comparison against a constant string, inlining the case
2756 // where both sides are strings.
2757 left_side.ToRegister();
2758
2759 // Here we split control flow to the stub call and inlined cases
2760 // before finally splitting it to the control destination. We use
2761 // a jump target and branching to duplicate the virtual frame at
2762 // the first split. We manually handle the off-frame references
2763 // by reconstituting them on the non-fall-through path.
2764 JumpTarget is_not_string, is_string;
2765 Register left_reg = left_side.reg();
2766 Handle<Object> right_val = right_side.handle();
Steve Block6ded16b2010-05-10 14:33:55 +01002767 ASSERT(StringShape(String::cast(*right_val)).IsSymbol());
Leon Clarkee46be812010-01-19 14:06:41 +00002768 __ test(left_side.reg(), Immediate(kSmiTagMask));
2769 is_not_string.Branch(zero, &left_side);
2770 Result temp = allocator_->Allocate();
2771 ASSERT(temp.is_valid());
2772 __ mov(temp.reg(),
2773 FieldOperand(left_side.reg(), HeapObject::kMapOffset));
2774 __ movzx_b(temp.reg(),
2775 FieldOperand(temp.reg(), Map::kInstanceTypeOffset));
2776 // If we are testing for equality then make use of the symbol shortcut.
2777 // Check if the right left hand side has the same type as the left hand
2778 // side (which is always a symbol).
2779 if (cc == equal) {
2780 Label not_a_symbol;
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002781 STATIC_ASSERT(kSymbolTag != 0);
Leon Clarkee46be812010-01-19 14:06:41 +00002782 // Ensure that no non-strings have the symbol bit set.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002783 STATIC_ASSERT(LAST_TYPE < kNotStringTag + kIsSymbolMask);
Leon Clarkee46be812010-01-19 14:06:41 +00002784 __ test(temp.reg(), Immediate(kIsSymbolMask)); // Test the symbol bit.
2785 __ j(zero, &not_a_symbol);
2786 // They are symbols, so do identity compare.
2787 __ cmp(left_side.reg(), right_side.handle());
2788 dest->true_target()->Branch(equal);
2789 dest->false_target()->Branch(not_equal);
2790 __ bind(&not_a_symbol);
2791 }
Steve Block6ded16b2010-05-10 14:33:55 +01002792 // Call the compare stub if the left side is not a flat ascii string.
Leon Clarkee46be812010-01-19 14:06:41 +00002793 __ and_(temp.reg(),
2794 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
2795 __ cmp(temp.reg(), kStringTag | kSeqStringTag | kAsciiStringTag);
2796 temp.Unuse();
2797 is_string.Branch(equal, &left_side);
2798
2799 // Setup and call the compare stub.
2800 is_not_string.Bind(&left_side);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002801 CompareFlags flags =
2802 static_cast<CompareFlags>(CANT_BOTH_BE_NAN | NO_SMI_COMPARE_IN_STUB);
2803 CompareStub stub(cc, strict, flags);
Leon Clarkee46be812010-01-19 14:06:41 +00002804 Result result = frame_->CallStub(&stub, &left_side, &right_side);
2805 result.ToRegister();
2806 __ cmp(result.reg(), 0);
2807 result.Unuse();
2808 dest->true_target()->Branch(cc);
2809 dest->false_target()->Jump();
2810
2811 is_string.Bind(&left_side);
Steve Block6ded16b2010-05-10 14:33:55 +01002812 // left_side is a sequential ASCII string.
Leon Clarkee46be812010-01-19 14:06:41 +00002813 left_side = Result(left_reg);
2814 right_side = Result(right_val);
Leon Clarkee46be812010-01-19 14:06:41 +00002815 // Test string equality and comparison.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002816 Label comparison_done;
Leon Clarkee46be812010-01-19 14:06:41 +00002817 if (cc == equal) {
Leon Clarkee46be812010-01-19 14:06:41 +00002818 __ cmp(FieldOperand(left_side.reg(), String::kLengthOffset),
Steve Block6ded16b2010-05-10 14:33:55 +01002819 Immediate(Smi::FromInt(1)));
Leon Clarkee46be812010-01-19 14:06:41 +00002820 __ j(not_equal, &comparison_done);
2821 uint8_t char_value =
Steve Block6ded16b2010-05-10 14:33:55 +01002822 static_cast<uint8_t>(String::cast(*right_val)->Get(0));
Leon Clarkee46be812010-01-19 14:06:41 +00002823 __ cmpb(FieldOperand(left_side.reg(), SeqAsciiString::kHeaderSize),
2824 char_value);
Leon Clarkee46be812010-01-19 14:06:41 +00002825 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002826 __ cmp(FieldOperand(left_side.reg(), String::kLengthOffset),
2827 Immediate(Smi::FromInt(1)));
2828 // If the length is 0 then the jump is taken and the flags
2829 // correctly represent being less than the one-character string.
2830 __ j(below, &comparison_done);
Steve Block6ded16b2010-05-10 14:33:55 +01002831 // Compare the first character of the string with the
2832 // constant 1-character string.
Leon Clarkee46be812010-01-19 14:06:41 +00002833 uint8_t char_value =
Steve Block6ded16b2010-05-10 14:33:55 +01002834 static_cast<uint8_t>(String::cast(*right_val)->Get(0));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002835 __ cmpb(FieldOperand(left_side.reg(), SeqAsciiString::kHeaderSize),
2836 char_value);
2837 __ j(not_equal, &comparison_done);
Leon Clarkee46be812010-01-19 14:06:41 +00002838 // If the first character is the same then the long string sorts after
2839 // the short one.
2840 __ cmp(FieldOperand(left_side.reg(), String::kLengthOffset),
Steve Block6ded16b2010-05-10 14:33:55 +01002841 Immediate(Smi::FromInt(1)));
Leon Clarkee46be812010-01-19 14:06:41 +00002842 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002843 __ bind(&comparison_done);
Leon Clarkee46be812010-01-19 14:06:41 +00002844 left_side.Unuse();
2845 right_side.Unuse();
2846 dest->Split(cc);
2847 }
2848 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01002849 // Neither side is a constant Smi, constant 1-char string or constant null.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002850 // If either side is a non-smi constant, or known to be a heap number,
2851 // skip the smi check.
Steve Blocka7e24c12009-10-30 11:49:00 +00002852 bool known_non_smi =
2853 (left_side.is_constant() && !left_side.handle()->IsSmi()) ||
Steve Block6ded16b2010-05-10 14:33:55 +01002854 (right_side.is_constant() && !right_side.handle()->IsSmi()) ||
2855 left_side.type_info().IsDouble() ||
2856 right_side.type_info().IsDouble();
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002857
Leon Clarkee46be812010-01-19 14:06:41 +00002858 NaNInformation nan_info =
2859 (CouldBeNaN(left_side) && CouldBeNaN(right_side)) ?
2860 kBothCouldBeNaN :
2861 kCantBothBeNaN;
Steve Block6ded16b2010-05-10 14:33:55 +01002862
2863 // Inline number comparison handling any combination of smi's and heap
2864 // numbers if:
2865 // code is in a loop
2866 // the compare operation is different from equal
2867 // compare is not a for-loop comparison
2868 // The reason for excluding equal is that it will most likely be done
2869 // with smi's (not heap numbers) and the code to comparing smi's is inlined
2870 // separately. The same reason applies for for-loop comparison which will
2871 // also most likely be smi comparisons.
2872 bool is_loop_condition = (node->AsExpression() != NULL)
2873 && node->AsExpression()->is_loop_condition();
2874 bool inline_number_compare =
2875 loop_nesting() > 0 && cc != equal && !is_loop_condition;
2876
2877 // Left and right needed in registers for the following code.
Steve Blocka7e24c12009-10-30 11:49:00 +00002878 left_side.ToRegister();
2879 right_side.ToRegister();
2880
2881 if (known_non_smi) {
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002882 // Inlined equality check:
2883 // If at least one of the objects is not NaN, then if the objects
2884 // are identical, they are equal.
Steve Block6ded16b2010-05-10 14:33:55 +01002885 if (nan_info == kCantBothBeNaN && cc == equal) {
2886 __ cmp(left_side.reg(), Operand(right_side.reg()));
2887 dest->true_target()->Branch(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +00002888 }
Steve Block6ded16b2010-05-10 14:33:55 +01002889
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002890 // Inlined number comparison:
Steve Block6ded16b2010-05-10 14:33:55 +01002891 if (inline_number_compare) {
2892 GenerateInlineNumberComparison(&left_side, &right_side, cc, dest);
2893 }
2894
2895 // End of in-line compare, call out to the compare stub. Don't include
2896 // number comparison in the stub if it was inlined.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002897 CompareFlags flags = ComputeCompareFlags(nan_info, inline_number_compare);
2898 CompareStub stub(cc, strict, flags);
Steve Block6ded16b2010-05-10 14:33:55 +01002899 Result answer = frame_->CallStub(&stub, &left_side, &right_side);
2900 __ test(answer.reg(), Operand(answer.reg()));
Steve Blocka7e24c12009-10-30 11:49:00 +00002901 answer.Unuse();
2902 dest->Split(cc);
2903 } else {
2904 // Here we split control flow to the stub call and inlined cases
2905 // before finally splitting it to the control destination. We use
2906 // a jump target and branching to duplicate the virtual frame at
2907 // the first split. We manually handle the off-frame references
2908 // by reconstituting them on the non-fall-through path.
2909 JumpTarget is_smi;
2910 Register left_reg = left_side.reg();
2911 Register right_reg = right_side.reg();
2912
Steve Block6ded16b2010-05-10 14:33:55 +01002913 // In-line check for comparing two smis.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002914 JumpIfBothSmiUsingTypeInfo(&left_side, &right_side, &is_smi);
Steve Block6ded16b2010-05-10 14:33:55 +01002915
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002916 if (has_valid_frame()) {
2917 // Inline the equality check if both operands can't be a NaN. If both
2918 // objects are the same they are equal.
2919 if (nan_info == kCantBothBeNaN && cc == equal) {
2920 __ cmp(left_side.reg(), Operand(right_side.reg()));
2921 dest->true_target()->Branch(equal);
2922 }
2923
2924 // Inlined number comparison:
2925 if (inline_number_compare) {
2926 GenerateInlineNumberComparison(&left_side, &right_side, cc, dest);
2927 }
2928
2929 // End of in-line compare, call out to the compare stub. Don't include
2930 // number comparison in the stub if it was inlined.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002931 CompareFlags flags =
2932 ComputeCompareFlags(nan_info, inline_number_compare);
2933 CompareStub stub(cc, strict, flags);
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002934 Result answer = frame_->CallStub(&stub, &left_side, &right_side);
2935 __ test(answer.reg(), Operand(answer.reg()));
2936 answer.Unuse();
2937 if (is_smi.is_linked()) {
2938 dest->true_target()->Branch(cc);
2939 dest->false_target()->Jump();
2940 } else {
2941 dest->Split(cc);
2942 }
2943 }
2944
2945 if (is_smi.is_linked()) {
2946 is_smi.Bind();
2947 left_side = Result(left_reg);
2948 right_side = Result(right_reg);
Steve Block6ded16b2010-05-10 14:33:55 +01002949 __ cmp(left_side.reg(), Operand(right_side.reg()));
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002950 right_side.Unuse();
2951 left_side.Unuse();
2952 dest->Split(cc);
Steve Block6ded16b2010-05-10 14:33:55 +01002953 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002954 }
2955 }
2956}
2957
2958
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002959void CodeGenerator::ConstantSmiComparison(Condition cc,
2960 bool strict,
2961 ControlDestination* dest,
2962 Result* left_side,
2963 Result* right_side,
2964 bool left_side_constant_smi,
2965 bool right_side_constant_smi,
2966 bool is_loop_condition) {
2967 if (left_side_constant_smi && right_side_constant_smi) {
2968 // Trivial case, comparing two constants.
2969 int left_value = Smi::cast(*left_side->handle())->value();
2970 int right_value = Smi::cast(*right_side->handle())->value();
2971 switch (cc) {
2972 case less:
2973 dest->Goto(left_value < right_value);
2974 break;
2975 case equal:
2976 dest->Goto(left_value == right_value);
2977 break;
2978 case greater_equal:
2979 dest->Goto(left_value >= right_value);
2980 break;
2981 default:
2982 UNREACHABLE();
2983 }
2984 } else {
2985 // Only one side is a constant Smi.
2986 // If left side is a constant Smi, reverse the operands.
2987 // Since one side is a constant Smi, conversion order does not matter.
2988 if (left_side_constant_smi) {
2989 Result* temp = left_side;
2990 left_side = right_side;
2991 right_side = temp;
2992 cc = ReverseCondition(cc);
2993 // This may re-introduce greater or less_equal as the value of cc.
2994 // CompareStub and the inline code both support all values of cc.
2995 }
2996 // Implement comparison against a constant Smi, inlining the case
2997 // where both sides are Smis.
2998 left_side->ToRegister();
2999 Register left_reg = left_side->reg();
3000 Handle<Object> right_val = right_side->handle();
3001
3002 if (left_side->is_smi()) {
3003 if (FLAG_debug_code) {
3004 __ AbortIfNotSmi(left_reg);
3005 }
3006 // Test smi equality and comparison by signed int comparison.
3007 if (IsUnsafeSmi(right_side->handle())) {
3008 right_side->ToRegister();
3009 __ cmp(left_reg, Operand(right_side->reg()));
3010 } else {
3011 __ cmp(Operand(left_reg), Immediate(right_side->handle()));
3012 }
3013 left_side->Unuse();
3014 right_side->Unuse();
3015 dest->Split(cc);
3016 } else {
3017 // Only the case where the left side could possibly be a non-smi is left.
3018 JumpTarget is_smi;
3019 if (cc == equal) {
3020 // We can do the equality comparison before the smi check.
3021 __ cmp(Operand(left_reg), Immediate(right_side->handle()));
3022 dest->true_target()->Branch(equal);
3023 __ test(left_reg, Immediate(kSmiTagMask));
3024 dest->false_target()->Branch(zero);
3025 } else {
3026 // Do the smi check, then the comparison.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003027 __ test(left_reg, Immediate(kSmiTagMask));
3028 is_smi.Branch(zero, left_side, right_side);
3029 }
3030
3031 // Jump or fall through to here if we are comparing a non-smi to a
3032 // constant smi. If the non-smi is a heap number and this is not
3033 // a loop condition, inline the floating point code.
Steve Block44f0eee2011-05-26 01:26:41 +01003034 if (!is_loop_condition &&
3035 masm()->isolate()->cpu_features()->IsSupported(SSE2)) {
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003036 // Right side is a constant smi and left side has been checked
3037 // not to be a smi.
3038 CpuFeatures::Scope use_sse2(SSE2);
3039 JumpTarget not_number;
3040 __ cmp(FieldOperand(left_reg, HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01003041 Immediate(FACTORY->heap_number_map()));
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003042 not_number.Branch(not_equal, left_side);
3043 __ movdbl(xmm1,
3044 FieldOperand(left_reg, HeapNumber::kValueOffset));
3045 int value = Smi::cast(*right_val)->value();
3046 if (value == 0) {
3047 __ xorpd(xmm0, xmm0);
3048 } else {
3049 Result temp = allocator()->Allocate();
3050 __ mov(temp.reg(), Immediate(value));
3051 __ cvtsi2sd(xmm0, Operand(temp.reg()));
3052 temp.Unuse();
3053 }
3054 __ ucomisd(xmm1, xmm0);
3055 // Jump to builtin for NaN.
3056 not_number.Branch(parity_even, left_side);
3057 left_side->Unuse();
3058 dest->true_target()->Branch(DoubleCondition(cc));
3059 dest->false_target()->Jump();
3060 not_number.Bind(left_side);
3061 }
3062
3063 // Setup and call the compare stub.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003064 CompareFlags flags =
3065 static_cast<CompareFlags>(CANT_BOTH_BE_NAN | NO_SMI_CODE_IN_STUB);
3066 CompareStub stub(cc, strict, flags);
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003067 Result result = frame_->CallStub(&stub, left_side, right_side);
3068 result.ToRegister();
3069 __ test(result.reg(), Operand(result.reg()));
3070 result.Unuse();
3071 if (cc == equal) {
3072 dest->Split(cc);
3073 } else {
3074 dest->true_target()->Branch(cc);
3075 dest->false_target()->Jump();
3076
3077 // It is important for performance for this case to be at the end.
3078 is_smi.Bind(left_side, right_side);
3079 if (IsUnsafeSmi(right_side->handle())) {
3080 right_side->ToRegister();
3081 __ cmp(left_reg, Operand(right_side->reg()));
3082 } else {
3083 __ cmp(Operand(left_reg), Immediate(right_side->handle()));
3084 }
3085 left_side->Unuse();
3086 right_side->Unuse();
3087 dest->Split(cc);
3088 }
3089 }
3090 }
3091}
3092
3093
Steve Block6ded16b2010-05-10 14:33:55 +01003094// Check that the comparison operand is a number. Jump to not_numbers jump
3095// target passing the left and right result if the operand is not a number.
3096static void CheckComparisonOperand(MacroAssembler* masm_,
3097 Result* operand,
3098 Result* left_side,
3099 Result* right_side,
3100 JumpTarget* not_numbers) {
3101 // Perform check if operand is not known to be a number.
3102 if (!operand->type_info().IsNumber()) {
3103 Label done;
3104 __ test(operand->reg(), Immediate(kSmiTagMask));
3105 __ j(zero, &done);
3106 __ cmp(FieldOperand(operand->reg(), HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01003107 Immediate(FACTORY->heap_number_map()));
Steve Block6ded16b2010-05-10 14:33:55 +01003108 not_numbers->Branch(not_equal, left_side, right_side, not_taken);
3109 __ bind(&done);
3110 }
3111}
3112
3113
3114// Load a comparison operand to the FPU stack. This assumes that the operand has
3115// already been checked and is a number.
3116static void LoadComparisonOperand(MacroAssembler* masm_,
3117 Result* operand) {
3118 Label done;
3119 if (operand->type_info().IsDouble()) {
3120 // Operand is known to be a heap number, just load it.
3121 __ fld_d(FieldOperand(operand->reg(), HeapNumber::kValueOffset));
3122 } else if (operand->type_info().IsSmi()) {
3123 // Operand is known to be a smi. Convert it to double and keep the original
3124 // smi.
3125 __ SmiUntag(operand->reg());
3126 __ push(operand->reg());
3127 __ fild_s(Operand(esp, 0));
3128 __ pop(operand->reg());
3129 __ SmiTag(operand->reg());
3130 } else {
3131 // Operand type not known, check for smi otherwise assume heap number.
3132 Label smi;
3133 __ test(operand->reg(), Immediate(kSmiTagMask));
3134 __ j(zero, &smi);
3135 __ fld_d(FieldOperand(operand->reg(), HeapNumber::kValueOffset));
3136 __ jmp(&done);
3137 __ bind(&smi);
3138 __ SmiUntag(operand->reg());
3139 __ push(operand->reg());
3140 __ fild_s(Operand(esp, 0));
3141 __ pop(operand->reg());
3142 __ SmiTag(operand->reg());
3143 __ jmp(&done);
3144 }
3145 __ bind(&done);
3146}
3147
3148
3149// Load a comparison operand into into a XMM register. Jump to not_numbers jump
3150// target passing the left and right result if the operand is not a number.
3151static void LoadComparisonOperandSSE2(MacroAssembler* masm_,
3152 Result* operand,
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003153 XMMRegister xmm_reg,
Steve Block6ded16b2010-05-10 14:33:55 +01003154 Result* left_side,
3155 Result* right_side,
3156 JumpTarget* not_numbers) {
3157 Label done;
3158 if (operand->type_info().IsDouble()) {
3159 // Operand is known to be a heap number, just load it.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003160 __ movdbl(xmm_reg, FieldOperand(operand->reg(), HeapNumber::kValueOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01003161 } else if (operand->type_info().IsSmi()) {
3162 // Operand is known to be a smi. Convert it to double and keep the original
3163 // smi.
3164 __ SmiUntag(operand->reg());
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003165 __ cvtsi2sd(xmm_reg, Operand(operand->reg()));
Steve Block6ded16b2010-05-10 14:33:55 +01003166 __ SmiTag(operand->reg());
3167 } else {
3168 // Operand type not known, check for smi or heap number.
3169 Label smi;
3170 __ test(operand->reg(), Immediate(kSmiTagMask));
3171 __ j(zero, &smi);
3172 if (!operand->type_info().IsNumber()) {
3173 __ cmp(FieldOperand(operand->reg(), HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01003174 Immediate(FACTORY->heap_number_map()));
Steve Block6ded16b2010-05-10 14:33:55 +01003175 not_numbers->Branch(not_equal, left_side, right_side, taken);
3176 }
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003177 __ movdbl(xmm_reg, FieldOperand(operand->reg(), HeapNumber::kValueOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01003178 __ jmp(&done);
3179
3180 __ bind(&smi);
3181 // Comvert smi to float and keep the original smi.
3182 __ SmiUntag(operand->reg());
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003183 __ cvtsi2sd(xmm_reg, Operand(operand->reg()));
Steve Block6ded16b2010-05-10 14:33:55 +01003184 __ SmiTag(operand->reg());
3185 __ jmp(&done);
3186 }
3187 __ bind(&done);
3188}
3189
3190
3191void CodeGenerator::GenerateInlineNumberComparison(Result* left_side,
3192 Result* right_side,
3193 Condition cc,
3194 ControlDestination* dest) {
3195 ASSERT(left_side->is_register());
3196 ASSERT(right_side->is_register());
3197
3198 JumpTarget not_numbers;
Steve Block44f0eee2011-05-26 01:26:41 +01003199 if (masm()->isolate()->cpu_features()->IsSupported(SSE2)) {
Steve Block6ded16b2010-05-10 14:33:55 +01003200 CpuFeatures::Scope use_sse2(SSE2);
3201
3202 // Load left and right operand into registers xmm0 and xmm1 and compare.
3203 LoadComparisonOperandSSE2(masm_, left_side, xmm0, left_side, right_side,
3204 &not_numbers);
3205 LoadComparisonOperandSSE2(masm_, right_side, xmm1, left_side, right_side,
3206 &not_numbers);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003207 __ ucomisd(xmm0, xmm1);
Steve Block6ded16b2010-05-10 14:33:55 +01003208 } else {
3209 Label check_right, compare;
3210
3211 // Make sure that both comparison operands are numbers.
3212 CheckComparisonOperand(masm_, left_side, left_side, right_side,
3213 &not_numbers);
3214 CheckComparisonOperand(masm_, right_side, left_side, right_side,
3215 &not_numbers);
3216
3217 // Load right and left operand to FPU stack and compare.
3218 LoadComparisonOperand(masm_, right_side);
3219 LoadComparisonOperand(masm_, left_side);
3220 __ FCmp();
3221 }
3222
3223 // Bail out if a NaN is involved.
3224 not_numbers.Branch(parity_even, left_side, right_side, not_taken);
3225
3226 // Split to destination targets based on comparison.
3227 left_side->Unuse();
3228 right_side->Unuse();
3229 dest->true_target()->Branch(DoubleCondition(cc));
3230 dest->false_target()->Jump();
3231
3232 not_numbers.Bind(left_side, right_side);
3233}
3234
3235
Steve Blocka7e24c12009-10-30 11:49:00 +00003236// Call the function just below TOS on the stack with the given
3237// arguments. The receiver is the TOS.
3238void CodeGenerator::CallWithArguments(ZoneList<Expression*>* args,
Leon Clarkee46be812010-01-19 14:06:41 +00003239 CallFunctionFlags flags,
Steve Blocka7e24c12009-10-30 11:49:00 +00003240 int position) {
3241 // Push the arguments ("left-to-right") on the stack.
3242 int arg_count = args->length();
3243 for (int i = 0; i < arg_count; i++) {
3244 Load(args->at(i));
Leon Clarkef7060e22010-06-03 12:02:55 +01003245 frame_->SpillTop();
Steve Blocka7e24c12009-10-30 11:49:00 +00003246 }
3247
3248 // Record the position for debugging purposes.
3249 CodeForSourcePosition(position);
3250
3251 // Use the shared code stub to call the function.
3252 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Leon Clarkee46be812010-01-19 14:06:41 +00003253 CallFunctionStub call_function(arg_count, in_loop, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00003254 Result answer = frame_->CallStub(&call_function, arg_count + 1);
3255 // Restore context and replace function on the stack with the
3256 // result of the stub invocation.
3257 frame_->RestoreContextRegister();
3258 frame_->SetElementAt(0, &answer);
3259}
3260
3261
Leon Clarked91b9f72010-01-27 17:25:45 +00003262void CodeGenerator::CallApplyLazy(Expression* applicand,
Steve Blocka7e24c12009-10-30 11:49:00 +00003263 Expression* receiver,
3264 VariableProxy* arguments,
3265 int position) {
Leon Clarked91b9f72010-01-27 17:25:45 +00003266 // An optimized implementation of expressions of the form
3267 // x.apply(y, arguments).
3268 // If the arguments object of the scope has not been allocated,
3269 // and x.apply is Function.prototype.apply, this optimization
3270 // just copies y and the arguments of the current function on the
3271 // stack, as receiver and arguments, and calls x.
3272 // In the implementation comments, we call x the applicand
3273 // and y the receiver.
Steve Blocka7e24c12009-10-30 11:49:00 +00003274 ASSERT(ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION);
3275 ASSERT(arguments->IsArguments());
3276
Leon Clarked91b9f72010-01-27 17:25:45 +00003277 // Load applicand.apply onto the stack. This will usually
Steve Blocka7e24c12009-10-30 11:49:00 +00003278 // give us a megamorphic load site. Not super, but it works.
Leon Clarked91b9f72010-01-27 17:25:45 +00003279 Load(applicand);
Andrei Popescu402d9372010-02-26 13:31:12 +00003280 frame()->Dup();
Steve Block44f0eee2011-05-26 01:26:41 +01003281 Handle<String> name = FACTORY->LookupAsciiSymbol("apply");
Leon Clarked91b9f72010-01-27 17:25:45 +00003282 frame()->Push(name);
3283 Result answer = frame()->CallLoadIC(RelocInfo::CODE_TARGET);
3284 __ nop();
3285 frame()->Push(&answer);
Steve Blocka7e24c12009-10-30 11:49:00 +00003286
3287 // Load the receiver and the existing arguments object onto the
3288 // expression stack. Avoid allocating the arguments object here.
3289 Load(receiver);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003290 LoadFromSlot(scope()->arguments()->AsSlot(), NOT_INSIDE_TYPEOF);
Steve Blocka7e24c12009-10-30 11:49:00 +00003291
3292 // Emit the source position information after having loaded the
3293 // receiver and the arguments.
3294 CodeForSourcePosition(position);
Leon Clarked91b9f72010-01-27 17:25:45 +00003295 // Contents of frame at this point:
3296 // Frame[0]: arguments object of the current function or the hole.
3297 // Frame[1]: receiver
3298 // Frame[2]: applicand.apply
3299 // Frame[3]: applicand.
Steve Blocka7e24c12009-10-30 11:49:00 +00003300
3301 // Check if the arguments object has been lazily allocated
3302 // already. If so, just use that instead of copying the arguments
3303 // from the stack. This also deals with cases where a local variable
3304 // named 'arguments' has been introduced.
3305 frame_->Dup();
3306 Result probe = frame_->Pop();
Leon Clarked91b9f72010-01-27 17:25:45 +00003307 { VirtualFrame::SpilledScope spilled_scope;
3308 Label slow, done;
3309 bool try_lazy = true;
3310 if (probe.is_constant()) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01003311 try_lazy = probe.handle()->IsArgumentsMarker();
Leon Clarked91b9f72010-01-27 17:25:45 +00003312 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01003313 __ cmp(Operand(probe.reg()), Immediate(FACTORY->arguments_marker()));
Leon Clarked91b9f72010-01-27 17:25:45 +00003314 probe.Unuse();
3315 __ j(not_equal, &slow);
3316 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003317
Leon Clarked91b9f72010-01-27 17:25:45 +00003318 if (try_lazy) {
3319 Label build_args;
3320 // Get rid of the arguments object probe.
3321 frame_->Drop(); // Can be called on a spilled frame.
3322 // Stack now has 3 elements on it.
3323 // Contents of stack at this point:
3324 // esp[0]: receiver
3325 // esp[1]: applicand.apply
3326 // esp[2]: applicand.
Steve Blocka7e24c12009-10-30 11:49:00 +00003327
Leon Clarked91b9f72010-01-27 17:25:45 +00003328 // Check that the receiver really is a JavaScript object.
3329 __ mov(eax, Operand(esp, 0));
3330 __ test(eax, Immediate(kSmiTagMask));
3331 __ j(zero, &build_args);
Steve Blocka7e24c12009-10-30 11:49:00 +00003332 // We allow all JSObjects including JSFunctions. As long as
3333 // JS_FUNCTION_TYPE is the last instance type and it is right
3334 // after LAST_JS_OBJECT_TYPE, we do not have to check the upper
3335 // bound.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01003336 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
3337 STATIC_ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
Leon Clarked91b9f72010-01-27 17:25:45 +00003338 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
3339 __ j(below, &build_args);
Steve Blocka7e24c12009-10-30 11:49:00 +00003340
Leon Clarked91b9f72010-01-27 17:25:45 +00003341 // Check that applicand.apply is Function.prototype.apply.
3342 __ mov(eax, Operand(esp, kPointerSize));
3343 __ test(eax, Immediate(kSmiTagMask));
3344 __ j(zero, &build_args);
3345 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ecx);
3346 __ j(not_equal, &build_args);
Steve Block791712a2010-08-27 10:21:07 +01003347 __ mov(ecx, FieldOperand(eax, JSFunction::kCodeEntryOffset));
3348 __ sub(Operand(ecx), Immediate(Code::kHeaderSize - kHeapObjectTag));
Steve Block44f0eee2011-05-26 01:26:41 +01003349 Handle<Code> apply_code(masm()->isolate()->builtins()->builtin(
3350 Builtins::kFunctionApply));
Steve Block791712a2010-08-27 10:21:07 +01003351 __ cmp(Operand(ecx), Immediate(apply_code));
Leon Clarked91b9f72010-01-27 17:25:45 +00003352 __ j(not_equal, &build_args);
3353
3354 // Check that applicand is a function.
3355 __ mov(edi, Operand(esp, 2 * kPointerSize));
3356 __ test(edi, Immediate(kSmiTagMask));
3357 __ j(zero, &build_args);
3358 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
3359 __ j(not_equal, &build_args);
3360
3361 // Copy the arguments to this function possibly from the
3362 // adaptor frame below it.
3363 Label invoke, adapted;
3364 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
3365 __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
3366 __ cmp(Operand(ecx),
3367 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3368 __ j(equal, &adapted);
3369
3370 // No arguments adaptor frame. Copy fixed number of arguments.
Andrei Popescu31002712010-02-23 13:46:05 +00003371 __ mov(eax, Immediate(scope()->num_parameters()));
3372 for (int i = 0; i < scope()->num_parameters(); i++) {
Leon Clarked91b9f72010-01-27 17:25:45 +00003373 __ push(frame_->ParameterAt(i));
3374 }
3375 __ jmp(&invoke);
3376
3377 // Arguments adaptor frame present. Copy arguments from there, but
3378 // avoid copying too many arguments to avoid stack overflows.
3379 __ bind(&adapted);
3380 static const uint32_t kArgumentsLimit = 1 * KB;
3381 __ mov(eax, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
3382 __ SmiUntag(eax);
3383 __ mov(ecx, Operand(eax));
3384 __ cmp(eax, kArgumentsLimit);
3385 __ j(above, &build_args);
3386
3387 // Loop through the arguments pushing them onto the execution
3388 // stack. We don't inform the virtual frame of the push, so we don't
3389 // have to worry about getting rid of the elements from the virtual
3390 // frame.
3391 Label loop;
3392 // ecx is a small non-negative integer, due to the test above.
3393 __ test(ecx, Operand(ecx));
3394 __ j(zero, &invoke);
3395 __ bind(&loop);
3396 __ push(Operand(edx, ecx, times_pointer_size, 1 * kPointerSize));
3397 __ dec(ecx);
3398 __ j(not_zero, &loop);
3399
3400 // Invoke the function.
3401 __ bind(&invoke);
3402 ParameterCount actual(eax);
3403 __ InvokeFunction(edi, actual, CALL_FUNCTION);
3404 // Drop applicand.apply and applicand from the stack, and push
3405 // the result of the function call, but leave the spilled frame
3406 // unchanged, with 3 elements, so it is correct when we compile the
3407 // slow-case code.
3408 __ add(Operand(esp), Immediate(2 * kPointerSize));
3409 __ push(eax);
3410 // Stack now has 1 element:
3411 // esp[0]: result
3412 __ jmp(&done);
3413
3414 // Slow-case: Allocate the arguments object since we know it isn't
3415 // there, and fall-through to the slow-case where we call
3416 // applicand.apply.
3417 __ bind(&build_args);
3418 // Stack now has 3 elements, because we have jumped from where:
3419 // esp[0]: receiver
3420 // esp[1]: applicand.apply
3421 // esp[2]: applicand.
3422
3423 // StoreArgumentsObject requires a correct frame, and may modify it.
3424 Result arguments_object = StoreArgumentsObject(false);
3425 frame_->SpillAll();
3426 arguments_object.ToRegister();
3427 frame_->EmitPush(arguments_object.reg());
3428 arguments_object.Unuse();
3429 // Stack and frame now have 4 elements.
3430 __ bind(&slow);
Leon Clarkeeab96aa2010-01-27 16:31:12 +00003431 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003432
Leon Clarked91b9f72010-01-27 17:25:45 +00003433 // Generic computation of x.apply(y, args) with no special optimization.
3434 // Flip applicand.apply and applicand on the stack, so
3435 // applicand looks like the receiver of the applicand.apply call.
3436 // Then process it as a normal function call.
3437 __ mov(eax, Operand(esp, 3 * kPointerSize));
3438 __ mov(ebx, Operand(esp, 2 * kPointerSize));
3439 __ mov(Operand(esp, 2 * kPointerSize), eax);
3440 __ mov(Operand(esp, 3 * kPointerSize), ebx);
Leon Clarkeeab96aa2010-01-27 16:31:12 +00003441
Leon Clarked91b9f72010-01-27 17:25:45 +00003442 CallFunctionStub call_function(2, NOT_IN_LOOP, NO_CALL_FUNCTION_FLAGS);
3443 Result res = frame_->CallStub(&call_function, 3);
3444 // The function and its two arguments have been dropped.
3445 frame_->Drop(1); // Drop the receiver as well.
3446 res.ToRegister();
3447 frame_->EmitPush(res.reg());
3448 // Stack now has 1 element:
3449 // esp[0]: result
3450 if (try_lazy) __ bind(&done);
3451 } // End of spilled scope.
3452 // Restore the context register after a call.
Steve Blocka7e24c12009-10-30 11:49:00 +00003453 frame_->RestoreContextRegister();
3454}
3455
3456
3457class DeferredStackCheck: public DeferredCode {
3458 public:
3459 DeferredStackCheck() {
3460 set_comment("[ DeferredStackCheck");
3461 }
3462
3463 virtual void Generate();
3464};
3465
3466
3467void DeferredStackCheck::Generate() {
3468 StackCheckStub stub;
3469 __ CallStub(&stub);
3470}
3471
3472
3473void CodeGenerator::CheckStack() {
Steve Blockd0582a62009-12-15 09:54:21 +00003474 DeferredStackCheck* deferred = new DeferredStackCheck;
3475 ExternalReference stack_limit =
Steve Block44f0eee2011-05-26 01:26:41 +01003476 ExternalReference::address_of_stack_limit(masm()->isolate());
Steve Blockd0582a62009-12-15 09:54:21 +00003477 __ cmp(esp, Operand::StaticVariable(stack_limit));
3478 deferred->Branch(below);
3479 deferred->BindExit();
Steve Blocka7e24c12009-10-30 11:49:00 +00003480}
3481
3482
3483void CodeGenerator::VisitAndSpill(Statement* statement) {
3484 ASSERT(in_spilled_code());
3485 set_in_spilled_code(false);
3486 Visit(statement);
3487 if (frame_ != NULL) {
3488 frame_->SpillAll();
3489 }
3490 set_in_spilled_code(true);
3491}
3492
3493
3494void CodeGenerator::VisitStatementsAndSpill(ZoneList<Statement*>* statements) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003495#ifdef DEBUG
3496 int original_height = frame_->height();
3497#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003498 ASSERT(in_spilled_code());
3499 set_in_spilled_code(false);
3500 VisitStatements(statements);
3501 if (frame_ != NULL) {
3502 frame_->SpillAll();
3503 }
3504 set_in_spilled_code(true);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003505
3506 ASSERT(!has_valid_frame() || frame_->height() == original_height);
Steve Blocka7e24c12009-10-30 11:49:00 +00003507}
3508
3509
3510void CodeGenerator::VisitStatements(ZoneList<Statement*>* statements) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003511#ifdef DEBUG
3512 int original_height = frame_->height();
3513#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003514 ASSERT(!in_spilled_code());
3515 for (int i = 0; has_valid_frame() && i < statements->length(); i++) {
3516 Visit(statements->at(i));
3517 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003518 ASSERT(!has_valid_frame() || frame_->height() == original_height);
Steve Blocka7e24c12009-10-30 11:49:00 +00003519}
3520
3521
3522void CodeGenerator::VisitBlock(Block* node) {
3523 ASSERT(!in_spilled_code());
3524 Comment cmnt(masm_, "[ Block");
3525 CodeForStatementPosition(node);
3526 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
3527 VisitStatements(node->statements());
3528 if (node->break_target()->is_linked()) {
3529 node->break_target()->Bind();
3530 }
3531 node->break_target()->Unuse();
3532}
3533
3534
3535void CodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
3536 // Call the runtime to declare the globals. The inevitable call
3537 // will sync frame elements to memory anyway, so we do it eagerly to
3538 // allow us to push the arguments directly into place.
3539 frame_->SyncRange(0, frame_->element_count() - 1);
3540
Steve Block3ce2e202009-11-05 08:53:23 +00003541 frame_->EmitPush(esi); // The context is the first argument.
Steve Blocka7e24c12009-10-30 11:49:00 +00003542 frame_->EmitPush(Immediate(pairs));
Steve Blocka7e24c12009-10-30 11:49:00 +00003543 frame_->EmitPush(Immediate(Smi::FromInt(is_eval() ? 1 : 0)));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003544 frame_->EmitPush(Immediate(Smi::FromInt(strict_mode_flag())));
3545 Result ignored = frame_->CallRuntime(Runtime::kDeclareGlobals, 4);
Steve Blocka7e24c12009-10-30 11:49:00 +00003546 // Return value is ignored.
3547}
3548
3549
3550void CodeGenerator::VisitDeclaration(Declaration* node) {
3551 Comment cmnt(masm_, "[ Declaration");
3552 Variable* var = node->proxy()->var();
3553 ASSERT(var != NULL); // must have been resolved
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003554 Slot* slot = var->AsSlot();
Steve Blocka7e24c12009-10-30 11:49:00 +00003555
3556 // If it was not possible to allocate the variable at compile time,
3557 // we need to "declare" it at runtime to make sure it actually
3558 // exists in the local context.
3559 if (slot != NULL && slot->type() == Slot::LOOKUP) {
3560 // Variables with a "LOOKUP" slot were introduced as non-locals
3561 // during variable resolution and must have mode DYNAMIC.
3562 ASSERT(var->is_dynamic());
3563 // For now, just do a runtime call. Sync the virtual frame eagerly
3564 // so we can simply push the arguments into place.
3565 frame_->SyncRange(0, frame_->element_count() - 1);
3566 frame_->EmitPush(esi);
3567 frame_->EmitPush(Immediate(var->name()));
3568 // Declaration nodes are always introduced in one of two modes.
3569 ASSERT(node->mode() == Variable::VAR || node->mode() == Variable::CONST);
3570 PropertyAttributes attr = node->mode() == Variable::VAR ? NONE : READ_ONLY;
3571 frame_->EmitPush(Immediate(Smi::FromInt(attr)));
3572 // Push initial value, if any.
3573 // Note: For variables we must not push an initial value (such as
3574 // 'undefined') because we may have a (legal) redeclaration and we
3575 // must not destroy the current value.
3576 if (node->mode() == Variable::CONST) {
Steve Block44f0eee2011-05-26 01:26:41 +01003577 frame_->EmitPush(Immediate(FACTORY->the_hole_value()));
Steve Blocka7e24c12009-10-30 11:49:00 +00003578 } else if (node->fun() != NULL) {
3579 Load(node->fun());
3580 } else {
3581 frame_->EmitPush(Immediate(Smi::FromInt(0))); // no initial value!
3582 }
3583 Result ignored = frame_->CallRuntime(Runtime::kDeclareContextSlot, 4);
3584 // Ignore the return value (declarations are statements).
3585 return;
3586 }
3587
3588 ASSERT(!var->is_global());
3589
3590 // If we have a function or a constant, we need to initialize the variable.
3591 Expression* val = NULL;
3592 if (node->mode() == Variable::CONST) {
Steve Block44f0eee2011-05-26 01:26:41 +01003593 val = new Literal(FACTORY->the_hole_value());
Steve Blocka7e24c12009-10-30 11:49:00 +00003594 } else {
3595 val = node->fun(); // NULL if we don't have a function
3596 }
3597
3598 if (val != NULL) {
3599 {
3600 // Set the initial value.
3601 Reference target(this, node->proxy());
3602 Load(val);
3603 target.SetValue(NOT_CONST_INIT);
3604 // The reference is removed from the stack (preserving TOS) when
3605 // it goes out of scope.
3606 }
3607 // Get rid of the assigned value (declarations are statements).
3608 frame_->Drop();
3609 }
3610}
3611
3612
3613void CodeGenerator::VisitExpressionStatement(ExpressionStatement* node) {
3614 ASSERT(!in_spilled_code());
3615 Comment cmnt(masm_, "[ ExpressionStatement");
3616 CodeForStatementPosition(node);
3617 Expression* expression = node->expression();
3618 expression->MarkAsStatement();
3619 Load(expression);
3620 // Remove the lingering expression result from the top of stack.
3621 frame_->Drop();
3622}
3623
3624
3625void CodeGenerator::VisitEmptyStatement(EmptyStatement* node) {
3626 ASSERT(!in_spilled_code());
3627 Comment cmnt(masm_, "// EmptyStatement");
3628 CodeForStatementPosition(node);
3629 // nothing to do
3630}
3631
3632
3633void CodeGenerator::VisitIfStatement(IfStatement* node) {
3634 ASSERT(!in_spilled_code());
3635 Comment cmnt(masm_, "[ IfStatement");
3636 // Generate different code depending on which parts of the if statement
3637 // are present or not.
3638 bool has_then_stm = node->HasThenStatement();
3639 bool has_else_stm = node->HasElseStatement();
3640
3641 CodeForStatementPosition(node);
3642 JumpTarget exit;
3643 if (has_then_stm && has_else_stm) {
3644 JumpTarget then;
3645 JumpTarget else_;
3646 ControlDestination dest(&then, &else_, true);
Steve Blockd0582a62009-12-15 09:54:21 +00003647 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00003648
3649 if (dest.false_was_fall_through()) {
3650 // The else target was bound, so we compile the else part first.
3651 Visit(node->else_statement());
3652
3653 // We may have dangling jumps to the then part.
3654 if (then.is_linked()) {
3655 if (has_valid_frame()) exit.Jump();
3656 then.Bind();
3657 Visit(node->then_statement());
3658 }
3659 } else {
3660 // The then target was bound, so we compile the then part first.
3661 Visit(node->then_statement());
3662
3663 if (else_.is_linked()) {
3664 if (has_valid_frame()) exit.Jump();
3665 else_.Bind();
3666 Visit(node->else_statement());
3667 }
3668 }
3669
3670 } else if (has_then_stm) {
3671 ASSERT(!has_else_stm);
3672 JumpTarget then;
3673 ControlDestination dest(&then, &exit, true);
Steve Blockd0582a62009-12-15 09:54:21 +00003674 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00003675
3676 if (dest.false_was_fall_through()) {
3677 // The exit label was bound. We may have dangling jumps to the
3678 // then part.
3679 if (then.is_linked()) {
3680 exit.Unuse();
3681 exit.Jump();
3682 then.Bind();
3683 Visit(node->then_statement());
3684 }
3685 } else {
3686 // The then label was bound.
3687 Visit(node->then_statement());
3688 }
3689
3690 } else if (has_else_stm) {
3691 ASSERT(!has_then_stm);
3692 JumpTarget else_;
3693 ControlDestination dest(&exit, &else_, false);
Steve Blockd0582a62009-12-15 09:54:21 +00003694 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00003695
3696 if (dest.true_was_fall_through()) {
3697 // The exit label was bound. We may have dangling jumps to the
3698 // else part.
3699 if (else_.is_linked()) {
3700 exit.Unuse();
3701 exit.Jump();
3702 else_.Bind();
3703 Visit(node->else_statement());
3704 }
3705 } else {
3706 // The else label was bound.
3707 Visit(node->else_statement());
3708 }
3709
3710 } else {
3711 ASSERT(!has_then_stm && !has_else_stm);
3712 // We only care about the condition's side effects (not its value
3713 // or control flow effect). LoadCondition is called without
3714 // forcing control flow.
3715 ControlDestination dest(&exit, &exit, true);
Steve Blockd0582a62009-12-15 09:54:21 +00003716 LoadCondition(node->condition(), &dest, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00003717 if (!dest.is_used()) {
3718 // We got a value on the frame rather than (or in addition to)
3719 // control flow.
3720 frame_->Drop();
3721 }
3722 }
3723
3724 if (exit.is_linked()) {
3725 exit.Bind();
3726 }
3727}
3728
3729
3730void CodeGenerator::VisitContinueStatement(ContinueStatement* node) {
3731 ASSERT(!in_spilled_code());
3732 Comment cmnt(masm_, "[ ContinueStatement");
3733 CodeForStatementPosition(node);
3734 node->target()->continue_target()->Jump();
3735}
3736
3737
3738void CodeGenerator::VisitBreakStatement(BreakStatement* node) {
3739 ASSERT(!in_spilled_code());
3740 Comment cmnt(masm_, "[ BreakStatement");
3741 CodeForStatementPosition(node);
3742 node->target()->break_target()->Jump();
3743}
3744
3745
3746void CodeGenerator::VisitReturnStatement(ReturnStatement* node) {
3747 ASSERT(!in_spilled_code());
3748 Comment cmnt(masm_, "[ ReturnStatement");
3749
3750 CodeForStatementPosition(node);
3751 Load(node->expression());
3752 Result return_value = frame_->Pop();
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08003753 masm()->positions_recorder()->WriteRecordedPositions();
Steve Blocka7e24c12009-10-30 11:49:00 +00003754 if (function_return_is_shadowed_) {
3755 function_return_.Jump(&return_value);
3756 } else {
3757 frame_->PrepareForReturn();
3758 if (function_return_.is_bound()) {
3759 // If the function return label is already bound we reuse the
3760 // code by jumping to the return site.
3761 function_return_.Jump(&return_value);
3762 } else {
3763 function_return_.Bind(&return_value);
3764 GenerateReturnSequence(&return_value);
3765 }
3766 }
3767}
3768
3769
3770void CodeGenerator::GenerateReturnSequence(Result* return_value) {
3771 // The return value is a live (but not currently reference counted)
3772 // reference to eax. This is safe because the current frame does not
3773 // contain a reference to eax (it is prepared for the return by spilling
3774 // all registers).
3775 if (FLAG_trace) {
3776 frame_->Push(return_value);
3777 *return_value = frame_->CallRuntime(Runtime::kTraceExit, 1);
3778 }
3779 return_value->ToRegister(eax);
3780
3781 // Add a label for checking the size of the code used for returning.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003782#ifdef DEBUG
Steve Blocka7e24c12009-10-30 11:49:00 +00003783 Label check_exit_codesize;
3784 masm_->bind(&check_exit_codesize);
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003785#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003786
3787 // Leave the frame and return popping the arguments and the
3788 // receiver.
3789 frame_->Exit();
Steve Block1e0659c2011-05-24 12:43:12 +01003790 int arguments_bytes = (scope()->num_parameters() + 1) * kPointerSize;
3791 __ Ret(arguments_bytes, ecx);
Steve Blocka7e24c12009-10-30 11:49:00 +00003792 DeleteFrame();
3793
3794#ifdef ENABLE_DEBUGGER_SUPPORT
Steve Block1e0659c2011-05-24 12:43:12 +01003795 // Check that the size of the code used for returning is large enough
3796 // for the debugger's requirements.
3797 ASSERT(Assembler::kJSReturnSequenceLength <=
3798 masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
Steve Blocka7e24c12009-10-30 11:49:00 +00003799#endif
3800}
3801
3802
3803void CodeGenerator::VisitWithEnterStatement(WithEnterStatement* node) {
3804 ASSERT(!in_spilled_code());
3805 Comment cmnt(masm_, "[ WithEnterStatement");
3806 CodeForStatementPosition(node);
3807 Load(node->expression());
3808 Result context;
3809 if (node->is_catch_block()) {
3810 context = frame_->CallRuntime(Runtime::kPushCatchContext, 1);
3811 } else {
3812 context = frame_->CallRuntime(Runtime::kPushContext, 1);
3813 }
3814
3815 // Update context local.
3816 frame_->SaveContextRegister();
3817
3818 // Verify that the runtime call result and esi agree.
3819 if (FLAG_debug_code) {
3820 __ cmp(context.reg(), Operand(esi));
3821 __ Assert(equal, "Runtime::NewContext should end up in esi");
3822 }
3823}
3824
3825
3826void CodeGenerator::VisitWithExitStatement(WithExitStatement* node) {
3827 ASSERT(!in_spilled_code());
3828 Comment cmnt(masm_, "[ WithExitStatement");
3829 CodeForStatementPosition(node);
3830 // Pop context.
3831 __ mov(esi, ContextOperand(esi, Context::PREVIOUS_INDEX));
3832 // Update context local.
3833 frame_->SaveContextRegister();
3834}
3835
3836
3837void CodeGenerator::VisitSwitchStatement(SwitchStatement* node) {
3838 ASSERT(!in_spilled_code());
3839 Comment cmnt(masm_, "[ SwitchStatement");
3840 CodeForStatementPosition(node);
3841 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
3842
3843 // Compile the switch value.
3844 Load(node->tag());
3845
3846 ZoneList<CaseClause*>* cases = node->cases();
3847 int length = cases->length();
3848 CaseClause* default_clause = NULL;
3849
3850 JumpTarget next_test;
3851 // Compile the case label expressions and comparisons. Exit early
3852 // if a comparison is unconditionally true. The target next_test is
3853 // bound before the loop in order to indicate control flow to the
3854 // first comparison.
3855 next_test.Bind();
3856 for (int i = 0; i < length && !next_test.is_unused(); i++) {
3857 CaseClause* clause = cases->at(i);
3858 // The default is not a test, but remember it for later.
3859 if (clause->is_default()) {
3860 default_clause = clause;
3861 continue;
3862 }
3863
3864 Comment cmnt(masm_, "[ Case comparison");
3865 // We recycle the same target next_test for each test. Bind it if
3866 // the previous test has not done so and then unuse it for the
3867 // loop.
3868 if (next_test.is_linked()) {
3869 next_test.Bind();
3870 }
3871 next_test.Unuse();
3872
3873 // Duplicate the switch value.
3874 frame_->Dup();
3875
3876 // Compile the label expression.
3877 Load(clause->label());
3878
3879 // Compare and branch to the body if true or the next test if
3880 // false. Prefer the next test as a fall through.
3881 ControlDestination dest(clause->body_target(), &next_test, false);
Leon Clarkee46be812010-01-19 14:06:41 +00003882 Comparison(node, equal, true, &dest);
Steve Blocka7e24c12009-10-30 11:49:00 +00003883
3884 // If the comparison fell through to the true target, jump to the
3885 // actual body.
3886 if (dest.true_was_fall_through()) {
3887 clause->body_target()->Unuse();
3888 clause->body_target()->Jump();
3889 }
3890 }
3891
3892 // If there was control flow to a next test from the last one
3893 // compiled, compile a jump to the default or break target.
3894 if (!next_test.is_unused()) {
3895 if (next_test.is_linked()) {
3896 next_test.Bind();
3897 }
3898 // Drop the switch value.
3899 frame_->Drop();
3900 if (default_clause != NULL) {
3901 default_clause->body_target()->Jump();
3902 } else {
3903 node->break_target()->Jump();
3904 }
3905 }
3906
Steve Blocka7e24c12009-10-30 11:49:00 +00003907 // The last instruction emitted was a jump, either to the default
3908 // clause or the break target, or else to a case body from the loop
3909 // that compiles the tests.
3910 ASSERT(!has_valid_frame());
3911 // Compile case bodies as needed.
3912 for (int i = 0; i < length; i++) {
3913 CaseClause* clause = cases->at(i);
3914
3915 // There are two ways to reach the body: from the corresponding
3916 // test or as the fall through of the previous body.
3917 if (clause->body_target()->is_linked() || has_valid_frame()) {
3918 if (clause->body_target()->is_linked()) {
3919 if (has_valid_frame()) {
3920 // If we have both a jump to the test and a fall through, put
3921 // a jump on the fall through path to avoid the dropping of
3922 // the switch value on the test path. The exception is the
3923 // default which has already had the switch value dropped.
3924 if (clause->is_default()) {
3925 clause->body_target()->Bind();
3926 } else {
3927 JumpTarget body;
3928 body.Jump();
3929 clause->body_target()->Bind();
3930 frame_->Drop();
3931 body.Bind();
3932 }
3933 } else {
3934 // No fall through to worry about.
3935 clause->body_target()->Bind();
3936 if (!clause->is_default()) {
3937 frame_->Drop();
3938 }
3939 }
3940 } else {
3941 // Otherwise, we have only fall through.
3942 ASSERT(has_valid_frame());
3943 }
3944
3945 // We are now prepared to compile the body.
3946 Comment cmnt(masm_, "[ Case body");
3947 VisitStatements(clause->statements());
3948 }
3949 clause->body_target()->Unuse();
3950 }
3951
3952 // We may not have a valid frame here so bind the break target only
3953 // if needed.
3954 if (node->break_target()->is_linked()) {
3955 node->break_target()->Bind();
3956 }
3957 node->break_target()->Unuse();
3958}
3959
3960
Steve Block3ce2e202009-11-05 08:53:23 +00003961void CodeGenerator::VisitDoWhileStatement(DoWhileStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003962 ASSERT(!in_spilled_code());
Steve Block3ce2e202009-11-05 08:53:23 +00003963 Comment cmnt(masm_, "[ DoWhileStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00003964 CodeForStatementPosition(node);
3965 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
Steve Block3ce2e202009-11-05 08:53:23 +00003966 JumpTarget body(JumpTarget::BIDIRECTIONAL);
3967 IncrementLoopNesting();
Steve Blocka7e24c12009-10-30 11:49:00 +00003968
Steve Block3ce2e202009-11-05 08:53:23 +00003969 ConditionAnalysis info = AnalyzeCondition(node->cond());
3970 // Label the top of the loop for the backward jump if necessary.
3971 switch (info) {
3972 case ALWAYS_TRUE:
3973 // Use the continue target.
3974 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
3975 node->continue_target()->Bind();
3976 break;
3977 case ALWAYS_FALSE:
3978 // No need to label it.
3979 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
3980 break;
3981 case DONT_KNOW:
3982 // Continue is the test, so use the backward body target.
3983 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
3984 body.Bind();
3985 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00003986 }
3987
Steve Block3ce2e202009-11-05 08:53:23 +00003988 CheckStack(); // TODO(1222600): ignore if body contains calls.
3989 Visit(node->body());
Steve Blocka7e24c12009-10-30 11:49:00 +00003990
Steve Block3ce2e202009-11-05 08:53:23 +00003991 // Compile the test.
3992 switch (info) {
3993 case ALWAYS_TRUE:
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003994 // If control flow can fall off the end of the body, jump back
3995 // to the top and bind the break target at the exit.
Steve Block3ce2e202009-11-05 08:53:23 +00003996 if (has_valid_frame()) {
3997 node->continue_target()->Jump();
Steve Blocka7e24c12009-10-30 11:49:00 +00003998 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003999 if (node->break_target()->is_linked()) {
4000 node->break_target()->Bind();
4001 }
4002 break;
Steve Block3ce2e202009-11-05 08:53:23 +00004003 case ALWAYS_FALSE:
4004 // We may have had continues or breaks in the body.
4005 if (node->continue_target()->is_linked()) {
4006 node->continue_target()->Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00004007 }
Steve Block3ce2e202009-11-05 08:53:23 +00004008 if (node->break_target()->is_linked()) {
4009 node->break_target()->Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00004010 }
Steve Block3ce2e202009-11-05 08:53:23 +00004011 break;
4012 case DONT_KNOW:
4013 // We have to compile the test expression if it can be reached by
4014 // control flow falling out of the body or via continue.
4015 if (node->continue_target()->is_linked()) {
4016 node->continue_target()->Bind();
4017 }
4018 if (has_valid_frame()) {
Steve Blockd0582a62009-12-15 09:54:21 +00004019 Comment cmnt(masm_, "[ DoWhileCondition");
4020 CodeForDoWhileConditionPosition(node);
Steve Block3ce2e202009-11-05 08:53:23 +00004021 ControlDestination dest(&body, node->break_target(), false);
Steve Blockd0582a62009-12-15 09:54:21 +00004022 LoadCondition(node->cond(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00004023 }
Steve Block3ce2e202009-11-05 08:53:23 +00004024 if (node->break_target()->is_linked()) {
4025 node->break_target()->Bind();
4026 }
4027 break;
4028 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004029
Steve Block3ce2e202009-11-05 08:53:23 +00004030 DecrementLoopNesting();
Ben Murdoch3bec4d22010-07-22 14:51:16 +01004031 node->continue_target()->Unuse();
4032 node->break_target()->Unuse();
Steve Block3ce2e202009-11-05 08:53:23 +00004033}
Steve Blocka7e24c12009-10-30 11:49:00 +00004034
Steve Block3ce2e202009-11-05 08:53:23 +00004035
4036void CodeGenerator::VisitWhileStatement(WhileStatement* node) {
4037 ASSERT(!in_spilled_code());
4038 Comment cmnt(masm_, "[ WhileStatement");
4039 CodeForStatementPosition(node);
4040
4041 // If the condition is always false and has no side effects, we do not
4042 // need to compile anything.
4043 ConditionAnalysis info = AnalyzeCondition(node->cond());
4044 if (info == ALWAYS_FALSE) return;
4045
4046 // Do not duplicate conditions that may have function literal
4047 // subexpressions. This can cause us to compile the function literal
4048 // twice.
4049 bool test_at_bottom = !node->may_have_function_literal();
4050 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
4051 IncrementLoopNesting();
4052 JumpTarget body;
4053 if (test_at_bottom) {
4054 body.set_direction(JumpTarget::BIDIRECTIONAL);
4055 }
4056
4057 // Based on the condition analysis, compile the test as necessary.
4058 switch (info) {
4059 case ALWAYS_TRUE:
4060 // We will not compile the test expression. Label the top of the
4061 // loop with the continue target.
4062 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
4063 node->continue_target()->Bind();
4064 break;
4065 case DONT_KNOW: {
4066 if (test_at_bottom) {
4067 // Continue is the test at the bottom, no need to label the test
4068 // at the top. The body is a backward target.
4069 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
4070 } else {
4071 // Label the test at the top as the continue target. The body
4072 // is a forward-only target.
4073 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
4074 node->continue_target()->Bind();
4075 }
4076 // Compile the test with the body as the true target and preferred
4077 // fall-through and with the break target as the false target.
4078 ControlDestination dest(&body, node->break_target(), true);
Steve Blockd0582a62009-12-15 09:54:21 +00004079 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00004080
4081 if (dest.false_was_fall_through()) {
4082 // If we got the break target as fall-through, the test may have
4083 // been unconditionally false (if there are no jumps to the
4084 // body).
4085 if (!body.is_linked()) {
4086 DecrementLoopNesting();
4087 return;
4088 }
4089
4090 // Otherwise, jump around the body on the fall through and then
4091 // bind the body target.
4092 node->break_target()->Unuse();
4093 node->break_target()->Jump();
4094 body.Bind();
4095 }
4096 break;
4097 }
4098 case ALWAYS_FALSE:
4099 UNREACHABLE();
4100 break;
4101 }
4102
4103 CheckStack(); // TODO(1222600): ignore if body contains calls.
4104 Visit(node->body());
4105
4106 // Based on the condition analysis, compile the backward jump as
4107 // necessary.
4108 switch (info) {
4109 case ALWAYS_TRUE:
4110 // The loop body has been labeled with the continue target.
4111 if (has_valid_frame()) {
4112 node->continue_target()->Jump();
4113 }
4114 break;
4115 case DONT_KNOW:
4116 if (test_at_bottom) {
Ben Murdoch3bec4d22010-07-22 14:51:16 +01004117 // If we have chosen to recompile the test at the bottom,
4118 // then it is the continue target.
Steve Blocka7e24c12009-10-30 11:49:00 +00004119 if (node->continue_target()->is_linked()) {
4120 node->continue_target()->Bind();
4121 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004122 if (has_valid_frame()) {
Steve Block3ce2e202009-11-05 08:53:23 +00004123 // The break target is the fall-through (body is a backward
4124 // jump from here and thus an invalid fall-through).
4125 ControlDestination dest(&body, node->break_target(), false);
Steve Blockd0582a62009-12-15 09:54:21 +00004126 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00004127 }
4128 } else {
4129 // If we have chosen not to recompile the test at the bottom,
4130 // jump back to the one at the top.
4131 if (has_valid_frame()) {
4132 node->continue_target()->Jump();
Steve Blocka7e24c12009-10-30 11:49:00 +00004133 }
4134 }
Steve Block3ce2e202009-11-05 08:53:23 +00004135 break;
4136 case ALWAYS_FALSE:
4137 UNREACHABLE();
4138 break;
4139 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004140
Steve Block3ce2e202009-11-05 08:53:23 +00004141 // The break target may be already bound (by the condition), or there
4142 // may not be a valid frame. Bind it only if needed.
4143 if (node->break_target()->is_linked()) {
4144 node->break_target()->Bind();
4145 }
4146 DecrementLoopNesting();
4147}
4148
4149
Steve Block6ded16b2010-05-10 14:33:55 +01004150void CodeGenerator::SetTypeForStackSlot(Slot* slot, TypeInfo info) {
4151 ASSERT(slot->type() == Slot::LOCAL || slot->type() == Slot::PARAMETER);
4152 if (slot->type() == Slot::LOCAL) {
4153 frame_->SetTypeForLocalAt(slot->index(), info);
4154 } else {
4155 frame_->SetTypeForParamAt(slot->index(), info);
4156 }
4157 if (FLAG_debug_code && info.IsSmi()) {
4158 if (slot->type() == Slot::LOCAL) {
4159 frame_->PushLocalAt(slot->index());
4160 } else {
4161 frame_->PushParameterAt(slot->index());
4162 }
4163 Result var = frame_->Pop();
4164 var.ToRegister();
4165 __ AbortIfNotSmi(var.reg());
4166 }
4167}
4168
4169
Steve Block3ce2e202009-11-05 08:53:23 +00004170void CodeGenerator::VisitForStatement(ForStatement* node) {
4171 ASSERT(!in_spilled_code());
4172 Comment cmnt(masm_, "[ ForStatement");
4173 CodeForStatementPosition(node);
4174
4175 // Compile the init expression if present.
4176 if (node->init() != NULL) {
4177 Visit(node->init());
4178 }
4179
4180 // If the condition is always false and has no side effects, we do not
4181 // need to compile anything else.
4182 ConditionAnalysis info = AnalyzeCondition(node->cond());
4183 if (info == ALWAYS_FALSE) return;
4184
4185 // Do not duplicate conditions that may have function literal
4186 // subexpressions. This can cause us to compile the function literal
4187 // twice.
4188 bool test_at_bottom = !node->may_have_function_literal();
4189 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
4190 IncrementLoopNesting();
4191
4192 // Target for backward edge if no test at the bottom, otherwise
4193 // unused.
4194 JumpTarget loop(JumpTarget::BIDIRECTIONAL);
4195
4196 // Target for backward edge if there is a test at the bottom,
4197 // otherwise used as target for test at the top.
4198 JumpTarget body;
4199 if (test_at_bottom) {
4200 body.set_direction(JumpTarget::BIDIRECTIONAL);
4201 }
4202
4203 // Based on the condition analysis, compile the test as necessary.
4204 switch (info) {
4205 case ALWAYS_TRUE:
4206 // We will not compile the test expression. Label the top of the
4207 // loop.
4208 if (node->next() == NULL) {
4209 // Use the continue target if there is no update expression.
4210 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
4211 node->continue_target()->Bind();
4212 } else {
4213 // Otherwise use the backward loop target.
4214 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
4215 loop.Bind();
4216 }
4217 break;
4218 case DONT_KNOW: {
4219 if (test_at_bottom) {
4220 // Continue is either the update expression or the test at the
4221 // bottom, no need to label the test at the top.
4222 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
4223 } else if (node->next() == NULL) {
4224 // We are not recompiling the test at the bottom and there is no
4225 // update expression.
4226 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
4227 node->continue_target()->Bind();
4228 } else {
4229 // We are not recompiling the test at the bottom and there is an
4230 // update expression.
4231 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
4232 loop.Bind();
4233 }
Ben Murdoch3bec4d22010-07-22 14:51:16 +01004234
Steve Block3ce2e202009-11-05 08:53:23 +00004235 // Compile the test with the body as the true target and preferred
4236 // fall-through and with the break target as the false target.
4237 ControlDestination dest(&body, node->break_target(), true);
Steve Blockd0582a62009-12-15 09:54:21 +00004238 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00004239
4240 if (dest.false_was_fall_through()) {
4241 // If we got the break target as fall-through, the test may have
4242 // been unconditionally false (if there are no jumps to the
4243 // body).
4244 if (!body.is_linked()) {
4245 DecrementLoopNesting();
4246 return;
4247 }
4248
4249 // Otherwise, jump around the body on the fall through and then
4250 // bind the body target.
4251 node->break_target()->Unuse();
4252 node->break_target()->Jump();
4253 body.Bind();
4254 }
4255 break;
4256 }
4257 case ALWAYS_FALSE:
4258 UNREACHABLE();
4259 break;
4260 }
4261
4262 CheckStack(); // TODO(1222600): ignore if body contains calls.
Steve Block6ded16b2010-05-10 14:33:55 +01004263
4264 // We know that the loop index is a smi if it is not modified in the
4265 // loop body and it is checked against a constant limit in the loop
4266 // condition. In this case, we reset the static type information of the
4267 // loop index to smi before compiling the body, the update expression, and
4268 // the bottom check of the loop condition.
4269 if (node->is_fast_smi_loop()) {
4270 // Set number type of the loop variable to smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004271 SetTypeForStackSlot(node->loop_variable()->AsSlot(), TypeInfo::Smi());
Steve Block6ded16b2010-05-10 14:33:55 +01004272 }
4273
Steve Block3ce2e202009-11-05 08:53:23 +00004274 Visit(node->body());
4275
4276 // If there is an update expression, compile it if necessary.
4277 if (node->next() != NULL) {
4278 if (node->continue_target()->is_linked()) {
4279 node->continue_target()->Bind();
4280 }
4281
4282 // Control can reach the update by falling out of the body or by a
4283 // continue.
4284 if (has_valid_frame()) {
4285 // Record the source position of the statement as this code which
4286 // is after the code for the body actually belongs to the loop
4287 // statement and not the body.
4288 CodeForStatementPosition(node);
4289 Visit(node->next());
4290 }
4291 }
4292
Steve Block6ded16b2010-05-10 14:33:55 +01004293 // Set the type of the loop variable to smi before compiling the test
4294 // expression if we are in a fast smi loop condition.
4295 if (node->is_fast_smi_loop() && has_valid_frame()) {
4296 // Set number type of the loop variable to smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004297 SetTypeForStackSlot(node->loop_variable()->AsSlot(), TypeInfo::Smi());
Steve Block6ded16b2010-05-10 14:33:55 +01004298 }
4299
Steve Block3ce2e202009-11-05 08:53:23 +00004300 // Based on the condition analysis, compile the backward jump as
4301 // necessary.
4302 switch (info) {
4303 case ALWAYS_TRUE:
4304 if (has_valid_frame()) {
4305 if (node->next() == NULL) {
4306 node->continue_target()->Jump();
4307 } else {
4308 loop.Jump();
4309 }
4310 }
4311 break;
4312 case DONT_KNOW:
4313 if (test_at_bottom) {
4314 if (node->continue_target()->is_linked()) {
4315 // We can have dangling jumps to the continue target if there
4316 // was no update expression.
4317 node->continue_target()->Bind();
4318 }
4319 // Control can reach the test at the bottom by falling out of
4320 // the body, by a continue in the body, or from the update
4321 // expression.
4322 if (has_valid_frame()) {
4323 // The break target is the fall-through (body is a backward
4324 // jump from here).
4325 ControlDestination dest(&body, node->break_target(), false);
Steve Blockd0582a62009-12-15 09:54:21 +00004326 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00004327 }
4328 } else {
4329 // Otherwise, jump back to the test at the top.
Steve Blocka7e24c12009-10-30 11:49:00 +00004330 if (has_valid_frame()) {
4331 if (node->next() == NULL) {
4332 node->continue_target()->Jump();
4333 } else {
4334 loop.Jump();
4335 }
4336 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004337 }
4338 break;
Steve Block3ce2e202009-11-05 08:53:23 +00004339 case ALWAYS_FALSE:
4340 UNREACHABLE();
4341 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00004342 }
4343
Ben Murdoch3bec4d22010-07-22 14:51:16 +01004344 // The break target may be already bound (by the condition), or there
4345 // may not be a valid frame. Bind it only if needed.
Steve Block3ce2e202009-11-05 08:53:23 +00004346 if (node->break_target()->is_linked()) {
4347 node->break_target()->Bind();
4348 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004349 DecrementLoopNesting();
Steve Blocka7e24c12009-10-30 11:49:00 +00004350}
4351
4352
4353void CodeGenerator::VisitForInStatement(ForInStatement* node) {
4354 ASSERT(!in_spilled_code());
4355 VirtualFrame::SpilledScope spilled_scope;
4356 Comment cmnt(masm_, "[ ForInStatement");
4357 CodeForStatementPosition(node);
4358
4359 JumpTarget primitive;
4360 JumpTarget jsobject;
4361 JumpTarget fixed_array;
4362 JumpTarget entry(JumpTarget::BIDIRECTIONAL);
4363 JumpTarget end_del_check;
4364 JumpTarget exit;
4365
4366 // Get the object to enumerate over (converted to JSObject).
4367 LoadAndSpill(node->enumerable());
4368
4369 // Both SpiderMonkey and kjs ignore null and undefined in contrast
4370 // to the specification. 12.6.4 mandates a call to ToObject.
4371 frame_->EmitPop(eax);
4372
4373 // eax: value to be iterated over
Steve Block44f0eee2011-05-26 01:26:41 +01004374 __ cmp(eax, FACTORY->undefined_value());
Steve Blocka7e24c12009-10-30 11:49:00 +00004375 exit.Branch(equal);
Steve Block44f0eee2011-05-26 01:26:41 +01004376 __ cmp(eax, FACTORY->null_value());
Steve Blocka7e24c12009-10-30 11:49:00 +00004377 exit.Branch(equal);
4378
4379 // Stack layout in body:
4380 // [iteration counter (smi)] <- slot 0
4381 // [length of array] <- slot 1
4382 // [FixedArray] <- slot 2
4383 // [Map or 0] <- slot 3
4384 // [Object] <- slot 4
4385
4386 // Check if enumerable is already a JSObject
4387 // eax: value to be iterated over
4388 __ test(eax, Immediate(kSmiTagMask));
4389 primitive.Branch(zero);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004390 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
Steve Blocka7e24c12009-10-30 11:49:00 +00004391 jsobject.Branch(above_equal);
4392
4393 primitive.Bind();
4394 frame_->EmitPush(eax);
4395 frame_->InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION, 1);
4396 // function call returns the value in eax, which is where we want it below
4397
4398 jsobject.Bind();
4399 // Get the set of properties (as a FixedArray or Map).
4400 // eax: value to be iterated over
Steve Blockd0582a62009-12-15 09:54:21 +00004401 frame_->EmitPush(eax); // Push the object being iterated over.
Steve Blocka7e24c12009-10-30 11:49:00 +00004402
Steve Blockd0582a62009-12-15 09:54:21 +00004403 // Check cache validity in generated code. This is a fast case for
4404 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
4405 // guarantee cache validity, call the runtime system to check cache
4406 // validity or get the property names in a fixed array.
4407 JumpTarget call_runtime;
4408 JumpTarget loop(JumpTarget::BIDIRECTIONAL);
4409 JumpTarget check_prototype;
4410 JumpTarget use_cache;
4411 __ mov(ecx, eax);
4412 loop.Bind();
4413 // Check that there are no elements.
4414 __ mov(edx, FieldOperand(ecx, JSObject::kElementsOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01004415 __ cmp(Operand(edx), Immediate(FACTORY->empty_fixed_array()));
Steve Blockd0582a62009-12-15 09:54:21 +00004416 call_runtime.Branch(not_equal);
4417 // Check that instance descriptors are not empty so that we can
4418 // check for an enum cache. Leave the map in ebx for the subsequent
4419 // prototype load.
4420 __ mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
4421 __ mov(edx, FieldOperand(ebx, Map::kInstanceDescriptorsOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01004422 __ cmp(Operand(edx), Immediate(FACTORY->empty_descriptor_array()));
Steve Blockd0582a62009-12-15 09:54:21 +00004423 call_runtime.Branch(equal);
4424 // Check that there in an enum cache in the non-empty instance
4425 // descriptors. This is the case if the next enumeration index
4426 // field does not contain a smi.
4427 __ mov(edx, FieldOperand(edx, DescriptorArray::kEnumerationIndexOffset));
4428 __ test(edx, Immediate(kSmiTagMask));
4429 call_runtime.Branch(zero);
4430 // For all objects but the receiver, check that the cache is empty.
4431 __ cmp(ecx, Operand(eax));
4432 check_prototype.Branch(equal);
4433 __ mov(edx, FieldOperand(edx, DescriptorArray::kEnumCacheBridgeCacheOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01004434 __ cmp(Operand(edx), Immediate(FACTORY->empty_fixed_array()));
Steve Blockd0582a62009-12-15 09:54:21 +00004435 call_runtime.Branch(not_equal);
4436 check_prototype.Bind();
4437 // Load the prototype from the map and loop if non-null.
4438 __ mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01004439 __ cmp(Operand(ecx), Immediate(FACTORY->null_value()));
Steve Blockd0582a62009-12-15 09:54:21 +00004440 loop.Branch(not_equal);
4441 // The enum cache is valid. Load the map of the object being
4442 // iterated over and use the cache for the iteration.
4443 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
4444 use_cache.Jump();
4445
4446 call_runtime.Bind();
4447 // Call the runtime to get the property names for the object.
Steve Blocka7e24c12009-10-30 11:49:00 +00004448 frame_->EmitPush(eax); // push the Object (slot 4) for the runtime call
4449 frame_->CallRuntime(Runtime::kGetPropertyNamesFast, 1);
4450
Steve Blockd0582a62009-12-15 09:54:21 +00004451 // If we got a map from the runtime call, we can do a fast
4452 // modification check. Otherwise, we got a fixed array, and we have
4453 // to do a slow check.
Steve Blocka7e24c12009-10-30 11:49:00 +00004454 // eax: map or fixed array (result from call to
4455 // Runtime::kGetPropertyNamesFast)
4456 __ mov(edx, Operand(eax));
4457 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01004458 __ cmp(ecx, FACTORY->meta_map());
Steve Blocka7e24c12009-10-30 11:49:00 +00004459 fixed_array.Branch(not_equal);
4460
Steve Blockd0582a62009-12-15 09:54:21 +00004461 use_cache.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00004462 // Get enum cache
Steve Blockd0582a62009-12-15 09:54:21 +00004463 // eax: map (either the result from a call to
4464 // Runtime::kGetPropertyNamesFast or has been fetched directly from
4465 // the object)
Steve Blocka7e24c12009-10-30 11:49:00 +00004466 __ mov(ecx, Operand(eax));
Steve Blockd0582a62009-12-15 09:54:21 +00004467
Steve Blocka7e24c12009-10-30 11:49:00 +00004468 __ mov(ecx, FieldOperand(ecx, Map::kInstanceDescriptorsOffset));
4469 // Get the bridge array held in the enumeration index field.
4470 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumerationIndexOffset));
4471 // Get the cache from the bridge array.
4472 __ mov(edx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset));
4473
4474 frame_->EmitPush(eax); // <- slot 3
4475 frame_->EmitPush(edx); // <- slot 2
4476 __ mov(eax, FieldOperand(edx, FixedArray::kLengthOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004477 frame_->EmitPush(eax); // <- slot 1
4478 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 0
4479 entry.Jump();
4480
4481 fixed_array.Bind();
4482 // eax: fixed array (result from call to Runtime::kGetPropertyNamesFast)
4483 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 3
4484 frame_->EmitPush(eax); // <- slot 2
4485
4486 // Push the length of the array and the initial index onto the stack.
4487 __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004488 frame_->EmitPush(eax); // <- slot 1
4489 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 0
4490
4491 // Condition.
4492 entry.Bind();
4493 // Grab the current frame's height for the break and continue
4494 // targets only after all the state is pushed on the frame.
4495 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
4496 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
4497
4498 __ mov(eax, frame_->ElementAt(0)); // load the current count
4499 __ cmp(eax, frame_->ElementAt(1)); // compare to the array length
4500 node->break_target()->Branch(above_equal);
4501
4502 // Get the i'th entry of the array.
4503 __ mov(edx, frame_->ElementAt(2));
Kristian Monsen25f61362010-05-21 11:50:48 +01004504 __ mov(ebx, FixedArrayElementOperand(edx, eax));
Steve Blocka7e24c12009-10-30 11:49:00 +00004505
4506 // Get the expected map from the stack or a zero map in the
4507 // permanent slow case eax: current iteration count ebx: i'th entry
4508 // of the enum cache
4509 __ mov(edx, frame_->ElementAt(3));
4510 // Check if the expected map still matches that of the enumerable.
4511 // If not, we have to filter the key.
4512 // eax: current iteration count
4513 // ebx: i'th entry of the enum cache
4514 // edx: expected map value
4515 __ mov(ecx, frame_->ElementAt(4));
4516 __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
4517 __ cmp(ecx, Operand(edx));
4518 end_del_check.Branch(equal);
4519
4520 // Convert the entry to a string (or null if it isn't a property anymore).
4521 frame_->EmitPush(frame_->ElementAt(4)); // push enumerable
4522 frame_->EmitPush(ebx); // push entry
4523 frame_->InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION, 2);
4524 __ mov(ebx, Operand(eax));
4525
4526 // If the property has been removed while iterating, we just skip it.
Iain Merrick75681382010-08-19 15:07:18 +01004527 __ test(ebx, Operand(ebx));
Steve Blocka7e24c12009-10-30 11:49:00 +00004528 node->continue_target()->Branch(equal);
4529
4530 end_del_check.Bind();
4531 // Store the entry in the 'each' expression and take another spin in the
4532 // loop. edx: i'th entry of the enum cache (or string there of)
4533 frame_->EmitPush(ebx);
4534 { Reference each(this, node->each());
Steve Blocka7e24c12009-10-30 11:49:00 +00004535 if (!each.is_illegal()) {
4536 if (each.size() > 0) {
Iain Merrick75681382010-08-19 15:07:18 +01004537 // Loading a reference may leave the frame in an unspilled state.
4538 frame_->SpillAll();
4539 // Get the value (under the reference on the stack) from memory.
Steve Blocka7e24c12009-10-30 11:49:00 +00004540 frame_->EmitPush(frame_->ElementAt(each.size()));
Leon Clarked91b9f72010-01-27 17:25:45 +00004541 each.SetValue(NOT_CONST_INIT);
4542 frame_->Drop(2);
4543 } else {
4544 // If the reference was to a slot we rely on the convenient property
4545 // that it doesn't matter whether a value (eg, ebx pushed above) is
4546 // right on top of or right underneath a zero-sized reference.
4547 each.SetValue(NOT_CONST_INIT);
Steve Blocka7e24c12009-10-30 11:49:00 +00004548 frame_->Drop();
4549 }
4550 }
4551 }
4552 // Unloading a reference may leave the frame in an unspilled state.
4553 frame_->SpillAll();
4554
Steve Blocka7e24c12009-10-30 11:49:00 +00004555 // Body.
4556 CheckStack(); // TODO(1222600): ignore if body contains calls.
4557 VisitAndSpill(node->body());
4558
4559 // Next. Reestablish a spilled frame in case we are coming here via
4560 // a continue in the body.
4561 node->continue_target()->Bind();
4562 frame_->SpillAll();
4563 frame_->EmitPop(eax);
4564 __ add(Operand(eax), Immediate(Smi::FromInt(1)));
4565 frame_->EmitPush(eax);
4566 entry.Jump();
4567
4568 // Cleanup. No need to spill because VirtualFrame::Drop is safe for
4569 // any frame.
4570 node->break_target()->Bind();
4571 frame_->Drop(5);
4572
4573 // Exit.
4574 exit.Bind();
4575
4576 node->continue_target()->Unuse();
4577 node->break_target()->Unuse();
4578}
4579
4580
Steve Block3ce2e202009-11-05 08:53:23 +00004581void CodeGenerator::VisitTryCatchStatement(TryCatchStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004582 ASSERT(!in_spilled_code());
4583 VirtualFrame::SpilledScope spilled_scope;
Steve Block3ce2e202009-11-05 08:53:23 +00004584 Comment cmnt(masm_, "[ TryCatchStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00004585 CodeForStatementPosition(node);
4586
4587 JumpTarget try_block;
4588 JumpTarget exit;
4589
4590 try_block.Call();
4591 // --- Catch block ---
4592 frame_->EmitPush(eax);
4593
4594 // Store the caught exception in the catch variable.
Leon Clarkee46be812010-01-19 14:06:41 +00004595 Variable* catch_var = node->catch_var()->var();
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004596 ASSERT(catch_var != NULL && catch_var->AsSlot() != NULL);
4597 StoreToSlot(catch_var->AsSlot(), NOT_CONST_INIT);
Steve Blocka7e24c12009-10-30 11:49:00 +00004598
4599 // Remove the exception from the stack.
4600 frame_->Drop();
4601
4602 VisitStatementsAndSpill(node->catch_block()->statements());
4603 if (has_valid_frame()) {
4604 exit.Jump();
4605 }
4606
4607
4608 // --- Try block ---
4609 try_block.Bind();
4610
4611 frame_->PushTryHandler(TRY_CATCH_HANDLER);
4612 int handler_height = frame_->height();
4613
4614 // Shadow the jump targets for all escapes from the try block, including
4615 // returns. During shadowing, the original target is hidden as the
4616 // ShadowTarget and operations on the original actually affect the
4617 // shadowing target.
4618 //
4619 // We should probably try to unify the escaping targets and the return
4620 // target.
4621 int nof_escapes = node->escaping_targets()->length();
4622 List<ShadowTarget*> shadows(1 + nof_escapes);
4623
4624 // Add the shadow target for the function return.
4625 static const int kReturnShadowIndex = 0;
4626 shadows.Add(new ShadowTarget(&function_return_));
4627 bool function_return_was_shadowed = function_return_is_shadowed_;
4628 function_return_is_shadowed_ = true;
4629 ASSERT(shadows[kReturnShadowIndex]->other_target() == &function_return_);
4630
4631 // Add the remaining shadow targets.
4632 for (int i = 0; i < nof_escapes; i++) {
4633 shadows.Add(new ShadowTarget(node->escaping_targets()->at(i)));
4634 }
4635
4636 // Generate code for the statements in the try block.
4637 VisitStatementsAndSpill(node->try_block()->statements());
4638
4639 // Stop the introduced shadowing and count the number of required unlinks.
4640 // After shadowing stops, the original targets are unshadowed and the
4641 // ShadowTargets represent the formerly shadowing targets.
4642 bool has_unlinks = false;
4643 for (int i = 0; i < shadows.length(); i++) {
4644 shadows[i]->StopShadowing();
4645 has_unlinks = has_unlinks || shadows[i]->is_linked();
4646 }
4647 function_return_is_shadowed_ = function_return_was_shadowed;
4648
4649 // Get an external reference to the handler address.
Steve Block44f0eee2011-05-26 01:26:41 +01004650 ExternalReference handler_address(Isolate::k_handler_address,
4651 masm()->isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +00004652
4653 // Make sure that there's nothing left on the stack above the
4654 // handler structure.
4655 if (FLAG_debug_code) {
4656 __ mov(eax, Operand::StaticVariable(handler_address));
4657 __ cmp(esp, Operand(eax));
4658 __ Assert(equal, "stack pointer should point to top handler");
4659 }
4660
4661 // If we can fall off the end of the try block, unlink from try chain.
4662 if (has_valid_frame()) {
4663 // The next handler address is on top of the frame. Unlink from
4664 // the handler list and drop the rest of this handler from the
4665 // frame.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01004666 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00004667 frame_->EmitPop(Operand::StaticVariable(handler_address));
4668 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
4669 if (has_unlinks) {
4670 exit.Jump();
4671 }
4672 }
4673
4674 // Generate unlink code for the (formerly) shadowing targets that
4675 // have been jumped to. Deallocate each shadow target.
4676 Result return_value;
4677 for (int i = 0; i < shadows.length(); i++) {
4678 if (shadows[i]->is_linked()) {
4679 // Unlink from try chain; be careful not to destroy the TOS if
4680 // there is one.
4681 if (i == kReturnShadowIndex) {
4682 shadows[i]->Bind(&return_value);
4683 return_value.ToRegister(eax);
4684 } else {
4685 shadows[i]->Bind();
4686 }
4687 // Because we can be jumping here (to spilled code) from
4688 // unspilled code, we need to reestablish a spilled frame at
4689 // this block.
4690 frame_->SpillAll();
4691
4692 // Reload sp from the top handler, because some statements that we
4693 // break from (eg, for...in) may have left stuff on the stack.
4694 __ mov(esp, Operand::StaticVariable(handler_address));
4695 frame_->Forget(frame_->height() - handler_height);
4696
Kristian Monsen50ef84f2010-07-29 15:18:00 +01004697 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00004698 frame_->EmitPop(Operand::StaticVariable(handler_address));
4699 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
4700
4701 if (i == kReturnShadowIndex) {
4702 if (!function_return_is_shadowed_) frame_->PrepareForReturn();
4703 shadows[i]->other_target()->Jump(&return_value);
4704 } else {
4705 shadows[i]->other_target()->Jump();
4706 }
4707 }
4708 }
4709
4710 exit.Bind();
4711}
4712
4713
Steve Block3ce2e202009-11-05 08:53:23 +00004714void CodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004715 ASSERT(!in_spilled_code());
4716 VirtualFrame::SpilledScope spilled_scope;
Steve Block3ce2e202009-11-05 08:53:23 +00004717 Comment cmnt(masm_, "[ TryFinallyStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00004718 CodeForStatementPosition(node);
4719
4720 // State: Used to keep track of reason for entering the finally
4721 // block. Should probably be extended to hold information for
4722 // break/continue from within the try block.
4723 enum { FALLING, THROWING, JUMPING };
4724
4725 JumpTarget try_block;
4726 JumpTarget finally_block;
4727
4728 try_block.Call();
4729
4730 frame_->EmitPush(eax);
4731 // In case of thrown exceptions, this is where we continue.
4732 __ Set(ecx, Immediate(Smi::FromInt(THROWING)));
4733 finally_block.Jump();
4734
4735 // --- Try block ---
4736 try_block.Bind();
4737
4738 frame_->PushTryHandler(TRY_FINALLY_HANDLER);
4739 int handler_height = frame_->height();
4740
4741 // Shadow the jump targets for all escapes from the try block, including
4742 // returns. During shadowing, the original target is hidden as the
4743 // ShadowTarget and operations on the original actually affect the
4744 // shadowing target.
4745 //
4746 // We should probably try to unify the escaping targets and the return
4747 // target.
4748 int nof_escapes = node->escaping_targets()->length();
4749 List<ShadowTarget*> shadows(1 + nof_escapes);
4750
4751 // Add the shadow target for the function return.
4752 static const int kReturnShadowIndex = 0;
4753 shadows.Add(new ShadowTarget(&function_return_));
4754 bool function_return_was_shadowed = function_return_is_shadowed_;
4755 function_return_is_shadowed_ = true;
4756 ASSERT(shadows[kReturnShadowIndex]->other_target() == &function_return_);
4757
4758 // Add the remaining shadow targets.
4759 for (int i = 0; i < nof_escapes; i++) {
4760 shadows.Add(new ShadowTarget(node->escaping_targets()->at(i)));
4761 }
4762
4763 // Generate code for the statements in the try block.
4764 VisitStatementsAndSpill(node->try_block()->statements());
4765
4766 // Stop the introduced shadowing and count the number of required unlinks.
4767 // After shadowing stops, the original targets are unshadowed and the
4768 // ShadowTargets represent the formerly shadowing targets.
4769 int nof_unlinks = 0;
4770 for (int i = 0; i < shadows.length(); i++) {
4771 shadows[i]->StopShadowing();
4772 if (shadows[i]->is_linked()) nof_unlinks++;
4773 }
4774 function_return_is_shadowed_ = function_return_was_shadowed;
4775
4776 // Get an external reference to the handler address.
Steve Block44f0eee2011-05-26 01:26:41 +01004777 ExternalReference handler_address(Isolate::k_handler_address,
4778 masm()->isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +00004779
4780 // If we can fall off the end of the try block, unlink from the try
4781 // chain and set the state on the frame to FALLING.
4782 if (has_valid_frame()) {
4783 // The next handler address is on top of the frame.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01004784 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00004785 frame_->EmitPop(Operand::StaticVariable(handler_address));
4786 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
4787
4788 // Fake a top of stack value (unneeded when FALLING) and set the
4789 // state in ecx, then jump around the unlink blocks if any.
Steve Block44f0eee2011-05-26 01:26:41 +01004790 frame_->EmitPush(Immediate(FACTORY->undefined_value()));
Steve Blocka7e24c12009-10-30 11:49:00 +00004791 __ Set(ecx, Immediate(Smi::FromInt(FALLING)));
4792 if (nof_unlinks > 0) {
4793 finally_block.Jump();
4794 }
4795 }
4796
4797 // Generate code to unlink and set the state for the (formerly)
4798 // shadowing targets that have been jumped to.
4799 for (int i = 0; i < shadows.length(); i++) {
4800 if (shadows[i]->is_linked()) {
4801 // If we have come from the shadowed return, the return value is
4802 // on the virtual frame. We must preserve it until it is
4803 // pushed.
4804 if (i == kReturnShadowIndex) {
4805 Result return_value;
4806 shadows[i]->Bind(&return_value);
4807 return_value.ToRegister(eax);
4808 } else {
4809 shadows[i]->Bind();
4810 }
4811 // Because we can be jumping here (to spilled code) from
4812 // unspilled code, we need to reestablish a spilled frame at
4813 // this block.
4814 frame_->SpillAll();
4815
4816 // Reload sp from the top handler, because some statements that
4817 // we break from (eg, for...in) may have left stuff on the
4818 // stack.
4819 __ mov(esp, Operand::StaticVariable(handler_address));
4820 frame_->Forget(frame_->height() - handler_height);
4821
4822 // Unlink this handler and drop it from the frame.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01004823 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00004824 frame_->EmitPop(Operand::StaticVariable(handler_address));
4825 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
4826
4827 if (i == kReturnShadowIndex) {
4828 // If this target shadowed the function return, materialize
4829 // the return value on the stack.
4830 frame_->EmitPush(eax);
4831 } else {
4832 // Fake TOS for targets that shadowed breaks and continues.
Steve Block44f0eee2011-05-26 01:26:41 +01004833 frame_->EmitPush(Immediate(FACTORY->undefined_value()));
Steve Blocka7e24c12009-10-30 11:49:00 +00004834 }
4835 __ Set(ecx, Immediate(Smi::FromInt(JUMPING + i)));
4836 if (--nof_unlinks > 0) {
4837 // If this is not the last unlink block, jump around the next.
4838 finally_block.Jump();
4839 }
4840 }
4841 }
4842
4843 // --- Finally block ---
4844 finally_block.Bind();
4845
4846 // Push the state on the stack.
4847 frame_->EmitPush(ecx);
4848
4849 // We keep two elements on the stack - the (possibly faked) result
4850 // and the state - while evaluating the finally block.
4851 //
4852 // Generate code for the statements in the finally block.
4853 VisitStatementsAndSpill(node->finally_block()->statements());
4854
4855 if (has_valid_frame()) {
4856 // Restore state and return value or faked TOS.
4857 frame_->EmitPop(ecx);
4858 frame_->EmitPop(eax);
4859 }
4860
4861 // Generate code to jump to the right destination for all used
4862 // formerly shadowing targets. Deallocate each shadow target.
4863 for (int i = 0; i < shadows.length(); i++) {
4864 if (has_valid_frame() && shadows[i]->is_bound()) {
4865 BreakTarget* original = shadows[i]->other_target();
4866 __ cmp(Operand(ecx), Immediate(Smi::FromInt(JUMPING + i)));
4867 if (i == kReturnShadowIndex) {
4868 // The return value is (already) in eax.
4869 Result return_value = allocator_->Allocate(eax);
4870 ASSERT(return_value.is_valid());
4871 if (function_return_is_shadowed_) {
4872 original->Branch(equal, &return_value);
4873 } else {
4874 // Branch around the preparation for return which may emit
4875 // code.
4876 JumpTarget skip;
4877 skip.Branch(not_equal);
4878 frame_->PrepareForReturn();
4879 original->Jump(&return_value);
4880 skip.Bind();
4881 }
4882 } else {
4883 original->Branch(equal);
4884 }
4885 }
4886 }
4887
4888 if (has_valid_frame()) {
4889 // Check if we need to rethrow the exception.
4890 JumpTarget exit;
4891 __ cmp(Operand(ecx), Immediate(Smi::FromInt(THROWING)));
4892 exit.Branch(not_equal);
4893
4894 // Rethrow exception.
4895 frame_->EmitPush(eax); // undo pop from above
4896 frame_->CallRuntime(Runtime::kReThrow, 1);
4897
4898 // Done.
4899 exit.Bind();
4900 }
4901}
4902
4903
4904void CodeGenerator::VisitDebuggerStatement(DebuggerStatement* node) {
4905 ASSERT(!in_spilled_code());
4906 Comment cmnt(masm_, "[ DebuggerStatement");
4907 CodeForStatementPosition(node);
4908#ifdef ENABLE_DEBUGGER_SUPPORT
4909 // Spill everything, even constants, to the frame.
4910 frame_->SpillAll();
Leon Clarke4515c472010-02-03 11:58:03 +00004911
Andrei Popescu402d9372010-02-26 13:31:12 +00004912 frame_->DebugBreak();
Steve Blocka7e24c12009-10-30 11:49:00 +00004913 // Ignore the return value.
4914#endif
4915}
4916
4917
Steve Block6ded16b2010-05-10 14:33:55 +01004918Result CodeGenerator::InstantiateFunction(
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004919 Handle<SharedFunctionInfo> function_info,
4920 bool pretenure) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004921 // The inevitable call will sync frame elements to memory anyway, so
4922 // we do it eagerly to allow us to push the arguments directly into
4923 // place.
Andrei Popescu402d9372010-02-26 13:31:12 +00004924 frame()->SyncRange(0, frame()->element_count() - 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00004925
Leon Clarkee46be812010-01-19 14:06:41 +00004926 // Use the fast case closure allocation code that allocates in new
4927 // space for nested functions that don't need literals cloning.
Steve Block44f0eee2011-05-26 01:26:41 +01004928 if (!pretenure &&
4929 scope()->is_function_scope() &&
4930 function_info->num_literals() == 0) {
4931 FastNewClosureStub stub(
4932 function_info->strict_mode() ? kStrictMode : kNonStrictMode);
Steve Block6ded16b2010-05-10 14:33:55 +01004933 frame()->EmitPush(Immediate(function_info));
Andrei Popescu402d9372010-02-26 13:31:12 +00004934 return frame()->CallStub(&stub, 1);
Leon Clarkee46be812010-01-19 14:06:41 +00004935 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01004936 // Call the runtime to instantiate the function based on the
4937 // shared function info.
Andrei Popescu402d9372010-02-26 13:31:12 +00004938 frame()->EmitPush(esi);
Steve Block6ded16b2010-05-10 14:33:55 +01004939 frame()->EmitPush(Immediate(function_info));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004940 frame()->EmitPush(Immediate(pretenure
Steve Block44f0eee2011-05-26 01:26:41 +01004941 ? FACTORY->true_value()
4942 : FACTORY->false_value()));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004943 return frame()->CallRuntime(Runtime::kNewClosure, 3);
Leon Clarkee46be812010-01-19 14:06:41 +00004944 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004945}
4946
4947
4948void CodeGenerator::VisitFunctionLiteral(FunctionLiteral* node) {
4949 Comment cmnt(masm_, "[ FunctionLiteral");
Steve Block6ded16b2010-05-10 14:33:55 +01004950 ASSERT(!in_safe_int32_mode());
4951 // Build the function info and instantiate it.
4952 Handle<SharedFunctionInfo> function_info =
Ben Murdochf87a2032010-10-22 12:50:53 +01004953 Compiler::BuildFunctionInfo(node, script());
Steve Blocka7e24c12009-10-30 11:49:00 +00004954 // Check for stack-overflow exception.
Ben Murdochf87a2032010-10-22 12:50:53 +01004955 if (function_info.is_null()) {
4956 SetStackOverflow();
4957 return;
4958 }
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004959 Result result = InstantiateFunction(function_info, node->pretenure());
Andrei Popescu402d9372010-02-26 13:31:12 +00004960 frame()->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00004961}
4962
4963
Steve Block6ded16b2010-05-10 14:33:55 +01004964void CodeGenerator::VisitSharedFunctionInfoLiteral(
4965 SharedFunctionInfoLiteral* node) {
4966 ASSERT(!in_safe_int32_mode());
4967 Comment cmnt(masm_, "[ SharedFunctionInfoLiteral");
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004968 Result result = InstantiateFunction(node->shared_function_info(), false);
Andrei Popescu402d9372010-02-26 13:31:12 +00004969 frame()->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00004970}
4971
4972
4973void CodeGenerator::VisitConditional(Conditional* node) {
4974 Comment cmnt(masm_, "[ Conditional");
Steve Block6ded16b2010-05-10 14:33:55 +01004975 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00004976 JumpTarget then;
4977 JumpTarget else_;
4978 JumpTarget exit;
4979 ControlDestination dest(&then, &else_, true);
Steve Blockd0582a62009-12-15 09:54:21 +00004980 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00004981
4982 if (dest.false_was_fall_through()) {
4983 // The else target was bound, so we compile the else part first.
Steve Blockd0582a62009-12-15 09:54:21 +00004984 Load(node->else_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00004985
4986 if (then.is_linked()) {
4987 exit.Jump();
4988 then.Bind();
Steve Blockd0582a62009-12-15 09:54:21 +00004989 Load(node->then_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00004990 }
4991 } else {
4992 // The then target was bound, so we compile the then part first.
Steve Blockd0582a62009-12-15 09:54:21 +00004993 Load(node->then_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00004994
4995 if (else_.is_linked()) {
4996 exit.Jump();
4997 else_.Bind();
Steve Blockd0582a62009-12-15 09:54:21 +00004998 Load(node->else_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00004999 }
5000 }
5001
5002 exit.Bind();
5003}
5004
5005
Leon Clarkef7060e22010-06-03 12:02:55 +01005006void CodeGenerator::LoadFromSlot(Slot* slot, TypeofState typeof_state) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005007 if (slot->type() == Slot::LOOKUP) {
5008 ASSERT(slot->var()->is_dynamic());
Steve Blocka7e24c12009-10-30 11:49:00 +00005009 JumpTarget slow;
5010 JumpTarget done;
Leon Clarkef7060e22010-06-03 12:02:55 +01005011 Result value;
Steve Blocka7e24c12009-10-30 11:49:00 +00005012
Kristian Monsen25f61362010-05-21 11:50:48 +01005013 // Generate fast case for loading from slots that correspond to
5014 // local/global variables or arguments unless they are shadowed by
5015 // eval-introduced bindings.
5016 EmitDynamicLoadFromSlotFastCase(slot,
5017 typeof_state,
Leon Clarkef7060e22010-06-03 12:02:55 +01005018 &value,
Kristian Monsen25f61362010-05-21 11:50:48 +01005019 &slow,
5020 &done);
Steve Blocka7e24c12009-10-30 11:49:00 +00005021
5022 slow.Bind();
5023 // A runtime call is inevitable. We eagerly sync frame elements
5024 // to memory so that we can push the arguments directly into place
5025 // on top of the frame.
Andrei Popescu402d9372010-02-26 13:31:12 +00005026 frame()->SyncRange(0, frame()->element_count() - 1);
5027 frame()->EmitPush(esi);
5028 frame()->EmitPush(Immediate(slot->var()->name()));
Steve Blocka7e24c12009-10-30 11:49:00 +00005029 if (typeof_state == INSIDE_TYPEOF) {
Leon Clarkef7060e22010-06-03 12:02:55 +01005030 value =
Andrei Popescu402d9372010-02-26 13:31:12 +00005031 frame()->CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00005032 } else {
Leon Clarkef7060e22010-06-03 12:02:55 +01005033 value = frame()->CallRuntime(Runtime::kLoadContextSlot, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00005034 }
5035
Leon Clarkef7060e22010-06-03 12:02:55 +01005036 done.Bind(&value);
5037 frame_->Push(&value);
Steve Blocka7e24c12009-10-30 11:49:00 +00005038
5039 } else if (slot->var()->mode() == Variable::CONST) {
5040 // Const slots may contain 'the hole' value (the constant hasn't been
5041 // initialized yet) which needs to be converted into the 'undefined'
5042 // value.
5043 //
5044 // We currently spill the virtual frame because constants use the
5045 // potentially unsafe direct-frame access of SlotOperand.
5046 VirtualFrame::SpilledScope spilled_scope;
5047 Comment cmnt(masm_, "[ Load const");
Andrei Popescu402d9372010-02-26 13:31:12 +00005048 Label exit;
Steve Blocka7e24c12009-10-30 11:49:00 +00005049 __ mov(ecx, SlotOperand(slot, ecx));
Steve Block44f0eee2011-05-26 01:26:41 +01005050 __ cmp(ecx, FACTORY->the_hole_value());
Andrei Popescu402d9372010-02-26 13:31:12 +00005051 __ j(not_equal, &exit);
Steve Block44f0eee2011-05-26 01:26:41 +01005052 __ mov(ecx, FACTORY->undefined_value());
Andrei Popescu402d9372010-02-26 13:31:12 +00005053 __ bind(&exit);
Leon Clarkef7060e22010-06-03 12:02:55 +01005054 frame()->EmitPush(ecx);
Steve Blocka7e24c12009-10-30 11:49:00 +00005055
5056 } else if (slot->type() == Slot::PARAMETER) {
Andrei Popescu402d9372010-02-26 13:31:12 +00005057 frame()->PushParameterAt(slot->index());
Steve Blocka7e24c12009-10-30 11:49:00 +00005058
5059 } else if (slot->type() == Slot::LOCAL) {
Andrei Popescu402d9372010-02-26 13:31:12 +00005060 frame()->PushLocalAt(slot->index());
Steve Blocka7e24c12009-10-30 11:49:00 +00005061
5062 } else {
5063 // The other remaining slot types (LOOKUP and GLOBAL) cannot reach
5064 // here.
5065 //
5066 // The use of SlotOperand below is safe for an unspilled frame
5067 // because it will always be a context slot.
5068 ASSERT(slot->type() == Slot::CONTEXT);
Leon Clarkef7060e22010-06-03 12:02:55 +01005069 Result temp = allocator()->Allocate();
5070 ASSERT(temp.is_valid());
5071 __ mov(temp.reg(), SlotOperand(slot, temp.reg()));
5072 frame()->Push(&temp);
Steve Blocka7e24c12009-10-30 11:49:00 +00005073 }
5074}
5075
5076
Leon Clarkef7060e22010-06-03 12:02:55 +01005077void CodeGenerator::LoadFromSlotCheckForArguments(Slot* slot,
Andrei Popescu402d9372010-02-26 13:31:12 +00005078 TypeofState state) {
Leon Clarkef7060e22010-06-03 12:02:55 +01005079 LoadFromSlot(slot, state);
Steve Blocka7e24c12009-10-30 11:49:00 +00005080
5081 // Bail out quickly if we're not using lazy arguments allocation.
Leon Clarkef7060e22010-06-03 12:02:55 +01005082 if (ArgumentsMode() != LAZY_ARGUMENTS_ALLOCATION) return;
Steve Blocka7e24c12009-10-30 11:49:00 +00005083
5084 // ... or if the slot isn't a non-parameter arguments slot.
Leon Clarkef7060e22010-06-03 12:02:55 +01005085 if (slot->type() == Slot::PARAMETER || !slot->is_arguments()) return;
Steve Blocka7e24c12009-10-30 11:49:00 +00005086
5087 // If the loaded value is a constant, we know if the arguments
5088 // object has been lazily loaded yet.
Leon Clarkef7060e22010-06-03 12:02:55 +01005089 Result result = frame()->Pop();
Andrei Popescu402d9372010-02-26 13:31:12 +00005090 if (result.is_constant()) {
Ben Murdoch086aeea2011-05-13 15:57:08 +01005091 if (result.handle()->IsArgumentsMarker()) {
Leon Clarkef7060e22010-06-03 12:02:55 +01005092 result = StoreArgumentsObject(false);
Steve Blocka7e24c12009-10-30 11:49:00 +00005093 }
Leon Clarkef7060e22010-06-03 12:02:55 +01005094 frame()->Push(&result);
5095 return;
Steve Blocka7e24c12009-10-30 11:49:00 +00005096 }
Leon Clarkef7060e22010-06-03 12:02:55 +01005097 ASSERT(result.is_register());
Steve Blocka7e24c12009-10-30 11:49:00 +00005098 // The loaded value is in a register. If it is the sentinel that
5099 // indicates that we haven't loaded the arguments object yet, we
5100 // need to do it now.
5101 JumpTarget exit;
Steve Block44f0eee2011-05-26 01:26:41 +01005102 __ cmp(Operand(result.reg()), Immediate(FACTORY->arguments_marker()));
Leon Clarkef7060e22010-06-03 12:02:55 +01005103 frame()->Push(&result);
5104 exit.Branch(not_equal);
Andrei Popescu402d9372010-02-26 13:31:12 +00005105
Andrei Popescu402d9372010-02-26 13:31:12 +00005106 result = StoreArgumentsObject(false);
Leon Clarkef7060e22010-06-03 12:02:55 +01005107 frame()->SetElementAt(0, &result);
5108 result.Unuse();
5109 exit.Bind();
5110 return;
Steve Blocka7e24c12009-10-30 11:49:00 +00005111}
5112
5113
5114Result CodeGenerator::LoadFromGlobalSlotCheckExtensions(
5115 Slot* slot,
5116 TypeofState typeof_state,
5117 JumpTarget* slow) {
Steve Block6ded16b2010-05-10 14:33:55 +01005118 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005119 // Check that no extension objects have been created by calls to
5120 // eval from the current scope to the global scope.
5121 Register context = esi;
5122 Result tmp = allocator_->Allocate();
5123 ASSERT(tmp.is_valid()); // All non-reserved registers were available.
5124
5125 Scope* s = scope();
5126 while (s != NULL) {
5127 if (s->num_heap_slots() > 0) {
5128 if (s->calls_eval()) {
5129 // Check that extension is NULL.
5130 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
5131 Immediate(0));
5132 slow->Branch(not_equal, not_taken);
5133 }
5134 // Load next context in chain.
5135 __ mov(tmp.reg(), ContextOperand(context, Context::CLOSURE_INDEX));
5136 __ mov(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset));
5137 context = tmp.reg();
5138 }
5139 // If no outer scope calls eval, we do not need to check more
5140 // context extensions. If we have reached an eval scope, we check
5141 // all extensions from this point.
5142 if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break;
5143 s = s->outer_scope();
5144 }
5145
5146 if (s != NULL && s->is_eval_scope()) {
5147 // Loop up the context chain. There is no frame effect so it is
5148 // safe to use raw labels here.
5149 Label next, fast;
5150 if (!context.is(tmp.reg())) {
5151 __ mov(tmp.reg(), context);
5152 }
5153 __ bind(&next);
5154 // Terminate at global context.
5155 __ cmp(FieldOperand(tmp.reg(), HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01005156 Immediate(FACTORY->global_context_map()));
Steve Blocka7e24c12009-10-30 11:49:00 +00005157 __ j(equal, &fast);
5158 // Check that extension is NULL.
5159 __ cmp(ContextOperand(tmp.reg(), Context::EXTENSION_INDEX), Immediate(0));
5160 slow->Branch(not_equal, not_taken);
5161 // Load next context in chain.
5162 __ mov(tmp.reg(), ContextOperand(tmp.reg(), Context::CLOSURE_INDEX));
5163 __ mov(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset));
5164 __ jmp(&next);
5165 __ bind(&fast);
5166 }
5167 tmp.Unuse();
5168
5169 // All extension objects were empty and it is safe to use a global
5170 // load IC call.
Andrei Popescu402d9372010-02-26 13:31:12 +00005171 // The register allocator prefers eax if it is free, so the code generator
5172 // will load the global object directly into eax, which is where the LoadIC
5173 // expects it.
5174 frame_->Spill(eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00005175 LoadGlobal();
5176 frame_->Push(slot->var()->name());
5177 RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
5178 ? RelocInfo::CODE_TARGET
5179 : RelocInfo::CODE_TARGET_CONTEXT;
5180 Result answer = frame_->CallLoadIC(mode);
5181 // A test eax instruction following the call signals that the inobject
5182 // property case was inlined. Ensure that there is not a test eax
5183 // instruction here.
5184 __ nop();
Steve Blocka7e24c12009-10-30 11:49:00 +00005185 return answer;
5186}
5187
5188
Kristian Monsen25f61362010-05-21 11:50:48 +01005189void CodeGenerator::EmitDynamicLoadFromSlotFastCase(Slot* slot,
5190 TypeofState typeof_state,
5191 Result* result,
5192 JumpTarget* slow,
5193 JumpTarget* done) {
5194 // Generate fast-case code for variables that might be shadowed by
5195 // eval-introduced variables. Eval is used a lot without
5196 // introducing variables. In those cases, we do not want to
5197 // perform a runtime call for all variables in the scope
5198 // containing the eval.
5199 if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) {
5200 *result = LoadFromGlobalSlotCheckExtensions(slot, typeof_state, slow);
5201 done->Jump(result);
5202
5203 } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01005204 Slot* potential_slot = slot->var()->local_if_not_shadowed()->AsSlot();
Kristian Monsen25f61362010-05-21 11:50:48 +01005205 Expression* rewrite = slot->var()->local_if_not_shadowed()->rewrite();
5206 if (potential_slot != NULL) {
5207 // Generate fast case for locals that rewrite to slots.
5208 // Allocate a fresh register to use as a temp in
5209 // ContextSlotOperandCheckExtensions and to hold the result
5210 // value.
5211 *result = allocator()->Allocate();
5212 ASSERT(result->is_valid());
5213 __ mov(result->reg(),
5214 ContextSlotOperandCheckExtensions(potential_slot, *result, slow));
5215 if (potential_slot->var()->mode() == Variable::CONST) {
Steve Block44f0eee2011-05-26 01:26:41 +01005216 __ cmp(result->reg(), FACTORY->the_hole_value());
Kristian Monsen25f61362010-05-21 11:50:48 +01005217 done->Branch(not_equal, result);
Steve Block44f0eee2011-05-26 01:26:41 +01005218 __ mov(result->reg(), FACTORY->undefined_value());
Kristian Monsen25f61362010-05-21 11:50:48 +01005219 }
5220 done->Jump(result);
5221 } else if (rewrite != NULL) {
5222 // Generate fast case for calls of an argument function.
5223 Property* property = rewrite->AsProperty();
5224 if (property != NULL) {
5225 VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
5226 Literal* key_literal = property->key()->AsLiteral();
5227 if (obj_proxy != NULL &&
5228 key_literal != NULL &&
5229 obj_proxy->IsArguments() &&
5230 key_literal->handle()->IsSmi()) {
5231 // Load arguments object if there are no eval-introduced
5232 // variables. Then load the argument from the arguments
5233 // object using keyed load.
5234 Result arguments = allocator()->Allocate();
5235 ASSERT(arguments.is_valid());
5236 __ mov(arguments.reg(),
Kristian Monsen0d5e1162010-09-30 15:31:59 +01005237 ContextSlotOperandCheckExtensions(obj_proxy->var()->AsSlot(),
Kristian Monsen25f61362010-05-21 11:50:48 +01005238 arguments,
5239 slow));
5240 frame_->Push(&arguments);
5241 frame_->Push(key_literal->handle());
5242 *result = EmitKeyedLoad();
5243 done->Jump(result);
5244 }
5245 }
5246 }
5247 }
5248}
5249
5250
Steve Blocka7e24c12009-10-30 11:49:00 +00005251void CodeGenerator::StoreToSlot(Slot* slot, InitState init_state) {
5252 if (slot->type() == Slot::LOOKUP) {
5253 ASSERT(slot->var()->is_dynamic());
5254
5255 // For now, just do a runtime call. Since the call is inevitable,
5256 // we eagerly sync the virtual frame so we can directly push the
5257 // arguments into place.
5258 frame_->SyncRange(0, frame_->element_count() - 1);
5259
5260 frame_->EmitPush(esi);
5261 frame_->EmitPush(Immediate(slot->var()->name()));
5262
5263 Result value;
5264 if (init_state == CONST_INIT) {
5265 // Same as the case for a normal store, but ignores attribute
5266 // (e.g. READ_ONLY) of context slot so that we can initialize const
5267 // properties (introduced via eval("const foo = (some expr);")). Also,
5268 // uses the current function context instead of the top context.
5269 //
5270 // Note that we must declare the foo upon entry of eval(), via a
5271 // context slot declaration, but we cannot initialize it at the same
5272 // time, because the const declaration may be at the end of the eval
5273 // code (sigh...) and the const variable may have been used before
5274 // (where its value is 'undefined'). Thus, we can only do the
5275 // initialization when we actually encounter the expression and when
5276 // the expression operands are defined and valid, and thus we need the
5277 // split into 2 operations: declaration of the context slot followed
5278 // by initialization.
5279 value = frame_->CallRuntime(Runtime::kInitializeConstContextSlot, 3);
5280 } else {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01005281 frame_->Push(Smi::FromInt(strict_mode_flag()));
5282 value = frame_->CallRuntime(Runtime::kStoreContextSlot, 4);
Steve Blocka7e24c12009-10-30 11:49:00 +00005283 }
5284 // Storing a variable must keep the (new) value on the expression
5285 // stack. This is necessary for compiling chained assignment
5286 // expressions.
5287 frame_->Push(&value);
5288
5289 } else {
5290 ASSERT(!slot->var()->is_dynamic());
5291
5292 JumpTarget exit;
5293 if (init_state == CONST_INIT) {
5294 ASSERT(slot->var()->mode() == Variable::CONST);
5295 // Only the first const initialization must be executed (the slot
5296 // still contains 'the hole' value). When the assignment is executed,
5297 // the code is identical to a normal store (see below).
5298 //
5299 // We spill the frame in the code below because the direct-frame
5300 // access of SlotOperand is potentially unsafe with an unspilled
5301 // frame.
5302 VirtualFrame::SpilledScope spilled_scope;
5303 Comment cmnt(masm_, "[ Init const");
5304 __ mov(ecx, SlotOperand(slot, ecx));
Steve Block44f0eee2011-05-26 01:26:41 +01005305 __ cmp(ecx, FACTORY->the_hole_value());
Steve Blocka7e24c12009-10-30 11:49:00 +00005306 exit.Branch(not_equal);
5307 }
5308
5309 // We must execute the store. Storing a variable must keep the (new)
5310 // value on the stack. This is necessary for compiling assignment
5311 // expressions.
5312 //
5313 // Note: We will reach here even with slot->var()->mode() ==
5314 // Variable::CONST because of const declarations which will initialize
5315 // consts to 'the hole' value and by doing so, end up calling this code.
5316 if (slot->type() == Slot::PARAMETER) {
5317 frame_->StoreToParameterAt(slot->index());
5318 } else if (slot->type() == Slot::LOCAL) {
5319 frame_->StoreToLocalAt(slot->index());
5320 } else {
5321 // The other slot types (LOOKUP and GLOBAL) cannot reach here.
5322 //
5323 // The use of SlotOperand below is safe for an unspilled frame
5324 // because the slot is a context slot.
5325 ASSERT(slot->type() == Slot::CONTEXT);
5326 frame_->Dup();
5327 Result value = frame_->Pop();
5328 value.ToRegister();
5329 Result start = allocator_->Allocate();
5330 ASSERT(start.is_valid());
5331 __ mov(SlotOperand(slot, start.reg()), value.reg());
5332 // RecordWrite may destroy the value registers.
5333 //
5334 // TODO(204): Avoid actually spilling when the value is not
5335 // needed (probably the common case).
5336 frame_->Spill(value.reg());
5337 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
5338 Result temp = allocator_->Allocate();
5339 ASSERT(temp.is_valid());
5340 __ RecordWrite(start.reg(), offset, value.reg(), temp.reg());
5341 // The results start, value, and temp are unused by going out of
5342 // scope.
5343 }
5344
5345 exit.Bind();
5346 }
5347}
5348
5349
Steve Block6ded16b2010-05-10 14:33:55 +01005350void CodeGenerator::VisitSlot(Slot* slot) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005351 Comment cmnt(masm_, "[ Slot");
Steve Block6ded16b2010-05-10 14:33:55 +01005352 if (in_safe_int32_mode()) {
5353 if ((slot->type() == Slot::LOCAL && !slot->is_arguments())) {
5354 frame()->UntaggedPushLocalAt(slot->index());
5355 } else if (slot->type() == Slot::PARAMETER) {
5356 frame()->UntaggedPushParameterAt(slot->index());
5357 } else {
5358 UNREACHABLE();
5359 }
5360 } else {
Leon Clarkef7060e22010-06-03 12:02:55 +01005361 LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF);
Steve Block6ded16b2010-05-10 14:33:55 +01005362 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005363}
5364
5365
5366void CodeGenerator::VisitVariableProxy(VariableProxy* node) {
5367 Comment cmnt(masm_, "[ VariableProxy");
5368 Variable* var = node->var();
5369 Expression* expr = var->rewrite();
5370 if (expr != NULL) {
5371 Visit(expr);
5372 } else {
5373 ASSERT(var->is_global());
Steve Block6ded16b2010-05-10 14:33:55 +01005374 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005375 Reference ref(this, node);
Steve Blockd0582a62009-12-15 09:54:21 +00005376 ref.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00005377 }
5378}
5379
5380
5381void CodeGenerator::VisitLiteral(Literal* node) {
5382 Comment cmnt(masm_, "[ Literal");
Ben Murdoche0cee9b2011-05-25 10:26:03 +01005383 if (frame_->ConstantPoolOverflowed()) {
5384 Result temp = allocator_->Allocate();
5385 ASSERT(temp.is_valid());
5386 if (in_safe_int32_mode()) {
5387 temp.set_untagged_int32(true);
5388 }
5389 __ Set(temp.reg(), Immediate(node->handle()));
5390 frame_->Push(&temp);
Steve Block6ded16b2010-05-10 14:33:55 +01005391 } else {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01005392 if (in_safe_int32_mode()) {
5393 frame_->PushUntaggedElement(node->handle());
5394 } else {
5395 frame_->Push(node->handle());
5396 }
Steve Block6ded16b2010-05-10 14:33:55 +01005397 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005398}
5399
5400
Steve Blockd0582a62009-12-15 09:54:21 +00005401void CodeGenerator::PushUnsafeSmi(Handle<Object> value) {
5402 ASSERT(value->IsSmi());
5403 int bits = reinterpret_cast<int>(*value);
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08005404 __ push(Immediate(bits ^ jit_cookie_));
5405 __ xor_(Operand(esp, 0), Immediate(jit_cookie_));
Steve Blockd0582a62009-12-15 09:54:21 +00005406}
5407
5408
5409void CodeGenerator::StoreUnsafeSmiToLocal(int offset, Handle<Object> value) {
5410 ASSERT(value->IsSmi());
5411 int bits = reinterpret_cast<int>(*value);
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08005412 __ mov(Operand(ebp, offset), Immediate(bits ^ jit_cookie_));
5413 __ xor_(Operand(ebp, offset), Immediate(jit_cookie_));
Steve Blockd0582a62009-12-15 09:54:21 +00005414}
5415
5416
5417void CodeGenerator::MoveUnsafeSmi(Register target, Handle<Object> value) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005418 ASSERT(target.is_valid());
5419 ASSERT(value->IsSmi());
5420 int bits = reinterpret_cast<int>(*value);
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08005421 __ Set(target, Immediate(bits ^ jit_cookie_));
5422 __ xor_(target, jit_cookie_);
Steve Blocka7e24c12009-10-30 11:49:00 +00005423}
5424
5425
5426bool CodeGenerator::IsUnsafeSmi(Handle<Object> value) {
5427 if (!value->IsSmi()) return false;
5428 int int_value = Smi::cast(*value)->value();
5429 return !is_intn(int_value, kMaxSmiInlinedBits);
5430}
5431
5432
5433// Materialize the regexp literal 'node' in the literals array
5434// 'literals' of the function. Leave the regexp boilerplate in
5435// 'boilerplate'.
5436class DeferredRegExpLiteral: public DeferredCode {
5437 public:
5438 DeferredRegExpLiteral(Register boilerplate,
5439 Register literals,
5440 RegExpLiteral* node)
5441 : boilerplate_(boilerplate), literals_(literals), node_(node) {
5442 set_comment("[ DeferredRegExpLiteral");
5443 }
5444
5445 void Generate();
5446
5447 private:
5448 Register boilerplate_;
5449 Register literals_;
5450 RegExpLiteral* node_;
5451};
5452
5453
5454void DeferredRegExpLiteral::Generate() {
5455 // Since the entry is undefined we call the runtime system to
5456 // compute the literal.
5457 // Literal array (0).
5458 __ push(literals_);
5459 // Literal index (1).
5460 __ push(Immediate(Smi::FromInt(node_->literal_index())));
5461 // RegExp pattern (2).
5462 __ push(Immediate(node_->pattern()));
5463 // RegExp flags (3).
5464 __ push(Immediate(node_->flags()));
5465 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
5466 if (!boilerplate_.is(eax)) __ mov(boilerplate_, eax);
5467}
5468
5469
Ben Murdochbb769b22010-08-11 14:56:33 +01005470class DeferredAllocateInNewSpace: public DeferredCode {
5471 public:
Steve Block791712a2010-08-27 10:21:07 +01005472 DeferredAllocateInNewSpace(int size,
5473 Register target,
5474 int registers_to_save = 0)
5475 : size_(size), target_(target), registers_to_save_(registers_to_save) {
Steve Block44f0eee2011-05-26 01:26:41 +01005476 ASSERT(size >= kPointerSize && size <= HEAP->MaxObjectSizeInNewSpace());
Steve Block791712a2010-08-27 10:21:07 +01005477 ASSERT_EQ(0, registers_to_save & target.bit());
Ben Murdochbb769b22010-08-11 14:56:33 +01005478 set_comment("[ DeferredAllocateInNewSpace");
5479 }
5480 void Generate();
5481
5482 private:
5483 int size_;
5484 Register target_;
Steve Block791712a2010-08-27 10:21:07 +01005485 int registers_to_save_;
Ben Murdochbb769b22010-08-11 14:56:33 +01005486};
5487
5488
5489void DeferredAllocateInNewSpace::Generate() {
Steve Block791712a2010-08-27 10:21:07 +01005490 for (int i = 0; i < kNumRegs; i++) {
5491 if (registers_to_save_ & (1 << i)) {
5492 Register save_register = { i };
5493 __ push(save_register);
5494 }
5495 }
Ben Murdochbb769b22010-08-11 14:56:33 +01005496 __ push(Immediate(Smi::FromInt(size_)));
5497 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
5498 if (!target_.is(eax)) {
5499 __ mov(target_, eax);
5500 }
Steve Block791712a2010-08-27 10:21:07 +01005501 for (int i = kNumRegs - 1; i >= 0; i--) {
5502 if (registers_to_save_ & (1 << i)) {
5503 Register save_register = { i };
5504 __ pop(save_register);
5505 }
5506 }
Ben Murdochbb769b22010-08-11 14:56:33 +01005507}
5508
5509
Steve Blocka7e24c12009-10-30 11:49:00 +00005510void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01005511 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005512 Comment cmnt(masm_, "[ RegExp Literal");
5513
5514 // Retrieve the literals array and check the allocated entry. Begin
5515 // with a writable copy of the function of this activation in a
5516 // register.
5517 frame_->PushFunction();
5518 Result literals = frame_->Pop();
5519 literals.ToRegister();
5520 frame_->Spill(literals.reg());
5521
5522 // Load the literals array of the function.
5523 __ mov(literals.reg(),
5524 FieldOperand(literals.reg(), JSFunction::kLiteralsOffset));
5525
5526 // Load the literal at the ast saved index.
5527 Result boilerplate = allocator_->Allocate();
5528 ASSERT(boilerplate.is_valid());
5529 int literal_offset =
5530 FixedArray::kHeaderSize + node->literal_index() * kPointerSize;
5531 __ mov(boilerplate.reg(), FieldOperand(literals.reg(), literal_offset));
5532
5533 // Check whether we need to materialize the RegExp object. If so,
5534 // jump to the deferred code passing the literals array.
5535 DeferredRegExpLiteral* deferred =
5536 new DeferredRegExpLiteral(boilerplate.reg(), literals.reg(), node);
Steve Block44f0eee2011-05-26 01:26:41 +01005537 __ cmp(boilerplate.reg(), FACTORY->undefined_value());
Steve Blocka7e24c12009-10-30 11:49:00 +00005538 deferred->Branch(equal);
5539 deferred->BindExit();
Steve Blocka7e24c12009-10-30 11:49:00 +00005540
Ben Murdochbb769b22010-08-11 14:56:33 +01005541 // Register of boilerplate contains RegExp object.
5542
5543 Result tmp = allocator()->Allocate();
5544 ASSERT(tmp.is_valid());
5545
5546 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
5547
5548 DeferredAllocateInNewSpace* allocate_fallback =
5549 new DeferredAllocateInNewSpace(size, literals.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00005550 frame_->Push(&boilerplate);
Ben Murdochbb769b22010-08-11 14:56:33 +01005551 frame_->SpillTop();
5552 __ AllocateInNewSpace(size,
5553 literals.reg(),
5554 tmp.reg(),
5555 no_reg,
5556 allocate_fallback->entry_label(),
5557 TAG_OBJECT);
5558 allocate_fallback->BindExit();
5559 boilerplate = frame_->Pop();
5560 // Copy from boilerplate to clone and return clone.
5561
5562 for (int i = 0; i < size; i += kPointerSize) {
5563 __ mov(tmp.reg(), FieldOperand(boilerplate.reg(), i));
5564 __ mov(FieldOperand(literals.reg(), i), tmp.reg());
5565 }
5566 frame_->Push(&literals);
Steve Blocka7e24c12009-10-30 11:49:00 +00005567}
5568
5569
Steve Blocka7e24c12009-10-30 11:49:00 +00005570void CodeGenerator::VisitObjectLiteral(ObjectLiteral* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01005571 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005572 Comment cmnt(masm_, "[ ObjectLiteral");
5573
Leon Clarkee46be812010-01-19 14:06:41 +00005574 // Load a writable copy of the function of this activation in a
Steve Blocka7e24c12009-10-30 11:49:00 +00005575 // register.
5576 frame_->PushFunction();
5577 Result literals = frame_->Pop();
5578 literals.ToRegister();
5579 frame_->Spill(literals.reg());
5580
5581 // Load the literals array of the function.
5582 __ mov(literals.reg(),
5583 FieldOperand(literals.reg(), JSFunction::kLiteralsOffset));
Leon Clarkee46be812010-01-19 14:06:41 +00005584 // Literal array.
5585 frame_->Push(&literals);
5586 // Literal index.
5587 frame_->Push(Smi::FromInt(node->literal_index()));
5588 // Constant properties.
5589 frame_->Push(node->constant_properties());
Steve Block6ded16b2010-05-10 14:33:55 +01005590 // Should the object literal have fast elements?
5591 frame_->Push(Smi::FromInt(node->fast_elements() ? 1 : 0));
Leon Clarkee46be812010-01-19 14:06:41 +00005592 Result clone;
5593 if (node->depth() > 1) {
Steve Block6ded16b2010-05-10 14:33:55 +01005594 clone = frame_->CallRuntime(Runtime::kCreateObjectLiteral, 4);
Leon Clarkee46be812010-01-19 14:06:41 +00005595 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01005596 clone = frame_->CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
Steve Blocka7e24c12009-10-30 11:49:00 +00005597 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005598 frame_->Push(&clone);
5599
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08005600 // Mark all computed expressions that are bound to a key that
5601 // is shadowed by a later occurrence of the same key. For the
5602 // marked expressions, no store code is emitted.
5603 node->CalculateEmitStore();
5604
Steve Blocka7e24c12009-10-30 11:49:00 +00005605 for (int i = 0; i < node->properties()->length(); i++) {
5606 ObjectLiteral::Property* property = node->properties()->at(i);
5607 switch (property->kind()) {
5608 case ObjectLiteral::Property::CONSTANT:
5609 break;
5610 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
5611 if (CompileTimeValue::IsCompileTimeValue(property->value())) break;
5612 // else fall through.
5613 case ObjectLiteral::Property::COMPUTED: {
5614 Handle<Object> key(property->key()->handle());
5615 if (key->IsSymbol()) {
5616 // Duplicate the object as the IC receiver.
5617 frame_->Dup();
5618 Load(property->value());
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08005619 if (property->emit_store()) {
5620 Result ignored =
Steve Block1e0659c2011-05-24 12:43:12 +01005621 frame_->CallStoreIC(Handle<String>::cast(key), false,
5622 strict_mode_flag());
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08005623 // A test eax instruction following the store IC call would
5624 // indicate the presence of an inlined version of the
5625 // store. Add a nop to indicate that there is no such
5626 // inlined version.
5627 __ nop();
5628 } else {
5629 frame_->Drop(2);
5630 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005631 break;
5632 }
5633 // Fall through
5634 }
5635 case ObjectLiteral::Property::PROTOTYPE: {
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08005636 // Duplicate the object as an argument to the runtime call.
5637 frame_->Dup();
5638 Load(property->key());
5639 Load(property->value());
5640 if (property->emit_store()) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01005641 frame_->Push(Smi::FromInt(NONE)); // PropertyAttributes
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08005642 // Ignore the result.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01005643 Result ignored = frame_->CallRuntime(Runtime::kSetProperty, 4);
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08005644 } else {
5645 frame_->Drop(3);
5646 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005647 break;
5648 }
5649 case ObjectLiteral::Property::SETTER: {
5650 // Duplicate the object as an argument to the runtime call.
5651 frame_->Dup();
5652 Load(property->key());
5653 frame_->Push(Smi::FromInt(1));
5654 Load(property->value());
5655 Result ignored = frame_->CallRuntime(Runtime::kDefineAccessor, 4);
5656 // Ignore the result.
5657 break;
5658 }
5659 case ObjectLiteral::Property::GETTER: {
5660 // Duplicate the object as an argument to the runtime call.
5661 frame_->Dup();
5662 Load(property->key());
5663 frame_->Push(Smi::FromInt(0));
5664 Load(property->value());
5665 Result ignored = frame_->CallRuntime(Runtime::kDefineAccessor, 4);
5666 // Ignore the result.
5667 break;
5668 }
5669 default: UNREACHABLE();
5670 }
5671 }
5672}
5673
5674
Steve Blocka7e24c12009-10-30 11:49:00 +00005675void CodeGenerator::VisitArrayLiteral(ArrayLiteral* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01005676 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005677 Comment cmnt(masm_, "[ ArrayLiteral");
5678
Leon Clarkee46be812010-01-19 14:06:41 +00005679 // Load a writable copy of the function of this activation in a
Steve Blocka7e24c12009-10-30 11:49:00 +00005680 // register.
5681 frame_->PushFunction();
5682 Result literals = frame_->Pop();
5683 literals.ToRegister();
5684 frame_->Spill(literals.reg());
5685
5686 // Load the literals array of the function.
5687 __ mov(literals.reg(),
5688 FieldOperand(literals.reg(), JSFunction::kLiteralsOffset));
5689
Leon Clarkee46be812010-01-19 14:06:41 +00005690 frame_->Push(&literals);
5691 frame_->Push(Smi::FromInt(node->literal_index()));
5692 frame_->Push(node->constant_elements());
5693 int length = node->values()->length();
5694 Result clone;
Steve Block44f0eee2011-05-26 01:26:41 +01005695 if (node->constant_elements()->map() == HEAP->fixed_cow_array_map()) {
Iain Merrick75681382010-08-19 15:07:18 +01005696 FastCloneShallowArrayStub stub(
5697 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
5698 clone = frame_->CallStub(&stub, 3);
Steve Block44f0eee2011-05-26 01:26:41 +01005699 Counters* counters = masm()->isolate()->counters();
5700 __ IncrementCounter(counters->cow_arrays_created_stub(), 1);
Iain Merrick75681382010-08-19 15:07:18 +01005701 } else if (node->depth() > 1) {
Leon Clarkee46be812010-01-19 14:06:41 +00005702 clone = frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3);
Iain Merrick75681382010-08-19 15:07:18 +01005703 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
Leon Clarkee46be812010-01-19 14:06:41 +00005704 clone = frame_->CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
5705 } else {
Iain Merrick75681382010-08-19 15:07:18 +01005706 FastCloneShallowArrayStub stub(
5707 FastCloneShallowArrayStub::CLONE_ELEMENTS, length);
Leon Clarkee46be812010-01-19 14:06:41 +00005708 clone = frame_->CallStub(&stub, 3);
Steve Blocka7e24c12009-10-30 11:49:00 +00005709 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005710 frame_->Push(&clone);
5711
5712 // Generate code to set the elements in the array that are not
5713 // literals.
Leon Clarkee46be812010-01-19 14:06:41 +00005714 for (int i = 0; i < length; i++) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005715 Expression* value = node->values()->at(i);
5716
Iain Merrick75681382010-08-19 15:07:18 +01005717 if (!CompileTimeValue::ArrayLiteralElementNeedsInitialization(value)) {
5718 continue;
5719 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005720
5721 // The property must be set by generated code.
5722 Load(value);
5723
5724 // Get the property value off the stack.
5725 Result prop_value = frame_->Pop();
5726 prop_value.ToRegister();
5727
5728 // Fetch the array literal while leaving a copy on the stack and
5729 // use it to get the elements array.
5730 frame_->Dup();
5731 Result elements = frame_->Pop();
5732 elements.ToRegister();
5733 frame_->Spill(elements.reg());
5734 // Get the elements array.
5735 __ mov(elements.reg(),
5736 FieldOperand(elements.reg(), JSObject::kElementsOffset));
5737
5738 // Write to the indexed properties array.
5739 int offset = i * kPointerSize + FixedArray::kHeaderSize;
5740 __ mov(FieldOperand(elements.reg(), offset), prop_value.reg());
5741
5742 // Update the write barrier for the array address.
5743 frame_->Spill(prop_value.reg()); // Overwritten by the write barrier.
5744 Result scratch = allocator_->Allocate();
5745 ASSERT(scratch.is_valid());
5746 __ RecordWrite(elements.reg(), offset, prop_value.reg(), scratch.reg());
5747 }
5748}
5749
5750
5751void CodeGenerator::VisitCatchExtensionObject(CatchExtensionObject* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01005752 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005753 ASSERT(!in_spilled_code());
5754 // Call runtime routine to allocate the catch extension object and
5755 // assign the exception value to the catch variable.
5756 Comment cmnt(masm_, "[ CatchExtensionObject");
5757 Load(node->key());
5758 Load(node->value());
5759 Result result =
5760 frame_->CallRuntime(Runtime::kCreateCatchExtensionObject, 2);
5761 frame_->Push(&result);
5762}
5763
5764
Andrei Popescu402d9372010-02-26 13:31:12 +00005765void CodeGenerator::EmitSlotAssignment(Assignment* node) {
5766#ifdef DEBUG
5767 int original_height = frame()->height();
5768#endif
5769 Comment cmnt(masm(), "[ Variable Assignment");
5770 Variable* var = node->target()->AsVariableProxy()->AsVariable();
5771 ASSERT(var != NULL);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01005772 Slot* slot = var->AsSlot();
Andrei Popescu402d9372010-02-26 13:31:12 +00005773 ASSERT(slot != NULL);
5774
5775 // Evaluate the right-hand side.
5776 if (node->is_compound()) {
Steve Block6ded16b2010-05-10 14:33:55 +01005777 // For a compound assignment the right-hand side is a binary operation
5778 // between the current property value and the actual right-hand side.
Leon Clarkef7060e22010-06-03 12:02:55 +01005779 LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF);
Andrei Popescu402d9372010-02-26 13:31:12 +00005780 Load(node->value());
5781
Steve Block6ded16b2010-05-10 14:33:55 +01005782 // Perform the binary operation.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005783 bool overwrite_value = node->value()->ResultOverwriteAllowed();
Steve Block6ded16b2010-05-10 14:33:55 +01005784 // Construct the implicit binary operation.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005785 BinaryOperation expr(node);
Steve Block6ded16b2010-05-10 14:33:55 +01005786 GenericBinaryOperation(&expr,
Andrei Popescu402d9372010-02-26 13:31:12 +00005787 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
5788 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01005789 // For non-compound assignment just load the right-hand side.
Andrei Popescu402d9372010-02-26 13:31:12 +00005790 Load(node->value());
5791 }
5792
5793 // Perform the assignment.
5794 if (var->mode() != Variable::CONST || node->op() == Token::INIT_CONST) {
5795 CodeForSourcePosition(node->position());
5796 StoreToSlot(slot,
5797 node->op() == Token::INIT_CONST ? CONST_INIT : NOT_CONST_INIT);
5798 }
5799 ASSERT(frame()->height() == original_height + 1);
5800}
5801
5802
5803void CodeGenerator::EmitNamedPropertyAssignment(Assignment* node) {
5804#ifdef DEBUG
5805 int original_height = frame()->height();
5806#endif
5807 Comment cmnt(masm(), "[ Named Property Assignment");
5808 Variable* var = node->target()->AsVariableProxy()->AsVariable();
5809 Property* prop = node->target()->AsProperty();
5810 ASSERT(var == NULL || (prop == NULL && var->is_global()));
5811
Steve Block6ded16b2010-05-10 14:33:55 +01005812 // Initialize name and evaluate the receiver sub-expression if necessary. If
5813 // the receiver is trivial it is not placed on the stack at this point, but
5814 // loaded whenever actually needed.
Andrei Popescu402d9372010-02-26 13:31:12 +00005815 Handle<String> name;
5816 bool is_trivial_receiver = false;
5817 if (var != NULL) {
5818 name = var->name();
5819 } else {
5820 Literal* lit = prop->key()->AsLiteral();
5821 ASSERT_NOT_NULL(lit);
5822 name = Handle<String>::cast(lit->handle());
5823 // Do not materialize the receiver on the frame if it is trivial.
5824 is_trivial_receiver = prop->obj()->IsTrivial();
5825 if (!is_trivial_receiver) Load(prop->obj());
5826 }
5827
Steve Block6ded16b2010-05-10 14:33:55 +01005828 // Change to slow case in the beginning of an initialization block to
5829 // avoid the quadratic behavior of repeatedly adding fast properties.
Andrei Popescu402d9372010-02-26 13:31:12 +00005830 if (node->starts_initialization_block()) {
Steve Block6ded16b2010-05-10 14:33:55 +01005831 // Initialization block consists of assignments of the form expr.x = ..., so
5832 // this will never be an assignment to a variable, so there must be a
5833 // receiver object.
Andrei Popescu402d9372010-02-26 13:31:12 +00005834 ASSERT_EQ(NULL, var);
Andrei Popescu402d9372010-02-26 13:31:12 +00005835 if (is_trivial_receiver) {
5836 frame()->Push(prop->obj());
5837 } else {
5838 frame()->Dup();
5839 }
5840 Result ignored = frame()->CallRuntime(Runtime::kToSlowProperties, 1);
5841 }
5842
Steve Block6ded16b2010-05-10 14:33:55 +01005843 // Change to fast case at the end of an initialization block. To prepare for
5844 // that add an extra copy of the receiver to the frame, so that it can be
5845 // converted back to fast case after the assignment.
Andrei Popescu402d9372010-02-26 13:31:12 +00005846 if (node->ends_initialization_block() && !is_trivial_receiver) {
Andrei Popescu402d9372010-02-26 13:31:12 +00005847 frame()->Dup();
5848 }
5849
Steve Block6ded16b2010-05-10 14:33:55 +01005850 // Stack layout:
5851 // [tos] : receiver (only materialized if non-trivial)
5852 // [tos+1] : receiver if at the end of an initialization block
5853
Andrei Popescu402d9372010-02-26 13:31:12 +00005854 // Evaluate the right-hand side.
5855 if (node->is_compound()) {
Steve Block6ded16b2010-05-10 14:33:55 +01005856 // For a compound assignment the right-hand side is a binary operation
5857 // between the current property value and the actual right-hand side.
Andrei Popescu402d9372010-02-26 13:31:12 +00005858 if (is_trivial_receiver) {
5859 frame()->Push(prop->obj());
5860 } else if (var != NULL) {
5861 // The LoadIC stub expects the object in eax.
5862 // Freeing eax causes the code generator to load the global into it.
5863 frame_->Spill(eax);
5864 LoadGlobal();
5865 } else {
5866 frame()->Dup();
5867 }
5868 Result value = EmitNamedLoad(name, var != NULL);
5869 frame()->Push(&value);
5870 Load(node->value());
5871
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005872 bool overwrite_value = node->value()->ResultOverwriteAllowed();
Steve Block6ded16b2010-05-10 14:33:55 +01005873 // Construct the implicit binary operation.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005874 BinaryOperation expr(node);
Steve Block6ded16b2010-05-10 14:33:55 +01005875 GenericBinaryOperation(&expr,
Andrei Popescu402d9372010-02-26 13:31:12 +00005876 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
5877 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01005878 // For non-compound assignment just load the right-hand side.
Andrei Popescu402d9372010-02-26 13:31:12 +00005879 Load(node->value());
5880 }
5881
Steve Block6ded16b2010-05-10 14:33:55 +01005882 // Stack layout:
5883 // [tos] : value
5884 // [tos+1] : receiver (only materialized if non-trivial)
5885 // [tos+2] : receiver if at the end of an initialization block
5886
Andrei Popescu402d9372010-02-26 13:31:12 +00005887 // Perform the assignment. It is safe to ignore constants here.
5888 ASSERT(var == NULL || var->mode() != Variable::CONST);
5889 ASSERT_NE(Token::INIT_CONST, node->op());
5890 if (is_trivial_receiver) {
5891 Result value = frame()->Pop();
5892 frame()->Push(prop->obj());
5893 frame()->Push(&value);
5894 }
5895 CodeForSourcePosition(node->position());
5896 bool is_contextual = (var != NULL);
5897 Result answer = EmitNamedStore(name, is_contextual);
5898 frame()->Push(&answer);
5899
Steve Block6ded16b2010-05-10 14:33:55 +01005900 // Stack layout:
5901 // [tos] : result
5902 // [tos+1] : receiver if at the end of an initialization block
5903
Andrei Popescu402d9372010-02-26 13:31:12 +00005904 if (node->ends_initialization_block()) {
5905 ASSERT_EQ(NULL, var);
5906 // The argument to the runtime call is the receiver.
5907 if (is_trivial_receiver) {
5908 frame()->Push(prop->obj());
5909 } else {
5910 // A copy of the receiver is below the value of the assignment. Swap
5911 // the receiver and the value of the assignment expression.
5912 Result result = frame()->Pop();
5913 Result receiver = frame()->Pop();
5914 frame()->Push(&result);
5915 frame()->Push(&receiver);
5916 }
5917 Result ignored = frame_->CallRuntime(Runtime::kToFastProperties, 1);
5918 }
5919
Steve Block6ded16b2010-05-10 14:33:55 +01005920 // Stack layout:
5921 // [tos] : result
5922
Andrei Popescu402d9372010-02-26 13:31:12 +00005923 ASSERT_EQ(frame()->height(), original_height + 1);
5924}
5925
5926
5927void CodeGenerator::EmitKeyedPropertyAssignment(Assignment* node) {
5928#ifdef DEBUG
5929 int original_height = frame()->height();
5930#endif
Steve Block6ded16b2010-05-10 14:33:55 +01005931 Comment cmnt(masm_, "[ Keyed Property Assignment");
Andrei Popescu402d9372010-02-26 13:31:12 +00005932 Property* prop = node->target()->AsProperty();
5933 ASSERT_NOT_NULL(prop);
5934
5935 // Evaluate the receiver subexpression.
5936 Load(prop->obj());
5937
Steve Block6ded16b2010-05-10 14:33:55 +01005938 // Change to slow case in the beginning of an initialization block to
5939 // avoid the quadratic behavior of repeatedly adding fast properties.
Andrei Popescu402d9372010-02-26 13:31:12 +00005940 if (node->starts_initialization_block()) {
Andrei Popescu402d9372010-02-26 13:31:12 +00005941 frame_->Dup();
5942 Result ignored = frame_->CallRuntime(Runtime::kToSlowProperties, 1);
5943 }
5944
Steve Block6ded16b2010-05-10 14:33:55 +01005945 // Change to fast case at the end of an initialization block. To prepare for
5946 // that add an extra copy of the receiver to the frame, so that it can be
5947 // converted back to fast case after the assignment.
Andrei Popescu402d9372010-02-26 13:31:12 +00005948 if (node->ends_initialization_block()) {
Andrei Popescu402d9372010-02-26 13:31:12 +00005949 frame_->Dup();
5950 }
5951
5952 // Evaluate the key subexpression.
5953 Load(prop->key());
5954
Steve Block6ded16b2010-05-10 14:33:55 +01005955 // Stack layout:
5956 // [tos] : key
5957 // [tos+1] : receiver
5958 // [tos+2] : receiver if at the end of an initialization block
5959
Andrei Popescu402d9372010-02-26 13:31:12 +00005960 // Evaluate the right-hand side.
5961 if (node->is_compound()) {
Steve Block6ded16b2010-05-10 14:33:55 +01005962 // For a compound assignment the right-hand side is a binary operation
5963 // between the current property value and the actual right-hand side.
5964 // Duplicate receiver and key for loading the current property value.
Andrei Popescu402d9372010-02-26 13:31:12 +00005965 frame()->PushElementAt(1);
5966 frame()->PushElementAt(1);
5967 Result value = EmitKeyedLoad();
5968 frame()->Push(&value);
5969 Load(node->value());
5970
Steve Block6ded16b2010-05-10 14:33:55 +01005971 // Perform the binary operation.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005972 bool overwrite_value = node->value()->ResultOverwriteAllowed();
5973 BinaryOperation expr(node);
Steve Block6ded16b2010-05-10 14:33:55 +01005974 GenericBinaryOperation(&expr,
Andrei Popescu402d9372010-02-26 13:31:12 +00005975 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
5976 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01005977 // For non-compound assignment just load the right-hand side.
Andrei Popescu402d9372010-02-26 13:31:12 +00005978 Load(node->value());
5979 }
5980
Steve Block6ded16b2010-05-10 14:33:55 +01005981 // Stack layout:
5982 // [tos] : value
5983 // [tos+1] : key
5984 // [tos+2] : receiver
5985 // [tos+3] : receiver if at the end of an initialization block
5986
Andrei Popescu402d9372010-02-26 13:31:12 +00005987 // Perform the assignment. It is safe to ignore constants here.
5988 ASSERT(node->op() != Token::INIT_CONST);
5989 CodeForSourcePosition(node->position());
5990 Result answer = EmitKeyedStore(prop->key()->type());
5991 frame()->Push(&answer);
5992
Steve Block6ded16b2010-05-10 14:33:55 +01005993 // Stack layout:
5994 // [tos] : result
5995 // [tos+1] : receiver if at the end of an initialization block
5996
5997 // Change to fast case at the end of an initialization block.
Andrei Popescu402d9372010-02-26 13:31:12 +00005998 if (node->ends_initialization_block()) {
5999 // The argument to the runtime call is the extra copy of the receiver,
6000 // which is below the value of the assignment. Swap the receiver and
6001 // the value of the assignment expression.
6002 Result result = frame()->Pop();
6003 Result receiver = frame()->Pop();
6004 frame()->Push(&result);
6005 frame()->Push(&receiver);
6006 Result ignored = frame_->CallRuntime(Runtime::kToFastProperties, 1);
6007 }
6008
Steve Block6ded16b2010-05-10 14:33:55 +01006009 // Stack layout:
6010 // [tos] : result
6011
Andrei Popescu402d9372010-02-26 13:31:12 +00006012 ASSERT(frame()->height() == original_height + 1);
6013}
6014
6015
Steve Blocka7e24c12009-10-30 11:49:00 +00006016void CodeGenerator::VisitAssignment(Assignment* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01006017 ASSERT(!in_safe_int32_mode());
Leon Clarked91b9f72010-01-27 17:25:45 +00006018#ifdef DEBUG
Andrei Popescu402d9372010-02-26 13:31:12 +00006019 int original_height = frame()->height();
Leon Clarked91b9f72010-01-27 17:25:45 +00006020#endif
Andrei Popescu402d9372010-02-26 13:31:12 +00006021 Variable* var = node->target()->AsVariableProxy()->AsVariable();
6022 Property* prop = node->target()->AsProperty();
Steve Blocka7e24c12009-10-30 11:49:00 +00006023
Andrei Popescu402d9372010-02-26 13:31:12 +00006024 if (var != NULL && !var->is_global()) {
6025 EmitSlotAssignment(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00006026
Andrei Popescu402d9372010-02-26 13:31:12 +00006027 } else if ((prop != NULL && prop->key()->IsPropertyName()) ||
6028 (var != NULL && var->is_global())) {
6029 // Properties whose keys are property names and global variables are
6030 // treated as named property references. We do not need to consider
6031 // global 'this' because it is not a valid left-hand side.
6032 EmitNamedPropertyAssignment(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00006033
Andrei Popescu402d9372010-02-26 13:31:12 +00006034 } else if (prop != NULL) {
6035 // Other properties (including rewritten parameters for a function that
6036 // uses arguments) are keyed property assignments.
6037 EmitKeyedPropertyAssignment(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00006038
Andrei Popescu402d9372010-02-26 13:31:12 +00006039 } else {
6040 // Invalid left-hand side.
6041 Load(node->target());
6042 Result result = frame()->CallRuntime(Runtime::kThrowReferenceError, 1);
6043 // The runtime call doesn't actually return but the code generator will
6044 // still generate code and expects a certain frame height.
6045 frame()->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00006046 }
Andrei Popescu402d9372010-02-26 13:31:12 +00006047
6048 ASSERT(frame()->height() == original_height + 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00006049}
6050
6051
6052void CodeGenerator::VisitThrow(Throw* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01006053 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00006054 Comment cmnt(masm_, "[ Throw");
6055 Load(node->exception());
6056 Result result = frame_->CallRuntime(Runtime::kThrow, 1);
6057 frame_->Push(&result);
6058}
6059
6060
6061void CodeGenerator::VisitProperty(Property* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01006062 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00006063 Comment cmnt(masm_, "[ Property");
6064 Reference property(this, node);
Steve Blockd0582a62009-12-15 09:54:21 +00006065 property.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00006066}
6067
6068
6069void CodeGenerator::VisitCall(Call* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01006070 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00006071 Comment cmnt(masm_, "[ Call");
6072
6073 Expression* function = node->expression();
6074 ZoneList<Expression*>* args = node->arguments();
6075
6076 // Check if the function is a variable or a property.
6077 Variable* var = function->AsVariableProxy()->AsVariable();
6078 Property* property = function->AsProperty();
6079
6080 // ------------------------------------------------------------------------
6081 // Fast-case: Use inline caching.
6082 // ---
6083 // According to ECMA-262, section 11.2.3, page 44, the function to call
6084 // must be resolved after the arguments have been evaluated. The IC code
6085 // automatically handles this by loading the arguments before the function
6086 // is resolved in cache misses (this also holds for megamorphic calls).
6087 // ------------------------------------------------------------------------
6088
6089 if (var != NULL && var->is_possibly_eval()) {
6090 // ----------------------------------
6091 // JavaScript example: 'eval(arg)' // eval is not known to be shadowed
6092 // ----------------------------------
6093
6094 // In a call to eval, we first call %ResolvePossiblyDirectEval to
6095 // resolve the function we need to call and the receiver of the
6096 // call. Then we call the resolved function using the given
6097 // arguments.
6098
6099 // Prepare the stack for the call to the resolved function.
6100 Load(function);
6101
6102 // Allocate a frame slot for the receiver.
Steve Block44f0eee2011-05-26 01:26:41 +01006103 frame_->Push(FACTORY->undefined_value());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006104
6105 // Load the arguments.
Steve Blocka7e24c12009-10-30 11:49:00 +00006106 int arg_count = args->length();
6107 for (int i = 0; i < arg_count; i++) {
6108 Load(args->at(i));
Leon Clarkef7060e22010-06-03 12:02:55 +01006109 frame_->SpillTop();
Steve Blocka7e24c12009-10-30 11:49:00 +00006110 }
6111
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006112 // Result to hold the result of the function resolution and the
6113 // final result of the eval call.
6114 Result result;
6115
6116 // If we know that eval can only be shadowed by eval-introduced
6117 // variables we attempt to load the global eval function directly
6118 // in generated code. If we succeed, there is no need to perform a
6119 // context lookup in the runtime system.
6120 JumpTarget done;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01006121 if (var->AsSlot() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) {
6122 ASSERT(var->AsSlot()->type() == Slot::LOOKUP);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006123 JumpTarget slow;
6124 // Prepare the stack for the call to
6125 // ResolvePossiblyDirectEvalNoLookup by pushing the loaded
6126 // function, the first argument to the eval call and the
6127 // receiver.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01006128 Result fun = LoadFromGlobalSlotCheckExtensions(var->AsSlot(),
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006129 NOT_INSIDE_TYPEOF,
6130 &slow);
6131 frame_->Push(&fun);
6132 if (arg_count > 0) {
6133 frame_->PushElementAt(arg_count);
6134 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01006135 frame_->Push(FACTORY->undefined_value());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006136 }
6137 frame_->PushParameterAt(-1);
6138
Steve Block1e0659c2011-05-24 12:43:12 +01006139 // Push the strict mode flag.
6140 frame_->Push(Smi::FromInt(strict_mode_flag()));
6141
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006142 // Resolve the call.
6143 result =
Steve Block1e0659c2011-05-24 12:43:12 +01006144 frame_->CallRuntime(Runtime::kResolvePossiblyDirectEvalNoLookup, 4);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006145
6146 done.Jump(&result);
6147 slow.Bind();
6148 }
6149
6150 // Prepare the stack for the call to ResolvePossiblyDirectEval by
6151 // pushing the loaded function, the first argument to the eval
6152 // call and the receiver.
Steve Blocka7e24c12009-10-30 11:49:00 +00006153 frame_->PushElementAt(arg_count + 1);
6154 if (arg_count > 0) {
6155 frame_->PushElementAt(arg_count);
6156 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01006157 frame_->Push(FACTORY->undefined_value());
Steve Blocka7e24c12009-10-30 11:49:00 +00006158 }
Leon Clarkee46be812010-01-19 14:06:41 +00006159 frame_->PushParameterAt(-1);
6160
Steve Block1e0659c2011-05-24 12:43:12 +01006161 // Push the strict mode flag.
6162 frame_->Push(Smi::FromInt(strict_mode_flag()));
6163
Steve Blocka7e24c12009-10-30 11:49:00 +00006164 // Resolve the call.
Steve Block1e0659c2011-05-24 12:43:12 +01006165 result = frame_->CallRuntime(Runtime::kResolvePossiblyDirectEval, 4);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006166
6167 // If we generated fast-case code bind the jump-target where fast
6168 // and slow case merge.
6169 if (done.is_linked()) done.Bind(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00006170
Leon Clarkee46be812010-01-19 14:06:41 +00006171 // The runtime call returns a pair of values in eax (function) and
6172 // edx (receiver). Touch up the stack with the right values.
6173 Result receiver = allocator_->Allocate(edx);
6174 frame_->SetElementAt(arg_count + 1, &result);
6175 frame_->SetElementAt(arg_count, &receiver);
6176 receiver.Unuse();
Steve Blocka7e24c12009-10-30 11:49:00 +00006177
6178 // Call the function.
6179 CodeForSourcePosition(node->position());
6180 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Leon Clarkee46be812010-01-19 14:06:41 +00006181 CallFunctionStub call_function(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
Steve Blocka7e24c12009-10-30 11:49:00 +00006182 result = frame_->CallStub(&call_function, arg_count + 1);
6183
6184 // Restore the context and overwrite the function on the stack with
6185 // the result.
6186 frame_->RestoreContextRegister();
6187 frame_->SetElementAt(0, &result);
6188
6189 } else if (var != NULL && !var->is_this() && var->is_global()) {
6190 // ----------------------------------
6191 // JavaScript example: 'foo(1, 2, 3)' // foo is global
6192 // ----------------------------------
6193
Steve Blocka7e24c12009-10-30 11:49:00 +00006194 // Pass the global object as the receiver and let the IC stub
6195 // patch the stack to use the global proxy as 'this' in the
6196 // invoked function.
6197 LoadGlobal();
6198
6199 // Load the arguments.
6200 int arg_count = args->length();
6201 for (int i = 0; i < arg_count; i++) {
6202 Load(args->at(i));
Leon Clarkef7060e22010-06-03 12:02:55 +01006203 frame_->SpillTop();
Steve Blocka7e24c12009-10-30 11:49:00 +00006204 }
6205
Leon Clarkee46be812010-01-19 14:06:41 +00006206 // Push the name of the function onto the frame.
6207 frame_->Push(var->name());
6208
Steve Blocka7e24c12009-10-30 11:49:00 +00006209 // Call the IC initialization code.
6210 CodeForSourcePosition(node->position());
6211 Result result = frame_->CallCallIC(RelocInfo::CODE_TARGET_CONTEXT,
6212 arg_count,
6213 loop_nesting());
6214 frame_->RestoreContextRegister();
Leon Clarkee46be812010-01-19 14:06:41 +00006215 frame_->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00006216
Kristian Monsen0d5e1162010-09-30 15:31:59 +01006217 } else if (var != NULL && var->AsSlot() != NULL &&
6218 var->AsSlot()->type() == Slot::LOOKUP) {
Steve Blocka7e24c12009-10-30 11:49:00 +00006219 // ----------------------------------
Kristian Monsen25f61362010-05-21 11:50:48 +01006220 // JavaScript examples:
6221 //
6222 // with (obj) foo(1, 2, 3) // foo may be in obj.
6223 //
6224 // function f() {};
6225 // function g() {
6226 // eval(...);
6227 // f(); // f could be in extension object.
6228 // }
Steve Blocka7e24c12009-10-30 11:49:00 +00006229 // ----------------------------------
6230
Kristian Monsen25f61362010-05-21 11:50:48 +01006231 JumpTarget slow, done;
6232 Result function;
6233
6234 // Generate fast case for loading functions from slots that
6235 // correspond to local/global variables or arguments unless they
6236 // are shadowed by eval-introduced bindings.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01006237 EmitDynamicLoadFromSlotFastCase(var->AsSlot(),
Kristian Monsen25f61362010-05-21 11:50:48 +01006238 NOT_INSIDE_TYPEOF,
6239 &function,
6240 &slow,
6241 &done);
6242
6243 slow.Bind();
6244 // Enter the runtime system to load the function from the context.
6245 // Sync the frame so we can push the arguments directly into
6246 // place.
Steve Blocka7e24c12009-10-30 11:49:00 +00006247 frame_->SyncRange(0, frame_->element_count() - 1);
6248 frame_->EmitPush(esi);
6249 frame_->EmitPush(Immediate(var->name()));
6250 frame_->CallRuntime(Runtime::kLoadContextSlot, 2);
6251 // The runtime call returns a pair of values in eax and edx. The
6252 // looked-up function is in eax and the receiver is in edx. These
6253 // register references are not ref counted here. We spill them
6254 // eagerly since they are arguments to an inevitable call (and are
6255 // not sharable by the arguments).
6256 ASSERT(!allocator()->is_used(eax));
6257 frame_->EmitPush(eax);
6258
6259 // Load the receiver.
6260 ASSERT(!allocator()->is_used(edx));
6261 frame_->EmitPush(edx);
6262
Kristian Monsen25f61362010-05-21 11:50:48 +01006263 // If fast case code has been generated, emit code to push the
6264 // function and receiver and have the slow path jump around this
6265 // code.
6266 if (done.is_linked()) {
6267 JumpTarget call;
6268 call.Jump();
6269 done.Bind(&function);
6270 frame_->Push(&function);
6271 LoadGlobalReceiver();
6272 call.Bind();
6273 }
6274
Steve Blocka7e24c12009-10-30 11:49:00 +00006275 // Call the function.
Leon Clarkee46be812010-01-19 14:06:41 +00006276 CallWithArguments(args, NO_CALL_FUNCTION_FLAGS, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00006277
6278 } else if (property != NULL) {
6279 // Check if the key is a literal string.
6280 Literal* literal = property->key()->AsLiteral();
6281
6282 if (literal != NULL && literal->handle()->IsSymbol()) {
6283 // ------------------------------------------------------------------
6284 // JavaScript example: 'object.foo(1, 2, 3)' or 'map["key"](1, 2, 3)'
6285 // ------------------------------------------------------------------
6286
6287 Handle<String> name = Handle<String>::cast(literal->handle());
6288
6289 if (ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION &&
6290 name->IsEqualTo(CStrVector("apply")) &&
6291 args->length() == 2 &&
6292 args->at(1)->AsVariableProxy() != NULL &&
6293 args->at(1)->AsVariableProxy()->IsArguments()) {
6294 // Use the optimized Function.prototype.apply that avoids
6295 // allocating lazily allocated arguments objects.
Leon Clarked91b9f72010-01-27 17:25:45 +00006296 CallApplyLazy(property->obj(),
Steve Blocka7e24c12009-10-30 11:49:00 +00006297 args->at(0),
6298 args->at(1)->AsVariableProxy(),
6299 node->position());
6300
6301 } else {
Leon Clarkee46be812010-01-19 14:06:41 +00006302 // Push the receiver onto the frame.
Steve Blocka7e24c12009-10-30 11:49:00 +00006303 Load(property->obj());
6304
6305 // Load the arguments.
6306 int arg_count = args->length();
6307 for (int i = 0; i < arg_count; i++) {
6308 Load(args->at(i));
Leon Clarkef7060e22010-06-03 12:02:55 +01006309 frame_->SpillTop();
Steve Blocka7e24c12009-10-30 11:49:00 +00006310 }
6311
Leon Clarkee46be812010-01-19 14:06:41 +00006312 // Push the name of the function onto the frame.
6313 frame_->Push(name);
6314
Steve Blocka7e24c12009-10-30 11:49:00 +00006315 // Call the IC initialization code.
6316 CodeForSourcePosition(node->position());
6317 Result result =
6318 frame_->CallCallIC(RelocInfo::CODE_TARGET, arg_count,
6319 loop_nesting());
6320 frame_->RestoreContextRegister();
Leon Clarkee46be812010-01-19 14:06:41 +00006321 frame_->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00006322 }
6323
6324 } else {
6325 // -------------------------------------------
6326 // JavaScript example: 'array[index](1, 2, 3)'
6327 // -------------------------------------------
6328
6329 // Load the function to call from the property through a reference.
Steve Blocka7e24c12009-10-30 11:49:00 +00006330
6331 // Pass receiver to called function.
6332 if (property->is_synthetic()) {
Leon Clarked91b9f72010-01-27 17:25:45 +00006333 Reference ref(this, property);
6334 ref.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00006335 // Use global object as receiver.
6336 LoadGlobalReceiver();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006337 // Call the function.
6338 CallWithArguments(args, RECEIVER_MIGHT_BE_VALUE, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00006339 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006340 // Push the receiver onto the frame.
Leon Clarked91b9f72010-01-27 17:25:45 +00006341 Load(property->obj());
Steve Blocka7e24c12009-10-30 11:49:00 +00006342
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006343 // Load the name of the function.
6344 Load(property->key());
6345
6346 // Swap the name of the function and the receiver on the stack to follow
6347 // the calling convention for call ICs.
6348 Result key = frame_->Pop();
6349 Result receiver = frame_->Pop();
6350 frame_->Push(&key);
6351 frame_->Push(&receiver);
6352 key.Unuse();
6353 receiver.Unuse();
6354
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006355 // Load the arguments.
6356 int arg_count = args->length();
6357 for (int i = 0; i < arg_count; i++) {
6358 Load(args->at(i));
6359 frame_->SpillTop();
6360 }
6361
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006362 // Place the key on top of stack and call the IC initialization code.
6363 frame_->PushElementAt(arg_count + 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006364 CodeForSourcePosition(node->position());
6365 Result result =
6366 frame_->CallKeyedCallIC(RelocInfo::CODE_TARGET,
6367 arg_count,
6368 loop_nesting());
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006369 frame_->Drop(); // Drop the key still on the stack.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006370 frame_->RestoreContextRegister();
6371 frame_->Push(&result);
6372 }
Steve Blocka7e24c12009-10-30 11:49:00 +00006373 }
6374
6375 } else {
6376 // ----------------------------------
6377 // JavaScript example: 'foo(1, 2, 3)' // foo is not global
6378 // ----------------------------------
6379
6380 // Load the function.
6381 Load(function);
6382
6383 // Pass the global proxy as the receiver.
6384 LoadGlobalReceiver();
6385
6386 // Call the function.
Leon Clarkee46be812010-01-19 14:06:41 +00006387 CallWithArguments(args, NO_CALL_FUNCTION_FLAGS, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00006388 }
6389}
6390
6391
6392void CodeGenerator::VisitCallNew(CallNew* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01006393 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00006394 Comment cmnt(masm_, "[ CallNew");
6395
6396 // According to ECMA-262, section 11.2.2, page 44, the function
6397 // expression in new calls must be evaluated before the
6398 // arguments. This is different from ordinary calls, where the
6399 // actual function to call is resolved after the arguments have been
6400 // evaluated.
6401
Kristian Monsen80d68ea2010-09-08 11:05:35 +01006402 // Push constructor on the stack. If it's not a function it's used as
6403 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
6404 // ignored.
Steve Blocka7e24c12009-10-30 11:49:00 +00006405 Load(node->expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00006406
6407 // Push the arguments ("left-to-right") on the stack.
6408 ZoneList<Expression*>* args = node->arguments();
6409 int arg_count = args->length();
6410 for (int i = 0; i < arg_count; i++) {
6411 Load(args->at(i));
6412 }
6413
6414 // Call the construct call builtin that handles allocation and
6415 // constructor invocation.
6416 CodeForSourcePosition(node->position());
6417 Result result = frame_->CallConstructor(arg_count);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01006418 frame_->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00006419}
6420
6421
6422void CodeGenerator::GenerateIsSmi(ZoneList<Expression*>* args) {
6423 ASSERT(args->length() == 1);
6424 Load(args->at(0));
6425 Result value = frame_->Pop();
6426 value.ToRegister();
6427 ASSERT(value.is_valid());
6428 __ test(value.reg(), Immediate(kSmiTagMask));
6429 value.Unuse();
6430 destination()->Split(zero);
6431}
6432
6433
6434void CodeGenerator::GenerateLog(ZoneList<Expression*>* args) {
6435 // Conditionally generate a log call.
6436 // Args:
6437 // 0 (literal string): The type of logging (corresponds to the flags).
6438 // This is used to determine whether or not to generate the log call.
6439 // 1 (string): Format string. Access the string at argument index 2
6440 // with '%2s' (see Logger::LogRuntime for all the formats).
6441 // 2 (array): Arguments to the format string.
6442 ASSERT_EQ(args->length(), 3);
6443#ifdef ENABLE_LOGGING_AND_PROFILING
6444 if (ShouldGenerateLog(args->at(0))) {
6445 Load(args->at(1));
6446 Load(args->at(2));
6447 frame_->CallRuntime(Runtime::kLog, 2);
6448 }
6449#endif
6450 // Finally, we're expected to leave a value on the top of the stack.
Steve Block44f0eee2011-05-26 01:26:41 +01006451 frame_->Push(FACTORY->undefined_value());
Steve Blocka7e24c12009-10-30 11:49:00 +00006452}
6453
6454
6455void CodeGenerator::GenerateIsNonNegativeSmi(ZoneList<Expression*>* args) {
6456 ASSERT(args->length() == 1);
6457 Load(args->at(0));
6458 Result value = frame_->Pop();
6459 value.ToRegister();
6460 ASSERT(value.is_valid());
Steve Block6ded16b2010-05-10 14:33:55 +01006461 __ test(value.reg(), Immediate(kSmiTagMask | kSmiSignMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00006462 value.Unuse();
6463 destination()->Split(zero);
6464}
6465
6466
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006467class DeferredStringCharCodeAt : public DeferredCode {
6468 public:
6469 DeferredStringCharCodeAt(Register object,
6470 Register index,
6471 Register scratch,
6472 Register result)
6473 : result_(result),
6474 char_code_at_generator_(object,
6475 index,
6476 scratch,
6477 result,
6478 &need_conversion_,
6479 &need_conversion_,
6480 &index_out_of_range_,
6481 STRING_INDEX_IS_NUMBER) {}
6482
6483 StringCharCodeAtGenerator* fast_case_generator() {
6484 return &char_code_at_generator_;
6485 }
6486
6487 virtual void Generate() {
6488 VirtualFrameRuntimeCallHelper call_helper(frame_state());
6489 char_code_at_generator_.GenerateSlow(masm(), call_helper);
6490
6491 __ bind(&need_conversion_);
6492 // Move the undefined value into the result register, which will
6493 // trigger conversion.
Steve Block44f0eee2011-05-26 01:26:41 +01006494 __ Set(result_, Immediate(FACTORY->undefined_value()));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006495 __ jmp(exit_label());
6496
6497 __ bind(&index_out_of_range_);
6498 // When the index is out of range, the spec requires us to return
6499 // NaN.
Steve Block44f0eee2011-05-26 01:26:41 +01006500 __ Set(result_, Immediate(FACTORY->nan_value()));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006501 __ jmp(exit_label());
6502 }
6503
6504 private:
6505 Register result_;
6506
6507 Label need_conversion_;
6508 Label index_out_of_range_;
6509
6510 StringCharCodeAtGenerator char_code_at_generator_;
6511};
6512
6513
6514// This generates code that performs a String.prototype.charCodeAt() call
6515// or returns a smi in order to trigger conversion.
6516void CodeGenerator::GenerateStringCharCodeAt(ZoneList<Expression*>* args) {
6517 Comment(masm_, "[ GenerateStringCharCodeAt");
Steve Blocka7e24c12009-10-30 11:49:00 +00006518 ASSERT(args->length() == 2);
6519
Steve Blocka7e24c12009-10-30 11:49:00 +00006520 Load(args->at(0));
6521 Load(args->at(1));
6522 Result index = frame_->Pop();
6523 Result object = frame_->Pop();
Steve Blocka7e24c12009-10-30 11:49:00 +00006524 object.ToRegister();
6525 index.ToRegister();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006526 // We might mutate the object register.
Steve Blocka7e24c12009-10-30 11:49:00 +00006527 frame_->Spill(object.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00006528
Steve Block6ded16b2010-05-10 14:33:55 +01006529 // We need two extra registers.
6530 Result result = allocator()->Allocate();
6531 ASSERT(result.is_valid());
6532 Result scratch = allocator()->Allocate();
6533 ASSERT(scratch.is_valid());
Steve Blocka7e24c12009-10-30 11:49:00 +00006534
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006535 DeferredStringCharCodeAt* deferred =
6536 new DeferredStringCharCodeAt(object.reg(),
6537 index.reg(),
6538 scratch.reg(),
6539 result.reg());
6540 deferred->fast_case_generator()->GenerateFast(masm_);
6541 deferred->BindExit();
Steve Block6ded16b2010-05-10 14:33:55 +01006542 frame_->Push(&result);
6543}
6544
6545
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006546class DeferredStringCharFromCode : public DeferredCode {
6547 public:
6548 DeferredStringCharFromCode(Register code,
6549 Register result)
6550 : char_from_code_generator_(code, result) {}
6551
6552 StringCharFromCodeGenerator* fast_case_generator() {
6553 return &char_from_code_generator_;
6554 }
6555
6556 virtual void Generate() {
6557 VirtualFrameRuntimeCallHelper call_helper(frame_state());
6558 char_from_code_generator_.GenerateSlow(masm(), call_helper);
6559 }
6560
6561 private:
6562 StringCharFromCodeGenerator char_from_code_generator_;
6563};
6564
6565
6566// Generates code for creating a one-char string from a char code.
6567void CodeGenerator::GenerateStringCharFromCode(ZoneList<Expression*>* args) {
6568 Comment(masm_, "[ GenerateStringCharFromCode");
Steve Block6ded16b2010-05-10 14:33:55 +01006569 ASSERT(args->length() == 1);
6570
6571 Load(args->at(0));
6572
6573 Result code = frame_->Pop();
6574 code.ToRegister();
6575 ASSERT(code.is_valid());
6576
Steve Block6ded16b2010-05-10 14:33:55 +01006577 Result result = allocator()->Allocate();
6578 ASSERT(result.is_valid());
6579
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006580 DeferredStringCharFromCode* deferred = new DeferredStringCharFromCode(
6581 code.reg(), result.reg());
6582 deferred->fast_case_generator()->GenerateFast(masm_);
6583 deferred->BindExit();
6584 frame_->Push(&result);
6585}
6586
6587
6588class DeferredStringCharAt : public DeferredCode {
6589 public:
6590 DeferredStringCharAt(Register object,
6591 Register index,
6592 Register scratch1,
6593 Register scratch2,
6594 Register result)
6595 : result_(result),
6596 char_at_generator_(object,
6597 index,
6598 scratch1,
6599 scratch2,
6600 result,
6601 &need_conversion_,
6602 &need_conversion_,
6603 &index_out_of_range_,
6604 STRING_INDEX_IS_NUMBER) {}
6605
6606 StringCharAtGenerator* fast_case_generator() {
6607 return &char_at_generator_;
6608 }
6609
6610 virtual void Generate() {
6611 VirtualFrameRuntimeCallHelper call_helper(frame_state());
6612 char_at_generator_.GenerateSlow(masm(), call_helper);
6613
6614 __ bind(&need_conversion_);
6615 // Move smi zero into the result register, which will trigger
6616 // conversion.
6617 __ Set(result_, Immediate(Smi::FromInt(0)));
6618 __ jmp(exit_label());
6619
6620 __ bind(&index_out_of_range_);
6621 // When the index is out of range, the spec requires us to return
6622 // the empty string.
Steve Block44f0eee2011-05-26 01:26:41 +01006623 __ Set(result_, Immediate(FACTORY->empty_string()));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006624 __ jmp(exit_label());
6625 }
6626
6627 private:
6628 Register result_;
6629
6630 Label need_conversion_;
6631 Label index_out_of_range_;
6632
6633 StringCharAtGenerator char_at_generator_;
6634};
6635
6636
6637// This generates code that performs a String.prototype.charAt() call
6638// or returns a smi in order to trigger conversion.
6639void CodeGenerator::GenerateStringCharAt(ZoneList<Expression*>* args) {
6640 Comment(masm_, "[ GenerateStringCharAt");
6641 ASSERT(args->length() == 2);
6642
6643 Load(args->at(0));
6644 Load(args->at(1));
6645 Result index = frame_->Pop();
6646 Result object = frame_->Pop();
6647 object.ToRegister();
6648 index.ToRegister();
6649 // We might mutate the object register.
6650 frame_->Spill(object.reg());
6651
6652 // We need three extra registers.
6653 Result result = allocator()->Allocate();
6654 ASSERT(result.is_valid());
6655 Result scratch1 = allocator()->Allocate();
6656 ASSERT(scratch1.is_valid());
6657 Result scratch2 = allocator()->Allocate();
6658 ASSERT(scratch2.is_valid());
6659
6660 DeferredStringCharAt* deferred =
6661 new DeferredStringCharAt(object.reg(),
6662 index.reg(),
6663 scratch1.reg(),
6664 scratch2.reg(),
6665 result.reg());
6666 deferred->fast_case_generator()->GenerateFast(masm_);
6667 deferred->BindExit();
Steve Block6ded16b2010-05-10 14:33:55 +01006668 frame_->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00006669}
6670
6671
6672void CodeGenerator::GenerateIsArray(ZoneList<Expression*>* args) {
6673 ASSERT(args->length() == 1);
6674 Load(args->at(0));
6675 Result value = frame_->Pop();
6676 value.ToRegister();
6677 ASSERT(value.is_valid());
6678 __ test(value.reg(), Immediate(kSmiTagMask));
6679 destination()->false_target()->Branch(equal);
6680 // It is a heap object - get map.
6681 Result temp = allocator()->Allocate();
6682 ASSERT(temp.is_valid());
6683 // Check if the object is a JS array or not.
6684 __ CmpObjectType(value.reg(), JS_ARRAY_TYPE, temp.reg());
6685 value.Unuse();
6686 temp.Unuse();
6687 destination()->Split(equal);
6688}
6689
6690
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006691void CodeGenerator::GenerateFastAsciiArrayJoin(ZoneList<Expression*>* args) {
Ben Murdochb8e0da22011-05-16 14:20:40 +01006692 Label bailout, done, one_char_separator, long_separator,
6693 non_trivial_array, not_size_one_array, loop, loop_condition,
6694 loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
6695
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006696 ASSERT(args->length() == 2);
Ben Murdochb8e0da22011-05-16 14:20:40 +01006697 // We will leave the separator on the stack until the end of the function.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006698 Load(args->at(1));
Ben Murdochb8e0da22011-05-16 14:20:40 +01006699 // Load this to eax (= array)
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006700 Load(args->at(0));
6701 Result array_result = frame_->Pop();
6702 array_result.ToRegister(eax);
6703 frame_->SpillAll();
6704
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006705 // All aliases of the same register have disjoint lifetimes.
6706 Register array = eax;
Ben Murdochb8e0da22011-05-16 14:20:40 +01006707 Register elements = no_reg; // Will be eax.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006708
Ben Murdochb8e0da22011-05-16 14:20:40 +01006709 Register index = edx;
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006710
Ben Murdochb8e0da22011-05-16 14:20:40 +01006711 Register string_length = ecx;
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006712
Ben Murdochb8e0da22011-05-16 14:20:40 +01006713 Register string = esi;
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006714
6715 Register scratch = ebx;
6716
Ben Murdochb8e0da22011-05-16 14:20:40 +01006717 Register array_length = edi;
6718 Register result_pos = no_reg; // Will be edi.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006719
Ben Murdochb8e0da22011-05-16 14:20:40 +01006720 // Separator operand is already pushed.
6721 Operand separator_operand = Operand(esp, 2 * kPointerSize);
6722 Operand result_operand = Operand(esp, 1 * kPointerSize);
6723 Operand array_length_operand = Operand(esp, 0);
6724 __ sub(Operand(esp), Immediate(2 * kPointerSize));
6725 __ cld();
6726 // Check that the array is a JSArray
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006727 __ test(array, Immediate(kSmiTagMask));
6728 __ j(zero, &bailout);
6729 __ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
6730 __ j(not_equal, &bailout);
6731
6732 // Check that the array has fast elements.
6733 __ test_b(FieldOperand(scratch, Map::kBitField2Offset),
6734 1 << Map::kHasFastElements);
6735 __ j(zero, &bailout);
6736
Ben Murdochb8e0da22011-05-16 14:20:40 +01006737 // If the array has length zero, return the empty string.
6738 __ mov(array_length, FieldOperand(array, JSArray::kLengthOffset));
6739 __ sar(array_length, 1);
6740 __ j(not_zero, &non_trivial_array);
Steve Block44f0eee2011-05-26 01:26:41 +01006741 __ mov(result_operand, FACTORY->empty_string());
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006742 __ jmp(&done);
6743
Ben Murdochb8e0da22011-05-16 14:20:40 +01006744 // Save the array length.
6745 __ bind(&non_trivial_array);
6746 __ mov(array_length_operand, array_length);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006747
Ben Murdochb8e0da22011-05-16 14:20:40 +01006748 // Save the FixedArray containing array's elements.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006749 // End of array's live range.
Ben Murdochb8e0da22011-05-16 14:20:40 +01006750 elements = array;
6751 __ mov(elements, FieldOperand(array, JSArray::kElementsOffset));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006752 array = no_reg;
6753
6754
Ben Murdochb8e0da22011-05-16 14:20:40 +01006755 // Check that all array elements are sequential ASCII strings, and
6756 // accumulate the sum of their lengths, as a smi-encoded value.
6757 __ Set(index, Immediate(0));
6758 __ Set(string_length, Immediate(0));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006759 // Loop condition: while (index < length).
Ben Murdochb8e0da22011-05-16 14:20:40 +01006760 // Live loop registers: index, array_length, string,
6761 // scratch, string_length, elements.
6762 __ jmp(&loop_condition);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006763 __ bind(&loop);
Ben Murdochb8e0da22011-05-16 14:20:40 +01006764 __ cmp(index, Operand(array_length));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006765 __ j(greater_equal, &done);
6766
Ben Murdochb8e0da22011-05-16 14:20:40 +01006767 __ mov(string, FieldOperand(elements, index,
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006768 times_pointer_size,
6769 FixedArray::kHeaderSize));
Ben Murdochb8e0da22011-05-16 14:20:40 +01006770 __ test(string, Immediate(kSmiTagMask));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006771 __ j(zero, &bailout);
Ben Murdochb8e0da22011-05-16 14:20:40 +01006772 __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
6773 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
6774 __ and_(scratch, Immediate(
6775 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
6776 __ cmp(scratch, kStringTag | kAsciiStringTag | kSeqStringTag);
6777 __ j(not_equal, &bailout);
6778 __ add(string_length,
6779 FieldOperand(string, SeqAsciiString::kLengthOffset));
6780 __ j(overflow, &bailout);
6781 __ add(Operand(index), Immediate(1));
6782 __ bind(&loop_condition);
6783 __ cmp(index, Operand(array_length));
6784 __ j(less, &loop);
6785
6786 // If array_length is 1, return elements[0], a string.
6787 __ cmp(array_length, 1);
6788 __ j(not_equal, &not_size_one_array);
6789 __ mov(scratch, FieldOperand(elements, FixedArray::kHeaderSize));
6790 __ mov(result_operand, scratch);
6791 __ jmp(&done);
6792
6793 __ bind(&not_size_one_array);
6794
6795 // End of array_length live range.
6796 result_pos = array_length;
6797 array_length = no_reg;
6798
6799 // Live registers:
6800 // string_length: Sum of string lengths, as a smi.
6801 // elements: FixedArray of strings.
6802
6803 // Check that the separator is a flat ASCII string.
6804 __ mov(string, separator_operand);
6805 __ test(string, Immediate(kSmiTagMask));
6806 __ j(zero, &bailout);
6807 __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
6808 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006809 __ and_(scratch, Immediate(
6810 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
6811 __ cmp(scratch, kStringTag | kAsciiStringTag | kSeqStringTag);
6812 __ j(not_equal, &bailout);
6813
Ben Murdochb8e0da22011-05-16 14:20:40 +01006814 // Add (separator length times array_length) - separator length
6815 // to string_length.
6816 __ mov(scratch, separator_operand);
6817 __ mov(scratch, FieldOperand(scratch, SeqAsciiString::kLengthOffset));
6818 __ sub(string_length, Operand(scratch)); // May be negative, temporarily.
6819 __ imul(scratch, array_length_operand);
6820 __ j(overflow, &bailout);
6821 __ add(string_length, Operand(scratch));
6822 __ j(overflow, &bailout);
6823
6824 __ shr(string_length, 1);
6825 // Live registers and stack values:
6826 // string_length
6827 // elements
6828 __ AllocateAsciiString(result_pos, string_length, scratch,
6829 index, string, &bailout);
6830 __ mov(result_operand, result_pos);
6831 __ lea(result_pos, FieldOperand(result_pos, SeqAsciiString::kHeaderSize));
6832
6833
6834 __ mov(string, separator_operand);
6835 __ cmp(FieldOperand(string, SeqAsciiString::kLengthOffset),
6836 Immediate(Smi::FromInt(1)));
6837 __ j(equal, &one_char_separator);
6838 __ j(greater, &long_separator);
6839
6840
6841 // Empty separator case
6842 __ mov(index, Immediate(0));
6843 __ jmp(&loop_1_condition);
6844 // Loop condition: while (index < length).
6845 __ bind(&loop_1);
6846 // Each iteration of the loop concatenates one string to the result.
6847 // Live values in registers:
6848 // index: which element of the elements array we are adding to the result.
6849 // result_pos: the position to which we are currently copying characters.
6850 // elements: the FixedArray of strings we are joining.
6851
6852 // Get string = array[index].
6853 __ mov(string, FieldOperand(elements, index,
6854 times_pointer_size,
6855 FixedArray::kHeaderSize));
6856 __ mov(string_length,
6857 FieldOperand(string, String::kLengthOffset));
6858 __ shr(string_length, 1);
6859 __ lea(string,
6860 FieldOperand(string, SeqAsciiString::kHeaderSize));
6861 __ CopyBytes(string, result_pos, string_length, scratch);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006862 __ add(Operand(index), Immediate(1));
Ben Murdochb8e0da22011-05-16 14:20:40 +01006863 __ bind(&loop_1_condition);
6864 __ cmp(index, array_length_operand);
6865 __ j(less, &loop_1); // End while (index < length).
6866 __ jmp(&done);
6867
6868
6869
6870 // One-character separator case
6871 __ bind(&one_char_separator);
6872 // Replace separator with its ascii character value.
6873 __ mov_b(scratch, FieldOperand(string, SeqAsciiString::kHeaderSize));
6874 __ mov_b(separator_operand, scratch);
6875
6876 __ Set(index, Immediate(0));
6877 // Jump into the loop after the code that copies the separator, so the first
6878 // element is not preceded by a separator
6879 __ jmp(&loop_2_entry);
6880 // Loop condition: while (index < length).
6881 __ bind(&loop_2);
6882 // Each iteration of the loop concatenates one string to the result.
6883 // Live values in registers:
6884 // index: which element of the elements array we are adding to the result.
6885 // result_pos: the position to which we are currently copying characters.
6886
6887 // Copy the separator character to the result.
6888 __ mov_b(scratch, separator_operand);
6889 __ mov_b(Operand(result_pos, 0), scratch);
6890 __ inc(result_pos);
6891
6892 __ bind(&loop_2_entry);
6893 // Get string = array[index].
6894 __ mov(string, FieldOperand(elements, index,
6895 times_pointer_size,
6896 FixedArray::kHeaderSize));
6897 __ mov(string_length,
6898 FieldOperand(string, String::kLengthOffset));
6899 __ shr(string_length, 1);
6900 __ lea(string,
6901 FieldOperand(string, SeqAsciiString::kHeaderSize));
6902 __ CopyBytes(string, result_pos, string_length, scratch);
6903 __ add(Operand(index), Immediate(1));
6904
6905 __ cmp(index, array_length_operand);
6906 __ j(less, &loop_2); // End while (index < length).
6907 __ jmp(&done);
6908
6909
6910 // Long separator case (separator is more than one character).
6911 __ bind(&long_separator);
6912
6913 __ Set(index, Immediate(0));
6914 // Jump into the loop after the code that copies the separator, so the first
6915 // element is not preceded by a separator
6916 __ jmp(&loop_3_entry);
6917 // Loop condition: while (index < length).
6918 __ bind(&loop_3);
6919 // Each iteration of the loop concatenates one string to the result.
6920 // Live values in registers:
6921 // index: which element of the elements array we are adding to the result.
6922 // result_pos: the position to which we are currently copying characters.
6923
6924 // Copy the separator to the result.
6925 __ mov(string, separator_operand);
6926 __ mov(string_length,
6927 FieldOperand(string, String::kLengthOffset));
6928 __ shr(string_length, 1);
6929 __ lea(string,
6930 FieldOperand(string, SeqAsciiString::kHeaderSize));
6931 __ CopyBytes(string, result_pos, string_length, scratch);
6932
6933 __ bind(&loop_3_entry);
6934 // Get string = array[index].
6935 __ mov(string, FieldOperand(elements, index,
6936 times_pointer_size,
6937 FixedArray::kHeaderSize));
6938 __ mov(string_length,
6939 FieldOperand(string, String::kLengthOffset));
6940 __ shr(string_length, 1);
6941 __ lea(string,
6942 FieldOperand(string, SeqAsciiString::kHeaderSize));
6943 __ CopyBytes(string, result_pos, string_length, scratch);
6944 __ add(Operand(index), Immediate(1));
6945
6946 __ cmp(index, array_length_operand);
6947 __ j(less, &loop_3); // End while (index < length).
6948 __ jmp(&done);
6949
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006950
6951 __ bind(&bailout);
Steve Block44f0eee2011-05-26 01:26:41 +01006952 __ mov(result_operand, FACTORY->undefined_value());
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006953 __ bind(&done);
Ben Murdochb8e0da22011-05-16 14:20:40 +01006954 __ mov(eax, result_operand);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006955 // Drop temp values from the stack, and restore context register.
Ben Murdochb8e0da22011-05-16 14:20:40 +01006956 __ add(Operand(esp), Immediate(2 * kPointerSize));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006957
6958 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
6959 frame_->Drop(1);
6960 frame_->Push(&array_result);
6961}
6962
6963
Andrei Popescu402d9372010-02-26 13:31:12 +00006964void CodeGenerator::GenerateIsRegExp(ZoneList<Expression*>* args) {
6965 ASSERT(args->length() == 1);
6966 Load(args->at(0));
6967 Result value = frame_->Pop();
6968 value.ToRegister();
6969 ASSERT(value.is_valid());
6970 __ test(value.reg(), Immediate(kSmiTagMask));
6971 destination()->false_target()->Branch(equal);
6972 // It is a heap object - get map.
6973 Result temp = allocator()->Allocate();
6974 ASSERT(temp.is_valid());
6975 // Check if the object is a regexp.
6976 __ CmpObjectType(value.reg(), JS_REGEXP_TYPE, temp.reg());
6977 value.Unuse();
6978 temp.Unuse();
6979 destination()->Split(equal);
6980}
6981
6982
Steve Blockd0582a62009-12-15 09:54:21 +00006983void CodeGenerator::GenerateIsObject(ZoneList<Expression*>* args) {
6984 // This generates a fast version of:
6985 // (typeof(arg) === 'object' || %_ClassOf(arg) == 'RegExp')
6986 ASSERT(args->length() == 1);
6987 Load(args->at(0));
6988 Result obj = frame_->Pop();
6989 obj.ToRegister();
6990
6991 __ test(obj.reg(), Immediate(kSmiTagMask));
6992 destination()->false_target()->Branch(zero);
Steve Block44f0eee2011-05-26 01:26:41 +01006993 __ cmp(obj.reg(), FACTORY->null_value());
Steve Blockd0582a62009-12-15 09:54:21 +00006994 destination()->true_target()->Branch(equal);
6995
6996 Result map = allocator()->Allocate();
6997 ASSERT(map.is_valid());
6998 __ mov(map.reg(), FieldOperand(obj.reg(), HeapObject::kMapOffset));
6999 // Undetectable objects behave like undefined when tested with typeof.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007000 __ test_b(FieldOperand(map.reg(), Map::kBitFieldOffset),
7001 1 << Map::kIsUndetectable);
Steve Blockd0582a62009-12-15 09:54:21 +00007002 destination()->false_target()->Branch(not_zero);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007003 // Do a range test for JSObject type. We can't use
7004 // MacroAssembler::IsInstanceJSObjectType, because we are using a
7005 // ControlDestination, so we copy its implementation here.
Steve Blockd0582a62009-12-15 09:54:21 +00007006 __ movzx_b(map.reg(), FieldOperand(map.reg(), Map::kInstanceTypeOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007007 __ sub(Operand(map.reg()), Immediate(FIRST_JS_OBJECT_TYPE));
7008 __ cmp(map.reg(), LAST_JS_OBJECT_TYPE - FIRST_JS_OBJECT_TYPE);
Steve Blockd0582a62009-12-15 09:54:21 +00007009 obj.Unuse();
7010 map.Unuse();
Leon Clarkef7060e22010-06-03 12:02:55 +01007011 destination()->Split(below_equal);
Steve Blockd0582a62009-12-15 09:54:21 +00007012}
7013
7014
Iain Merrick75681382010-08-19 15:07:18 +01007015void CodeGenerator::GenerateIsSpecObject(ZoneList<Expression*>* args) {
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007016 // This generates a fast version of:
7017 // (typeof(arg) === 'object' || %_ClassOf(arg) == 'RegExp' ||
7018 // typeof(arg) == function).
7019 // It includes undetectable objects (as opposed to IsObject).
7020 ASSERT(args->length() == 1);
7021 Load(args->at(0));
7022 Result value = frame_->Pop();
7023 value.ToRegister();
7024 ASSERT(value.is_valid());
7025 __ test(value.reg(), Immediate(kSmiTagMask));
7026 destination()->false_target()->Branch(equal);
7027
7028 // Check that this is an object.
7029 frame_->Spill(value.reg());
7030 __ CmpObjectType(value.reg(), FIRST_JS_OBJECT_TYPE, value.reg());
7031 value.Unuse();
7032 destination()->Split(above_equal);
7033}
7034
7035
Iain Merrick75681382010-08-19 15:07:18 +01007036// Deferred code to check whether the String JavaScript object is safe for using
7037// default value of. This code is called after the bit caching this information
7038// in the map has been checked with the map for the object in the map_result_
7039// register. On return the register map_result_ contains 1 for true and 0 for
7040// false.
7041class DeferredIsStringWrapperSafeForDefaultValueOf : public DeferredCode {
7042 public:
7043 DeferredIsStringWrapperSafeForDefaultValueOf(Register object,
7044 Register map_result,
7045 Register scratch1,
7046 Register scratch2)
7047 : object_(object),
7048 map_result_(map_result),
7049 scratch1_(scratch1),
7050 scratch2_(scratch2) { }
7051
7052 virtual void Generate() {
7053 Label false_result;
7054
7055 // Check that map is loaded as expected.
7056 if (FLAG_debug_code) {
7057 __ cmp(map_result_, FieldOperand(object_, HeapObject::kMapOffset));
7058 __ Assert(equal, "Map not in expected register");
7059 }
7060
7061 // Check for fast case object. Generate false result for slow case object.
7062 __ mov(scratch1_, FieldOperand(object_, JSObject::kPropertiesOffset));
7063 __ mov(scratch1_, FieldOperand(scratch1_, HeapObject::kMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01007064 __ cmp(scratch1_, FACTORY->hash_table_map());
Iain Merrick75681382010-08-19 15:07:18 +01007065 __ j(equal, &false_result);
7066
7067 // Look for valueOf symbol in the descriptor array, and indicate false if
7068 // found. The type is not checked, so if it is a transition it is a false
7069 // negative.
7070 __ mov(map_result_,
7071 FieldOperand(map_result_, Map::kInstanceDescriptorsOffset));
7072 __ mov(scratch1_, FieldOperand(map_result_, FixedArray::kLengthOffset));
7073 // map_result_: descriptor array
7074 // scratch1_: length of descriptor array
7075 // Calculate the end of the descriptor array.
7076 STATIC_ASSERT(kSmiTag == 0);
7077 STATIC_ASSERT(kSmiTagSize == 1);
7078 STATIC_ASSERT(kPointerSize == 4);
7079 __ lea(scratch1_,
7080 Operand(map_result_, scratch1_, times_2, FixedArray::kHeaderSize));
7081 // Calculate location of the first key name.
7082 __ add(Operand(map_result_),
7083 Immediate(FixedArray::kHeaderSize +
7084 DescriptorArray::kFirstIndex * kPointerSize));
7085 // Loop through all the keys in the descriptor array. If one of these is the
7086 // symbol valueOf the result is false.
7087 Label entry, loop;
7088 __ jmp(&entry);
7089 __ bind(&loop);
7090 __ mov(scratch2_, FieldOperand(map_result_, 0));
Steve Block44f0eee2011-05-26 01:26:41 +01007091 __ cmp(scratch2_, FACTORY->value_of_symbol());
Iain Merrick75681382010-08-19 15:07:18 +01007092 __ j(equal, &false_result);
7093 __ add(Operand(map_result_), Immediate(kPointerSize));
7094 __ bind(&entry);
7095 __ cmp(map_result_, Operand(scratch1_));
7096 __ j(not_equal, &loop);
7097
7098 // Reload map as register map_result_ was used as temporary above.
7099 __ mov(map_result_, FieldOperand(object_, HeapObject::kMapOffset));
7100
7101 // If a valueOf property is not found on the object check that it's
7102 // prototype is the un-modified String prototype. If not result is false.
7103 __ mov(scratch1_, FieldOperand(map_result_, Map::kPrototypeOffset));
7104 __ test(scratch1_, Immediate(kSmiTagMask));
7105 __ j(zero, &false_result);
7106 __ mov(scratch1_, FieldOperand(scratch1_, HeapObject::kMapOffset));
7107 __ mov(scratch2_, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
7108 __ mov(scratch2_,
7109 FieldOperand(scratch2_, GlobalObject::kGlobalContextOffset));
7110 __ cmp(scratch1_,
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08007111 ContextOperand(scratch2_,
7112 Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
Iain Merrick75681382010-08-19 15:07:18 +01007113 __ j(not_equal, &false_result);
7114 // Set the bit in the map to indicate that it has been checked safe for
7115 // default valueOf and set true result.
7116 __ or_(FieldOperand(map_result_, Map::kBitField2Offset),
7117 Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
7118 __ Set(map_result_, Immediate(1));
7119 __ jmp(exit_label());
7120 __ bind(&false_result);
7121 // Set false result.
7122 __ Set(map_result_, Immediate(0));
7123 }
7124
7125 private:
7126 Register object_;
7127 Register map_result_;
7128 Register scratch1_;
7129 Register scratch2_;
7130};
7131
7132
7133void CodeGenerator::GenerateIsStringWrapperSafeForDefaultValueOf(
7134 ZoneList<Expression*>* args) {
7135 ASSERT(args->length() == 1);
7136 Load(args->at(0));
7137 Result obj = frame_->Pop(); // Pop the string wrapper.
7138 obj.ToRegister();
7139 ASSERT(obj.is_valid());
7140 if (FLAG_debug_code) {
7141 __ AbortIfSmi(obj.reg());
7142 }
7143
7144 // Check whether this map has already been checked to be safe for default
7145 // valueOf.
7146 Result map_result = allocator()->Allocate();
7147 ASSERT(map_result.is_valid());
7148 __ mov(map_result.reg(), FieldOperand(obj.reg(), HeapObject::kMapOffset));
7149 __ test_b(FieldOperand(map_result.reg(), Map::kBitField2Offset),
7150 1 << Map::kStringWrapperSafeForDefaultValueOf);
7151 destination()->true_target()->Branch(not_zero);
7152
7153 // We need an additional two scratch registers for the deferred code.
7154 Result temp1 = allocator()->Allocate();
7155 ASSERT(temp1.is_valid());
7156 Result temp2 = allocator()->Allocate();
7157 ASSERT(temp2.is_valid());
7158
7159 DeferredIsStringWrapperSafeForDefaultValueOf* deferred =
7160 new DeferredIsStringWrapperSafeForDefaultValueOf(
7161 obj.reg(), map_result.reg(), temp1.reg(), temp2.reg());
7162 deferred->Branch(zero);
7163 deferred->BindExit();
7164 __ test(map_result.reg(), Operand(map_result.reg()));
7165 obj.Unuse();
7166 map_result.Unuse();
7167 temp1.Unuse();
7168 temp2.Unuse();
7169 destination()->Split(not_equal);
7170}
7171
7172
Steve Blockd0582a62009-12-15 09:54:21 +00007173void CodeGenerator::GenerateIsFunction(ZoneList<Expression*>* args) {
7174 // This generates a fast version of:
7175 // (%_ClassOf(arg) === 'Function')
7176 ASSERT(args->length() == 1);
7177 Load(args->at(0));
7178 Result obj = frame_->Pop();
7179 obj.ToRegister();
7180 __ test(obj.reg(), Immediate(kSmiTagMask));
7181 destination()->false_target()->Branch(zero);
7182 Result temp = allocator()->Allocate();
7183 ASSERT(temp.is_valid());
7184 __ CmpObjectType(obj.reg(), JS_FUNCTION_TYPE, temp.reg());
7185 obj.Unuse();
7186 temp.Unuse();
7187 destination()->Split(equal);
7188}
7189
7190
Leon Clarked91b9f72010-01-27 17:25:45 +00007191void CodeGenerator::GenerateIsUndetectableObject(ZoneList<Expression*>* args) {
7192 ASSERT(args->length() == 1);
7193 Load(args->at(0));
7194 Result obj = frame_->Pop();
7195 obj.ToRegister();
7196 __ test(obj.reg(), Immediate(kSmiTagMask));
7197 destination()->false_target()->Branch(zero);
7198 Result temp = allocator()->Allocate();
7199 ASSERT(temp.is_valid());
7200 __ mov(temp.reg(),
7201 FieldOperand(obj.reg(), HeapObject::kMapOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007202 __ test_b(FieldOperand(temp.reg(), Map::kBitFieldOffset),
7203 1 << Map::kIsUndetectable);
Leon Clarked91b9f72010-01-27 17:25:45 +00007204 obj.Unuse();
7205 temp.Unuse();
7206 destination()->Split(not_zero);
7207}
7208
7209
Steve Blocka7e24c12009-10-30 11:49:00 +00007210void CodeGenerator::GenerateIsConstructCall(ZoneList<Expression*>* args) {
7211 ASSERT(args->length() == 0);
7212
7213 // Get the frame pointer for the calling frame.
7214 Result fp = allocator()->Allocate();
7215 __ mov(fp.reg(), Operand(ebp, StandardFrameConstants::kCallerFPOffset));
7216
7217 // Skip the arguments adaptor frame if it exists.
7218 Label check_frame_marker;
7219 __ cmp(Operand(fp.reg(), StandardFrameConstants::kContextOffset),
7220 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
7221 __ j(not_equal, &check_frame_marker);
7222 __ mov(fp.reg(), Operand(fp.reg(), StandardFrameConstants::kCallerFPOffset));
7223
7224 // Check the marker in the calling frame.
7225 __ bind(&check_frame_marker);
7226 __ cmp(Operand(fp.reg(), StandardFrameConstants::kMarkerOffset),
7227 Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
7228 fp.Unuse();
7229 destination()->Split(equal);
7230}
7231
7232
7233void CodeGenerator::GenerateArgumentsLength(ZoneList<Expression*>* args) {
7234 ASSERT(args->length() == 0);
Steve Block6ded16b2010-05-10 14:33:55 +01007235
7236 Result fp = allocator_->Allocate();
7237 Result result = allocator_->Allocate();
7238 ASSERT(fp.is_valid() && result.is_valid());
7239
7240 Label exit;
7241
7242 // Get the number of formal parameters.
7243 __ Set(result.reg(), Immediate(Smi::FromInt(scope()->num_parameters())));
7244
7245 // Check if the calling frame is an arguments adaptor frame.
7246 __ mov(fp.reg(), Operand(ebp, StandardFrameConstants::kCallerFPOffset));
7247 __ cmp(Operand(fp.reg(), StandardFrameConstants::kContextOffset),
7248 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
7249 __ j(not_equal, &exit);
7250
7251 // Arguments adaptor case: Read the arguments length from the
7252 // adaptor frame.
7253 __ mov(result.reg(),
7254 Operand(fp.reg(), ArgumentsAdaptorFrameConstants::kLengthOffset));
7255
7256 __ bind(&exit);
7257 result.set_type_info(TypeInfo::Smi());
7258 if (FLAG_debug_code) __ AbortIfNotSmi(result.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00007259 frame_->Push(&result);
7260}
7261
7262
7263void CodeGenerator::GenerateClassOf(ZoneList<Expression*>* args) {
7264 ASSERT(args->length() == 1);
7265 JumpTarget leave, null, function, non_function_constructor;
7266 Load(args->at(0)); // Load the object.
7267 Result obj = frame_->Pop();
7268 obj.ToRegister();
7269 frame_->Spill(obj.reg());
7270
7271 // If the object is a smi, we return null.
7272 __ test(obj.reg(), Immediate(kSmiTagMask));
7273 null.Branch(zero);
7274
7275 // Check that the object is a JS object but take special care of JS
7276 // functions to make sure they have 'Function' as their class.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007277 __ CmpObjectType(obj.reg(), FIRST_JS_OBJECT_TYPE, obj.reg());
7278 null.Branch(below);
Steve Blocka7e24c12009-10-30 11:49:00 +00007279
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007280 // As long as JS_FUNCTION_TYPE is the last instance type and it is
7281 // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
7282 // LAST_JS_OBJECT_TYPE.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01007283 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
7284 STATIC_ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007285 __ CmpInstanceType(obj.reg(), JS_FUNCTION_TYPE);
7286 function.Branch(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +00007287
7288 // Check if the constructor in the map is a function.
7289 { Result tmp = allocator()->Allocate();
7290 __ mov(obj.reg(), FieldOperand(obj.reg(), Map::kConstructorOffset));
7291 __ CmpObjectType(obj.reg(), JS_FUNCTION_TYPE, tmp.reg());
7292 non_function_constructor.Branch(not_equal);
7293 }
7294
7295 // The map register now contains the constructor function. Grab the
7296 // instance class name from there.
7297 __ mov(obj.reg(),
7298 FieldOperand(obj.reg(), JSFunction::kSharedFunctionInfoOffset));
7299 __ mov(obj.reg(),
7300 FieldOperand(obj.reg(), SharedFunctionInfo::kInstanceClassNameOffset));
7301 frame_->Push(&obj);
7302 leave.Jump();
7303
7304 // Functions have class 'Function'.
7305 function.Bind();
Steve Block44f0eee2011-05-26 01:26:41 +01007306 frame_->Push(FACTORY->function_class_symbol());
Steve Blocka7e24c12009-10-30 11:49:00 +00007307 leave.Jump();
7308
7309 // Objects with a non-function constructor have class 'Object'.
7310 non_function_constructor.Bind();
Steve Block44f0eee2011-05-26 01:26:41 +01007311 frame_->Push(FACTORY->Object_symbol());
Steve Blocka7e24c12009-10-30 11:49:00 +00007312 leave.Jump();
7313
7314 // Non-JS objects have class null.
7315 null.Bind();
Steve Block44f0eee2011-05-26 01:26:41 +01007316 frame_->Push(FACTORY->null_value());
Steve Blocka7e24c12009-10-30 11:49:00 +00007317
7318 // All done.
7319 leave.Bind();
7320}
7321
7322
7323void CodeGenerator::GenerateValueOf(ZoneList<Expression*>* args) {
7324 ASSERT(args->length() == 1);
7325 JumpTarget leave;
7326 Load(args->at(0)); // Load the object.
7327 frame_->Dup();
7328 Result object = frame_->Pop();
7329 object.ToRegister();
7330 ASSERT(object.is_valid());
7331 // if (object->IsSmi()) return object.
7332 __ test(object.reg(), Immediate(kSmiTagMask));
7333 leave.Branch(zero, taken);
7334 // It is a heap object - get map.
7335 Result temp = allocator()->Allocate();
7336 ASSERT(temp.is_valid());
7337 // if (!object->IsJSValue()) return object.
7338 __ CmpObjectType(object.reg(), JS_VALUE_TYPE, temp.reg());
7339 leave.Branch(not_equal, not_taken);
7340 __ mov(temp.reg(), FieldOperand(object.reg(), JSValue::kValueOffset));
7341 object.Unuse();
7342 frame_->SetElementAt(0, &temp);
7343 leave.Bind();
7344}
7345
7346
7347void CodeGenerator::GenerateSetValueOf(ZoneList<Expression*>* args) {
7348 ASSERT(args->length() == 2);
7349 JumpTarget leave;
7350 Load(args->at(0)); // Load the object.
7351 Load(args->at(1)); // Load the value.
7352 Result value = frame_->Pop();
7353 Result object = frame_->Pop();
7354 value.ToRegister();
7355 object.ToRegister();
7356
7357 // if (object->IsSmi()) return value.
7358 __ test(object.reg(), Immediate(kSmiTagMask));
7359 leave.Branch(zero, &value, taken);
7360
7361 // It is a heap object - get its map.
7362 Result scratch = allocator_->Allocate();
7363 ASSERT(scratch.is_valid());
7364 // if (!object->IsJSValue()) return value.
7365 __ CmpObjectType(object.reg(), JS_VALUE_TYPE, scratch.reg());
7366 leave.Branch(not_equal, &value, not_taken);
7367
7368 // Store the value.
7369 __ mov(FieldOperand(object.reg(), JSValue::kValueOffset), value.reg());
7370 // Update the write barrier. Save the value as it will be
7371 // overwritten by the write barrier code and is needed afterward.
7372 Result duplicate_value = allocator_->Allocate();
7373 ASSERT(duplicate_value.is_valid());
7374 __ mov(duplicate_value.reg(), value.reg());
7375 // The object register is also overwritten by the write barrier and
7376 // possibly aliased in the frame.
7377 frame_->Spill(object.reg());
7378 __ RecordWrite(object.reg(), JSValue::kValueOffset, duplicate_value.reg(),
7379 scratch.reg());
7380 object.Unuse();
7381 scratch.Unuse();
7382 duplicate_value.Unuse();
7383
7384 // Leave.
7385 leave.Bind(&value);
7386 frame_->Push(&value);
7387}
7388
7389
Steve Block6ded16b2010-05-10 14:33:55 +01007390void CodeGenerator::GenerateArguments(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00007391 ASSERT(args->length() == 1);
7392
7393 // ArgumentsAccessStub expects the key in edx and the formal
7394 // parameter count in eax.
7395 Load(args->at(0));
7396 Result key = frame_->Pop();
7397 // Explicitly create a constant result.
Andrei Popescu31002712010-02-23 13:46:05 +00007398 Result count(Handle<Smi>(Smi::FromInt(scope()->num_parameters())));
Steve Blocka7e24c12009-10-30 11:49:00 +00007399 // Call the shared stub to get to arguments[key].
7400 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
7401 Result result = frame_->CallStub(&stub, &key, &count);
7402 frame_->Push(&result);
7403}
7404
7405
7406void CodeGenerator::GenerateObjectEquals(ZoneList<Expression*>* args) {
7407 ASSERT(args->length() == 2);
7408
7409 // Load the two objects into registers and perform the comparison.
7410 Load(args->at(0));
7411 Load(args->at(1));
7412 Result right = frame_->Pop();
7413 Result left = frame_->Pop();
7414 right.ToRegister();
7415 left.ToRegister();
7416 __ cmp(right.reg(), Operand(left.reg()));
7417 right.Unuse();
7418 left.Unuse();
7419 destination()->Split(equal);
7420}
7421
7422
7423void CodeGenerator::GenerateGetFramePointer(ZoneList<Expression*>* args) {
7424 ASSERT(args->length() == 0);
Kristian Monsen50ef84f2010-07-29 15:18:00 +01007425 STATIC_ASSERT(kSmiTag == 0); // EBP value is aligned, so it looks like a Smi.
Steve Blocka7e24c12009-10-30 11:49:00 +00007426 Result ebp_as_smi = allocator_->Allocate();
7427 ASSERT(ebp_as_smi.is_valid());
7428 __ mov(ebp_as_smi.reg(), Operand(ebp));
7429 frame_->Push(&ebp_as_smi);
7430}
7431
7432
Steve Block6ded16b2010-05-10 14:33:55 +01007433void CodeGenerator::GenerateRandomHeapNumber(
7434 ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00007435 ASSERT(args->length() == 0);
7436 frame_->SpillAll();
7437
Steve Block6ded16b2010-05-10 14:33:55 +01007438 Label slow_allocate_heapnumber;
7439 Label heapnumber_allocated;
Steve Blocka7e24c12009-10-30 11:49:00 +00007440
Steve Block6ded16b2010-05-10 14:33:55 +01007441 __ AllocateHeapNumber(edi, ebx, ecx, &slow_allocate_heapnumber);
7442 __ jmp(&heapnumber_allocated);
Steve Blocka7e24c12009-10-30 11:49:00 +00007443
Steve Block6ded16b2010-05-10 14:33:55 +01007444 __ bind(&slow_allocate_heapnumber);
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007445 // Allocate a heap number.
7446 __ CallRuntime(Runtime::kNumberAlloc, 0);
Steve Block6ded16b2010-05-10 14:33:55 +01007447 __ mov(edi, eax);
7448
7449 __ bind(&heapnumber_allocated);
7450
7451 __ PrepareCallCFunction(0, ebx);
Steve Block44f0eee2011-05-26 01:26:41 +01007452 __ CallCFunction(ExternalReference::random_uint32_function(masm()->isolate()),
7453 0);
Steve Block6ded16b2010-05-10 14:33:55 +01007454
7455 // Convert 32 random bits in eax to 0.(32 random bits) in a double
7456 // by computing:
7457 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
7458 // This is implemented on both SSE2 and FPU.
Steve Block44f0eee2011-05-26 01:26:41 +01007459 if (masm()->isolate()->cpu_features()->IsSupported(SSE2)) {
Steve Block6ded16b2010-05-10 14:33:55 +01007460 CpuFeatures::Scope fscope(SSE2);
7461 __ mov(ebx, Immediate(0x49800000)); // 1.0 x 2^20 as single.
7462 __ movd(xmm1, Operand(ebx));
7463 __ movd(xmm0, Operand(eax));
7464 __ cvtss2sd(xmm1, xmm1);
7465 __ pxor(xmm0, xmm1);
7466 __ subsd(xmm0, xmm1);
7467 __ movdbl(FieldOperand(edi, HeapNumber::kValueOffset), xmm0);
7468 } else {
7469 // 0x4130000000000000 is 1.0 x 2^20 as a double.
7470 __ mov(FieldOperand(edi, HeapNumber::kExponentOffset),
7471 Immediate(0x41300000));
7472 __ mov(FieldOperand(edi, HeapNumber::kMantissaOffset), eax);
7473 __ fld_d(FieldOperand(edi, HeapNumber::kValueOffset));
7474 __ mov(FieldOperand(edi, HeapNumber::kMantissaOffset), Immediate(0));
7475 __ fld_d(FieldOperand(edi, HeapNumber::kValueOffset));
7476 __ fsubp(1);
7477 __ fstp_d(FieldOperand(edi, HeapNumber::kValueOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00007478 }
Steve Block6ded16b2010-05-10 14:33:55 +01007479 __ mov(eax, edi);
Steve Blocka7e24c12009-10-30 11:49:00 +00007480
7481 Result result = allocator_->Allocate(eax);
7482 frame_->Push(&result);
7483}
7484
7485
Steve Blockd0582a62009-12-15 09:54:21 +00007486void CodeGenerator::GenerateStringAdd(ZoneList<Expression*>* args) {
7487 ASSERT_EQ(2, args->length());
7488
7489 Load(args->at(0));
7490 Load(args->at(1));
7491
7492 StringAddStub stub(NO_STRING_ADD_FLAGS);
7493 Result answer = frame_->CallStub(&stub, 2);
7494 frame_->Push(&answer);
7495}
7496
7497
Leon Clarkee46be812010-01-19 14:06:41 +00007498void CodeGenerator::GenerateSubString(ZoneList<Expression*>* args) {
7499 ASSERT_EQ(3, args->length());
7500
7501 Load(args->at(0));
7502 Load(args->at(1));
7503 Load(args->at(2));
7504
7505 SubStringStub stub;
7506 Result answer = frame_->CallStub(&stub, 3);
7507 frame_->Push(&answer);
7508}
7509
7510
7511void CodeGenerator::GenerateStringCompare(ZoneList<Expression*>* args) {
7512 ASSERT_EQ(2, args->length());
7513
7514 Load(args->at(0));
7515 Load(args->at(1));
7516
7517 StringCompareStub stub;
7518 Result answer = frame_->CallStub(&stub, 2);
7519 frame_->Push(&answer);
7520}
7521
7522
7523void CodeGenerator::GenerateRegExpExec(ZoneList<Expression*>* args) {
Steve Block6ded16b2010-05-10 14:33:55 +01007524 ASSERT_EQ(4, args->length());
Leon Clarkee46be812010-01-19 14:06:41 +00007525
7526 // Load the arguments on the stack and call the stub.
7527 Load(args->at(0));
7528 Load(args->at(1));
7529 Load(args->at(2));
7530 Load(args->at(3));
Ben Murdochb0fe1622011-05-05 13:52:32 +01007531
Leon Clarkee46be812010-01-19 14:06:41 +00007532 RegExpExecStub stub;
7533 Result result = frame_->CallStub(&stub, 4);
7534 frame_->Push(&result);
7535}
7536
7537
Steve Block6ded16b2010-05-10 14:33:55 +01007538void CodeGenerator::GenerateRegExpConstructResult(ZoneList<Expression*>* args) {
Steve Block6ded16b2010-05-10 14:33:55 +01007539 ASSERT_EQ(3, args->length());
Ben Murdochb0fe1622011-05-05 13:52:32 +01007540
Steve Block6ded16b2010-05-10 14:33:55 +01007541 Load(args->at(0)); // Size of array, smi.
7542 Load(args->at(1)); // "index" property value.
7543 Load(args->at(2)); // "input" property value.
Steve Block6ded16b2010-05-10 14:33:55 +01007544
Ben Murdochb0fe1622011-05-05 13:52:32 +01007545 RegExpConstructResultStub stub;
7546 Result result = frame_->CallStub(&stub, 3);
7547 frame_->Push(&result);
Steve Block6ded16b2010-05-10 14:33:55 +01007548}
7549
7550
7551class DeferredSearchCache: public DeferredCode {
7552 public:
7553 DeferredSearchCache(Register dst, Register cache, Register key)
7554 : dst_(dst), cache_(cache), key_(key) {
7555 set_comment("[ DeferredSearchCache");
7556 }
7557
7558 virtual void Generate();
7559
7560 private:
Kristian Monsen25f61362010-05-21 11:50:48 +01007561 Register dst_; // on invocation Smi index of finger, on exit
7562 // holds value being looked up.
7563 Register cache_; // instance of JSFunctionResultCache.
7564 Register key_; // key being looked up.
Steve Block6ded16b2010-05-10 14:33:55 +01007565};
7566
7567
7568void DeferredSearchCache::Generate() {
Kristian Monsen25f61362010-05-21 11:50:48 +01007569 Label first_loop, search_further, second_loop, cache_miss;
7570
7571 // Smi-tagging is equivalent to multiplying by 2.
7572 STATIC_ASSERT(kSmiTag == 0);
7573 STATIC_ASSERT(kSmiTagSize == 1);
7574
7575 Smi* kEntrySizeSmi = Smi::FromInt(JSFunctionResultCache::kEntrySize);
7576 Smi* kEntriesIndexSmi = Smi::FromInt(JSFunctionResultCache::kEntriesIndex);
7577
7578 // Check the cache from finger to start of the cache.
7579 __ bind(&first_loop);
7580 __ sub(Operand(dst_), Immediate(kEntrySizeSmi));
7581 __ cmp(Operand(dst_), Immediate(kEntriesIndexSmi));
7582 __ j(less, &search_further);
7583
7584 __ cmp(key_, CodeGenerator::FixedArrayElementOperand(cache_, dst_));
7585 __ j(not_equal, &first_loop);
7586
7587 __ mov(FieldOperand(cache_, JSFunctionResultCache::kFingerOffset), dst_);
7588 __ mov(dst_, CodeGenerator::FixedArrayElementOperand(cache_, dst_, 1));
7589 __ jmp(exit_label());
7590
7591 __ bind(&search_further);
7592
7593 // Check the cache from end of cache up to finger.
7594 __ mov(dst_, FieldOperand(cache_, JSFunctionResultCache::kCacheSizeOffset));
7595
7596 __ bind(&second_loop);
7597 __ sub(Operand(dst_), Immediate(kEntrySizeSmi));
7598 // Consider prefetching into some reg.
7599 __ cmp(dst_, FieldOperand(cache_, JSFunctionResultCache::kFingerOffset));
7600 __ j(less_equal, &cache_miss);
7601
7602 __ cmp(key_, CodeGenerator::FixedArrayElementOperand(cache_, dst_));
7603 __ j(not_equal, &second_loop);
7604
7605 __ mov(FieldOperand(cache_, JSFunctionResultCache::kFingerOffset), dst_);
7606 __ mov(dst_, CodeGenerator::FixedArrayElementOperand(cache_, dst_, 1));
7607 __ jmp(exit_label());
7608
7609 __ bind(&cache_miss);
7610 __ push(cache_); // store a reference to cache
7611 __ push(key_); // store a key
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01007612 __ push(Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
Steve Block6ded16b2010-05-10 14:33:55 +01007613 __ push(key_);
Kristian Monsen25f61362010-05-21 11:50:48 +01007614 // On ia32 function must be in edi.
7615 __ mov(edi, FieldOperand(cache_, JSFunctionResultCache::kFactoryOffset));
7616 ParameterCount expected(1);
7617 __ InvokeFunction(edi, expected, CALL_FUNCTION);
7618
7619 // Find a place to put new cached value into.
7620 Label add_new_entry, update_cache;
7621 __ mov(ecx, Operand(esp, kPointerSize)); // restore the cache
7622 // Possible optimization: cache size is constant for the given cache
7623 // so technically we could use a constant here. However, if we have
7624 // cache miss this optimization would hardly matter much.
7625
7626 // Check if we could add new entry to cache.
7627 __ mov(ebx, FieldOperand(ecx, FixedArray::kLengthOffset));
Kristian Monsen25f61362010-05-21 11:50:48 +01007628 __ cmp(ebx, FieldOperand(ecx, JSFunctionResultCache::kCacheSizeOffset));
7629 __ j(greater, &add_new_entry);
7630
7631 // Check if we could evict entry after finger.
7632 __ mov(edx, FieldOperand(ecx, JSFunctionResultCache::kFingerOffset));
7633 __ add(Operand(edx), Immediate(kEntrySizeSmi));
7634 __ cmp(ebx, Operand(edx));
7635 __ j(greater, &update_cache);
7636
7637 // Need to wrap over the cache.
7638 __ mov(edx, Immediate(kEntriesIndexSmi));
7639 __ jmp(&update_cache);
7640
7641 __ bind(&add_new_entry);
7642 __ mov(edx, FieldOperand(ecx, JSFunctionResultCache::kCacheSizeOffset));
7643 __ lea(ebx, Operand(edx, JSFunctionResultCache::kEntrySize << 1));
7644 __ mov(FieldOperand(ecx, JSFunctionResultCache::kCacheSizeOffset), ebx);
7645
7646 // Update the cache itself.
7647 // edx holds the index.
7648 __ bind(&update_cache);
7649 __ pop(ebx); // restore the key
7650 __ mov(FieldOperand(ecx, JSFunctionResultCache::kFingerOffset), edx);
7651 // Store key.
7652 __ mov(CodeGenerator::FixedArrayElementOperand(ecx, edx), ebx);
7653 __ RecordWrite(ecx, 0, ebx, edx);
7654
7655 // Store value.
7656 __ pop(ecx); // restore the cache.
7657 __ mov(edx, FieldOperand(ecx, JSFunctionResultCache::kFingerOffset));
7658 __ add(Operand(edx), Immediate(Smi::FromInt(1)));
7659 __ mov(ebx, eax);
7660 __ mov(CodeGenerator::FixedArrayElementOperand(ecx, edx), ebx);
7661 __ RecordWrite(ecx, 0, ebx, edx);
7662
Steve Block6ded16b2010-05-10 14:33:55 +01007663 if (!dst_.is(eax)) {
7664 __ mov(dst_, eax);
7665 }
7666}
7667
7668
7669void CodeGenerator::GenerateGetFromCache(ZoneList<Expression*>* args) {
7670 ASSERT_EQ(2, args->length());
7671
7672 ASSERT_NE(NULL, args->at(0)->AsLiteral());
7673 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
7674
7675 Handle<FixedArray> jsfunction_result_caches(
Steve Block44f0eee2011-05-26 01:26:41 +01007676 masm()->isolate()->global_context()->jsfunction_result_caches());
Steve Block6ded16b2010-05-10 14:33:55 +01007677 if (jsfunction_result_caches->length() <= cache_id) {
7678 __ Abort("Attempt to use undefined cache.");
Steve Block44f0eee2011-05-26 01:26:41 +01007679 frame_->Push(FACTORY->undefined_value());
Steve Block6ded16b2010-05-10 14:33:55 +01007680 return;
7681 }
7682
7683 Load(args->at(1));
7684 Result key = frame_->Pop();
7685 key.ToRegister();
7686
7687 Result cache = allocator()->Allocate();
7688 ASSERT(cache.is_valid());
7689 __ mov(cache.reg(), ContextOperand(esi, Context::GLOBAL_INDEX));
7690 __ mov(cache.reg(),
7691 FieldOperand(cache.reg(), GlobalObject::kGlobalContextOffset));
7692 __ mov(cache.reg(),
7693 ContextOperand(cache.reg(), Context::JSFUNCTION_RESULT_CACHES_INDEX));
7694 __ mov(cache.reg(),
7695 FieldOperand(cache.reg(), FixedArray::OffsetOfElementAt(cache_id)));
7696
7697 Result tmp = allocator()->Allocate();
7698 ASSERT(tmp.is_valid());
7699
7700 DeferredSearchCache* deferred = new DeferredSearchCache(tmp.reg(),
7701 cache.reg(),
7702 key.reg());
7703
Steve Block6ded16b2010-05-10 14:33:55 +01007704 // tmp.reg() now holds finger offset as a smi.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01007705 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
Kristian Monsen25f61362010-05-21 11:50:48 +01007706 __ mov(tmp.reg(), FieldOperand(cache.reg(),
7707 JSFunctionResultCache::kFingerOffset));
7708 __ cmp(key.reg(), FixedArrayElementOperand(cache.reg(), tmp.reg()));
Steve Block6ded16b2010-05-10 14:33:55 +01007709 deferred->Branch(not_equal);
7710
Kristian Monsen25f61362010-05-21 11:50:48 +01007711 __ mov(tmp.reg(), FixedArrayElementOperand(cache.reg(), tmp.reg(), 1));
Steve Block6ded16b2010-05-10 14:33:55 +01007712
7713 deferred->BindExit();
7714 frame_->Push(&tmp);
7715}
7716
7717
Andrei Popescu402d9372010-02-26 13:31:12 +00007718void CodeGenerator::GenerateNumberToString(ZoneList<Expression*>* args) {
7719 ASSERT_EQ(args->length(), 1);
7720
7721 // Load the argument on the stack and call the stub.
7722 Load(args->at(0));
7723 NumberToStringStub stub;
7724 Result result = frame_->CallStub(&stub, 1);
7725 frame_->Push(&result);
7726}
7727
7728
Steve Block6ded16b2010-05-10 14:33:55 +01007729class DeferredSwapElements: public DeferredCode {
7730 public:
7731 DeferredSwapElements(Register object, Register index1, Register index2)
7732 : object_(object), index1_(index1), index2_(index2) {
7733 set_comment("[ DeferredSwapElements");
7734 }
7735
7736 virtual void Generate();
7737
7738 private:
7739 Register object_, index1_, index2_;
7740};
7741
7742
7743void DeferredSwapElements::Generate() {
7744 __ push(object_);
7745 __ push(index1_);
7746 __ push(index2_);
7747 __ CallRuntime(Runtime::kSwapElements, 3);
7748}
7749
7750
7751void CodeGenerator::GenerateSwapElements(ZoneList<Expression*>* args) {
7752 // Note: this code assumes that indices are passed are within
7753 // elements' bounds and refer to valid (not holes) values.
7754 Comment cmnt(masm_, "[ GenerateSwapElements");
7755
7756 ASSERT_EQ(3, args->length());
7757
7758 Load(args->at(0));
7759 Load(args->at(1));
7760 Load(args->at(2));
7761
7762 Result index2 = frame_->Pop();
7763 index2.ToRegister();
7764
7765 Result index1 = frame_->Pop();
7766 index1.ToRegister();
7767
7768 Result object = frame_->Pop();
7769 object.ToRegister();
7770
7771 Result tmp1 = allocator()->Allocate();
7772 tmp1.ToRegister();
7773 Result tmp2 = allocator()->Allocate();
7774 tmp2.ToRegister();
7775
7776 frame_->Spill(object.reg());
7777 frame_->Spill(index1.reg());
7778 frame_->Spill(index2.reg());
7779
7780 DeferredSwapElements* deferred = new DeferredSwapElements(object.reg(),
7781 index1.reg(),
7782 index2.reg());
7783
7784 // Fetch the map and check if array is in fast case.
7785 // Check that object doesn't require security checks and
7786 // has no indexed interceptor.
7787 __ CmpObjectType(object.reg(), FIRST_JS_OBJECT_TYPE, tmp1.reg());
Leon Clarkef7060e22010-06-03 12:02:55 +01007788 deferred->Branch(below);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007789 __ test_b(FieldOperand(tmp1.reg(), Map::kBitFieldOffset),
7790 KeyedLoadIC::kSlowCaseBitFieldMask);
Steve Block6ded16b2010-05-10 14:33:55 +01007791 deferred->Branch(not_zero);
7792
Iain Merrick75681382010-08-19 15:07:18 +01007793 // Check the object's elements are in fast case and writable.
Steve Block6ded16b2010-05-10 14:33:55 +01007794 __ mov(tmp1.reg(), FieldOperand(object.reg(), JSObject::kElementsOffset));
7795 __ cmp(FieldOperand(tmp1.reg(), HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01007796 Immediate(FACTORY->fixed_array_map()));
Steve Block6ded16b2010-05-10 14:33:55 +01007797 deferred->Branch(not_equal);
7798
7799 // Smi-tagging is equivalent to multiplying by 2.
7800 STATIC_ASSERT(kSmiTag == 0);
7801 STATIC_ASSERT(kSmiTagSize == 1);
7802
7803 // Check that both indices are smis.
7804 __ mov(tmp2.reg(), index1.reg());
7805 __ or_(tmp2.reg(), Operand(index2.reg()));
7806 __ test(tmp2.reg(), Immediate(kSmiTagMask));
7807 deferred->Branch(not_zero);
7808
Ben Murdochdb5a90a2011-01-06 18:27:03 +00007809 // Check that both indices are valid.
7810 __ mov(tmp2.reg(), FieldOperand(object.reg(), JSArray::kLengthOffset));
7811 __ cmp(tmp2.reg(), Operand(index1.reg()));
7812 deferred->Branch(below_equal);
7813 __ cmp(tmp2.reg(), Operand(index2.reg()));
7814 deferred->Branch(below_equal);
7815
Steve Block6ded16b2010-05-10 14:33:55 +01007816 // Bring addresses into index1 and index2.
Kristian Monsen25f61362010-05-21 11:50:48 +01007817 __ lea(index1.reg(), FixedArrayElementOperand(tmp1.reg(), index1.reg()));
7818 __ lea(index2.reg(), FixedArrayElementOperand(tmp1.reg(), index2.reg()));
Steve Block6ded16b2010-05-10 14:33:55 +01007819
7820 // Swap elements.
7821 __ mov(object.reg(), Operand(index1.reg(), 0));
7822 __ mov(tmp2.reg(), Operand(index2.reg(), 0));
7823 __ mov(Operand(index2.reg(), 0), object.reg());
7824 __ mov(Operand(index1.reg(), 0), tmp2.reg());
7825
7826 Label done;
7827 __ InNewSpace(tmp1.reg(), tmp2.reg(), equal, &done);
7828 // Possible optimization: do a check that both values are Smis
7829 // (or them and test against Smi mask.)
7830
7831 __ mov(tmp2.reg(), tmp1.reg());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007832 __ RecordWriteHelper(tmp2.reg(), index1.reg(), object.reg());
7833 __ RecordWriteHelper(tmp1.reg(), index2.reg(), object.reg());
Steve Block6ded16b2010-05-10 14:33:55 +01007834 __ bind(&done);
7835
7836 deferred->BindExit();
Steve Block44f0eee2011-05-26 01:26:41 +01007837 frame_->Push(FACTORY->undefined_value());
Steve Block6ded16b2010-05-10 14:33:55 +01007838}
7839
7840
7841void CodeGenerator::GenerateCallFunction(ZoneList<Expression*>* args) {
7842 Comment cmnt(masm_, "[ GenerateCallFunction");
7843
7844 ASSERT(args->length() >= 2);
7845
7846 int n_args = args->length() - 2; // for receiver and function.
7847 Load(args->at(0)); // receiver
7848 for (int i = 0; i < n_args; i++) {
7849 Load(args->at(i + 1));
7850 }
7851 Load(args->at(n_args + 1)); // function
7852 Result result = frame_->CallJSFunction(n_args);
7853 frame_->Push(&result);
7854}
7855
7856
7857// Generates the Math.pow method. Only handles special cases and
7858// branches to the runtime system for everything else. Please note
7859// that this function assumes that the callsite has executed ToNumber
7860// on both arguments.
7861void CodeGenerator::GenerateMathPow(ZoneList<Expression*>* args) {
7862 ASSERT(args->length() == 2);
7863 Load(args->at(0));
7864 Load(args->at(1));
Steve Block44f0eee2011-05-26 01:26:41 +01007865 if (!masm()->isolate()->cpu_features()->IsSupported(SSE2)) {
Steve Block6ded16b2010-05-10 14:33:55 +01007866 Result res = frame_->CallRuntime(Runtime::kMath_pow, 2);
7867 frame_->Push(&res);
7868 } else {
7869 CpuFeatures::Scope use_sse2(SSE2);
7870 Label allocate_return;
7871 // Load the two operands while leaving the values on the frame.
7872 frame()->Dup();
7873 Result exponent = frame()->Pop();
7874 exponent.ToRegister();
7875 frame()->Spill(exponent.reg());
7876 frame()->PushElementAt(1);
7877 Result base = frame()->Pop();
7878 base.ToRegister();
7879 frame()->Spill(base.reg());
7880
7881 Result answer = allocator()->Allocate();
7882 ASSERT(answer.is_valid());
7883 ASSERT(!exponent.reg().is(base.reg()));
7884 JumpTarget call_runtime;
7885
7886 // Save 1 in xmm3 - we need this several times later on.
7887 __ mov(answer.reg(), Immediate(1));
7888 __ cvtsi2sd(xmm3, Operand(answer.reg()));
7889
7890 Label exponent_nonsmi;
7891 Label base_nonsmi;
7892 // If the exponent is a heap number go to that specific case.
7893 __ test(exponent.reg(), Immediate(kSmiTagMask));
7894 __ j(not_zero, &exponent_nonsmi);
7895 __ test(base.reg(), Immediate(kSmiTagMask));
7896 __ j(not_zero, &base_nonsmi);
7897
7898 // Optimized version when y is an integer.
7899 Label powi;
7900 __ SmiUntag(base.reg());
7901 __ cvtsi2sd(xmm0, Operand(base.reg()));
7902 __ jmp(&powi);
7903 // exponent is smi and base is a heapnumber.
7904 __ bind(&base_nonsmi);
7905 __ cmp(FieldOperand(base.reg(), HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01007906 FACTORY->heap_number_map());
Steve Block6ded16b2010-05-10 14:33:55 +01007907 call_runtime.Branch(not_equal);
7908
7909 __ movdbl(xmm0, FieldOperand(base.reg(), HeapNumber::kValueOffset));
7910
7911 // Optimized version of pow if y is an integer.
7912 __ bind(&powi);
7913 __ SmiUntag(exponent.reg());
7914
7915 // Save exponent in base as we need to check if exponent is negative later.
7916 // We know that base and exponent are in different registers.
7917 __ mov(base.reg(), exponent.reg());
7918
7919 // Get absolute value of exponent.
7920 Label no_neg;
7921 __ cmp(exponent.reg(), 0);
7922 __ j(greater_equal, &no_neg);
7923 __ neg(exponent.reg());
7924 __ bind(&no_neg);
7925
7926 // Load xmm1 with 1.
7927 __ movsd(xmm1, xmm3);
7928 Label while_true;
7929 Label no_multiply;
7930
7931 __ bind(&while_true);
7932 __ shr(exponent.reg(), 1);
7933 __ j(not_carry, &no_multiply);
7934 __ mulsd(xmm1, xmm0);
7935 __ bind(&no_multiply);
7936 __ test(exponent.reg(), Operand(exponent.reg()));
7937 __ mulsd(xmm0, xmm0);
7938 __ j(not_zero, &while_true);
7939
7940 // x has the original value of y - if y is negative return 1/result.
7941 __ test(base.reg(), Operand(base.reg()));
7942 __ j(positive, &allocate_return);
7943 // Special case if xmm1 has reached infinity.
7944 __ mov(answer.reg(), Immediate(0x7FB00000));
7945 __ movd(xmm0, Operand(answer.reg()));
7946 __ cvtss2sd(xmm0, xmm0);
7947 __ ucomisd(xmm0, xmm1);
7948 call_runtime.Branch(equal);
7949 __ divsd(xmm3, xmm1);
7950 __ movsd(xmm1, xmm3);
7951 __ jmp(&allocate_return);
7952
7953 // exponent (or both) is a heapnumber - no matter what we should now work
7954 // on doubles.
7955 __ bind(&exponent_nonsmi);
7956 __ cmp(FieldOperand(exponent.reg(), HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01007957 FACTORY->heap_number_map());
Steve Block6ded16b2010-05-10 14:33:55 +01007958 call_runtime.Branch(not_equal);
7959 __ movdbl(xmm1, FieldOperand(exponent.reg(), HeapNumber::kValueOffset));
7960 // Test if exponent is nan.
7961 __ ucomisd(xmm1, xmm1);
7962 call_runtime.Branch(parity_even);
7963
7964 Label base_not_smi;
7965 Label handle_special_cases;
7966 __ test(base.reg(), Immediate(kSmiTagMask));
7967 __ j(not_zero, &base_not_smi);
7968 __ SmiUntag(base.reg());
7969 __ cvtsi2sd(xmm0, Operand(base.reg()));
7970 __ jmp(&handle_special_cases);
7971 __ bind(&base_not_smi);
7972 __ cmp(FieldOperand(base.reg(), HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01007973 FACTORY->heap_number_map());
Steve Block6ded16b2010-05-10 14:33:55 +01007974 call_runtime.Branch(not_equal);
7975 __ mov(answer.reg(), FieldOperand(base.reg(), HeapNumber::kExponentOffset));
7976 __ and_(answer.reg(), HeapNumber::kExponentMask);
7977 __ cmp(Operand(answer.reg()), Immediate(HeapNumber::kExponentMask));
7978 // base is NaN or +/-Infinity
7979 call_runtime.Branch(greater_equal);
7980 __ movdbl(xmm0, FieldOperand(base.reg(), HeapNumber::kValueOffset));
7981
7982 // base is in xmm0 and exponent is in xmm1.
7983 __ bind(&handle_special_cases);
7984 Label not_minus_half;
7985 // Test for -0.5.
7986 // Load xmm2 with -0.5.
7987 __ mov(answer.reg(), Immediate(0xBF000000));
7988 __ movd(xmm2, Operand(answer.reg()));
7989 __ cvtss2sd(xmm2, xmm2);
7990 // xmm2 now has -0.5.
7991 __ ucomisd(xmm2, xmm1);
7992 __ j(not_equal, &not_minus_half);
7993
7994 // Calculates reciprocal of square root.
Steve Block1e0659c2011-05-24 12:43:12 +01007995 // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
7996 __ xorpd(xmm1, xmm1);
7997 __ addsd(xmm1, xmm0);
Steve Block6ded16b2010-05-10 14:33:55 +01007998 __ sqrtsd(xmm1, xmm1);
Steve Block1e0659c2011-05-24 12:43:12 +01007999 __ divsd(xmm3, xmm1);
8000 __ movsd(xmm1, xmm3);
Steve Block6ded16b2010-05-10 14:33:55 +01008001 __ jmp(&allocate_return);
8002
8003 // Test for 0.5.
8004 __ bind(&not_minus_half);
8005 // Load xmm2 with 0.5.
8006 // Since xmm3 is 1 and xmm2 is -0.5 this is simply xmm2 + xmm3.
8007 __ addsd(xmm2, xmm3);
8008 // xmm2 now has 0.5.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01008009 __ ucomisd(xmm2, xmm1);
Steve Block6ded16b2010-05-10 14:33:55 +01008010 call_runtime.Branch(not_equal);
8011 // Calculates square root.
Steve Block1e0659c2011-05-24 12:43:12 +01008012 // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
8013 __ xorpd(xmm1, xmm1);
8014 __ addsd(xmm1, xmm0);
Steve Block6ded16b2010-05-10 14:33:55 +01008015 __ sqrtsd(xmm1, xmm1);
8016
8017 JumpTarget done;
8018 Label failure, success;
8019 __ bind(&allocate_return);
8020 // Make a copy of the frame to enable us to handle allocation
8021 // failure after the JumpTarget jump.
8022 VirtualFrame* clone = new VirtualFrame(frame());
8023 __ AllocateHeapNumber(answer.reg(), exponent.reg(),
8024 base.reg(), &failure);
8025 __ movdbl(FieldOperand(answer.reg(), HeapNumber::kValueOffset), xmm1);
8026 // Remove the two original values from the frame - we only need those
8027 // in the case where we branch to runtime.
8028 frame()->Drop(2);
8029 exponent.Unuse();
8030 base.Unuse();
8031 done.Jump(&answer);
8032 // Use the copy of the original frame as our current frame.
8033 RegisterFile empty_regs;
8034 SetFrame(clone, &empty_regs);
8035 // If we experience an allocation failure we branch to runtime.
8036 __ bind(&failure);
8037 call_runtime.Bind();
8038 answer = frame()->CallRuntime(Runtime::kMath_pow_cfunction, 2);
8039
8040 done.Bind(&answer);
8041 frame()->Push(&answer);
8042 }
8043}
8044
8045
Andrei Popescu402d9372010-02-26 13:31:12 +00008046void CodeGenerator::GenerateMathSin(ZoneList<Expression*>* args) {
8047 ASSERT_EQ(args->length(), 1);
8048 Load(args->at(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01008049 TranscendentalCacheStub stub(TranscendentalCache::SIN,
8050 TranscendentalCacheStub::TAGGED);
Andrei Popescu402d9372010-02-26 13:31:12 +00008051 Result result = frame_->CallStub(&stub, 1);
8052 frame_->Push(&result);
8053}
8054
8055
8056void CodeGenerator::GenerateMathCos(ZoneList<Expression*>* args) {
8057 ASSERT_EQ(args->length(), 1);
8058 Load(args->at(0));
Ben Murdochb0fe1622011-05-05 13:52:32 +01008059 TranscendentalCacheStub stub(TranscendentalCache::COS,
8060 TranscendentalCacheStub::TAGGED);
8061 Result result = frame_->CallStub(&stub, 1);
8062 frame_->Push(&result);
8063}
8064
8065
8066void CodeGenerator::GenerateMathLog(ZoneList<Expression*>* args) {
8067 ASSERT_EQ(args->length(), 1);
8068 Load(args->at(0));
8069 TranscendentalCacheStub stub(TranscendentalCache::LOG,
8070 TranscendentalCacheStub::TAGGED);
Andrei Popescu402d9372010-02-26 13:31:12 +00008071 Result result = frame_->CallStub(&stub, 1);
8072 frame_->Push(&result);
8073}
8074
8075
Steve Block6ded16b2010-05-10 14:33:55 +01008076// Generates the Math.sqrt method. Please note - this function assumes that
8077// the callsite has executed ToNumber on the argument.
8078void CodeGenerator::GenerateMathSqrt(ZoneList<Expression*>* args) {
8079 ASSERT_EQ(args->length(), 1);
8080 Load(args->at(0));
8081
Steve Block44f0eee2011-05-26 01:26:41 +01008082 if (!masm()->isolate()->cpu_features()->IsSupported(SSE2)) {
Steve Block6ded16b2010-05-10 14:33:55 +01008083 Result result = frame()->CallRuntime(Runtime::kMath_sqrt, 1);
8084 frame()->Push(&result);
8085 } else {
8086 CpuFeatures::Scope use_sse2(SSE2);
8087 // Leave original value on the frame if we need to call runtime.
8088 frame()->Dup();
8089 Result result = frame()->Pop();
8090 result.ToRegister();
8091 frame()->Spill(result.reg());
8092 Label runtime;
8093 Label non_smi;
8094 Label load_done;
8095 JumpTarget end;
8096
8097 __ test(result.reg(), Immediate(kSmiTagMask));
8098 __ j(not_zero, &non_smi);
8099 __ SmiUntag(result.reg());
8100 __ cvtsi2sd(xmm0, Operand(result.reg()));
8101 __ jmp(&load_done);
8102 __ bind(&non_smi);
8103 __ cmp(FieldOperand(result.reg(), HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01008104 FACTORY->heap_number_map());
Steve Block6ded16b2010-05-10 14:33:55 +01008105 __ j(not_equal, &runtime);
8106 __ movdbl(xmm0, FieldOperand(result.reg(), HeapNumber::kValueOffset));
8107
8108 __ bind(&load_done);
8109 __ sqrtsd(xmm0, xmm0);
8110 // A copy of the virtual frame to allow us to go to runtime after the
8111 // JumpTarget jump.
8112 Result scratch = allocator()->Allocate();
8113 VirtualFrame* clone = new VirtualFrame(frame());
8114 __ AllocateHeapNumber(result.reg(), scratch.reg(), no_reg, &runtime);
8115
8116 __ movdbl(FieldOperand(result.reg(), HeapNumber::kValueOffset), xmm0);
8117 frame()->Drop(1);
8118 scratch.Unuse();
8119 end.Jump(&result);
8120 // We only branch to runtime if we have an allocation error.
8121 // Use the copy of the original frame as our current frame.
8122 RegisterFile empty_regs;
8123 SetFrame(clone, &empty_regs);
8124 __ bind(&runtime);
8125 result = frame()->CallRuntime(Runtime::kMath_sqrt, 1);
8126
8127 end.Bind(&result);
8128 frame()->Push(&result);
8129 }
8130}
8131
8132
Ben Murdochbb769b22010-08-11 14:56:33 +01008133void CodeGenerator::GenerateIsRegExpEquivalent(ZoneList<Expression*>* args) {
8134 ASSERT_EQ(2, args->length());
8135 Load(args->at(0));
8136 Load(args->at(1));
8137 Result right_res = frame_->Pop();
8138 Result left_res = frame_->Pop();
8139 right_res.ToRegister();
8140 left_res.ToRegister();
8141 Result tmp_res = allocator()->Allocate();
8142 ASSERT(tmp_res.is_valid());
8143 Register right = right_res.reg();
8144 Register left = left_res.reg();
8145 Register tmp = tmp_res.reg();
8146 right_res.Unuse();
8147 left_res.Unuse();
8148 tmp_res.Unuse();
8149 __ cmp(left, Operand(right));
8150 destination()->true_target()->Branch(equal);
8151 // Fail if either is a non-HeapObject.
8152 __ mov(tmp, left);
8153 __ and_(Operand(tmp), right);
8154 __ test(Operand(tmp), Immediate(kSmiTagMask));
8155 destination()->false_target()->Branch(equal);
8156 __ CmpObjectType(left, JS_REGEXP_TYPE, tmp);
8157 destination()->false_target()->Branch(not_equal);
8158 __ cmp(tmp, FieldOperand(right, HeapObject::kMapOffset));
8159 destination()->false_target()->Branch(not_equal);
8160 __ mov(tmp, FieldOperand(left, JSRegExp::kDataOffset));
8161 __ cmp(tmp, FieldOperand(right, JSRegExp::kDataOffset));
8162 destination()->Split(equal);
8163}
8164
8165
Kristian Monsen80d68ea2010-09-08 11:05:35 +01008166void CodeGenerator::GenerateHasCachedArrayIndex(ZoneList<Expression*>* args) {
8167 ASSERT(args->length() == 1);
8168 Load(args->at(0));
8169 Result value = frame_->Pop();
8170 value.ToRegister();
8171 ASSERT(value.is_valid());
8172 if (FLAG_debug_code) {
8173 __ AbortIfNotString(value.reg());
8174 }
8175
8176 __ test(FieldOperand(value.reg(), String::kHashFieldOffset),
8177 Immediate(String::kContainsCachedArrayIndexMask));
8178
8179 value.Unuse();
8180 destination()->Split(zero);
8181}
8182
8183
8184void CodeGenerator::GenerateGetCachedArrayIndex(ZoneList<Expression*>* args) {
8185 ASSERT(args->length() == 1);
8186 Load(args->at(0));
8187 Result string = frame_->Pop();
8188 string.ToRegister();
8189 if (FLAG_debug_code) {
8190 __ AbortIfNotString(string.reg());
8191 }
8192
8193 Result number = allocator()->Allocate();
8194 ASSERT(number.is_valid());
8195 __ mov(number.reg(), FieldOperand(string.reg(), String::kHashFieldOffset));
8196 __ IndexFromHash(number.reg(), number.reg());
8197 string.Unuse();
8198 frame_->Push(&number);
8199}
8200
8201
Steve Blocka7e24c12009-10-30 11:49:00 +00008202void CodeGenerator::VisitCallRuntime(CallRuntime* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01008203 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00008204 if (CheckForInlineRuntimeCall(node)) {
8205 return;
8206 }
8207
8208 ZoneList<Expression*>* args = node->arguments();
8209 Comment cmnt(masm_, "[ CallRuntime");
Steve Block44f0eee2011-05-26 01:26:41 +01008210 const Runtime::Function* function = node->function();
Steve Blocka7e24c12009-10-30 11:49:00 +00008211
8212 if (function == NULL) {
Steve Blocka7e24c12009-10-30 11:49:00 +00008213 // Push the builtins object found in the current global object.
8214 Result temp = allocator()->Allocate();
8215 ASSERT(temp.is_valid());
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08008216 __ mov(temp.reg(), GlobalObjectOperand());
Steve Blocka7e24c12009-10-30 11:49:00 +00008217 __ mov(temp.reg(), FieldOperand(temp.reg(), GlobalObject::kBuiltinsOffset));
8218 frame_->Push(&temp);
8219 }
8220
8221 // Push the arguments ("left-to-right").
8222 int arg_count = args->length();
8223 for (int i = 0; i < arg_count; i++) {
8224 Load(args->at(i));
8225 }
8226
8227 if (function == NULL) {
8228 // Call the JS runtime function.
Leon Clarkee46be812010-01-19 14:06:41 +00008229 frame_->Push(node->name());
Steve Blocka7e24c12009-10-30 11:49:00 +00008230 Result answer = frame_->CallCallIC(RelocInfo::CODE_TARGET,
8231 arg_count,
8232 loop_nesting_);
8233 frame_->RestoreContextRegister();
Leon Clarkee46be812010-01-19 14:06:41 +00008234 frame_->Push(&answer);
Steve Blocka7e24c12009-10-30 11:49:00 +00008235 } else {
8236 // Call the C runtime function.
8237 Result answer = frame_->CallRuntime(function, arg_count);
8238 frame_->Push(&answer);
8239 }
8240}
8241
8242
8243void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00008244 Comment cmnt(masm_, "[ UnaryOperation");
8245
8246 Token::Value op = node->op();
8247
8248 if (op == Token::NOT) {
8249 // Swap the true and false targets but keep the same actual label
8250 // as the fall through.
8251 destination()->Invert();
Steve Blockd0582a62009-12-15 09:54:21 +00008252 LoadCondition(node->expression(), destination(), true);
Steve Blocka7e24c12009-10-30 11:49:00 +00008253 // Swap the labels back.
8254 destination()->Invert();
8255
8256 } else if (op == Token::DELETE) {
8257 Property* property = node->expression()->AsProperty();
8258 if (property != NULL) {
8259 Load(property->obj());
8260 Load(property->key());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01008261 frame_->Push(Smi::FromInt(strict_mode_flag()));
8262 Result answer = frame_->InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, 3);
Steve Blocka7e24c12009-10-30 11:49:00 +00008263 frame_->Push(&answer);
8264 return;
8265 }
8266
8267 Variable* variable = node->expression()->AsVariableProxy()->AsVariable();
8268 if (variable != NULL) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01008269 // Delete of an unqualified identifier is disallowed in strict mode
8270 // but "delete this" is.
8271 ASSERT(strict_mode_flag() == kNonStrictMode || variable->is_this());
Kristian Monsen0d5e1162010-09-30 15:31:59 +01008272 Slot* slot = variable->AsSlot();
Steve Blocka7e24c12009-10-30 11:49:00 +00008273 if (variable->is_global()) {
8274 LoadGlobal();
8275 frame_->Push(variable->name());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01008276 frame_->Push(Smi::FromInt(kNonStrictMode));
Steve Blocka7e24c12009-10-30 11:49:00 +00008277 Result answer = frame_->InvokeBuiltin(Builtins::DELETE,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01008278 CALL_FUNCTION, 3);
Steve Blocka7e24c12009-10-30 11:49:00 +00008279 frame_->Push(&answer);
Steve Blocka7e24c12009-10-30 11:49:00 +00008280
8281 } else if (slot != NULL && slot->type() == Slot::LOOKUP) {
Steve Block1e0659c2011-05-24 12:43:12 +01008282 // Call the runtime to delete from the context holding the named
Steve Blocka7e24c12009-10-30 11:49:00 +00008283 // variable. Sync the virtual frame eagerly so we can push the
8284 // arguments directly into place.
8285 frame_->SyncRange(0, frame_->element_count() - 1);
8286 frame_->EmitPush(esi);
8287 frame_->EmitPush(Immediate(variable->name()));
Steve Block1e0659c2011-05-24 12:43:12 +01008288 Result answer = frame_->CallRuntime(Runtime::kDeleteContextSlot, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00008289 frame_->Push(&answer);
Ben Murdoche0cee9b2011-05-25 10:26:03 +01008290 } else {
8291 // Default: Result of deleting non-global, not dynamically
8292 // introduced variables is false.
Steve Block44f0eee2011-05-26 01:26:41 +01008293 frame_->Push(FACTORY->false_value());
Steve Blocka7e24c12009-10-30 11:49:00 +00008294 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008295 } else {
8296 // Default: Result of deleting expressions is true.
8297 Load(node->expression()); // may have side-effects
Steve Block44f0eee2011-05-26 01:26:41 +01008298 frame_->SetElementAt(0, FACTORY->true_value());
Steve Blocka7e24c12009-10-30 11:49:00 +00008299 }
8300
8301 } else if (op == Token::TYPEOF) {
8302 // Special case for loading the typeof expression; see comment on
8303 // LoadTypeofExpression().
8304 LoadTypeofExpression(node->expression());
8305 Result answer = frame_->CallRuntime(Runtime::kTypeof, 1);
8306 frame_->Push(&answer);
8307
8308 } else if (op == Token::VOID) {
8309 Expression* expression = node->expression();
8310 if (expression && expression->AsLiteral() && (
8311 expression->AsLiteral()->IsTrue() ||
8312 expression->AsLiteral()->IsFalse() ||
8313 expression->AsLiteral()->handle()->IsNumber() ||
8314 expression->AsLiteral()->handle()->IsString() ||
8315 expression->AsLiteral()->handle()->IsJSRegExp() ||
8316 expression->AsLiteral()->IsNull())) {
8317 // Omit evaluating the value of the primitive literal.
8318 // It will be discarded anyway, and can have no side effect.
Steve Block44f0eee2011-05-26 01:26:41 +01008319 frame_->Push(FACTORY->undefined_value());
Steve Blocka7e24c12009-10-30 11:49:00 +00008320 } else {
8321 Load(node->expression());
Steve Block44f0eee2011-05-26 01:26:41 +01008322 frame_->SetElementAt(0, FACTORY->undefined_value());
Steve Blocka7e24c12009-10-30 11:49:00 +00008323 }
8324
8325 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01008326 if (in_safe_int32_mode()) {
8327 Visit(node->expression());
8328 Result value = frame_->Pop();
8329 ASSERT(value.is_untagged_int32());
8330 // Registers containing an int32 value are not multiply used.
8331 ASSERT(!value.is_register() || !frame_->is_used(value.reg()));
8332 value.ToRegister();
8333 switch (op) {
8334 case Token::SUB: {
8335 __ neg(value.reg());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01008336 frame_->Push(&value);
Steve Block6ded16b2010-05-10 14:33:55 +01008337 if (node->no_negative_zero()) {
8338 // -MIN_INT is MIN_INT with the overflow flag set.
8339 unsafe_bailout_->Branch(overflow);
8340 } else {
8341 // MIN_INT and 0 both have bad negations. They both have 31 zeros.
8342 __ test(value.reg(), Immediate(0x7FFFFFFF));
8343 unsafe_bailout_->Branch(zero);
8344 }
8345 break;
8346 }
8347 case Token::BIT_NOT: {
8348 __ not_(value.reg());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01008349 frame_->Push(&value);
Steve Block6ded16b2010-05-10 14:33:55 +01008350 break;
8351 }
8352 case Token::ADD: {
8353 // Unary plus has no effect on int32 values.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01008354 frame_->Push(&value);
Steve Block6ded16b2010-05-10 14:33:55 +01008355 break;
8356 }
8357 default:
8358 UNREACHABLE();
8359 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00008360 }
Steve Block6ded16b2010-05-10 14:33:55 +01008361 } else {
8362 Load(node->expression());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01008363 bool can_overwrite = node->expression()->ResultOverwriteAllowed();
Leon Clarkeac952652010-07-15 11:15:24 +01008364 UnaryOverwriteMode overwrite =
8365 can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
8366 bool no_negative_zero = node->expression()->no_negative_zero();
Steve Block6ded16b2010-05-10 14:33:55 +01008367 switch (op) {
8368 case Token::NOT:
8369 case Token::DELETE:
8370 case Token::TYPEOF:
8371 UNREACHABLE(); // handled above
8372 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00008373
Steve Block6ded16b2010-05-10 14:33:55 +01008374 case Token::SUB: {
Leon Clarkeac952652010-07-15 11:15:24 +01008375 GenericUnaryOpStub stub(
8376 Token::SUB,
8377 overwrite,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01008378 NO_UNARY_FLAGS,
Leon Clarkeac952652010-07-15 11:15:24 +01008379 no_negative_zero ? kIgnoreNegativeZero : kStrictNegativeZero);
Steve Block6ded16b2010-05-10 14:33:55 +01008380 Result operand = frame_->Pop();
8381 Result answer = frame_->CallStub(&stub, &operand);
8382 answer.set_type_info(TypeInfo::Number());
8383 frame_->Push(&answer);
8384 break;
8385 }
8386 case Token::BIT_NOT: {
8387 // Smi check.
8388 JumpTarget smi_label;
8389 JumpTarget continue_label;
8390 Result operand = frame_->Pop();
8391 TypeInfo operand_info = operand.type_info();
8392 operand.ToRegister();
8393 if (operand_info.IsSmi()) {
8394 if (FLAG_debug_code) __ AbortIfNotSmi(operand.reg());
8395 frame_->Spill(operand.reg());
8396 // Set smi tag bit. It will be reset by the not operation.
8397 __ lea(operand.reg(), Operand(operand.reg(), kSmiTagMask));
8398 __ not_(operand.reg());
8399 Result answer = operand;
8400 answer.set_type_info(TypeInfo::Smi());
8401 frame_->Push(&answer);
8402 } else {
8403 __ test(operand.reg(), Immediate(kSmiTagMask));
8404 smi_label.Branch(zero, &operand, taken);
Steve Blocka7e24c12009-10-30 11:49:00 +00008405
Kristian Monsen0d5e1162010-09-30 15:31:59 +01008406 GenericUnaryOpStub stub(Token::BIT_NOT,
8407 overwrite,
8408 NO_UNARY_SMI_CODE_IN_STUB);
Steve Block6ded16b2010-05-10 14:33:55 +01008409 Result answer = frame_->CallStub(&stub, &operand);
8410 continue_label.Jump(&answer);
Leon Clarkee46be812010-01-19 14:06:41 +00008411
Steve Block6ded16b2010-05-10 14:33:55 +01008412 smi_label.Bind(&answer);
8413 answer.ToRegister();
8414 frame_->Spill(answer.reg());
8415 // Set smi tag bit. It will be reset by the not operation.
8416 __ lea(answer.reg(), Operand(answer.reg(), kSmiTagMask));
8417 __ not_(answer.reg());
Leon Clarkee46be812010-01-19 14:06:41 +00008418
Steve Block6ded16b2010-05-10 14:33:55 +01008419 continue_label.Bind(&answer);
8420 answer.set_type_info(TypeInfo::Integer32());
8421 frame_->Push(&answer);
8422 }
8423 break;
8424 }
8425 case Token::ADD: {
8426 // Smi check.
8427 JumpTarget continue_label;
8428 Result operand = frame_->Pop();
8429 TypeInfo operand_info = operand.type_info();
8430 operand.ToRegister();
8431 __ test(operand.reg(), Immediate(kSmiTagMask));
8432 continue_label.Branch(zero, &operand, taken);
Steve Blocka7e24c12009-10-30 11:49:00 +00008433
Steve Block6ded16b2010-05-10 14:33:55 +01008434 frame_->Push(&operand);
8435 Result answer = frame_->InvokeBuiltin(Builtins::TO_NUMBER,
Steve Blocka7e24c12009-10-30 11:49:00 +00008436 CALL_FUNCTION, 1);
8437
Steve Block6ded16b2010-05-10 14:33:55 +01008438 continue_label.Bind(&answer);
8439 if (operand_info.IsSmi()) {
8440 answer.set_type_info(TypeInfo::Smi());
8441 } else if (operand_info.IsInteger32()) {
8442 answer.set_type_info(TypeInfo::Integer32());
8443 } else {
8444 answer.set_type_info(TypeInfo::Number());
8445 }
8446 frame_->Push(&answer);
8447 break;
8448 }
8449 default:
8450 UNREACHABLE();
Steve Blocka7e24c12009-10-30 11:49:00 +00008451 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008452 }
8453 }
8454}
8455
8456
8457// The value in dst was optimistically incremented or decremented. The
8458// result overflowed or was not smi tagged. Undo the operation, call
8459// into the runtime to convert the argument to a number, and call the
8460// specialized add or subtract stub. The result is left in dst.
8461class DeferredPrefixCountOperation: public DeferredCode {
8462 public:
Steve Block6ded16b2010-05-10 14:33:55 +01008463 DeferredPrefixCountOperation(Register dst,
8464 bool is_increment,
8465 TypeInfo input_type)
8466 : dst_(dst), is_increment_(is_increment), input_type_(input_type) {
Steve Blocka7e24c12009-10-30 11:49:00 +00008467 set_comment("[ DeferredCountOperation");
8468 }
8469
8470 virtual void Generate();
8471
8472 private:
8473 Register dst_;
8474 bool is_increment_;
Steve Block6ded16b2010-05-10 14:33:55 +01008475 TypeInfo input_type_;
Steve Blocka7e24c12009-10-30 11:49:00 +00008476};
8477
8478
8479void DeferredPrefixCountOperation::Generate() {
8480 // Undo the optimistic smi operation.
8481 if (is_increment_) {
8482 __ sub(Operand(dst_), Immediate(Smi::FromInt(1)));
8483 } else {
8484 __ add(Operand(dst_), Immediate(Smi::FromInt(1)));
8485 }
Steve Block6ded16b2010-05-10 14:33:55 +01008486 Register left;
8487 if (input_type_.IsNumber()) {
8488 left = dst_;
Steve Blocka7e24c12009-10-30 11:49:00 +00008489 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01008490 __ push(dst_);
8491 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION);
8492 left = eax;
Steve Blocka7e24c12009-10-30 11:49:00 +00008493 }
Steve Block6ded16b2010-05-10 14:33:55 +01008494
8495 GenericBinaryOpStub stub(is_increment_ ? Token::ADD : Token::SUB,
8496 NO_OVERWRITE,
8497 NO_GENERIC_BINARY_FLAGS,
8498 TypeInfo::Number());
8499 stub.GenerateCall(masm_, left, Smi::FromInt(1));
8500
Steve Blocka7e24c12009-10-30 11:49:00 +00008501 if (!dst_.is(eax)) __ mov(dst_, eax);
8502}
8503
8504
8505// The value in dst was optimistically incremented or decremented. The
8506// result overflowed or was not smi tagged. Undo the operation and call
8507// into the runtime to convert the argument to a number. Update the
8508// original value in old. Call the specialized add or subtract stub.
8509// The result is left in dst.
8510class DeferredPostfixCountOperation: public DeferredCode {
8511 public:
Steve Block6ded16b2010-05-10 14:33:55 +01008512 DeferredPostfixCountOperation(Register dst,
8513 Register old,
8514 bool is_increment,
8515 TypeInfo input_type)
8516 : dst_(dst),
8517 old_(old),
8518 is_increment_(is_increment),
8519 input_type_(input_type) {
Steve Blocka7e24c12009-10-30 11:49:00 +00008520 set_comment("[ DeferredCountOperation");
8521 }
8522
8523 virtual void Generate();
8524
8525 private:
8526 Register dst_;
8527 Register old_;
8528 bool is_increment_;
Steve Block6ded16b2010-05-10 14:33:55 +01008529 TypeInfo input_type_;
Steve Blocka7e24c12009-10-30 11:49:00 +00008530};
8531
8532
8533void DeferredPostfixCountOperation::Generate() {
8534 // Undo the optimistic smi operation.
8535 if (is_increment_) {
8536 __ sub(Operand(dst_), Immediate(Smi::FromInt(1)));
8537 } else {
8538 __ add(Operand(dst_), Immediate(Smi::FromInt(1)));
8539 }
Steve Block6ded16b2010-05-10 14:33:55 +01008540 Register left;
8541 if (input_type_.IsNumber()) {
8542 __ push(dst_); // Save the input to use as the old value.
8543 left = dst_;
Steve Blocka7e24c12009-10-30 11:49:00 +00008544 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01008545 __ push(dst_);
8546 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION);
8547 __ push(eax); // Save the result of ToNumber to use as the old value.
8548 left = eax;
Steve Blocka7e24c12009-10-30 11:49:00 +00008549 }
Steve Block6ded16b2010-05-10 14:33:55 +01008550
8551 GenericBinaryOpStub stub(is_increment_ ? Token::ADD : Token::SUB,
8552 NO_OVERWRITE,
8553 NO_GENERIC_BINARY_FLAGS,
8554 TypeInfo::Number());
8555 stub.GenerateCall(masm_, left, Smi::FromInt(1));
8556
Steve Blocka7e24c12009-10-30 11:49:00 +00008557 if (!dst_.is(eax)) __ mov(dst_, eax);
8558 __ pop(old_);
8559}
8560
8561
8562void CodeGenerator::VisitCountOperation(CountOperation* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01008563 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00008564 Comment cmnt(masm_, "[ CountOperation");
8565
8566 bool is_postfix = node->is_postfix();
8567 bool is_increment = node->op() == Token::INC;
8568
8569 Variable* var = node->expression()->AsVariableProxy()->AsVariable();
8570 bool is_const = (var != NULL && var->mode() == Variable::CONST);
8571
8572 // Postfix operations need a stack slot under the reference to hold
8573 // the old value while the new value is being stored. This is so that
8574 // in the case that storing the new value requires a call, the old
8575 // value will be in the frame to be spilled.
8576 if (is_postfix) frame_->Push(Smi::FromInt(0));
8577
Leon Clarked91b9f72010-01-27 17:25:45 +00008578 // A constant reference is not saved to, so a constant reference is not a
8579 // compound assignment reference.
8580 { Reference target(this, node->expression(), !is_const);
Steve Blocka7e24c12009-10-30 11:49:00 +00008581 if (target.is_illegal()) {
8582 // Spoof the virtual frame to have the expected height (one higher
8583 // than on entry).
8584 if (!is_postfix) frame_->Push(Smi::FromInt(0));
8585 return;
8586 }
Steve Blockd0582a62009-12-15 09:54:21 +00008587 target.TakeValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00008588
8589 Result new_value = frame_->Pop();
8590 new_value.ToRegister();
8591
8592 Result old_value; // Only allocated in the postfix case.
8593 if (is_postfix) {
8594 // Allocate a temporary to preserve the old value.
8595 old_value = allocator_->Allocate();
8596 ASSERT(old_value.is_valid());
8597 __ mov(old_value.reg(), new_value.reg());
Steve Block6ded16b2010-05-10 14:33:55 +01008598
8599 // The return value for postfix operations is ToNumber(input).
8600 // Keep more precise type info if the input is some kind of
8601 // number already. If the input is not a number we have to wait
8602 // for the deferred code to convert it.
8603 if (new_value.type_info().IsNumber()) {
8604 old_value.set_type_info(new_value.type_info());
8605 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008606 }
Steve Block6ded16b2010-05-10 14:33:55 +01008607
Steve Blocka7e24c12009-10-30 11:49:00 +00008608 // Ensure the new value is writable.
8609 frame_->Spill(new_value.reg());
8610
Steve Block6ded16b2010-05-10 14:33:55 +01008611 Result tmp;
8612 if (new_value.is_smi()) {
8613 if (FLAG_debug_code) __ AbortIfNotSmi(new_value.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00008614 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01008615 // We don't know statically if the input is a smi.
8616 // In order to combine the overflow and the smi tag check, we need
8617 // to be able to allocate a byte register. We attempt to do so
8618 // without spilling. If we fail, we will generate separate overflow
8619 // and smi tag checks.
8620 // We allocate and clear a temporary byte register before performing
8621 // the count operation since clearing the register using xor will clear
8622 // the overflow flag.
8623 tmp = allocator_->AllocateByteRegisterWithoutSpilling();
8624 if (tmp.is_valid()) {
8625 __ Set(tmp.reg(), Immediate(0));
8626 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008627 }
8628
8629 if (is_increment) {
8630 __ add(Operand(new_value.reg()), Immediate(Smi::FromInt(1)));
8631 } else {
8632 __ sub(Operand(new_value.reg()), Immediate(Smi::FromInt(1)));
8633 }
8634
Steve Block6ded16b2010-05-10 14:33:55 +01008635 DeferredCode* deferred = NULL;
8636 if (is_postfix) {
8637 deferred = new DeferredPostfixCountOperation(new_value.reg(),
8638 old_value.reg(),
8639 is_increment,
8640 new_value.type_info());
8641 } else {
8642 deferred = new DeferredPrefixCountOperation(new_value.reg(),
8643 is_increment,
8644 new_value.type_info());
8645 }
8646
8647 if (new_value.is_smi()) {
8648 // In case we have a smi as input just check for overflow.
8649 deferred->Branch(overflow);
8650 } else {
8651 // If the count operation didn't overflow and the result is a valid
8652 // smi, we're done. Otherwise, we jump to the deferred slow-case
8653 // code.
Steve Blocka7e24c12009-10-30 11:49:00 +00008654 // We combine the overflow and the smi tag check if we could
8655 // successfully allocate a temporary byte register.
Steve Block6ded16b2010-05-10 14:33:55 +01008656 if (tmp.is_valid()) {
8657 __ setcc(overflow, tmp.reg());
8658 __ or_(Operand(tmp.reg()), new_value.reg());
8659 __ test(tmp.reg(), Immediate(kSmiTagMask));
8660 tmp.Unuse();
8661 deferred->Branch(not_zero);
8662 } else {
8663 // Otherwise we test separately for overflow and smi tag.
8664 deferred->Branch(overflow);
8665 __ test(new_value.reg(), Immediate(kSmiTagMask));
8666 deferred->Branch(not_zero);
8667 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008668 }
8669 deferred->BindExit();
8670
Steve Block6ded16b2010-05-10 14:33:55 +01008671 // Postfix count operations return their input converted to
8672 // number. The case when the input is already a number is covered
8673 // above in the allocation code for old_value.
8674 if (is_postfix && !new_value.type_info().IsNumber()) {
8675 old_value.set_type_info(TypeInfo::Number());
8676 }
8677
8678 // The result of ++ or -- is an Integer32 if the
8679 // input is a smi. Otherwise it is a number.
8680 if (new_value.is_smi()) {
8681 new_value.set_type_info(TypeInfo::Integer32());
8682 } else {
8683 new_value.set_type_info(TypeInfo::Number());
8684 }
8685
Steve Blocka7e24c12009-10-30 11:49:00 +00008686 // Postfix: store the old value in the allocated slot under the
8687 // reference.
8688 if (is_postfix) frame_->SetElementAt(target.size(), &old_value);
8689
8690 frame_->Push(&new_value);
8691 // Non-constant: update the reference.
8692 if (!is_const) target.SetValue(NOT_CONST_INIT);
8693 }
8694
8695 // Postfix: drop the new value and use the old.
8696 if (is_postfix) frame_->Drop();
8697}
8698
8699
Steve Block6ded16b2010-05-10 14:33:55 +01008700void CodeGenerator::Int32BinaryOperation(BinaryOperation* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00008701 Token::Value op = node->op();
Steve Block6ded16b2010-05-10 14:33:55 +01008702 Comment cmnt(masm_, "[ Int32BinaryOperation");
8703 ASSERT(in_safe_int32_mode());
8704 ASSERT(safe_int32_mode_enabled());
8705 ASSERT(FLAG_safe_int32_compiler);
Steve Blocka7e24c12009-10-30 11:49:00 +00008706
Steve Block6ded16b2010-05-10 14:33:55 +01008707 if (op == Token::COMMA) {
8708 // Discard left value.
8709 frame_->Nip(1);
8710 return;
8711 }
8712
8713 Result right = frame_->Pop();
8714 Result left = frame_->Pop();
8715
8716 ASSERT(right.is_untagged_int32());
8717 ASSERT(left.is_untagged_int32());
8718 // Registers containing an int32 value are not multiply used.
8719 ASSERT(!left.is_register() || !frame_->is_used(left.reg()));
8720 ASSERT(!right.is_register() || !frame_->is_used(right.reg()));
8721
8722 switch (op) {
8723 case Token::COMMA:
8724 case Token::OR:
8725 case Token::AND:
8726 UNREACHABLE();
8727 break;
8728 case Token::BIT_OR:
8729 case Token::BIT_XOR:
8730 case Token::BIT_AND:
8731 if (left.is_constant() || right.is_constant()) {
8732 int32_t value; // Put constant in value, non-constant in left.
8733 // Constants are known to be int32 values, from static analysis,
8734 // or else will be converted to int32 by implicit ECMA [[ToInt32]].
8735 if (left.is_constant()) {
8736 ASSERT(left.handle()->IsSmi() || left.handle()->IsHeapNumber());
8737 value = NumberToInt32(*left.handle());
8738 left = right;
8739 } else {
8740 ASSERT(right.handle()->IsSmi() || right.handle()->IsHeapNumber());
8741 value = NumberToInt32(*right.handle());
8742 }
8743
8744 left.ToRegister();
8745 if (op == Token::BIT_OR) {
8746 __ or_(Operand(left.reg()), Immediate(value));
8747 } else if (op == Token::BIT_XOR) {
8748 __ xor_(Operand(left.reg()), Immediate(value));
8749 } else {
8750 ASSERT(op == Token::BIT_AND);
8751 __ and_(Operand(left.reg()), Immediate(value));
8752 }
8753 } else {
8754 ASSERT(left.is_register());
8755 ASSERT(right.is_register());
8756 if (op == Token::BIT_OR) {
8757 __ or_(left.reg(), Operand(right.reg()));
8758 } else if (op == Token::BIT_XOR) {
8759 __ xor_(left.reg(), Operand(right.reg()));
8760 } else {
8761 ASSERT(op == Token::BIT_AND);
8762 __ and_(left.reg(), Operand(right.reg()));
8763 }
8764 }
8765 frame_->Push(&left);
8766 right.Unuse();
8767 break;
8768 case Token::SAR:
8769 case Token::SHL:
8770 case Token::SHR: {
8771 bool test_shr_overflow = false;
8772 left.ToRegister();
8773 if (right.is_constant()) {
8774 ASSERT(right.handle()->IsSmi() || right.handle()->IsHeapNumber());
8775 int shift_amount = NumberToInt32(*right.handle()) & 0x1F;
8776 if (op == Token::SAR) {
8777 __ sar(left.reg(), shift_amount);
8778 } else if (op == Token::SHL) {
8779 __ shl(left.reg(), shift_amount);
8780 } else {
8781 ASSERT(op == Token::SHR);
8782 __ shr(left.reg(), shift_amount);
8783 if (shift_amount == 0) test_shr_overflow = true;
8784 }
8785 } else {
8786 // Move right to ecx
8787 if (left.is_register() && left.reg().is(ecx)) {
8788 right.ToRegister();
8789 __ xchg(left.reg(), right.reg());
8790 left = right; // Left is unused here, copy of right unused by Push.
8791 } else {
8792 right.ToRegister(ecx);
8793 left.ToRegister();
8794 }
8795 if (op == Token::SAR) {
8796 __ sar_cl(left.reg());
8797 } else if (op == Token::SHL) {
8798 __ shl_cl(left.reg());
8799 } else {
8800 ASSERT(op == Token::SHR);
8801 __ shr_cl(left.reg());
8802 test_shr_overflow = true;
8803 }
8804 }
8805 {
8806 Register left_reg = left.reg();
8807 frame_->Push(&left);
8808 right.Unuse();
8809 if (test_shr_overflow && !node->to_int32()) {
8810 // Uint32 results with top bit set are not Int32 values.
8811 // If they will be forced to Int32, skip the test.
8812 // Test is needed because shr with shift amount 0 does not set flags.
8813 __ test(left_reg, Operand(left_reg));
8814 unsafe_bailout_->Branch(sign);
8815 }
8816 }
8817 break;
8818 }
8819 case Token::ADD:
8820 case Token::SUB:
8821 case Token::MUL:
8822 if ((left.is_constant() && op != Token::SUB) || right.is_constant()) {
8823 int32_t value; // Put constant in value, non-constant in left.
8824 if (right.is_constant()) {
8825 ASSERT(right.handle()->IsSmi() || right.handle()->IsHeapNumber());
8826 value = NumberToInt32(*right.handle());
8827 } else {
8828 ASSERT(left.handle()->IsSmi() || left.handle()->IsHeapNumber());
8829 value = NumberToInt32(*left.handle());
8830 left = right;
8831 }
8832
8833 left.ToRegister();
8834 if (op == Token::ADD) {
8835 __ add(Operand(left.reg()), Immediate(value));
8836 } else if (op == Token::SUB) {
8837 __ sub(Operand(left.reg()), Immediate(value));
8838 } else {
8839 ASSERT(op == Token::MUL);
8840 __ imul(left.reg(), left.reg(), value);
8841 }
8842 } else {
8843 left.ToRegister();
8844 ASSERT(left.is_register());
8845 ASSERT(right.is_register());
8846 if (op == Token::ADD) {
8847 __ add(left.reg(), Operand(right.reg()));
8848 } else if (op == Token::SUB) {
8849 __ sub(left.reg(), Operand(right.reg()));
8850 } else {
8851 ASSERT(op == Token::MUL);
8852 // We have statically verified that a negative zero can be ignored.
8853 __ imul(left.reg(), Operand(right.reg()));
8854 }
8855 }
8856 right.Unuse();
8857 frame_->Push(&left);
Russell Brenner90bac252010-11-18 13:33:46 -08008858 if (!node->to_int32() || op == Token::MUL) {
8859 // If ToInt32 is called on the result of ADD, SUB, we don't
Steve Block6ded16b2010-05-10 14:33:55 +01008860 // care about overflows.
Russell Brenner90bac252010-11-18 13:33:46 -08008861 // Result of MUL can be non-representable precisely in double so
8862 // we have to check for overflow.
Steve Block6ded16b2010-05-10 14:33:55 +01008863 unsafe_bailout_->Branch(overflow);
8864 }
8865 break;
8866 case Token::DIV:
8867 case Token::MOD: {
8868 if (right.is_register() && (right.reg().is(eax) || right.reg().is(edx))) {
8869 if (left.is_register() && left.reg().is(edi)) {
8870 right.ToRegister(ebx);
8871 } else {
8872 right.ToRegister(edi);
8873 }
8874 }
8875 left.ToRegister(eax);
8876 Result edx_reg = allocator_->Allocate(edx);
8877 right.ToRegister();
8878 // The results are unused here because BreakTarget::Branch cannot handle
8879 // live results.
8880 Register right_reg = right.reg();
8881 left.Unuse();
8882 right.Unuse();
8883 edx_reg.Unuse();
8884 __ cmp(right_reg, 0);
8885 // Ensure divisor is positive: no chance of non-int32 or -0 result.
8886 unsafe_bailout_->Branch(less_equal);
8887 __ cdq(); // Sign-extend eax into edx:eax
8888 __ idiv(right_reg);
8889 if (op == Token::MOD) {
8890 // Negative zero can arise as a negative divident with a zero result.
8891 if (!node->no_negative_zero()) {
8892 Label not_negative_zero;
8893 __ test(edx, Operand(edx));
8894 __ j(not_zero, &not_negative_zero);
8895 __ test(eax, Operand(eax));
8896 unsafe_bailout_->Branch(negative);
8897 __ bind(&not_negative_zero);
8898 }
8899 Result edx_result(edx, TypeInfo::Integer32());
8900 edx_result.set_untagged_int32(true);
8901 frame_->Push(&edx_result);
8902 } else {
8903 ASSERT(op == Token::DIV);
8904 __ test(edx, Operand(edx));
8905 unsafe_bailout_->Branch(not_equal);
8906 Result eax_result(eax, TypeInfo::Integer32());
8907 eax_result.set_untagged_int32(true);
8908 frame_->Push(&eax_result);
8909 }
8910 break;
8911 }
8912 default:
8913 UNREACHABLE();
8914 break;
8915 }
8916}
8917
8918
8919void CodeGenerator::GenerateLogicalBooleanOperation(BinaryOperation* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00008920 // According to ECMA-262 section 11.11, page 58, the binary logical
8921 // operators must yield the result of one of the two expressions
8922 // before any ToBoolean() conversions. This means that the value
8923 // produced by a && or || operator is not necessarily a boolean.
8924
8925 // NOTE: If the left hand side produces a materialized value (not
8926 // control flow), we force the right hand side to do the same. This
8927 // is necessary because we assume that if we get control flow on the
8928 // last path out of an expression we got it on all paths.
Steve Block6ded16b2010-05-10 14:33:55 +01008929 if (node->op() == Token::AND) {
8930 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00008931 JumpTarget is_true;
8932 ControlDestination dest(&is_true, destination()->false_target(), true);
Steve Blockd0582a62009-12-15 09:54:21 +00008933 LoadCondition(node->left(), &dest, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00008934
8935 if (dest.false_was_fall_through()) {
8936 // The current false target was used as the fall-through. If
8937 // there are no dangling jumps to is_true then the left
8938 // subexpression was unconditionally false. Otherwise we have
8939 // paths where we do have to evaluate the right subexpression.
8940 if (is_true.is_linked()) {
8941 // We need to compile the right subexpression. If the jump to
8942 // the current false target was a forward jump then we have a
8943 // valid frame, we have just bound the false target, and we
8944 // have to jump around the code for the right subexpression.
8945 if (has_valid_frame()) {
8946 destination()->false_target()->Unuse();
8947 destination()->false_target()->Jump();
8948 }
8949 is_true.Bind();
8950 // The left subexpression compiled to control flow, so the
8951 // right one is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00008952 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00008953 } else {
8954 // We have actually just jumped to or bound the current false
8955 // target but the current control destination is not marked as
8956 // used.
8957 destination()->Use(false);
8958 }
8959
8960 } else if (dest.is_used()) {
8961 // The left subexpression compiled to control flow (and is_true
8962 // was just bound), so the right is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00008963 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00008964
8965 } else {
8966 // We have a materialized value on the frame, so we exit with
8967 // one on all paths. There are possibly also jumps to is_true
8968 // from nested subexpressions.
8969 JumpTarget pop_and_continue;
8970 JumpTarget exit;
8971
8972 // Avoid popping the result if it converts to 'false' using the
8973 // standard ToBoolean() conversion as described in ECMA-262,
8974 // section 9.2, page 30.
8975 //
8976 // Duplicate the TOS value. The duplicate will be popped by
8977 // ToBoolean.
8978 frame_->Dup();
8979 ControlDestination dest(&pop_and_continue, &exit, true);
8980 ToBoolean(&dest);
8981
8982 // Pop the result of evaluating the first part.
8983 frame_->Drop();
8984
8985 // Compile right side expression.
8986 is_true.Bind();
8987 Load(node->right());
8988
8989 // Exit (always with a materialized value).
8990 exit.Bind();
8991 }
8992
Steve Block6ded16b2010-05-10 14:33:55 +01008993 } else {
8994 ASSERT(node->op() == Token::OR);
8995 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00008996 JumpTarget is_false;
8997 ControlDestination dest(destination()->true_target(), &is_false, false);
Steve Blockd0582a62009-12-15 09:54:21 +00008998 LoadCondition(node->left(), &dest, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00008999
9000 if (dest.true_was_fall_through()) {
9001 // The current true target was used as the fall-through. If
9002 // there are no dangling jumps to is_false then the left
9003 // subexpression was unconditionally true. Otherwise we have
9004 // paths where we do have to evaluate the right subexpression.
9005 if (is_false.is_linked()) {
9006 // We need to compile the right subexpression. If the jump to
9007 // the current true target was a forward jump then we have a
9008 // valid frame, we have just bound the true target, and we
9009 // have to jump around the code for the right subexpression.
9010 if (has_valid_frame()) {
9011 destination()->true_target()->Unuse();
9012 destination()->true_target()->Jump();
9013 }
9014 is_false.Bind();
9015 // The left subexpression compiled to control flow, so the
9016 // right one is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00009017 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00009018 } else {
9019 // We have just jumped to or bound the current true target but
9020 // the current control destination is not marked as used.
9021 destination()->Use(true);
9022 }
9023
9024 } else if (dest.is_used()) {
9025 // The left subexpression compiled to control flow (and is_false
9026 // was just bound), so the right is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00009027 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00009028
9029 } else {
9030 // We have a materialized value on the frame, so we exit with
9031 // one on all paths. There are possibly also jumps to is_false
9032 // from nested subexpressions.
9033 JumpTarget pop_and_continue;
9034 JumpTarget exit;
9035
9036 // Avoid popping the result if it converts to 'true' using the
9037 // standard ToBoolean() conversion as described in ECMA-262,
9038 // section 9.2, page 30.
9039 //
9040 // Duplicate the TOS value. The duplicate will be popped by
9041 // ToBoolean.
9042 frame_->Dup();
9043 ControlDestination dest(&exit, &pop_and_continue, false);
9044 ToBoolean(&dest);
9045
9046 // Pop the result of evaluating the first part.
9047 frame_->Drop();
9048
9049 // Compile right side expression.
9050 is_false.Bind();
9051 Load(node->right());
9052
9053 // Exit (always with a materialized value).
9054 exit.Bind();
9055 }
Steve Block6ded16b2010-05-10 14:33:55 +01009056 }
9057}
Steve Blocka7e24c12009-10-30 11:49:00 +00009058
Steve Block6ded16b2010-05-10 14:33:55 +01009059
9060void CodeGenerator::VisitBinaryOperation(BinaryOperation* node) {
9061 Comment cmnt(masm_, "[ BinaryOperation");
9062
9063 if (node->op() == Token::AND || node->op() == Token::OR) {
9064 GenerateLogicalBooleanOperation(node);
9065 } else if (in_safe_int32_mode()) {
9066 Visit(node->left());
9067 Visit(node->right());
9068 Int32BinaryOperation(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00009069 } else {
9070 // NOTE: The code below assumes that the slow cases (calls to runtime)
9071 // never return a constant/immutable object.
9072 OverwriteMode overwrite_mode = NO_OVERWRITE;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01009073 if (node->left()->ResultOverwriteAllowed()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00009074 overwrite_mode = OVERWRITE_LEFT;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01009075 } else if (node->right()->ResultOverwriteAllowed()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00009076 overwrite_mode = OVERWRITE_RIGHT;
9077 }
9078
Steve Block6ded16b2010-05-10 14:33:55 +01009079 if (node->left()->IsTrivial()) {
9080 Load(node->right());
9081 Result right = frame_->Pop();
9082 frame_->Push(node->left());
9083 frame_->Push(&right);
9084 } else {
9085 Load(node->left());
9086 Load(node->right());
9087 }
9088 GenericBinaryOperation(node, overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00009089 }
9090}
9091
9092
9093void CodeGenerator::VisitThisFunction(ThisFunction* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01009094 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00009095 frame_->PushFunction();
9096}
9097
9098
9099void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01009100 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00009101 Comment cmnt(masm_, "[ CompareOperation");
9102
Leon Clarkee46be812010-01-19 14:06:41 +00009103 bool left_already_loaded = false;
9104
Steve Blocka7e24c12009-10-30 11:49:00 +00009105 // Get the expressions from the node.
9106 Expression* left = node->left();
9107 Expression* right = node->right();
9108 Token::Value op = node->op();
9109 // To make typeof testing for natives implemented in JavaScript really
9110 // efficient, we generate special code for expressions of the form:
9111 // 'typeof <expression> == <string>'.
9112 UnaryOperation* operation = left->AsUnaryOperation();
9113 if ((op == Token::EQ || op == Token::EQ_STRICT) &&
9114 (operation != NULL && operation->op() == Token::TYPEOF) &&
9115 (right->AsLiteral() != NULL &&
9116 right->AsLiteral()->handle()->IsString())) {
9117 Handle<String> check(String::cast(*right->AsLiteral()->handle()));
9118
9119 // Load the operand and move it to a register.
9120 LoadTypeofExpression(operation->expression());
9121 Result answer = frame_->Pop();
9122 answer.ToRegister();
9123
Steve Block44f0eee2011-05-26 01:26:41 +01009124 if (check->Equals(HEAP->number_symbol())) {
Steve Blocka7e24c12009-10-30 11:49:00 +00009125 __ test(answer.reg(), Immediate(kSmiTagMask));
9126 destination()->true_target()->Branch(zero);
9127 frame_->Spill(answer.reg());
9128 __ mov(answer.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01009129 __ cmp(answer.reg(), FACTORY->heap_number_map());
Steve Blocka7e24c12009-10-30 11:49:00 +00009130 answer.Unuse();
9131 destination()->Split(equal);
9132
Steve Block44f0eee2011-05-26 01:26:41 +01009133 } else if (check->Equals(HEAP->string_symbol())) {
Steve Blocka7e24c12009-10-30 11:49:00 +00009134 __ test(answer.reg(), Immediate(kSmiTagMask));
9135 destination()->false_target()->Branch(zero);
9136
9137 // It can be an undetectable string object.
9138 Result temp = allocator()->Allocate();
9139 ASSERT(temp.is_valid());
9140 __ mov(temp.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009141 __ test_b(FieldOperand(temp.reg(), Map::kBitFieldOffset),
9142 1 << Map::kIsUndetectable);
Steve Blocka7e24c12009-10-30 11:49:00 +00009143 destination()->false_target()->Branch(not_zero);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009144 __ CmpInstanceType(temp.reg(), FIRST_NONSTRING_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00009145 temp.Unuse();
9146 answer.Unuse();
Andrei Popescu402d9372010-02-26 13:31:12 +00009147 destination()->Split(below);
Steve Blocka7e24c12009-10-30 11:49:00 +00009148
Steve Block44f0eee2011-05-26 01:26:41 +01009149 } else if (check->Equals(HEAP->boolean_symbol())) {
9150 __ cmp(answer.reg(), FACTORY->true_value());
Steve Blocka7e24c12009-10-30 11:49:00 +00009151 destination()->true_target()->Branch(equal);
Steve Block44f0eee2011-05-26 01:26:41 +01009152 __ cmp(answer.reg(), FACTORY->false_value());
Steve Blocka7e24c12009-10-30 11:49:00 +00009153 answer.Unuse();
9154 destination()->Split(equal);
9155
Steve Block44f0eee2011-05-26 01:26:41 +01009156 } else if (check->Equals(HEAP->undefined_symbol())) {
9157 __ cmp(answer.reg(), FACTORY->undefined_value());
Steve Blocka7e24c12009-10-30 11:49:00 +00009158 destination()->true_target()->Branch(equal);
9159
9160 __ test(answer.reg(), Immediate(kSmiTagMask));
9161 destination()->false_target()->Branch(zero);
9162
9163 // It can be an undetectable object.
9164 frame_->Spill(answer.reg());
9165 __ mov(answer.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009166 __ test_b(FieldOperand(answer.reg(), Map::kBitFieldOffset),
9167 1 << Map::kIsUndetectable);
Steve Blocka7e24c12009-10-30 11:49:00 +00009168 answer.Unuse();
9169 destination()->Split(not_zero);
9170
Steve Block44f0eee2011-05-26 01:26:41 +01009171 } else if (check->Equals(HEAP->function_symbol())) {
Steve Blocka7e24c12009-10-30 11:49:00 +00009172 __ test(answer.reg(), Immediate(kSmiTagMask));
9173 destination()->false_target()->Branch(zero);
9174 frame_->Spill(answer.reg());
9175 __ CmpObjectType(answer.reg(), JS_FUNCTION_TYPE, answer.reg());
Steve Blockd0582a62009-12-15 09:54:21 +00009176 destination()->true_target()->Branch(equal);
9177 // Regular expressions are callable so typeof == 'function'.
9178 __ CmpInstanceType(answer.reg(), JS_REGEXP_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00009179 answer.Unuse();
9180 destination()->Split(equal);
Steve Block44f0eee2011-05-26 01:26:41 +01009181 } else if (check->Equals(HEAP->object_symbol())) {
Steve Blocka7e24c12009-10-30 11:49:00 +00009182 __ test(answer.reg(), Immediate(kSmiTagMask));
9183 destination()->false_target()->Branch(zero);
Steve Block44f0eee2011-05-26 01:26:41 +01009184 __ cmp(answer.reg(), FACTORY->null_value());
Steve Blocka7e24c12009-10-30 11:49:00 +00009185 destination()->true_target()->Branch(equal);
9186
Steve Blocka7e24c12009-10-30 11:49:00 +00009187 Result map = allocator()->Allocate();
9188 ASSERT(map.is_valid());
Steve Blockd0582a62009-12-15 09:54:21 +00009189 // Regular expressions are typeof == 'function', not 'object'.
9190 __ CmpObjectType(answer.reg(), JS_REGEXP_TYPE, map.reg());
9191 destination()->false_target()->Branch(equal);
9192
9193 // It can be an undetectable object.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009194 __ test_b(FieldOperand(map.reg(), Map::kBitFieldOffset),
9195 1 << Map::kIsUndetectable);
Steve Blocka7e24c12009-10-30 11:49:00 +00009196 destination()->false_target()->Branch(not_zero);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009197 // Do a range test for JSObject type. We can't use
9198 // MacroAssembler::IsInstanceJSObjectType, because we are using a
9199 // ControlDestination, so we copy its implementation here.
Steve Blocka7e24c12009-10-30 11:49:00 +00009200 __ movzx_b(map.reg(), FieldOperand(map.reg(), Map::kInstanceTypeOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009201 __ sub(Operand(map.reg()), Immediate(FIRST_JS_OBJECT_TYPE));
9202 __ cmp(map.reg(), LAST_JS_OBJECT_TYPE - FIRST_JS_OBJECT_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00009203 answer.Unuse();
9204 map.Unuse();
Leon Clarkef7060e22010-06-03 12:02:55 +01009205 destination()->Split(below_equal);
Steve Blocka7e24c12009-10-30 11:49:00 +00009206 } else {
9207 // Uncommon case: typeof testing against a string literal that is
9208 // never returned from the typeof operator.
9209 answer.Unuse();
9210 destination()->Goto(false);
9211 }
9212 return;
Leon Clarkee46be812010-01-19 14:06:41 +00009213 } else if (op == Token::LT &&
9214 right->AsLiteral() != NULL &&
9215 right->AsLiteral()->handle()->IsHeapNumber()) {
9216 Handle<HeapNumber> check(HeapNumber::cast(*right->AsLiteral()->handle()));
9217 if (check->value() == 2147483648.0) { // 0x80000000.
9218 Load(left);
9219 left_already_loaded = true;
9220 Result lhs = frame_->Pop();
9221 lhs.ToRegister();
9222 __ test(lhs.reg(), Immediate(kSmiTagMask));
9223 destination()->true_target()->Branch(zero); // All Smis are less.
9224 Result scratch = allocator()->Allocate();
9225 ASSERT(scratch.is_valid());
9226 __ mov(scratch.reg(), FieldOperand(lhs.reg(), HeapObject::kMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01009227 __ cmp(scratch.reg(), FACTORY->heap_number_map());
Leon Clarkee46be812010-01-19 14:06:41 +00009228 JumpTarget not_a_number;
9229 not_a_number.Branch(not_equal, &lhs);
9230 __ mov(scratch.reg(),
9231 FieldOperand(lhs.reg(), HeapNumber::kExponentOffset));
9232 __ cmp(Operand(scratch.reg()), Immediate(0xfff00000));
9233 not_a_number.Branch(above_equal, &lhs); // It's a negative NaN or -Inf.
9234 const uint32_t borderline_exponent =
9235 (HeapNumber::kExponentBias + 31) << HeapNumber::kExponentShift;
9236 __ cmp(Operand(scratch.reg()), Immediate(borderline_exponent));
9237 scratch.Unuse();
9238 lhs.Unuse();
9239 destination()->true_target()->Branch(less);
9240 destination()->false_target()->Jump();
9241
9242 not_a_number.Bind(&lhs);
9243 frame_->Push(&lhs);
9244 }
Steve Blocka7e24c12009-10-30 11:49:00 +00009245 }
9246
9247 Condition cc = no_condition;
9248 bool strict = false;
9249 switch (op) {
9250 case Token::EQ_STRICT:
9251 strict = true;
9252 // Fall through
9253 case Token::EQ:
9254 cc = equal;
9255 break;
9256 case Token::LT:
9257 cc = less;
9258 break;
9259 case Token::GT:
9260 cc = greater;
9261 break;
9262 case Token::LTE:
9263 cc = less_equal;
9264 break;
9265 case Token::GTE:
9266 cc = greater_equal;
9267 break;
9268 case Token::IN: {
Leon Clarkee46be812010-01-19 14:06:41 +00009269 if (!left_already_loaded) Load(left);
Steve Blocka7e24c12009-10-30 11:49:00 +00009270 Load(right);
9271 Result answer = frame_->InvokeBuiltin(Builtins::IN, CALL_FUNCTION, 2);
9272 frame_->Push(&answer); // push the result
9273 return;
9274 }
9275 case Token::INSTANCEOF: {
Leon Clarkee46be812010-01-19 14:06:41 +00009276 if (!left_already_loaded) Load(left);
Steve Blocka7e24c12009-10-30 11:49:00 +00009277 Load(right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01009278 InstanceofStub stub(InstanceofStub::kNoFlags);
Steve Blocka7e24c12009-10-30 11:49:00 +00009279 Result answer = frame_->CallStub(&stub, 2);
9280 answer.ToRegister();
9281 __ test(answer.reg(), Operand(answer.reg()));
9282 answer.Unuse();
9283 destination()->Split(zero);
9284 return;
9285 }
9286 default:
9287 UNREACHABLE();
9288 }
Steve Block6ded16b2010-05-10 14:33:55 +01009289
9290 if (left->IsTrivial()) {
9291 if (!left_already_loaded) {
9292 Load(right);
9293 Result right_result = frame_->Pop();
9294 frame_->Push(left);
9295 frame_->Push(&right_result);
9296 } else {
9297 Load(right);
9298 }
9299 } else {
9300 if (!left_already_loaded) Load(left);
9301 Load(right);
9302 }
Leon Clarkee46be812010-01-19 14:06:41 +00009303 Comparison(node, cc, strict, destination());
Steve Blocka7e24c12009-10-30 11:49:00 +00009304}
9305
9306
Kristian Monsen80d68ea2010-09-08 11:05:35 +01009307void CodeGenerator::VisitCompareToNull(CompareToNull* node) {
9308 ASSERT(!in_safe_int32_mode());
9309 Comment cmnt(masm_, "[ CompareToNull");
9310
9311 Load(node->expression());
9312 Result operand = frame_->Pop();
9313 operand.ToRegister();
Steve Block44f0eee2011-05-26 01:26:41 +01009314 __ cmp(operand.reg(), FACTORY->null_value());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01009315 if (node->is_strict()) {
9316 operand.Unuse();
9317 destination()->Split(equal);
9318 } else {
9319 // The 'null' value is only equal to 'undefined' if using non-strict
9320 // comparisons.
9321 destination()->true_target()->Branch(equal);
Steve Block44f0eee2011-05-26 01:26:41 +01009322 __ cmp(operand.reg(), FACTORY->undefined_value());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01009323 destination()->true_target()->Branch(equal);
9324 __ test(operand.reg(), Immediate(kSmiTagMask));
9325 destination()->false_target()->Branch(equal);
9326
9327 // It can be an undetectable object.
9328 // Use a scratch register in preference to spilling operand.reg().
9329 Result temp = allocator()->Allocate();
9330 ASSERT(temp.is_valid());
9331 __ mov(temp.reg(),
9332 FieldOperand(operand.reg(), HeapObject::kMapOffset));
9333 __ test_b(FieldOperand(temp.reg(), Map::kBitFieldOffset),
9334 1 << Map::kIsUndetectable);
9335 temp.Unuse();
9336 operand.Unuse();
9337 destination()->Split(not_zero);
9338 }
9339}
9340
9341
Steve Blocka7e24c12009-10-30 11:49:00 +00009342#ifdef DEBUG
9343bool CodeGenerator::HasValidEntryRegisters() {
9344 return (allocator()->count(eax) == (frame()->is_used(eax) ? 1 : 0))
9345 && (allocator()->count(ebx) == (frame()->is_used(ebx) ? 1 : 0))
9346 && (allocator()->count(ecx) == (frame()->is_used(ecx) ? 1 : 0))
9347 && (allocator()->count(edx) == (frame()->is_used(edx) ? 1 : 0))
9348 && (allocator()->count(edi) == (frame()->is_used(edi) ? 1 : 0));
9349}
9350#endif
9351
9352
9353// Emit a LoadIC call to get the value from receiver and leave it in
Andrei Popescu402d9372010-02-26 13:31:12 +00009354// dst.
Steve Blocka7e24c12009-10-30 11:49:00 +00009355class DeferredReferenceGetNamedValue: public DeferredCode {
9356 public:
9357 DeferredReferenceGetNamedValue(Register dst,
9358 Register receiver,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009359 Handle<String> name,
9360 bool is_contextual)
9361 : dst_(dst),
9362 receiver_(receiver),
9363 name_(name),
Ben Murdochf87a2032010-10-22 12:50:53 +01009364 is_contextual_(is_contextual),
9365 is_dont_delete_(false) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009366 set_comment(is_contextual
9367 ? "[ DeferredReferenceGetNamedValue (contextual)"
9368 : "[ DeferredReferenceGetNamedValue");
Steve Blocka7e24c12009-10-30 11:49:00 +00009369 }
9370
9371 virtual void Generate();
9372
9373 Label* patch_site() { return &patch_site_; }
9374
Ben Murdochf87a2032010-10-22 12:50:53 +01009375 void set_is_dont_delete(bool value) {
9376 ASSERT(is_contextual_);
9377 is_dont_delete_ = value;
9378 }
9379
Steve Blocka7e24c12009-10-30 11:49:00 +00009380 private:
9381 Label patch_site_;
9382 Register dst_;
9383 Register receiver_;
9384 Handle<String> name_;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009385 bool is_contextual_;
Ben Murdochf87a2032010-10-22 12:50:53 +01009386 bool is_dont_delete_;
Steve Blocka7e24c12009-10-30 11:49:00 +00009387};
9388
9389
9390void DeferredReferenceGetNamedValue::Generate() {
Andrei Popescu402d9372010-02-26 13:31:12 +00009391 if (!receiver_.is(eax)) {
9392 __ mov(eax, receiver_);
9393 }
Steve Blocka7e24c12009-10-30 11:49:00 +00009394 __ Set(ecx, Immediate(name_));
Steve Block44f0eee2011-05-26 01:26:41 +01009395 Handle<Code> ic(masm()->isolate()->builtins()->builtin(
9396 Builtins::kLoadIC_Initialize));
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009397 RelocInfo::Mode mode = is_contextual_
9398 ? RelocInfo::CODE_TARGET_CONTEXT
9399 : RelocInfo::CODE_TARGET;
9400 __ call(ic, mode);
9401 // The call must be followed by:
9402 // - a test eax instruction to indicate that the inobject property
9403 // case was inlined.
Ben Murdochf87a2032010-10-22 12:50:53 +01009404 // - a mov ecx or mov edx instruction to indicate that the
9405 // contextual property load was inlined.
Steve Blocka7e24c12009-10-30 11:49:00 +00009406 //
9407 // Store the delta to the map check instruction here in the test
9408 // instruction. Use masm_-> instead of the __ macro since the
9409 // latter can't return a value.
9410 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
9411 // Here we use masm_-> instead of the __ macro because this is the
9412 // instruction that gets patched and coverage code gets in the way.
Steve Block44f0eee2011-05-26 01:26:41 +01009413 Counters* counters = masm()->isolate()->counters();
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009414 if (is_contextual_) {
Ben Murdochf87a2032010-10-22 12:50:53 +01009415 masm_->mov(is_dont_delete_ ? edx : ecx, -delta_to_patch_site);
Steve Block44f0eee2011-05-26 01:26:41 +01009416 __ IncrementCounter(counters->named_load_global_inline_miss(), 1);
Ben Murdochf87a2032010-10-22 12:50:53 +01009417 if (is_dont_delete_) {
Steve Block44f0eee2011-05-26 01:26:41 +01009418 __ IncrementCounter(counters->dont_delete_hint_miss(), 1);
Ben Murdochf87a2032010-10-22 12:50:53 +01009419 }
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009420 } else {
9421 masm_->test(eax, Immediate(-delta_to_patch_site));
Steve Block44f0eee2011-05-26 01:26:41 +01009422 __ IncrementCounter(counters->named_load_inline_miss(), 1);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009423 }
Steve Blocka7e24c12009-10-30 11:49:00 +00009424
9425 if (!dst_.is(eax)) __ mov(dst_, eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00009426}
9427
9428
9429class DeferredReferenceGetKeyedValue: public DeferredCode {
9430 public:
9431 explicit DeferredReferenceGetKeyedValue(Register dst,
9432 Register receiver,
Andrei Popescu402d9372010-02-26 13:31:12 +00009433 Register key)
9434 : dst_(dst), receiver_(receiver), key_(key) {
Steve Blocka7e24c12009-10-30 11:49:00 +00009435 set_comment("[ DeferredReferenceGetKeyedValue");
9436 }
9437
9438 virtual void Generate();
9439
9440 Label* patch_site() { return &patch_site_; }
9441
9442 private:
9443 Label patch_site_;
9444 Register dst_;
9445 Register receiver_;
9446 Register key_;
Steve Blocka7e24c12009-10-30 11:49:00 +00009447};
9448
9449
9450void DeferredReferenceGetKeyedValue::Generate() {
Andrei Popescu402d9372010-02-26 13:31:12 +00009451 if (!receiver_.is(eax)) {
9452 // Register eax is available for key.
9453 if (!key_.is(eax)) {
9454 __ mov(eax, key_);
9455 }
9456 if (!receiver_.is(edx)) {
9457 __ mov(edx, receiver_);
9458 }
9459 } else if (!key_.is(edx)) {
9460 // Register edx is available for receiver.
9461 if (!receiver_.is(edx)) {
9462 __ mov(edx, receiver_);
9463 }
9464 if (!key_.is(eax)) {
9465 __ mov(eax, key_);
9466 }
9467 } else {
9468 __ xchg(edx, eax);
9469 }
Steve Blocka7e24c12009-10-30 11:49:00 +00009470 // Calculate the delta from the IC call instruction to the map check
9471 // cmp instruction in the inlined version. This delta is stored in
9472 // a test(eax, delta) instruction after the call so that we can find
9473 // it in the IC initialization code and patch the cmp instruction.
9474 // This means that we cannot allow test instructions after calls to
9475 // KeyedLoadIC stubs in other places.
Steve Block44f0eee2011-05-26 01:26:41 +01009476 Handle<Code> ic(masm()->isolate()->builtins()->builtin(
9477 Builtins::kKeyedLoadIC_Initialize));
Andrei Popescu402d9372010-02-26 13:31:12 +00009478 __ call(ic, RelocInfo::CODE_TARGET);
Steve Blocka7e24c12009-10-30 11:49:00 +00009479 // The delta from the start of the map-compare instruction to the
9480 // test instruction. We use masm_-> directly here instead of the __
9481 // macro because the macro sometimes uses macro expansion to turn
9482 // into something that can't return a value. This is encountered
9483 // when doing generated code coverage tests.
9484 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
9485 // Here we use masm_-> instead of the __ macro because this is the
9486 // instruction that gets patched and coverage code gets in the way.
9487 masm_->test(eax, Immediate(-delta_to_patch_site));
Steve Block44f0eee2011-05-26 01:26:41 +01009488 Counters* counters = masm()->isolate()->counters();
9489 __ IncrementCounter(counters->keyed_load_inline_miss(), 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00009490
9491 if (!dst_.is(eax)) __ mov(dst_, eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00009492}
9493
9494
9495class DeferredReferenceSetKeyedValue: public DeferredCode {
9496 public:
9497 DeferredReferenceSetKeyedValue(Register value,
9498 Register key,
Steve Block6ded16b2010-05-10 14:33:55 +01009499 Register receiver,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01009500 Register scratch,
9501 StrictModeFlag strict_mode)
Steve Block6ded16b2010-05-10 14:33:55 +01009502 : value_(value),
9503 key_(key),
9504 receiver_(receiver),
Ben Murdoche0cee9b2011-05-25 10:26:03 +01009505 scratch_(scratch),
9506 strict_mode_(strict_mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +00009507 set_comment("[ DeferredReferenceSetKeyedValue");
9508 }
9509
9510 virtual void Generate();
9511
9512 Label* patch_site() { return &patch_site_; }
9513
9514 private:
9515 Register value_;
9516 Register key_;
9517 Register receiver_;
Steve Block6ded16b2010-05-10 14:33:55 +01009518 Register scratch_;
Steve Blocka7e24c12009-10-30 11:49:00 +00009519 Label patch_site_;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01009520 StrictModeFlag strict_mode_;
Steve Blocka7e24c12009-10-30 11:49:00 +00009521};
9522
9523
9524void DeferredReferenceSetKeyedValue::Generate() {
Steve Block44f0eee2011-05-26 01:26:41 +01009525 Counters* counters = masm()->isolate()->counters();
9526 __ IncrementCounter(counters->keyed_store_inline_miss(), 1);
Steve Block6ded16b2010-05-10 14:33:55 +01009527 // Move value_ to eax, key_ to ecx, and receiver_ to edx.
9528 Register old_value = value_;
9529
9530 // First, move value to eax.
9531 if (!value_.is(eax)) {
9532 if (key_.is(eax)) {
9533 // Move key_ out of eax, preferably to ecx.
9534 if (!value_.is(ecx) && !receiver_.is(ecx)) {
9535 __ mov(ecx, key_);
9536 key_ = ecx;
9537 } else {
9538 __ mov(scratch_, key_);
9539 key_ = scratch_;
9540 }
9541 }
9542 if (receiver_.is(eax)) {
9543 // Move receiver_ out of eax, preferably to edx.
9544 if (!value_.is(edx) && !key_.is(edx)) {
9545 __ mov(edx, receiver_);
9546 receiver_ = edx;
9547 } else {
9548 // Both moves to scratch are from eax, also, no valid path hits both.
9549 __ mov(scratch_, receiver_);
9550 receiver_ = scratch_;
9551 }
9552 }
9553 __ mov(eax, value_);
9554 value_ = eax;
9555 }
9556
9557 // Now value_ is in eax. Move the other two to the right positions.
9558 // We do not update the variables key_ and receiver_ to ecx and edx.
9559 if (key_.is(ecx)) {
9560 if (!receiver_.is(edx)) {
9561 __ mov(edx, receiver_);
9562 }
9563 } else if (key_.is(edx)) {
9564 if (receiver_.is(ecx)) {
9565 __ xchg(edx, ecx);
9566 } else {
9567 __ mov(ecx, key_);
9568 if (!receiver_.is(edx)) {
9569 __ mov(edx, receiver_);
9570 }
9571 }
9572 } else { // Key is not in edx or ecx.
9573 if (!receiver_.is(edx)) {
9574 __ mov(edx, receiver_);
9575 }
9576 __ mov(ecx, key_);
9577 }
9578
Steve Blocka7e24c12009-10-30 11:49:00 +00009579 // Call the IC stub.
Steve Block44f0eee2011-05-26 01:26:41 +01009580 Handle<Code> ic(masm()->isolate()->builtins()->builtin(
9581 (strict_mode_ == kStrictMode) ? Builtins::kKeyedStoreIC_Initialize_Strict
9582 : Builtins::kKeyedStoreIC_Initialize));
Steve Blocka7e24c12009-10-30 11:49:00 +00009583 __ call(ic, RelocInfo::CODE_TARGET);
9584 // The delta from the start of the map-compare instruction to the
9585 // test instruction. We use masm_-> directly here instead of the
9586 // __ macro because the macro sometimes uses macro expansion to turn
9587 // into something that can't return a value. This is encountered
9588 // when doing generated code coverage tests.
9589 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
9590 // Here we use masm_-> instead of the __ macro because this is the
9591 // instruction that gets patched and coverage code gets in the way.
9592 masm_->test(eax, Immediate(-delta_to_patch_site));
Steve Block6ded16b2010-05-10 14:33:55 +01009593 // Restore value (returned from store IC) register.
9594 if (!old_value.is(eax)) __ mov(old_value, eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00009595}
9596
9597
Andrei Popescu402d9372010-02-26 13:31:12 +00009598Result CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) {
9599#ifdef DEBUG
9600 int original_height = frame()->height();
9601#endif
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009602
Steve Block44f0eee2011-05-26 01:26:41 +01009603 Isolate* isolate = masm()->isolate();
9604 Factory* factory = isolate->factory();
9605 Counters* counters = isolate->counters();
9606
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009607 bool contextual_load_in_builtin =
9608 is_contextual &&
Steve Block44f0eee2011-05-26 01:26:41 +01009609 (isolate->bootstrapper()->IsActive() ||
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009610 (!info_->closure().is_null() && info_->closure()->IsBuiltin()));
9611
Andrei Popescu402d9372010-02-26 13:31:12 +00009612 Result result;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009613 // Do not inline in the global code or when not in loop.
9614 if (scope()->is_global_scope() ||
9615 loop_nesting() == 0 ||
9616 contextual_load_in_builtin) {
Andrei Popescu402d9372010-02-26 13:31:12 +00009617 Comment cmnt(masm(), "[ Load from named Property");
9618 frame()->Push(name);
9619
9620 RelocInfo::Mode mode = is_contextual
9621 ? RelocInfo::CODE_TARGET_CONTEXT
9622 : RelocInfo::CODE_TARGET;
9623 result = frame()->CallLoadIC(mode);
9624 // A test eax instruction following the call signals that the inobject
9625 // property case was inlined. Ensure that there is not a test eax
9626 // instruction here.
9627 __ nop();
9628 } else {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009629 // Inline the property load.
9630 Comment cmnt(masm(), is_contextual
9631 ? "[ Inlined contextual property load"
9632 : "[ Inlined named property load");
Andrei Popescu402d9372010-02-26 13:31:12 +00009633 Result receiver = frame()->Pop();
9634 receiver.ToRegister();
9635
9636 result = allocator()->Allocate();
9637 ASSERT(result.is_valid());
9638 DeferredReferenceGetNamedValue* deferred =
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009639 new DeferredReferenceGetNamedValue(result.reg(),
9640 receiver.reg(),
9641 name,
9642 is_contextual);
Andrei Popescu402d9372010-02-26 13:31:12 +00009643
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009644 if (!is_contextual) {
9645 // Check that the receiver is a heap object.
9646 __ test(receiver.reg(), Immediate(kSmiTagMask));
9647 deferred->Branch(zero);
9648 }
Andrei Popescu402d9372010-02-26 13:31:12 +00009649
9650 __ bind(deferred->patch_site());
9651 // This is the map check instruction that will be patched (so we can't
9652 // use the double underscore macro that may insert instructions).
9653 // Initially use an invalid map to force a failure.
9654 masm()->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01009655 Immediate(factory->null_value()));
Andrei Popescu402d9372010-02-26 13:31:12 +00009656 // This branch is always a forwards branch so it's always a fixed size
9657 // which allows the assert below to succeed and patching to work.
9658 deferred->Branch(not_equal);
9659
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009660 // The delta from the patch label to the actual load must be
9661 // statically known.
Andrei Popescu402d9372010-02-26 13:31:12 +00009662 ASSERT(masm()->SizeOfCodeGeneratedSince(deferred->patch_site()) ==
9663 LoadIC::kOffsetToLoadInstruction);
Andrei Popescu402d9372010-02-26 13:31:12 +00009664
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009665 if (is_contextual) {
9666 // Load the (initialy invalid) cell and get its value.
Steve Block44f0eee2011-05-26 01:26:41 +01009667 masm()->mov(result.reg(), factory->null_value());
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009668 if (FLAG_debug_code) {
9669 __ cmp(FieldOperand(result.reg(), HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01009670 factory->global_property_cell_map());
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009671 __ Assert(equal, "Uninitialized inlined contextual load");
9672 }
9673 __ mov(result.reg(),
9674 FieldOperand(result.reg(), JSGlobalPropertyCell::kValueOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01009675 __ cmp(result.reg(), factory->the_hole_value());
9676 deferred->Branch(equal);
Ben Murdochf87a2032010-10-22 12:50:53 +01009677 bool is_dont_delete = false;
9678 if (!info_->closure().is_null()) {
9679 // When doing lazy compilation we can check if the global cell
9680 // already exists and use its "don't delete" status as a hint.
9681 AssertNoAllocation no_gc;
9682 v8::internal::GlobalObject* global_object =
9683 info_->closure()->context()->global();
9684 LookupResult lookup;
9685 global_object->LocalLookupRealNamedProperty(*name, &lookup);
9686 if (lookup.IsProperty() && lookup.type() == NORMAL) {
9687 ASSERT(lookup.holder() == global_object);
9688 ASSERT(global_object->property_dictionary()->ValueAt(
9689 lookup.GetDictionaryEntry())->IsJSGlobalPropertyCell());
9690 is_dont_delete = lookup.IsDontDelete();
9691 }
9692 }
9693 deferred->set_is_dont_delete(is_dont_delete);
9694 if (!is_dont_delete) {
Steve Block44f0eee2011-05-26 01:26:41 +01009695 __ cmp(result.reg(), factory->the_hole_value());
Ben Murdochf87a2032010-10-22 12:50:53 +01009696 deferred->Branch(equal);
9697 } else if (FLAG_debug_code) {
Steve Block44f0eee2011-05-26 01:26:41 +01009698 __ cmp(result.reg(), factory->the_hole_value());
Ben Murdochf87a2032010-10-22 12:50:53 +01009699 __ Check(not_equal, "DontDelete cells can't contain the hole");
9700 }
Steve Block44f0eee2011-05-26 01:26:41 +01009701 __ IncrementCounter(counters->named_load_global_inline(), 1);
Ben Murdochf87a2032010-10-22 12:50:53 +01009702 if (is_dont_delete) {
Steve Block44f0eee2011-05-26 01:26:41 +01009703 __ IncrementCounter(counters->dont_delete_hint_hit(), 1);
Ben Murdochf87a2032010-10-22 12:50:53 +01009704 }
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009705 } else {
9706 // The initial (invalid) offset has to be large enough to force a 32-bit
9707 // instruction encoding to allow patching with an arbitrary offset. Use
9708 // kMaxInt (minus kHeapObjectTag).
9709 int offset = kMaxInt;
9710 masm()->mov(result.reg(), FieldOperand(receiver.reg(), offset));
Steve Block44f0eee2011-05-26 01:26:41 +01009711 __ IncrementCounter(counters->named_load_inline(), 1);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009712 }
9713
Andrei Popescu402d9372010-02-26 13:31:12 +00009714 deferred->BindExit();
9715 }
9716 ASSERT(frame()->height() == original_height - 1);
9717 return result;
9718}
9719
9720
9721Result CodeGenerator::EmitNamedStore(Handle<String> name, bool is_contextual) {
9722#ifdef DEBUG
9723 int expected_height = frame()->height() - (is_contextual ? 1 : 2);
9724#endif
Kristian Monsen50ef84f2010-07-29 15:18:00 +01009725
9726 Result result;
9727 if (is_contextual || scope()->is_global_scope() || loop_nesting() == 0) {
Steve Block1e0659c2011-05-24 12:43:12 +01009728 result = frame()->CallStoreIC(name, is_contextual, strict_mode_flag());
Kristian Monsen50ef84f2010-07-29 15:18:00 +01009729 // A test eax instruction following the call signals that the inobject
9730 // property case was inlined. Ensure that there is not a test eax
9731 // instruction here.
9732 __ nop();
9733 } else {
9734 // Inline the in-object property case.
9735 JumpTarget slow, done;
9736 Label patch_site;
9737
9738 // Get the value and receiver from the stack.
9739 Result value = frame()->Pop();
9740 value.ToRegister();
9741 Result receiver = frame()->Pop();
9742 receiver.ToRegister();
9743
9744 // Allocate result register.
9745 result = allocator()->Allocate();
9746 ASSERT(result.is_valid() && receiver.is_valid() && value.is_valid());
9747
9748 // Check that the receiver is a heap object.
9749 __ test(receiver.reg(), Immediate(kSmiTagMask));
9750 slow.Branch(zero, &value, &receiver);
9751
9752 // This is the map check instruction that will be patched (so we can't
9753 // use the double underscore macro that may insert instructions).
9754 // Initially use an invalid map to force a failure.
9755 __ bind(&patch_site);
9756 masm()->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01009757 Immediate(FACTORY->null_value()));
Kristian Monsen50ef84f2010-07-29 15:18:00 +01009758 // This branch is always a forwards branch so it's always a fixed size
9759 // which allows the assert below to succeed and patching to work.
9760 slow.Branch(not_equal, &value, &receiver);
9761
9762 // The delta from the patch label to the store offset must be
9763 // statically known.
9764 ASSERT(masm()->SizeOfCodeGeneratedSince(&patch_site) ==
9765 StoreIC::kOffsetToStoreInstruction);
9766
9767 // The initial (invalid) offset has to be large enough to force a 32-bit
9768 // instruction encoding to allow patching with an arbitrary offset. Use
9769 // kMaxInt (minus kHeapObjectTag).
9770 int offset = kMaxInt;
9771 __ mov(FieldOperand(receiver.reg(), offset), value.reg());
9772 __ mov(result.reg(), Operand(value.reg()));
9773
9774 // Allocate scratch register for write barrier.
9775 Result scratch = allocator()->Allocate();
9776 ASSERT(scratch.is_valid());
9777
9778 // The write barrier clobbers all input registers, so spill the
9779 // receiver and the value.
9780 frame_->Spill(receiver.reg());
9781 frame_->Spill(value.reg());
9782
9783 // If the receiver and the value share a register allocate a new
9784 // register for the receiver.
9785 if (receiver.reg().is(value.reg())) {
9786 receiver = allocator()->Allocate();
9787 ASSERT(receiver.is_valid());
9788 __ mov(receiver.reg(), Operand(value.reg()));
9789 }
9790
9791 // Update the write barrier. To save instructions in the inlined
9792 // version we do not filter smis.
9793 Label skip_write_barrier;
9794 __ InNewSpace(receiver.reg(), value.reg(), equal, &skip_write_barrier);
9795 int delta_to_record_write = masm_->SizeOfCodeGeneratedSince(&patch_site);
9796 __ lea(scratch.reg(), Operand(receiver.reg(), offset));
9797 __ RecordWriteHelper(receiver.reg(), scratch.reg(), value.reg());
9798 if (FLAG_debug_code) {
9799 __ mov(receiver.reg(), Immediate(BitCast<int32_t>(kZapValue)));
9800 __ mov(value.reg(), Immediate(BitCast<int32_t>(kZapValue)));
9801 __ mov(scratch.reg(), Immediate(BitCast<int32_t>(kZapValue)));
9802 }
9803 __ bind(&skip_write_barrier);
9804 value.Unuse();
9805 scratch.Unuse();
9806 receiver.Unuse();
9807 done.Jump(&result);
9808
9809 slow.Bind(&value, &receiver);
9810 frame()->Push(&receiver);
9811 frame()->Push(&value);
Steve Block1e0659c2011-05-24 12:43:12 +01009812 result = frame()->CallStoreIC(name, is_contextual, strict_mode_flag());
Kristian Monsen50ef84f2010-07-29 15:18:00 +01009813 // Encode the offset to the map check instruction and the offset
9814 // to the write barrier store address computation in a test eax
9815 // instruction.
9816 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site);
9817 __ test(eax,
9818 Immediate((delta_to_record_write << 16) | delta_to_patch_site));
9819 done.Bind(&result);
9820 }
Andrei Popescu402d9372010-02-26 13:31:12 +00009821
9822 ASSERT_EQ(expected_height, frame()->height());
9823 return result;
9824}
9825
9826
9827Result CodeGenerator::EmitKeyedLoad() {
9828#ifdef DEBUG
9829 int original_height = frame()->height();
9830#endif
9831 Result result;
9832 // Inline array load code if inside of a loop. We do not know the
9833 // receiver map yet, so we initially generate the code with a check
9834 // against an invalid map. In the inline cache code, we patch the map
9835 // check if appropriate.
Leon Clarked91b9f72010-01-27 17:25:45 +00009836 if (loop_nesting() > 0) {
9837 Comment cmnt(masm_, "[ Inlined load from keyed Property");
9838
Leon Clarked91b9f72010-01-27 17:25:45 +00009839 // Use a fresh temporary to load the elements without destroying
9840 // the receiver which is needed for the deferred slow case.
9841 Result elements = allocator()->Allocate();
9842 ASSERT(elements.is_valid());
9843
Leon Clarkef7060e22010-06-03 12:02:55 +01009844 Result key = frame_->Pop();
9845 Result receiver = frame_->Pop();
9846 key.ToRegister();
9847 receiver.ToRegister();
9848
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009849 // If key and receiver are shared registers on the frame, their values will
9850 // be automatically saved and restored when going to deferred code.
9851 // The result is in elements, which is guaranteed non-shared.
Leon Clarked91b9f72010-01-27 17:25:45 +00009852 DeferredReferenceGetKeyedValue* deferred =
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009853 new DeferredReferenceGetKeyedValue(elements.reg(),
Leon Clarked91b9f72010-01-27 17:25:45 +00009854 receiver.reg(),
Andrei Popescu402d9372010-02-26 13:31:12 +00009855 key.reg());
Leon Clarked91b9f72010-01-27 17:25:45 +00009856
Andrei Popescu402d9372010-02-26 13:31:12 +00009857 __ test(receiver.reg(), Immediate(kSmiTagMask));
9858 deferred->Branch(zero);
Leon Clarked91b9f72010-01-27 17:25:45 +00009859
Leon Clarkef7060e22010-06-03 12:02:55 +01009860 // Check that the receiver has the expected map.
Leon Clarked91b9f72010-01-27 17:25:45 +00009861 // Initially, use an invalid map. The map is patched in the IC
9862 // initialization code.
9863 __ bind(deferred->patch_site());
9864 // Use masm-> here instead of the double underscore macro since extra
9865 // coverage code can interfere with the patching.
9866 masm_->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01009867 Immediate(FACTORY->null_value()));
Leon Clarked91b9f72010-01-27 17:25:45 +00009868 deferred->Branch(not_equal);
9869
9870 // Check that the key is a smi.
Steve Block6ded16b2010-05-10 14:33:55 +01009871 if (!key.is_smi()) {
9872 __ test(key.reg(), Immediate(kSmiTagMask));
9873 deferred->Branch(not_zero);
9874 } else {
9875 if (FLAG_debug_code) __ AbortIfNotSmi(key.reg());
9876 }
Leon Clarked91b9f72010-01-27 17:25:45 +00009877
Iain Merrick75681382010-08-19 15:07:18 +01009878 // Get the elements array from the receiver.
Leon Clarked91b9f72010-01-27 17:25:45 +00009879 __ mov(elements.reg(),
9880 FieldOperand(receiver.reg(), JSObject::kElementsOffset));
Iain Merrick75681382010-08-19 15:07:18 +01009881 __ AssertFastElements(elements.reg());
Leon Clarked91b9f72010-01-27 17:25:45 +00009882
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009883 // Check that the key is within bounds.
9884 __ cmp(key.reg(),
Leon Clarked91b9f72010-01-27 17:25:45 +00009885 FieldOperand(elements.reg(), FixedArray::kLengthOffset));
9886 deferred->Branch(above_equal);
9887
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009888 // Load and check that the result is not the hole.
9889 // Key holds a smi.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01009890 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009891 __ mov(elements.reg(),
9892 FieldOperand(elements.reg(),
9893 key.reg(),
9894 times_2,
9895 FixedArray::kHeaderSize));
9896 result = elements;
Steve Block44f0eee2011-05-26 01:26:41 +01009897 __ cmp(Operand(result.reg()), Immediate(FACTORY->the_hole_value()));
Leon Clarked91b9f72010-01-27 17:25:45 +00009898 deferred->Branch(equal);
Steve Block44f0eee2011-05-26 01:26:41 +01009899 __ IncrementCounter(masm_->isolate()->counters()->keyed_load_inline(), 1);
Leon Clarked91b9f72010-01-27 17:25:45 +00009900
9901 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00009902 } else {
9903 Comment cmnt(masm_, "[ Load from keyed Property");
Andrei Popescu402d9372010-02-26 13:31:12 +00009904 result = frame_->CallKeyedLoadIC(RelocInfo::CODE_TARGET);
Leon Clarked91b9f72010-01-27 17:25:45 +00009905 // Make sure that we do not have a test instruction after the
9906 // call. A test instruction after the call is used to
9907 // indicate that we have generated an inline version of the
9908 // keyed load. The explicit nop instruction is here because
9909 // the push that follows might be peep-hole optimized away.
9910 __ nop();
Leon Clarked91b9f72010-01-27 17:25:45 +00009911 }
Andrei Popescu402d9372010-02-26 13:31:12 +00009912 ASSERT(frame()->height() == original_height - 2);
9913 return result;
9914}
9915
9916
9917Result CodeGenerator::EmitKeyedStore(StaticType* key_type) {
9918#ifdef DEBUG
9919 int original_height = frame()->height();
9920#endif
9921 Result result;
9922 // Generate inlined version of the keyed store if the code is in a loop
9923 // and the key is likely to be a smi.
9924 if (loop_nesting() > 0 && key_type->IsLikelySmi()) {
9925 Comment cmnt(masm(), "[ Inlined store to keyed Property");
9926
9927 // Get the receiver, key and value into registers.
9928 result = frame()->Pop();
9929 Result key = frame()->Pop();
9930 Result receiver = frame()->Pop();
9931
9932 Result tmp = allocator_->Allocate();
9933 ASSERT(tmp.is_valid());
Steve Block6ded16b2010-05-10 14:33:55 +01009934 Result tmp2 = allocator_->Allocate();
9935 ASSERT(tmp2.is_valid());
Andrei Popescu402d9372010-02-26 13:31:12 +00009936
9937 // Determine whether the value is a constant before putting it in a
9938 // register.
9939 bool value_is_constant = result.is_constant();
9940
9941 // Make sure that value, key and receiver are in registers.
9942 result.ToRegister();
9943 key.ToRegister();
9944 receiver.ToRegister();
9945
9946 DeferredReferenceSetKeyedValue* deferred =
9947 new DeferredReferenceSetKeyedValue(result.reg(),
9948 key.reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01009949 receiver.reg(),
Ben Murdoche0cee9b2011-05-25 10:26:03 +01009950 tmp.reg(),
9951 strict_mode_flag());
Andrei Popescu402d9372010-02-26 13:31:12 +00009952
9953 // Check that the receiver is not a smi.
9954 __ test(receiver.reg(), Immediate(kSmiTagMask));
9955 deferred->Branch(zero);
9956
Steve Block6ded16b2010-05-10 14:33:55 +01009957 // Check that the key is a smi.
9958 if (!key.is_smi()) {
9959 __ test(key.reg(), Immediate(kSmiTagMask));
9960 deferred->Branch(not_zero);
9961 } else {
9962 if (FLAG_debug_code) __ AbortIfNotSmi(key.reg());
9963 }
9964
Andrei Popescu402d9372010-02-26 13:31:12 +00009965 // Check that the receiver is a JSArray.
Steve Block6ded16b2010-05-10 14:33:55 +01009966 __ CmpObjectType(receiver.reg(), JS_ARRAY_TYPE, tmp.reg());
Andrei Popescu402d9372010-02-26 13:31:12 +00009967 deferred->Branch(not_equal);
9968
Andrei Popescu402d9372010-02-26 13:31:12 +00009969 // Get the elements array from the receiver and check that it is not a
9970 // dictionary.
9971 __ mov(tmp.reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01009972 FieldOperand(receiver.reg(), JSArray::kElementsOffset));
9973
9974 // Check whether it is possible to omit the write barrier. If the elements
9975 // array is in new space or the value written is a smi we can safely update
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009976 // the elements array without write barrier.
Steve Block6ded16b2010-05-10 14:33:55 +01009977 Label in_new_space;
9978 __ InNewSpace(tmp.reg(), tmp2.reg(), equal, &in_new_space);
9979 if (!value_is_constant) {
9980 __ test(result.reg(), Immediate(kSmiTagMask));
9981 deferred->Branch(not_zero);
9982 }
9983
9984 __ bind(&in_new_space);
Andrei Popescu402d9372010-02-26 13:31:12 +00009985 // Bind the deferred code patch site to be able to locate the fixed
9986 // array map comparison. When debugging, we patch this comparison to
9987 // always fail so that we will hit the IC call in the deferred code
9988 // which will allow the debugger to break for fast case stores.
9989 __ bind(deferred->patch_site());
9990 __ cmp(FieldOperand(tmp.reg(), HeapObject::kMapOffset),
Steve Block44f0eee2011-05-26 01:26:41 +01009991 Immediate(FACTORY->fixed_array_map()));
Andrei Popescu402d9372010-02-26 13:31:12 +00009992 deferred->Branch(not_equal);
9993
Steve Block44f0eee2011-05-26 01:26:41 +01009994 // Check that the key is within bounds. Both the key and the length of
9995 // the JSArray are smis (because the fixed array check above ensures the
9996 // elements are in fast case). Use unsigned comparison to handle negative
9997 // keys.
9998 __ cmp(key.reg(),
9999 FieldOperand(receiver.reg(), JSArray::kLengthOffset));
10000 deferred->Branch(above_equal);
10001
Andrei Popescu402d9372010-02-26 13:31:12 +000010002 // Store the value.
Kristian Monsen25f61362010-05-21 11:50:48 +010010003 __ mov(FixedArrayElementOperand(tmp.reg(), key.reg()), result.reg());
Steve Block44f0eee2011-05-26 01:26:41 +010010004 __ IncrementCounter(masm_->isolate()->counters()->keyed_store_inline(), 1);
Andrei Popescu402d9372010-02-26 13:31:12 +000010005
10006 deferred->BindExit();
10007 } else {
Ben Murdoche0cee9b2011-05-25 10:26:03 +010010008 result = frame()->CallKeyedStoreIC(strict_mode_flag());
Andrei Popescu402d9372010-02-26 13:31:12 +000010009 // Make sure that we do not have a test instruction after the
10010 // call. A test instruction after the call is used to
10011 // indicate that we have generated an inline version of the
10012 // keyed store.
10013 __ nop();
Andrei Popescu402d9372010-02-26 13:31:12 +000010014 }
10015 ASSERT(frame()->height() == original_height - 3);
10016 return result;
Leon Clarked91b9f72010-01-27 17:25:45 +000010017}
10018
10019
Steve Blocka7e24c12009-10-30 11:49:00 +000010020#undef __
10021#define __ ACCESS_MASM(masm)
10022
10023
10024Handle<String> Reference::GetName() {
10025 ASSERT(type_ == NAMED);
10026 Property* property = expression_->AsProperty();
10027 if (property == NULL) {
10028 // Global variable reference treated as a named property reference.
10029 VariableProxy* proxy = expression_->AsVariableProxy();
10030 ASSERT(proxy->AsVariable() != NULL);
10031 ASSERT(proxy->AsVariable()->is_global());
10032 return proxy->name();
10033 } else {
10034 Literal* raw_name = property->key()->AsLiteral();
10035 ASSERT(raw_name != NULL);
Andrei Popescu402d9372010-02-26 13:31:12 +000010036 return Handle<String>::cast(raw_name->handle());
Steve Blocka7e24c12009-10-30 11:49:00 +000010037 }
10038}
10039
10040
Steve Blockd0582a62009-12-15 09:54:21 +000010041void Reference::GetValue() {
Steve Blocka7e24c12009-10-30 11:49:00 +000010042 ASSERT(!cgen_->in_spilled_code());
10043 ASSERT(cgen_->HasValidEntryRegisters());
10044 ASSERT(!is_illegal());
10045 MacroAssembler* masm = cgen_->masm();
10046
10047 // Record the source position for the property load.
10048 Property* property = expression_->AsProperty();
10049 if (property != NULL) {
10050 cgen_->CodeForSourcePosition(property->position());
10051 }
10052
10053 switch (type_) {
10054 case SLOT: {
10055 Comment cmnt(masm, "[ Load from Slot");
Kristian Monsen0d5e1162010-09-30 15:31:59 +010010056 Slot* slot = expression_->AsVariableProxy()->AsVariable()->AsSlot();
Steve Blocka7e24c12009-10-30 11:49:00 +000010057 ASSERT(slot != NULL);
Leon Clarkef7060e22010-06-03 12:02:55 +010010058 cgen_->LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF);
Andrei Popescu402d9372010-02-26 13:31:12 +000010059 if (!persist_after_get_) set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +000010060 break;
10061 }
10062
10063 case NAMED: {
Steve Blocka7e24c12009-10-30 11:49:00 +000010064 Variable* var = expression_->AsVariableProxy()->AsVariable();
10065 bool is_global = var != NULL;
10066 ASSERT(!is_global || var->is_global());
Andrei Popescu402d9372010-02-26 13:31:12 +000010067 if (persist_after_get_) cgen_->frame()->Dup();
10068 Result result = cgen_->EmitNamedLoad(GetName(), is_global);
10069 if (!persist_after_get_) set_unloaded();
10070 cgen_->frame()->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +000010071 break;
10072 }
10073
10074 case KEYED: {
Andrei Popescu402d9372010-02-26 13:31:12 +000010075 if (persist_after_get_) {
10076 cgen_->frame()->PushElementAt(1);
10077 cgen_->frame()->PushElementAt(1);
10078 }
10079 Result value = cgen_->EmitKeyedLoad();
Leon Clarked91b9f72010-01-27 17:25:45 +000010080 cgen_->frame()->Push(&value);
Andrei Popescu402d9372010-02-26 13:31:12 +000010081 if (!persist_after_get_) set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +000010082 break;
10083 }
10084
10085 default:
10086 UNREACHABLE();
10087 }
10088}
10089
10090
Steve Blockd0582a62009-12-15 09:54:21 +000010091void Reference::TakeValue() {
Steve Blocka7e24c12009-10-30 11:49:00 +000010092 // For non-constant frame-allocated slots, we invalidate the value in the
10093 // slot. For all others, we fall back on GetValue.
10094 ASSERT(!cgen_->in_spilled_code());
10095 ASSERT(!is_illegal());
10096 if (type_ != SLOT) {
Steve Blockd0582a62009-12-15 09:54:21 +000010097 GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +000010098 return;
10099 }
10100
Kristian Monsen0d5e1162010-09-30 15:31:59 +010010101 Slot* slot = expression_->AsVariableProxy()->AsVariable()->AsSlot();
Steve Blocka7e24c12009-10-30 11:49:00 +000010102 ASSERT(slot != NULL);
10103 if (slot->type() == Slot::LOOKUP ||
10104 slot->type() == Slot::CONTEXT ||
10105 slot->var()->mode() == Variable::CONST ||
10106 slot->is_arguments()) {
Steve Blockd0582a62009-12-15 09:54:21 +000010107 GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +000010108 return;
10109 }
10110
10111 // Only non-constant, frame-allocated parameters and locals can
10112 // reach here. Be careful not to use the optimizations for arguments
10113 // object access since it may not have been initialized yet.
10114 ASSERT(!slot->is_arguments());
10115 if (slot->type() == Slot::PARAMETER) {
10116 cgen_->frame()->TakeParameterAt(slot->index());
10117 } else {
10118 ASSERT(slot->type() == Slot::LOCAL);
10119 cgen_->frame()->TakeLocalAt(slot->index());
10120 }
Leon Clarked91b9f72010-01-27 17:25:45 +000010121
10122 ASSERT(persist_after_get_);
10123 // Do not unload the reference, because it is used in SetValue.
Steve Blocka7e24c12009-10-30 11:49:00 +000010124}
10125
10126
10127void Reference::SetValue(InitState init_state) {
10128 ASSERT(cgen_->HasValidEntryRegisters());
10129 ASSERT(!is_illegal());
10130 MacroAssembler* masm = cgen_->masm();
10131 switch (type_) {
10132 case SLOT: {
10133 Comment cmnt(masm, "[ Store to Slot");
Kristian Monsen0d5e1162010-09-30 15:31:59 +010010134 Slot* slot = expression_->AsVariableProxy()->AsVariable()->AsSlot();
Steve Blocka7e24c12009-10-30 11:49:00 +000010135 ASSERT(slot != NULL);
10136 cgen_->StoreToSlot(slot, init_state);
Andrei Popescu402d9372010-02-26 13:31:12 +000010137 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +000010138 break;
10139 }
10140
10141 case NAMED: {
10142 Comment cmnt(masm, "[ Store to named Property");
Andrei Popescu402d9372010-02-26 13:31:12 +000010143 Result answer = cgen_->EmitNamedStore(GetName(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +000010144 cgen_->frame()->Push(&answer);
Leon Clarke4515c472010-02-03 11:58:03 +000010145 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +000010146 break;
10147 }
10148
10149 case KEYED: {
10150 Comment cmnt(masm, "[ Store to keyed Property");
Steve Blocka7e24c12009-10-30 11:49:00 +000010151 Property* property = expression()->AsProperty();
10152 ASSERT(property != NULL);
Steve Block6ded16b2010-05-10 14:33:55 +010010153
Andrei Popescu402d9372010-02-26 13:31:12 +000010154 Result answer = cgen_->EmitKeyedStore(property->key()->type());
10155 cgen_->frame()->Push(&answer);
10156 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +000010157 break;
10158 }
10159
Andrei Popescu402d9372010-02-26 13:31:12 +000010160 case UNLOADED:
10161 case ILLEGAL:
Steve Blocka7e24c12009-10-30 11:49:00 +000010162 UNREACHABLE();
10163 }
10164}
10165
10166
Steve Blocka7e24c12009-10-30 11:49:00 +000010167#undef __
10168
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010010169#define __ masm.
10170
Ben Murdochb0fe1622011-05-05 13:52:32 +010010171
10172static void MemCopyWrapper(void* dest, const void* src, size_t size) {
10173 memcpy(dest, src, size);
10174}
10175
10176
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010010177MemCopyFunction CreateMemCopyFunction() {
Ben Murdochb0fe1622011-05-05 13:52:32 +010010178 HandleScope scope;
10179 MacroAssembler masm(NULL, 1 * KB);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010010180
10181 // Generated code is put into a fixed, unmovable, buffer, and not into
10182 // the V8 heap. We can't, and don't, refer to any relocatable addresses
10183 // (e.g. the JavaScript nan-object).
10184
10185 // 32-bit C declaration function calls pass arguments on stack.
10186
10187 // Stack layout:
10188 // esp[12]: Third argument, size.
10189 // esp[8]: Second argument, source pointer.
10190 // esp[4]: First argument, destination pointer.
10191 // esp[0]: return address
10192
10193 const int kDestinationOffset = 1 * kPointerSize;
10194 const int kSourceOffset = 2 * kPointerSize;
10195 const int kSizeOffset = 3 * kPointerSize;
10196
10197 int stack_offset = 0; // Update if we change the stack height.
10198
10199 if (FLAG_debug_code) {
10200 __ cmp(Operand(esp, kSizeOffset + stack_offset),
10201 Immediate(kMinComplexMemCopy));
10202 Label ok;
10203 __ j(greater_equal, &ok);
10204 __ int3();
10205 __ bind(&ok);
10206 }
Steve Block44f0eee2011-05-26 01:26:41 +010010207 if (masm.isolate()->cpu_features()->IsSupported(SSE2)) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010010208 CpuFeatures::Scope enable(SSE2);
10209 __ push(edi);
10210 __ push(esi);
10211 stack_offset += 2 * kPointerSize;
10212 Register dst = edi;
10213 Register src = esi;
10214 Register count = ecx;
10215 __ mov(dst, Operand(esp, stack_offset + kDestinationOffset));
10216 __ mov(src, Operand(esp, stack_offset + kSourceOffset));
10217 __ mov(count, Operand(esp, stack_offset + kSizeOffset));
10218
10219
10220 __ movdqu(xmm0, Operand(src, 0));
10221 __ movdqu(Operand(dst, 0), xmm0);
10222 __ mov(edx, dst);
10223 __ and_(edx, 0xF);
10224 __ neg(edx);
10225 __ add(Operand(edx), Immediate(16));
10226 __ add(dst, Operand(edx));
10227 __ add(src, Operand(edx));
10228 __ sub(Operand(count), edx);
10229
10230 // edi is now aligned. Check if esi is also aligned.
10231 Label unaligned_source;
10232 __ test(Operand(src), Immediate(0x0F));
10233 __ j(not_zero, &unaligned_source);
10234 {
Steve Block44f0eee2011-05-26 01:26:41 +010010235 __ IncrementCounter(masm.isolate()->counters()->memcopy_aligned(), 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010010236 // Copy loop for aligned source and destination.
10237 __ mov(edx, count);
10238 Register loop_count = ecx;
10239 Register count = edx;
10240 __ shr(loop_count, 5);
10241 {
10242 // Main copy loop.
10243 Label loop;
10244 __ bind(&loop);
10245 __ prefetch(Operand(src, 0x20), 1);
10246 __ movdqa(xmm0, Operand(src, 0x00));
10247 __ movdqa(xmm1, Operand(src, 0x10));
10248 __ add(Operand(src), Immediate(0x20));
10249
10250 __ movdqa(Operand(dst, 0x00), xmm0);
10251 __ movdqa(Operand(dst, 0x10), xmm1);
10252 __ add(Operand(dst), Immediate(0x20));
10253
10254 __ dec(loop_count);
10255 __ j(not_zero, &loop);
10256 }
10257
10258 // At most 31 bytes to copy.
10259 Label move_less_16;
10260 __ test(Operand(count), Immediate(0x10));
10261 __ j(zero, &move_less_16);
10262 __ movdqa(xmm0, Operand(src, 0));
10263 __ add(Operand(src), Immediate(0x10));
10264 __ movdqa(Operand(dst, 0), xmm0);
10265 __ add(Operand(dst), Immediate(0x10));
10266 __ bind(&move_less_16);
10267
10268 // At most 15 bytes to copy. Copy 16 bytes at end of string.
10269 __ and_(count, 0xF);
10270 __ movdqu(xmm0, Operand(src, count, times_1, -0x10));
10271 __ movdqu(Operand(dst, count, times_1, -0x10), xmm0);
10272
Ben Murdochb0fe1622011-05-05 13:52:32 +010010273 __ mov(eax, Operand(esp, stack_offset + kDestinationOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010010274 __ pop(esi);
10275 __ pop(edi);
10276 __ ret(0);
10277 }
10278 __ Align(16);
10279 {
10280 // Copy loop for unaligned source and aligned destination.
10281 // If source is not aligned, we can't read it as efficiently.
10282 __ bind(&unaligned_source);
Steve Block44f0eee2011-05-26 01:26:41 +010010283 __ IncrementCounter(masm.isolate()->counters()->memcopy_unaligned(), 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010010284 __ mov(edx, ecx);
10285 Register loop_count = ecx;
10286 Register count = edx;
10287 __ shr(loop_count, 5);
10288 {
10289 // Main copy loop
10290 Label loop;
10291 __ bind(&loop);
10292 __ prefetch(Operand(src, 0x20), 1);
10293 __ movdqu(xmm0, Operand(src, 0x00));
10294 __ movdqu(xmm1, Operand(src, 0x10));
10295 __ add(Operand(src), Immediate(0x20));
10296
10297 __ movdqa(Operand(dst, 0x00), xmm0);
10298 __ movdqa(Operand(dst, 0x10), xmm1);
10299 __ add(Operand(dst), Immediate(0x20));
10300
10301 __ dec(loop_count);
10302 __ j(not_zero, &loop);
10303 }
10304
10305 // At most 31 bytes to copy.
10306 Label move_less_16;
10307 __ test(Operand(count), Immediate(0x10));
10308 __ j(zero, &move_less_16);
10309 __ movdqu(xmm0, Operand(src, 0));
10310 __ add(Operand(src), Immediate(0x10));
10311 __ movdqa(Operand(dst, 0), xmm0);
10312 __ add(Operand(dst), Immediate(0x10));
10313 __ bind(&move_less_16);
10314
10315 // At most 15 bytes to copy. Copy 16 bytes at end of string.
10316 __ and_(count, 0x0F);
10317 __ movdqu(xmm0, Operand(src, count, times_1, -0x10));
10318 __ movdqu(Operand(dst, count, times_1, -0x10), xmm0);
10319
Ben Murdochb0fe1622011-05-05 13:52:32 +010010320 __ mov(eax, Operand(esp, stack_offset + kDestinationOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010010321 __ pop(esi);
10322 __ pop(edi);
10323 __ ret(0);
10324 }
10325
10326 } else {
Steve Block44f0eee2011-05-26 01:26:41 +010010327 __ IncrementCounter(masm.isolate()->counters()->memcopy_noxmm(), 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010010328 // SSE2 not supported. Unlikely to happen in practice.
10329 __ push(edi);
10330 __ push(esi);
10331 stack_offset += 2 * kPointerSize;
10332 __ cld();
10333 Register dst = edi;
10334 Register src = esi;
10335 Register count = ecx;
10336 __ mov(dst, Operand(esp, stack_offset + kDestinationOffset));
10337 __ mov(src, Operand(esp, stack_offset + kSourceOffset));
10338 __ mov(count, Operand(esp, stack_offset + kSizeOffset));
10339
10340 // Copy the first word.
10341 __ mov(eax, Operand(src, 0));
10342 __ mov(Operand(dst, 0), eax);
10343
10344 // Increment src,dstso that dst is aligned.
10345 __ mov(edx, dst);
10346 __ and_(edx, 0x03);
10347 __ neg(edx);
10348 __ add(Operand(edx), Immediate(4)); // edx = 4 - (dst & 3)
10349 __ add(dst, Operand(edx));
10350 __ add(src, Operand(edx));
10351 __ sub(Operand(count), edx);
10352 // edi is now aligned, ecx holds number of remaning bytes to copy.
10353
10354 __ mov(edx, count);
10355 count = edx;
10356 __ shr(ecx, 2); // Make word count instead of byte count.
10357 __ rep_movs();
10358
10359 // At most 3 bytes left to copy. Copy 4 bytes at end of string.
10360 __ and_(count, 3);
10361 __ mov(eax, Operand(src, count, times_1, -4));
10362 __ mov(Operand(dst, count, times_1, -4), eax);
10363
Ben Murdochb0fe1622011-05-05 13:52:32 +010010364 __ mov(eax, Operand(esp, stack_offset + kDestinationOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010010365 __ pop(esi);
10366 __ pop(edi);
10367 __ ret(0);
10368 }
10369
10370 CodeDesc desc;
10371 masm.GetCode(&desc);
Ben Murdochb0fe1622011-05-05 13:52:32 +010010372 ASSERT(desc.reloc_size == 0);
10373
10374 // Copy the generated code into an executable chunk and return a pointer
10375 // to the first instruction in it as a C++ function pointer.
10376 LargeObjectChunk* chunk = LargeObjectChunk::New(desc.instr_size, EXECUTABLE);
10377 if (chunk == NULL) return &MemCopyWrapper;
10378 memcpy(chunk->GetStartAddress(), desc.buffer, desc.instr_size);
10379 CPU::FlushICache(chunk->GetStartAddress(), desc.instr_size);
10380 return FUNCTION_CAST<MemCopyFunction>(chunk->GetStartAddress());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010010381}
10382
10383#undef __
10384
Steve Blocka7e24c12009-10-30 11:49:00 +000010385} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +010010386
10387#endif // V8_TARGET_ARCH_IA32