blob: 6f4ef87e85ebc5dbd525fda4b93daabc31809df4 [file] [log] [blame]
Leon Clarked91b9f72010-01-27 17:25:45 +00001// Copyright 2010 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_IA32)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "codegen-inl.h"
Kristian Monsen80d68ea2010-09-08 11:05:35 +010033#include "bootstrapper.h"
34#include "code-stubs.h"
Steve Blockd0582a62009-12-15 09:54:21 +000035#include "compiler.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000036#include "debug.h"
37#include "ic-inl.h"
38#include "parser.h"
Leon Clarkee46be812010-01-19 14:06:41 +000039#include "regexp-macro-assembler.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000040#include "register-allocator-inl.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000041#include "scopes.h"
Steve Block6ded16b2010-05-10 14:33:55 +010042#include "virtual-frame-inl.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000043
44namespace v8 {
45namespace internal {
46
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010047#define __ ACCESS_MASM(masm)
Steve Blocka7e24c12009-10-30 11:49:00 +000048
49// -------------------------------------------------------------------------
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010050// Platform-specific FrameRegisterState functions.
Steve Blocka7e24c12009-10-30 11:49:00 +000051
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010052void FrameRegisterState::Save(MacroAssembler* masm) const {
Steve Blocka7e24c12009-10-30 11:49:00 +000053 for (int i = 0; i < RegisterAllocator::kNumRegisters; i++) {
54 int action = registers_[i];
55 if (action == kPush) {
56 __ push(RegisterAllocator::ToRegister(i));
57 } else if (action != kIgnore && (action & kSyncedFlag) == 0) {
58 __ mov(Operand(ebp, action), RegisterAllocator::ToRegister(i));
59 }
60 }
61}
62
63
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010064void FrameRegisterState::Restore(MacroAssembler* masm) const {
Steve Blocka7e24c12009-10-30 11:49:00 +000065 // Restore registers in reverse order due to the stack.
66 for (int i = RegisterAllocator::kNumRegisters - 1; i >= 0; i--) {
67 int action = registers_[i];
68 if (action == kPush) {
69 __ pop(RegisterAllocator::ToRegister(i));
70 } else if (action != kIgnore) {
71 action &= ~kSyncedFlag;
72 __ mov(RegisterAllocator::ToRegister(i), Operand(ebp, action));
73 }
74 }
75}
76
77
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010078#undef __
79#define __ ACCESS_MASM(masm_)
80
81// -------------------------------------------------------------------------
82// Platform-specific DeferredCode functions.
83
84void DeferredCode::SaveRegisters() {
85 frame_state_.Save(masm_);
86}
87
88
89void DeferredCode::RestoreRegisters() {
90 frame_state_.Restore(masm_);
91}
92
93
94// -------------------------------------------------------------------------
95// Platform-specific RuntimeCallHelper functions.
96
97void VirtualFrameRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
98 frame_state_->Save(masm);
99}
100
101
102void VirtualFrameRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
103 frame_state_->Restore(masm);
104}
105
106
107void ICRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
108 masm->EnterInternalFrame();
109}
110
111
112void ICRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
113 masm->LeaveInternalFrame();
114}
115
116
Steve Blocka7e24c12009-10-30 11:49:00 +0000117// -------------------------------------------------------------------------
118// CodeGenState implementation.
119
120CodeGenState::CodeGenState(CodeGenerator* owner)
121 : owner_(owner),
Steve Blocka7e24c12009-10-30 11:49:00 +0000122 destination_(NULL),
123 previous_(NULL) {
124 owner_->set_state(this);
125}
126
127
128CodeGenState::CodeGenState(CodeGenerator* owner,
Steve Blocka7e24c12009-10-30 11:49:00 +0000129 ControlDestination* destination)
130 : owner_(owner),
Steve Blocka7e24c12009-10-30 11:49:00 +0000131 destination_(destination),
132 previous_(owner->state()) {
133 owner_->set_state(this);
134}
135
136
137CodeGenState::~CodeGenState() {
138 ASSERT(owner_->state() == this);
139 owner_->set_state(previous_);
140}
141
Steve Blocka7e24c12009-10-30 11:49:00 +0000142// -------------------------------------------------------------------------
Ben Murdoch3bec4d22010-07-22 14:51:16 +0100143// CodeGenerator implementation.
Steve Blocka7e24c12009-10-30 11:49:00 +0000144
Andrei Popescu31002712010-02-23 13:46:05 +0000145CodeGenerator::CodeGenerator(MacroAssembler* masm)
146 : deferred_(8),
Leon Clarke4515c472010-02-03 11:58:03 +0000147 masm_(masm),
Andrei Popescu31002712010-02-23 13:46:05 +0000148 info_(NULL),
Steve Blocka7e24c12009-10-30 11:49:00 +0000149 frame_(NULL),
150 allocator_(NULL),
151 state_(NULL),
152 loop_nesting_(0),
Steve Block6ded16b2010-05-10 14:33:55 +0100153 in_safe_int32_mode_(false),
154 safe_int32_mode_enabled_(true),
Steve Blocka7e24c12009-10-30 11:49:00 +0000155 function_return_is_shadowed_(false),
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -0800156 in_spilled_code_(false),
157 jit_cookie_((FLAG_mask_constants_with_cookie) ? V8::Random() : 0) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000158}
159
160
161// Calling conventions:
162// ebp: caller's frame pointer
163// esp: stack pointer
164// edi: called JS function
165// esi: callee's context
166
Andrei Popescu402d9372010-02-26 13:31:12 +0000167void CodeGenerator::Generate(CompilationInfo* info) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000168 // Record the position for debugging purposes.
Andrei Popescu31002712010-02-23 13:46:05 +0000169 CodeForFunctionPosition(info->function());
Steve Block6ded16b2010-05-10 14:33:55 +0100170 Comment cmnt(masm_, "[ function compiled by virtual frame code generator");
Steve Blocka7e24c12009-10-30 11:49:00 +0000171
172 // Initialize state.
Andrei Popescu31002712010-02-23 13:46:05 +0000173 info_ = info;
Steve Blocka7e24c12009-10-30 11:49:00 +0000174 ASSERT(allocator_ == NULL);
175 RegisterAllocator register_allocator(this);
176 allocator_ = &register_allocator;
177 ASSERT(frame_ == NULL);
178 frame_ = new VirtualFrame();
179 set_in_spilled_code(false);
180
181 // Adjust for function-level loop nesting.
Steve Block6ded16b2010-05-10 14:33:55 +0100182 ASSERT_EQ(0, loop_nesting_);
Ben Murdochf87a2032010-10-22 12:50:53 +0100183 loop_nesting_ = info->is_in_loop() ? 1 : 0;
Steve Blocka7e24c12009-10-30 11:49:00 +0000184
185 JumpTarget::set_compiling_deferred_code(false);
186
Ben Murdochf87a2032010-10-22 12:50:53 +0100187 {
Steve Blocka7e24c12009-10-30 11:49:00 +0000188 CodeGenState state(this);
189
190 // Entry:
191 // Stack: receiver, arguments, return address.
192 // ebp: caller's frame pointer
193 // esp: stack pointer
194 // edi: called JS function
195 // esi: callee's context
196 allocator_->Initialize();
Steve Blocka7e24c12009-10-30 11:49:00 +0000197
Ben Murdochf87a2032010-10-22 12:50:53 +0100198#ifdef DEBUG
199 if (strlen(FLAG_stop_at) > 0 &&
200 info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
201 frame_->SpillAll();
202 __ int3();
203 }
204#endif
205
Iain Merrick75681382010-08-19 15:07:18 +0100206 frame_->Enter();
Leon Clarke4515c472010-02-03 11:58:03 +0000207
Iain Merrick75681382010-08-19 15:07:18 +0100208 // Allocate space for locals and initialize them.
209 frame_->AllocateStackSlots();
Leon Clarke4515c472010-02-03 11:58:03 +0000210
Iain Merrick75681382010-08-19 15:07:18 +0100211 // Allocate the local context if needed.
212 int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
213 if (heap_slots > 0) {
214 Comment cmnt(masm_, "[ allocate local context");
215 // Allocate local context.
216 // Get outer context and create a new context based on it.
217 frame_->PushFunction();
218 Result context;
219 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
220 FastNewContextStub stub(heap_slots);
221 context = frame_->CallStub(&stub, 1);
222 } else {
223 context = frame_->CallRuntime(Runtime::kNewContext, 1);
Leon Clarke4515c472010-02-03 11:58:03 +0000224 }
225
Iain Merrick75681382010-08-19 15:07:18 +0100226 // Update context local.
227 frame_->SaveContextRegister();
Leon Clarke4515c472010-02-03 11:58:03 +0000228
Iain Merrick75681382010-08-19 15:07:18 +0100229 // Verify that the runtime call result and esi agree.
230 if (FLAG_debug_code) {
231 __ cmp(context.reg(), Operand(esi));
232 __ Assert(equal, "Runtime::NewContext should end up in esi");
Andrei Popescu402d9372010-02-26 13:31:12 +0000233 }
Leon Clarke4515c472010-02-03 11:58:03 +0000234 }
235
Iain Merrick75681382010-08-19 15:07:18 +0100236 // TODO(1241774): Improve this code:
237 // 1) only needed if we have a context
238 // 2) no need to recompute context ptr every single time
239 // 3) don't copy parameter operand code from SlotOperand!
240 {
241 Comment cmnt2(masm_, "[ copy context parameters into .context");
242 // Note that iteration order is relevant here! If we have the same
243 // parameter twice (e.g., function (x, y, x)), and that parameter
244 // needs to be copied into the context, it must be the last argument
245 // passed to the parameter that needs to be copied. This is a rare
246 // case so we don't check for it, instead we rely on the copying
247 // order: such a parameter is copied repeatedly into the same
248 // context location and thus the last value is what is seen inside
249 // the function.
250 for (int i = 0; i < scope()->num_parameters(); i++) {
251 Variable* par = scope()->parameter(i);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100252 Slot* slot = par->AsSlot();
Iain Merrick75681382010-08-19 15:07:18 +0100253 if (slot != NULL && slot->type() == Slot::CONTEXT) {
254 // The use of SlotOperand below is safe in unspilled code
255 // because the slot is guaranteed to be a context slot.
256 //
257 // There are no parameters in the global scope.
258 ASSERT(!scope()->is_global_scope());
259 frame_->PushParameterAt(i);
260 Result value = frame_->Pop();
261 value.ToRegister();
262
263 // SlotOperand loads context.reg() with the context object
264 // stored to, used below in RecordWrite.
265 Result context = allocator_->Allocate();
266 ASSERT(context.is_valid());
267 __ mov(SlotOperand(slot, context.reg()), value.reg());
268 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
269 Result scratch = allocator_->Allocate();
270 ASSERT(scratch.is_valid());
271 frame_->Spill(context.reg());
272 frame_->Spill(value.reg());
273 __ RecordWrite(context.reg(), offset, value.reg(), scratch.reg());
274 }
275 }
276 }
277
278 // Store the arguments object. This must happen after context
279 // initialization because the arguments object may be stored in
280 // the context.
281 if (ArgumentsMode() != NO_ARGUMENTS_ALLOCATION) {
282 StoreArgumentsObject(true);
283 }
284
285 // Initialize ThisFunction reference if present.
286 if (scope()->is_function_scope() && scope()->function() != NULL) {
287 frame_->Push(Factory::the_hole_value());
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100288 StoreToSlot(scope()->function()->AsSlot(), NOT_CONST_INIT);
Iain Merrick75681382010-08-19 15:07:18 +0100289 }
290
291
Steve Blocka7e24c12009-10-30 11:49:00 +0000292 // Initialize the function return target after the locals are set
293 // up, because it needs the expected frame height from the frame.
294 function_return_.set_direction(JumpTarget::BIDIRECTIONAL);
295 function_return_is_shadowed_ = false;
296
Steve Blocka7e24c12009-10-30 11:49:00 +0000297 // Generate code to 'execute' declarations and initialize functions
298 // (source elements). In case of an illegal redeclaration we need to
299 // handle that instead of processing the declarations.
Andrei Popescu31002712010-02-23 13:46:05 +0000300 if (scope()->HasIllegalRedeclaration()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000301 Comment cmnt(masm_, "[ illegal redeclarations");
Andrei Popescu31002712010-02-23 13:46:05 +0000302 scope()->VisitIllegalRedeclaration(this);
Steve Blocka7e24c12009-10-30 11:49:00 +0000303 } else {
304 Comment cmnt(masm_, "[ declarations");
Andrei Popescu31002712010-02-23 13:46:05 +0000305 ProcessDeclarations(scope()->declarations());
Steve Blocka7e24c12009-10-30 11:49:00 +0000306 // Bail out if a stack-overflow exception occurred when processing
307 // declarations.
308 if (HasStackOverflow()) return;
309 }
310
311 if (FLAG_trace) {
312 frame_->CallRuntime(Runtime::kTraceEnter, 0);
313 // Ignore the return value.
314 }
315 CheckStack();
316
317 // Compile the body of the function in a vanilla state. Don't
318 // bother compiling all the code if the scope has an illegal
319 // redeclaration.
Andrei Popescu31002712010-02-23 13:46:05 +0000320 if (!scope()->HasIllegalRedeclaration()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000321 Comment cmnt(masm_, "[ function body");
322#ifdef DEBUG
323 bool is_builtin = Bootstrapper::IsActive();
324 bool should_trace =
325 is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls;
326 if (should_trace) {
327 frame_->CallRuntime(Runtime::kDebugTrace, 0);
328 // Ignore the return value.
329 }
330#endif
Andrei Popescu31002712010-02-23 13:46:05 +0000331 VisitStatements(info->function()->body());
Steve Blocka7e24c12009-10-30 11:49:00 +0000332
333 // Handle the return from the function.
334 if (has_valid_frame()) {
335 // If there is a valid frame, control flow can fall off the end of
336 // the body. In that case there is an implicit return statement.
337 ASSERT(!function_return_is_shadowed_);
Andrei Popescu31002712010-02-23 13:46:05 +0000338 CodeForReturnPosition(info->function());
Steve Blocka7e24c12009-10-30 11:49:00 +0000339 frame_->PrepareForReturn();
340 Result undefined(Factory::undefined_value());
341 if (function_return_.is_bound()) {
342 function_return_.Jump(&undefined);
343 } else {
344 function_return_.Bind(&undefined);
345 GenerateReturnSequence(&undefined);
346 }
347 } else if (function_return_.is_linked()) {
348 // If the return target has dangling jumps to it, then we have not
349 // yet generated the return sequence. This can happen when (a)
350 // control does not flow off the end of the body so we did not
351 // compile an artificial return statement just above, and (b) there
352 // are return statements in the body but (c) they are all shadowed.
353 Result return_value;
354 function_return_.Bind(&return_value);
355 GenerateReturnSequence(&return_value);
356 }
357 }
358 }
359
360 // Adjust for function-level loop nesting.
Ben Murdochf87a2032010-10-22 12:50:53 +0100361 ASSERT_EQ(loop_nesting_, info->is_in_loop() ? 1 : 0);
Steve Block6ded16b2010-05-10 14:33:55 +0100362 loop_nesting_ = 0;
Steve Blocka7e24c12009-10-30 11:49:00 +0000363
364 // Code generation state must be reset.
365 ASSERT(state_ == NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000366 ASSERT(!function_return_is_shadowed_);
367 function_return_.Unuse();
368 DeleteFrame();
369
370 // Process any deferred code using the register allocator.
371 if (!HasStackOverflow()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000372 JumpTarget::set_compiling_deferred_code(true);
373 ProcessDeferred();
374 JumpTarget::set_compiling_deferred_code(false);
375 }
376
377 // There is no need to delete the register allocator, it is a
378 // stack-allocated local.
379 allocator_ = NULL;
Steve Blocka7e24c12009-10-30 11:49:00 +0000380}
381
382
383Operand CodeGenerator::SlotOperand(Slot* slot, Register tmp) {
384 // Currently, this assertion will fail if we try to assign to
385 // a constant variable that is constant because it is read-only
386 // (such as the variable referring to a named function expression).
387 // We need to implement assignments to read-only variables.
388 // Ideally, we should do this during AST generation (by converting
389 // such assignments into expression statements); however, in general
390 // we may not be able to make the decision until past AST generation,
391 // that is when the entire program is known.
392 ASSERT(slot != NULL);
393 int index = slot->index();
394 switch (slot->type()) {
395 case Slot::PARAMETER:
396 return frame_->ParameterAt(index);
397
398 case Slot::LOCAL:
399 return frame_->LocalAt(index);
400
401 case Slot::CONTEXT: {
402 // Follow the context chain if necessary.
403 ASSERT(!tmp.is(esi)); // do not overwrite context register
404 Register context = esi;
405 int chain_length = scope()->ContextChainLength(slot->var()->scope());
406 for (int i = 0; i < chain_length; i++) {
407 // Load the closure.
408 // (All contexts, even 'with' contexts, have a closure,
409 // and it is the same for all contexts inside a function.
410 // There is no need to go to the function context first.)
411 __ mov(tmp, ContextOperand(context, Context::CLOSURE_INDEX));
412 // Load the function context (which is the incoming, outer context).
413 __ mov(tmp, FieldOperand(tmp, JSFunction::kContextOffset));
414 context = tmp;
415 }
416 // We may have a 'with' context now. Get the function context.
417 // (In fact this mov may never be the needed, since the scope analysis
418 // may not permit a direct context access in this case and thus we are
419 // always at a function context. However it is safe to dereference be-
420 // cause the function context of a function context is itself. Before
421 // deleting this mov we should try to create a counter-example first,
422 // though...)
423 __ mov(tmp, ContextOperand(context, Context::FCONTEXT_INDEX));
424 return ContextOperand(tmp, index);
425 }
426
427 default:
428 UNREACHABLE();
429 return Operand(eax);
430 }
431}
432
433
434Operand CodeGenerator::ContextSlotOperandCheckExtensions(Slot* slot,
435 Result tmp,
436 JumpTarget* slow) {
437 ASSERT(slot->type() == Slot::CONTEXT);
438 ASSERT(tmp.is_register());
439 Register context = esi;
440
441 for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) {
442 if (s->num_heap_slots() > 0) {
443 if (s->calls_eval()) {
444 // Check that extension is NULL.
445 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
446 Immediate(0));
447 slow->Branch(not_equal, not_taken);
448 }
449 __ mov(tmp.reg(), ContextOperand(context, Context::CLOSURE_INDEX));
450 __ mov(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset));
451 context = tmp.reg();
452 }
453 }
454 // Check that last extension is NULL.
455 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
456 slow->Branch(not_equal, not_taken);
457 __ mov(tmp.reg(), ContextOperand(context, Context::FCONTEXT_INDEX));
458 return ContextOperand(tmp.reg(), slot->index());
459}
460
461
462// Emit code to load the value of an expression to the top of the
463// frame. If the expression is boolean-valued it may be compiled (or
464// partially compiled) into control flow to the control destination.
465// If force_control is true, control flow is forced.
Steve Block6ded16b2010-05-10 14:33:55 +0100466void CodeGenerator::LoadCondition(Expression* expr,
Steve Blocka7e24c12009-10-30 11:49:00 +0000467 ControlDestination* dest,
468 bool force_control) {
469 ASSERT(!in_spilled_code());
470 int original_height = frame_->height();
471
Steve Blockd0582a62009-12-15 09:54:21 +0000472 { CodeGenState new_state(this, dest);
Steve Block6ded16b2010-05-10 14:33:55 +0100473 Visit(expr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000474
475 // If we hit a stack overflow, we may not have actually visited
476 // the expression. In that case, we ensure that we have a
477 // valid-looking frame state because we will continue to generate
478 // code as we unwind the C++ stack.
479 //
480 // It's possible to have both a stack overflow and a valid frame
481 // state (eg, a subexpression overflowed, visiting it returned
482 // with a dummied frame state, and visiting this expression
483 // returned with a normal-looking state).
484 if (HasStackOverflow() &&
485 !dest->is_used() &&
486 frame_->height() == original_height) {
487 dest->Goto(true);
488 }
489 }
490
491 if (force_control && !dest->is_used()) {
492 // Convert the TOS value into flow to the control destination.
493 ToBoolean(dest);
494 }
495
496 ASSERT(!(force_control && !dest->is_used()));
497 ASSERT(dest->is_used() || frame_->height() == original_height + 1);
498}
499
500
Steve Blockd0582a62009-12-15 09:54:21 +0000501void CodeGenerator::LoadAndSpill(Expression* expression) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000502 ASSERT(in_spilled_code());
503 set_in_spilled_code(false);
Steve Blockd0582a62009-12-15 09:54:21 +0000504 Load(expression);
Steve Blocka7e24c12009-10-30 11:49:00 +0000505 frame_->SpillAll();
506 set_in_spilled_code(true);
507}
508
509
Steve Block6ded16b2010-05-10 14:33:55 +0100510void CodeGenerator::LoadInSafeInt32Mode(Expression* expr,
511 BreakTarget* unsafe_bailout) {
512 set_unsafe_bailout(unsafe_bailout);
513 set_in_safe_int32_mode(true);
514 Load(expr);
515 Result value = frame_->Pop();
516 ASSERT(frame_->HasNoUntaggedInt32Elements());
517 if (expr->GuaranteedSmiResult()) {
518 ConvertInt32ResultToSmi(&value);
519 } else {
520 ConvertInt32ResultToNumber(&value);
521 }
522 set_in_safe_int32_mode(false);
523 set_unsafe_bailout(NULL);
524 frame_->Push(&value);
525}
526
527
528void CodeGenerator::LoadWithSafeInt32ModeDisabled(Expression* expr) {
529 set_safe_int32_mode_enabled(false);
530 Load(expr);
531 set_safe_int32_mode_enabled(true);
532}
533
534
535void CodeGenerator::ConvertInt32ResultToSmi(Result* value) {
536 ASSERT(value->is_untagged_int32());
537 if (value->is_register()) {
538 __ add(value->reg(), Operand(value->reg()));
539 } else {
540 ASSERT(value->is_constant());
541 ASSERT(value->handle()->IsSmi());
542 }
543 value->set_untagged_int32(false);
544 value->set_type_info(TypeInfo::Smi());
545}
546
547
548void CodeGenerator::ConvertInt32ResultToNumber(Result* value) {
549 ASSERT(value->is_untagged_int32());
550 if (value->is_register()) {
551 Register val = value->reg();
552 JumpTarget done;
553 __ add(val, Operand(val));
554 done.Branch(no_overflow, value);
555 __ sar(val, 1);
556 // If there was an overflow, bits 30 and 31 of the original number disagree.
557 __ xor_(val, 0x80000000u);
558 if (CpuFeatures::IsSupported(SSE2)) {
559 CpuFeatures::Scope fscope(SSE2);
560 __ cvtsi2sd(xmm0, Operand(val));
561 } else {
562 // Move val to ST[0] in the FPU
563 // Push and pop are safe with respect to the virtual frame because
564 // all synced elements are below the actual stack pointer.
565 __ push(val);
566 __ fild_s(Operand(esp, 0));
567 __ pop(val);
568 }
569 Result scratch = allocator_->Allocate();
570 ASSERT(scratch.is_register());
571 Label allocation_failed;
572 __ AllocateHeapNumber(val, scratch.reg(),
573 no_reg, &allocation_failed);
574 VirtualFrame* clone = new VirtualFrame(frame_);
575 scratch.Unuse();
576 if (CpuFeatures::IsSupported(SSE2)) {
577 CpuFeatures::Scope fscope(SSE2);
578 __ movdbl(FieldOperand(val, HeapNumber::kValueOffset), xmm0);
579 } else {
580 __ fstp_d(FieldOperand(val, HeapNumber::kValueOffset));
581 }
582 done.Jump(value);
583
584 // Establish the virtual frame, cloned from where AllocateHeapNumber
585 // jumped to allocation_failed.
586 RegisterFile empty_regs;
587 SetFrame(clone, &empty_regs);
588 __ bind(&allocation_failed);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100589 if (!CpuFeatures::IsSupported(SSE2)) {
590 // Pop the value from the floating point stack.
591 __ fstp(0);
592 }
Steve Block6ded16b2010-05-10 14:33:55 +0100593 unsafe_bailout_->Jump();
594
595 done.Bind(value);
596 } else {
597 ASSERT(value->is_constant());
598 }
599 value->set_untagged_int32(false);
600 value->set_type_info(TypeInfo::Integer32());
601}
602
603
Steve Blockd0582a62009-12-15 09:54:21 +0000604void CodeGenerator::Load(Expression* expr) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000605#ifdef DEBUG
606 int original_height = frame_->height();
607#endif
608 ASSERT(!in_spilled_code());
Steve Blocka7e24c12009-10-30 11:49:00 +0000609
Steve Block6ded16b2010-05-10 14:33:55 +0100610 // If the expression should be a side-effect-free 32-bit int computation,
611 // compile that SafeInt32 path, and a bailout path.
612 if (!in_safe_int32_mode() &&
613 safe_int32_mode_enabled() &&
614 expr->side_effect_free() &&
615 expr->num_bit_ops() > 2 &&
616 CpuFeatures::IsSupported(SSE2)) {
617 BreakTarget unsafe_bailout;
618 JumpTarget done;
619 unsafe_bailout.set_expected_height(frame_->height());
620 LoadInSafeInt32Mode(expr, &unsafe_bailout);
621 done.Jump();
622
623 if (unsafe_bailout.is_linked()) {
624 unsafe_bailout.Bind();
625 LoadWithSafeInt32ModeDisabled(expr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000626 }
Steve Block6ded16b2010-05-10 14:33:55 +0100627 done.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +0000628 } else {
Steve Block6ded16b2010-05-10 14:33:55 +0100629 JumpTarget true_target;
630 JumpTarget false_target;
Steve Block6ded16b2010-05-10 14:33:55 +0100631 ControlDestination dest(&true_target, &false_target, true);
632 LoadCondition(expr, &dest, false);
633
634 if (dest.false_was_fall_through()) {
635 // The false target was just bound.
Steve Blocka7e24c12009-10-30 11:49:00 +0000636 JumpTarget loaded;
Steve Block6ded16b2010-05-10 14:33:55 +0100637 frame_->Push(Factory::false_value());
638 // There may be dangling jumps to the true target.
Steve Blocka7e24c12009-10-30 11:49:00 +0000639 if (true_target.is_linked()) {
Steve Block6ded16b2010-05-10 14:33:55 +0100640 loaded.Jump();
Steve Blocka7e24c12009-10-30 11:49:00 +0000641 true_target.Bind();
642 frame_->Push(Factory::true_value());
Steve Block6ded16b2010-05-10 14:33:55 +0100643 loaded.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +0000644 }
Steve Block6ded16b2010-05-10 14:33:55 +0100645
646 } else if (dest.is_used()) {
647 // There is true, and possibly false, control flow (with true as
648 // the fall through).
649 JumpTarget loaded;
650 frame_->Push(Factory::true_value());
Steve Blocka7e24c12009-10-30 11:49:00 +0000651 if (false_target.is_linked()) {
Steve Block6ded16b2010-05-10 14:33:55 +0100652 loaded.Jump();
Steve Blocka7e24c12009-10-30 11:49:00 +0000653 false_target.Bind();
654 frame_->Push(Factory::false_value());
Steve Block6ded16b2010-05-10 14:33:55 +0100655 loaded.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +0000656 }
Steve Block6ded16b2010-05-10 14:33:55 +0100657
658 } else {
659 // We have a valid value on top of the frame, but we still may
660 // have dangling jumps to the true and false targets from nested
661 // subexpressions (eg, the left subexpressions of the
662 // short-circuited boolean operators).
663 ASSERT(has_valid_frame());
664 if (true_target.is_linked() || false_target.is_linked()) {
665 JumpTarget loaded;
666 loaded.Jump(); // Don't lose the current TOS.
667 if (true_target.is_linked()) {
668 true_target.Bind();
669 frame_->Push(Factory::true_value());
670 if (false_target.is_linked()) {
671 loaded.Jump();
672 }
673 }
674 if (false_target.is_linked()) {
675 false_target.Bind();
676 frame_->Push(Factory::false_value());
677 }
678 loaded.Bind();
679 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000680 }
681 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000682 ASSERT(has_valid_frame());
683 ASSERT(frame_->height() == original_height + 1);
684}
685
686
687void CodeGenerator::LoadGlobal() {
688 if (in_spilled_code()) {
689 frame_->EmitPush(GlobalObject());
690 } else {
691 Result temp = allocator_->Allocate();
692 __ mov(temp.reg(), GlobalObject());
693 frame_->Push(&temp);
694 }
695}
696
697
698void CodeGenerator::LoadGlobalReceiver() {
699 Result temp = allocator_->Allocate();
700 Register reg = temp.reg();
701 __ mov(reg, GlobalObject());
702 __ mov(reg, FieldOperand(reg, GlobalObject::kGlobalReceiverOffset));
703 frame_->Push(&temp);
704}
705
706
Steve Blockd0582a62009-12-15 09:54:21 +0000707void CodeGenerator::LoadTypeofExpression(Expression* expr) {
708 // Special handling of identifiers as subexpressions of typeof.
709 Variable* variable = expr->AsVariableProxy()->AsVariable();
Steve Blocka7e24c12009-10-30 11:49:00 +0000710 if (variable != NULL && !variable->is_this() && variable->is_global()) {
Steve Blockd0582a62009-12-15 09:54:21 +0000711 // For a global variable we build the property reference
712 // <global>.<variable> and perform a (regular non-contextual) property
713 // load to make sure we do not get reference errors.
Steve Blocka7e24c12009-10-30 11:49:00 +0000714 Slot global(variable, Slot::CONTEXT, Context::GLOBAL_INDEX);
715 Literal key(variable->name());
Steve Blocka7e24c12009-10-30 11:49:00 +0000716 Property property(&global, &key, RelocInfo::kNoPosition);
Steve Blockd0582a62009-12-15 09:54:21 +0000717 Reference ref(this, &property);
718 ref.GetValue();
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100719 } else if (variable != NULL && variable->AsSlot() != NULL) {
Steve Blockd0582a62009-12-15 09:54:21 +0000720 // For a variable that rewrites to a slot, we signal it is the immediate
721 // subexpression of a typeof.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100722 LoadFromSlotCheckForArguments(variable->AsSlot(), INSIDE_TYPEOF);
Steve Blocka7e24c12009-10-30 11:49:00 +0000723 } else {
Steve Blockd0582a62009-12-15 09:54:21 +0000724 // Anything else can be handled normally.
725 Load(expr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000726 }
727}
728
729
Andrei Popescu31002712010-02-23 13:46:05 +0000730ArgumentsAllocationMode CodeGenerator::ArgumentsMode() {
731 if (scope()->arguments() == NULL) return NO_ARGUMENTS_ALLOCATION;
732 ASSERT(scope()->arguments_shadow() != NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000733 // We don't want to do lazy arguments allocation for functions that
734 // have heap-allocated contexts, because it interfers with the
735 // uninitialized const tracking in the context objects.
Andrei Popescu31002712010-02-23 13:46:05 +0000736 return (scope()->num_heap_slots() > 0)
Steve Blocka7e24c12009-10-30 11:49:00 +0000737 ? EAGER_ARGUMENTS_ALLOCATION
738 : LAZY_ARGUMENTS_ALLOCATION;
739}
740
741
742Result CodeGenerator::StoreArgumentsObject(bool initial) {
743 ArgumentsAllocationMode mode = ArgumentsMode();
744 ASSERT(mode != NO_ARGUMENTS_ALLOCATION);
745
746 Comment cmnt(masm_, "[ store arguments object");
747 if (mode == LAZY_ARGUMENTS_ALLOCATION && initial) {
748 // When using lazy arguments allocation, we store the hole value
749 // as a sentinel indicating that the arguments object hasn't been
750 // allocated yet.
751 frame_->Push(Factory::the_hole_value());
752 } else {
753 ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT);
754 frame_->PushFunction();
755 frame_->PushReceiverSlotAddress();
Andrei Popescu31002712010-02-23 13:46:05 +0000756 frame_->Push(Smi::FromInt(scope()->num_parameters()));
Steve Blocka7e24c12009-10-30 11:49:00 +0000757 Result result = frame_->CallStub(&stub, 3);
758 frame_->Push(&result);
759 }
760
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100761 Variable* arguments = scope()->arguments();
762 Variable* shadow = scope()->arguments_shadow();
763 ASSERT(arguments != NULL && arguments->AsSlot() != NULL);
764 ASSERT(shadow != NULL && shadow->AsSlot() != NULL);
Leon Clarkee46be812010-01-19 14:06:41 +0000765 JumpTarget done;
766 bool skip_arguments = false;
767 if (mode == LAZY_ARGUMENTS_ALLOCATION && !initial) {
Ben Murdoch3bec4d22010-07-22 14:51:16 +0100768 // We have to skip storing into the arguments slot if it has
769 // already been written to. This can happen if the a function
770 // has a local variable named 'arguments'.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100771 LoadFromSlot(arguments->AsSlot(), NOT_INSIDE_TYPEOF);
Leon Clarkef7060e22010-06-03 12:02:55 +0100772 Result probe = frame_->Pop();
Leon Clarkee46be812010-01-19 14:06:41 +0000773 if (probe.is_constant()) {
774 // We have to skip updating the arguments object if it has
775 // been assigned a proper value.
776 skip_arguments = !probe.handle()->IsTheHole();
777 } else {
778 __ cmp(Operand(probe.reg()), Immediate(Factory::the_hole_value()));
779 probe.Unuse();
780 done.Branch(not_equal);
Steve Blocka7e24c12009-10-30 11:49:00 +0000781 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000782 }
Leon Clarkee46be812010-01-19 14:06:41 +0000783 if (!skip_arguments) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100784 StoreToSlot(arguments->AsSlot(), NOT_CONST_INIT);
Leon Clarkee46be812010-01-19 14:06:41 +0000785 if (mode == LAZY_ARGUMENTS_ALLOCATION) done.Bind();
786 }
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100787 StoreToSlot(shadow->AsSlot(), NOT_CONST_INIT);
Steve Blocka7e24c12009-10-30 11:49:00 +0000788 return frame_->Pop();
789}
790
Leon Clarked91b9f72010-01-27 17:25:45 +0000791//------------------------------------------------------------------------------
792// CodeGenerator implementation of variables, lookups, and stores.
Steve Blocka7e24c12009-10-30 11:49:00 +0000793
Leon Clarked91b9f72010-01-27 17:25:45 +0000794Reference::Reference(CodeGenerator* cgen,
795 Expression* expression,
796 bool persist_after_get)
797 : cgen_(cgen),
798 expression_(expression),
799 type_(ILLEGAL),
800 persist_after_get_(persist_after_get) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000801 cgen->LoadReference(this);
802}
803
804
805Reference::~Reference() {
Leon Clarked91b9f72010-01-27 17:25:45 +0000806 ASSERT(is_unloaded() || is_illegal());
Steve Blocka7e24c12009-10-30 11:49:00 +0000807}
808
809
810void CodeGenerator::LoadReference(Reference* ref) {
811 // References are loaded from both spilled and unspilled code. Set the
812 // state to unspilled to allow that (and explicitly spill after
813 // construction at the construction sites).
814 bool was_in_spilled_code = in_spilled_code_;
815 in_spilled_code_ = false;
816
817 Comment cmnt(masm_, "[ LoadReference");
818 Expression* e = ref->expression();
819 Property* property = e->AsProperty();
820 Variable* var = e->AsVariableProxy()->AsVariable();
821
822 if (property != NULL) {
823 // The expression is either a property or a variable proxy that rewrites
824 // to a property.
825 Load(property->obj());
Leon Clarkee46be812010-01-19 14:06:41 +0000826 if (property->key()->IsPropertyName()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000827 ref->set_type(Reference::NAMED);
828 } else {
829 Load(property->key());
830 ref->set_type(Reference::KEYED);
831 }
832 } else if (var != NULL) {
833 // The expression is a variable proxy that does not rewrite to a
834 // property. Global variables are treated as named property references.
835 if (var->is_global()) {
Andrei Popescu402d9372010-02-26 13:31:12 +0000836 // If eax is free, the register allocator prefers it. Thus the code
837 // generator will load the global object into eax, which is where
838 // LoadIC wants it. Most uses of Reference call LoadIC directly
839 // after the reference is created.
840 frame_->Spill(eax);
Steve Blocka7e24c12009-10-30 11:49:00 +0000841 LoadGlobal();
842 ref->set_type(Reference::NAMED);
843 } else {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100844 ASSERT(var->AsSlot() != NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000845 ref->set_type(Reference::SLOT);
846 }
847 } else {
848 // Anything else is a runtime error.
849 Load(e);
850 frame_->CallRuntime(Runtime::kThrowReferenceError, 1);
851 }
852
853 in_spilled_code_ = was_in_spilled_code;
854}
855
856
Steve Blocka7e24c12009-10-30 11:49:00 +0000857// ECMA-262, section 9.2, page 30: ToBoolean(). Pop the top of stack and
858// convert it to a boolean in the condition code register or jump to
859// 'false_target'/'true_target' as appropriate.
860void CodeGenerator::ToBoolean(ControlDestination* dest) {
861 Comment cmnt(masm_, "[ ToBoolean");
862
863 // The value to convert should be popped from the frame.
864 Result value = frame_->Pop();
865 value.ToRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +0000866
Steve Block6ded16b2010-05-10 14:33:55 +0100867 if (value.is_integer32()) { // Also takes Smi case.
868 Comment cmnt(masm_, "ONLY_INTEGER_32");
Andrei Popescu402d9372010-02-26 13:31:12 +0000869 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100870 Label ok;
871 __ AbortIfNotNumber(value.reg());
872 __ test(value.reg(), Immediate(kSmiTagMask));
873 __ j(zero, &ok);
874 __ fldz();
875 __ fld_d(FieldOperand(value.reg(), HeapNumber::kValueOffset));
876 __ FCmp();
877 __ j(not_zero, &ok);
878 __ Abort("Smi was wrapped in HeapNumber in output from bitop");
879 __ bind(&ok);
880 }
881 // In the integer32 case there are no Smis hidden in heap numbers, so we
882 // need only test for Smi zero.
883 __ test(value.reg(), Operand(value.reg()));
884 dest->false_target()->Branch(zero);
885 value.Unuse();
886 dest->Split(not_zero);
887 } else if (value.is_number()) {
888 Comment cmnt(masm_, "ONLY_NUMBER");
889 // Fast case if TypeInfo indicates only numbers.
890 if (FLAG_debug_code) {
891 __ AbortIfNotNumber(value.reg());
Andrei Popescu402d9372010-02-26 13:31:12 +0000892 }
893 // Smi => false iff zero.
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100894 STATIC_ASSERT(kSmiTag == 0);
Andrei Popescu402d9372010-02-26 13:31:12 +0000895 __ test(value.reg(), Operand(value.reg()));
896 dest->false_target()->Branch(zero);
897 __ test(value.reg(), Immediate(kSmiTagMask));
898 dest->true_target()->Branch(zero);
899 __ fldz();
900 __ fld_d(FieldOperand(value.reg(), HeapNumber::kValueOffset));
901 __ FCmp();
902 value.Unuse();
903 dest->Split(not_zero);
904 } else {
905 // Fast case checks.
906 // 'false' => false.
907 __ cmp(value.reg(), Factory::false_value());
908 dest->false_target()->Branch(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +0000909
Andrei Popescu402d9372010-02-26 13:31:12 +0000910 // 'true' => true.
911 __ cmp(value.reg(), Factory::true_value());
912 dest->true_target()->Branch(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +0000913
Andrei Popescu402d9372010-02-26 13:31:12 +0000914 // 'undefined' => false.
915 __ cmp(value.reg(), Factory::undefined_value());
916 dest->false_target()->Branch(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +0000917
Andrei Popescu402d9372010-02-26 13:31:12 +0000918 // Smi => false iff zero.
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100919 STATIC_ASSERT(kSmiTag == 0);
Andrei Popescu402d9372010-02-26 13:31:12 +0000920 __ test(value.reg(), Operand(value.reg()));
921 dest->false_target()->Branch(zero);
922 __ test(value.reg(), Immediate(kSmiTagMask));
923 dest->true_target()->Branch(zero);
Steve Blocka7e24c12009-10-30 11:49:00 +0000924
Andrei Popescu402d9372010-02-26 13:31:12 +0000925 // Call the stub for all other cases.
926 frame_->Push(&value); // Undo the Pop() from above.
927 ToBooleanStub stub;
928 Result temp = frame_->CallStub(&stub, 1);
929 // Convert the result to a condition code.
930 __ test(temp.reg(), Operand(temp.reg()));
931 temp.Unuse();
932 dest->Split(not_equal);
933 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000934}
935
936
Iain Merrick75681382010-08-19 15:07:18 +0100937// Perform or call the specialized stub for a binary operation. Requires the
938// three registers left, right and dst to be distinct and spilled. This
939// deferred operation has up to three entry points: The main one calls the
940// runtime system. The second is for when the result is a non-Smi. The
941// third is for when at least one of the inputs is non-Smi and we have SSE2.
Steve Blocka7e24c12009-10-30 11:49:00 +0000942class DeferredInlineBinaryOperation: public DeferredCode {
943 public:
944 DeferredInlineBinaryOperation(Token::Value op,
945 Register dst,
946 Register left,
947 Register right,
Steve Block6ded16b2010-05-10 14:33:55 +0100948 TypeInfo left_info,
949 TypeInfo right_info,
Steve Blocka7e24c12009-10-30 11:49:00 +0000950 OverwriteMode mode)
Steve Block6ded16b2010-05-10 14:33:55 +0100951 : op_(op), dst_(dst), left_(left), right_(right),
952 left_info_(left_info), right_info_(right_info), mode_(mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000953 set_comment("[ DeferredInlineBinaryOperation");
Iain Merrick75681382010-08-19 15:07:18 +0100954 ASSERT(!left.is(right));
Steve Blocka7e24c12009-10-30 11:49:00 +0000955 }
956
957 virtual void Generate();
958
Iain Merrick75681382010-08-19 15:07:18 +0100959 // This stub makes explicit calls to SaveRegisters(), RestoreRegisters() and
960 // Exit().
961 virtual bool AutoSaveAndRestore() { return false; }
962
963 void JumpToAnswerOutOfRange(Condition cond);
964 void JumpToConstantRhs(Condition cond, Smi* smi_value);
965 Label* NonSmiInputLabel();
966
Steve Blocka7e24c12009-10-30 11:49:00 +0000967 private:
Iain Merrick75681382010-08-19 15:07:18 +0100968 void GenerateAnswerOutOfRange();
969 void GenerateNonSmiInput();
970
Steve Blocka7e24c12009-10-30 11:49:00 +0000971 Token::Value op_;
972 Register dst_;
973 Register left_;
974 Register right_;
Steve Block6ded16b2010-05-10 14:33:55 +0100975 TypeInfo left_info_;
976 TypeInfo right_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000977 OverwriteMode mode_;
Iain Merrick75681382010-08-19 15:07:18 +0100978 Label answer_out_of_range_;
979 Label non_smi_input_;
980 Label constant_rhs_;
981 Smi* smi_value_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000982};
983
984
Iain Merrick75681382010-08-19 15:07:18 +0100985Label* DeferredInlineBinaryOperation::NonSmiInputLabel() {
986 if (Token::IsBitOp(op_) && CpuFeatures::IsSupported(SSE2)) {
987 return &non_smi_input_;
988 } else {
989 return entry_label();
990 }
991}
992
993
994void DeferredInlineBinaryOperation::JumpToAnswerOutOfRange(Condition cond) {
995 __ j(cond, &answer_out_of_range_);
996}
997
998
999void DeferredInlineBinaryOperation::JumpToConstantRhs(Condition cond,
1000 Smi* smi_value) {
1001 smi_value_ = smi_value;
1002 __ j(cond, &constant_rhs_);
1003}
1004
1005
Steve Blocka7e24c12009-10-30 11:49:00 +00001006void DeferredInlineBinaryOperation::Generate() {
Iain Merrick75681382010-08-19 15:07:18 +01001007 // Registers are not saved implicitly for this stub, so we should not
1008 // tread on the registers that were not passed to us.
1009 if (CpuFeatures::IsSupported(SSE2) &&
1010 ((op_ == Token::ADD) ||
1011 (op_ == Token::SUB) ||
1012 (op_ == Token::MUL) ||
1013 (op_ == Token::DIV))) {
Leon Clarkee46be812010-01-19 14:06:41 +00001014 CpuFeatures::Scope use_sse2(SSE2);
1015 Label call_runtime, after_alloc_failure;
1016 Label left_smi, right_smi, load_right, do_op;
Steve Block6ded16b2010-05-10 14:33:55 +01001017 if (!left_info_.IsSmi()) {
1018 __ test(left_, Immediate(kSmiTagMask));
1019 __ j(zero, &left_smi);
1020 if (!left_info_.IsNumber()) {
1021 __ cmp(FieldOperand(left_, HeapObject::kMapOffset),
1022 Factory::heap_number_map());
1023 __ j(not_equal, &call_runtime);
1024 }
1025 __ movdbl(xmm0, FieldOperand(left_, HeapNumber::kValueOffset));
1026 if (mode_ == OVERWRITE_LEFT) {
1027 __ mov(dst_, left_);
1028 }
1029 __ jmp(&load_right);
Leon Clarkee46be812010-01-19 14:06:41 +00001030
Steve Block6ded16b2010-05-10 14:33:55 +01001031 __ bind(&left_smi);
1032 } else {
1033 if (FLAG_debug_code) __ AbortIfNotSmi(left_);
1034 }
Leon Clarkee46be812010-01-19 14:06:41 +00001035 __ SmiUntag(left_);
1036 __ cvtsi2sd(xmm0, Operand(left_));
1037 __ SmiTag(left_);
1038 if (mode_ == OVERWRITE_LEFT) {
1039 Label alloc_failure;
1040 __ push(left_);
1041 __ AllocateHeapNumber(dst_, left_, no_reg, &after_alloc_failure);
1042 __ pop(left_);
1043 }
1044
1045 __ bind(&load_right);
Steve Block6ded16b2010-05-10 14:33:55 +01001046 if (!right_info_.IsSmi()) {
1047 __ test(right_, Immediate(kSmiTagMask));
1048 __ j(zero, &right_smi);
1049 if (!right_info_.IsNumber()) {
1050 __ cmp(FieldOperand(right_, HeapObject::kMapOffset),
1051 Factory::heap_number_map());
1052 __ j(not_equal, &call_runtime);
1053 }
1054 __ movdbl(xmm1, FieldOperand(right_, HeapNumber::kValueOffset));
1055 if (mode_ == OVERWRITE_RIGHT) {
1056 __ mov(dst_, right_);
1057 } else if (mode_ == NO_OVERWRITE) {
1058 Label alloc_failure;
1059 __ push(left_);
1060 __ AllocateHeapNumber(dst_, left_, no_reg, &after_alloc_failure);
1061 __ pop(left_);
1062 }
1063 __ jmp(&do_op);
Leon Clarkee46be812010-01-19 14:06:41 +00001064
Steve Block6ded16b2010-05-10 14:33:55 +01001065 __ bind(&right_smi);
1066 } else {
1067 if (FLAG_debug_code) __ AbortIfNotSmi(right_);
1068 }
Leon Clarkee46be812010-01-19 14:06:41 +00001069 __ SmiUntag(right_);
1070 __ cvtsi2sd(xmm1, Operand(right_));
1071 __ SmiTag(right_);
1072 if (mode_ == OVERWRITE_RIGHT || mode_ == NO_OVERWRITE) {
Leon Clarkee46be812010-01-19 14:06:41 +00001073 __ push(left_);
1074 __ AllocateHeapNumber(dst_, left_, no_reg, &after_alloc_failure);
1075 __ pop(left_);
1076 }
1077
1078 __ bind(&do_op);
1079 switch (op_) {
1080 case Token::ADD: __ addsd(xmm0, xmm1); break;
1081 case Token::SUB: __ subsd(xmm0, xmm1); break;
1082 case Token::MUL: __ mulsd(xmm0, xmm1); break;
1083 case Token::DIV: __ divsd(xmm0, xmm1); break;
1084 default: UNREACHABLE();
1085 }
1086 __ movdbl(FieldOperand(dst_, HeapNumber::kValueOffset), xmm0);
Iain Merrick75681382010-08-19 15:07:18 +01001087 Exit();
1088
Leon Clarkee46be812010-01-19 14:06:41 +00001089
1090 __ bind(&after_alloc_failure);
1091 __ pop(left_);
1092 __ bind(&call_runtime);
1093 }
Iain Merrick75681382010-08-19 15:07:18 +01001094 // Register spilling is not done implicitly for this stub.
1095 // We can't postpone it any more now though.
1096 SaveRegisters();
1097
Steve Block6ded16b2010-05-10 14:33:55 +01001098 GenericBinaryOpStub stub(op_,
1099 mode_,
1100 NO_SMI_CODE_IN_STUB,
1101 TypeInfo::Combine(left_info_, right_info_));
Steve Block3ce2e202009-11-05 08:53:23 +00001102 stub.GenerateCall(masm_, left_, right_);
Steve Blocka7e24c12009-10-30 11:49:00 +00001103 if (!dst_.is(eax)) __ mov(dst_, eax);
Iain Merrick75681382010-08-19 15:07:18 +01001104 RestoreRegisters();
1105 Exit();
1106
1107 if (non_smi_input_.is_linked() || constant_rhs_.is_linked()) {
1108 GenerateNonSmiInput();
1109 }
1110 if (answer_out_of_range_.is_linked()) {
1111 GenerateAnswerOutOfRange();
1112 }
1113}
1114
1115
1116void DeferredInlineBinaryOperation::GenerateNonSmiInput() {
1117 // We know at least one of the inputs was not a Smi.
1118 // This is a third entry point into the deferred code.
1119 // We may not overwrite left_ because we want to be able
1120 // to call the handling code for non-smi answer and it
1121 // might want to overwrite the heap number in left_.
1122 ASSERT(!right_.is(dst_));
1123 ASSERT(!left_.is(dst_));
1124 ASSERT(!left_.is(right_));
1125 // This entry point is used for bit ops where the right hand side
1126 // is a constant Smi and the left hand side is a heap object. It
1127 // is also used for bit ops where both sides are unknown, but where
1128 // at least one of them is a heap object.
1129 bool rhs_is_constant = constant_rhs_.is_linked();
1130 // We can't generate code for both cases.
1131 ASSERT(!non_smi_input_.is_linked() || !constant_rhs_.is_linked());
1132
1133 if (FLAG_debug_code) {
1134 __ int3(); // We don't fall through into this code.
1135 }
1136
1137 __ bind(&non_smi_input_);
1138
1139 if (rhs_is_constant) {
1140 __ bind(&constant_rhs_);
1141 // In this case the input is a heap object and it is in the dst_ register.
1142 // The left_ and right_ registers have not been initialized yet.
1143 __ mov(right_, Immediate(smi_value_));
1144 __ mov(left_, Operand(dst_));
1145 if (!CpuFeatures::IsSupported(SSE2)) {
1146 __ jmp(entry_label());
1147 return;
1148 } else {
1149 CpuFeatures::Scope use_sse2(SSE2);
1150 __ JumpIfNotNumber(dst_, left_info_, entry_label());
1151 __ ConvertToInt32(dst_, left_, dst_, left_info_, entry_label());
1152 __ SmiUntag(right_);
1153 }
1154 } else {
1155 // We know we have SSE2 here because otherwise the label is not linked (see
1156 // NonSmiInputLabel).
1157 CpuFeatures::Scope use_sse2(SSE2);
1158 // Handle the non-constant right hand side situation:
1159 if (left_info_.IsSmi()) {
1160 // Right is a heap object.
1161 __ JumpIfNotNumber(right_, right_info_, entry_label());
1162 __ ConvertToInt32(right_, right_, dst_, right_info_, entry_label());
1163 __ mov(dst_, Operand(left_));
1164 __ SmiUntag(dst_);
1165 } else if (right_info_.IsSmi()) {
1166 // Left is a heap object.
1167 __ JumpIfNotNumber(left_, left_info_, entry_label());
1168 __ ConvertToInt32(dst_, left_, dst_, left_info_, entry_label());
1169 __ SmiUntag(right_);
1170 } else {
1171 // Here we don't know if it's one or both that is a heap object.
1172 Label only_right_is_heap_object, got_both;
1173 __ mov(dst_, Operand(left_));
1174 __ SmiUntag(dst_, &only_right_is_heap_object);
1175 // Left was a heap object.
1176 __ JumpIfNotNumber(left_, left_info_, entry_label());
1177 __ ConvertToInt32(dst_, left_, dst_, left_info_, entry_label());
1178 __ SmiUntag(right_, &got_both);
1179 // Both were heap objects.
1180 __ rcl(right_, 1); // Put tag back.
1181 __ JumpIfNotNumber(right_, right_info_, entry_label());
1182 __ ConvertToInt32(right_, right_, no_reg, right_info_, entry_label());
1183 __ jmp(&got_both);
1184 __ bind(&only_right_is_heap_object);
1185 __ JumpIfNotNumber(right_, right_info_, entry_label());
1186 __ ConvertToInt32(right_, right_, no_reg, right_info_, entry_label());
1187 __ bind(&got_both);
1188 }
1189 }
1190 ASSERT(op_ == Token::BIT_AND ||
1191 op_ == Token::BIT_OR ||
1192 op_ == Token::BIT_XOR ||
1193 right_.is(ecx));
1194 switch (op_) {
1195 case Token::BIT_AND: __ and_(dst_, Operand(right_)); break;
1196 case Token::BIT_OR: __ or_(dst_, Operand(right_)); break;
1197 case Token::BIT_XOR: __ xor_(dst_, Operand(right_)); break;
1198 case Token::SHR: __ shr_cl(dst_); break;
1199 case Token::SAR: __ sar_cl(dst_); break;
1200 case Token::SHL: __ shl_cl(dst_); break;
1201 default: UNREACHABLE();
1202 }
1203 if (op_ == Token::SHR) {
1204 // Check that the *unsigned* result fits in a smi. Neither of
1205 // the two high-order bits can be set:
1206 // * 0x80000000: high bit would be lost when smi tagging.
1207 // * 0x40000000: this number would convert to negative when smi
1208 // tagging.
1209 __ test(dst_, Immediate(0xc0000000));
1210 __ j(not_zero, &answer_out_of_range_);
1211 } else {
1212 // Check that the *signed* result fits in a smi.
1213 __ cmp(dst_, 0xc0000000);
1214 __ j(negative, &answer_out_of_range_);
1215 }
1216 __ SmiTag(dst_);
1217 Exit();
1218}
1219
1220
1221void DeferredInlineBinaryOperation::GenerateAnswerOutOfRange() {
1222 Label after_alloc_failure2;
1223 Label allocation_ok;
1224 __ bind(&after_alloc_failure2);
1225 // We have to allocate a number, causing a GC, while keeping hold of
1226 // the answer in dst_. The answer is not a Smi. We can't just call the
1227 // runtime shift function here because we already threw away the inputs.
1228 __ xor_(left_, Operand(left_));
1229 __ shl(dst_, 1); // Put top bit in carry flag and Smi tag the low bits.
1230 __ rcr(left_, 1); // Rotate with carry.
1231 __ push(dst_); // Smi tagged low 31 bits.
1232 __ push(left_); // 0 or 0x80000000, which is Smi tagged in both cases.
1233 __ CallRuntime(Runtime::kNumberAlloc, 0);
1234 if (!left_.is(eax)) {
1235 __ mov(left_, eax);
1236 }
1237 __ pop(right_); // High bit.
1238 __ pop(dst_); // Low 31 bits.
1239 __ shr(dst_, 1); // Put 0 in top bit.
1240 __ or_(dst_, Operand(right_));
1241 __ jmp(&allocation_ok);
1242
1243 // This is the second entry point to the deferred code. It is used only by
1244 // the bit operations.
1245 // The dst_ register has the answer. It is not Smi tagged. If mode_ is
1246 // OVERWRITE_LEFT then left_ must contain either an overwritable heap number
1247 // or a Smi.
1248 // Put a heap number pointer in left_.
1249 __ bind(&answer_out_of_range_);
1250 SaveRegisters();
1251 if (mode_ == OVERWRITE_LEFT) {
1252 __ test(left_, Immediate(kSmiTagMask));
1253 __ j(not_zero, &allocation_ok);
1254 }
1255 // This trashes right_.
1256 __ AllocateHeapNumber(left_, right_, no_reg, &after_alloc_failure2);
1257 __ bind(&allocation_ok);
1258 if (CpuFeatures::IsSupported(SSE2) && op_ != Token::SHR) {
1259 CpuFeatures::Scope use_sse2(SSE2);
1260 ASSERT(Token::IsBitOp(op_));
1261 // Signed conversion.
1262 __ cvtsi2sd(xmm0, Operand(dst_));
1263 __ movdbl(FieldOperand(left_, HeapNumber::kValueOffset), xmm0);
1264 } else {
1265 if (op_ == Token::SHR) {
1266 __ push(Immediate(0)); // High word of unsigned value.
1267 __ push(dst_);
1268 __ fild_d(Operand(esp, 0));
1269 __ Drop(2);
1270 } else {
1271 ASSERT(Token::IsBitOp(op_));
1272 __ push(dst_);
1273 __ fild_s(Operand(esp, 0)); // Signed conversion.
1274 __ pop(dst_);
1275 }
1276 __ fstp_d(FieldOperand(left_, HeapNumber::kValueOffset));
1277 }
1278 __ mov(dst_, left_);
1279 RestoreRegisters();
1280 Exit();
Steve Blocka7e24c12009-10-30 11:49:00 +00001281}
1282
1283
Steve Block6ded16b2010-05-10 14:33:55 +01001284static TypeInfo CalculateTypeInfo(TypeInfo operands_type,
1285 Token::Value op,
1286 const Result& right,
1287 const Result& left) {
1288 // Set TypeInfo of result according to the operation performed.
1289 // Rely on the fact that smis have a 31 bit payload on ia32.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01001290 STATIC_ASSERT(kSmiValueSize == 31);
Steve Block6ded16b2010-05-10 14:33:55 +01001291 switch (op) {
1292 case Token::COMMA:
1293 return right.type_info();
1294 case Token::OR:
1295 case Token::AND:
1296 // Result type can be either of the two input types.
1297 return operands_type;
1298 case Token::BIT_AND: {
1299 // Anding with positive Smis will give you a Smi.
1300 if (right.is_constant() && right.handle()->IsSmi() &&
1301 Smi::cast(*right.handle())->value() >= 0) {
1302 return TypeInfo::Smi();
1303 } else if (left.is_constant() && left.handle()->IsSmi() &&
1304 Smi::cast(*left.handle())->value() >= 0) {
1305 return TypeInfo::Smi();
1306 }
1307 return (operands_type.IsSmi())
1308 ? TypeInfo::Smi()
1309 : TypeInfo::Integer32();
1310 }
1311 case Token::BIT_OR: {
1312 // Oring with negative Smis will give you a Smi.
1313 if (right.is_constant() && right.handle()->IsSmi() &&
1314 Smi::cast(*right.handle())->value() < 0) {
1315 return TypeInfo::Smi();
1316 } else if (left.is_constant() && left.handle()->IsSmi() &&
1317 Smi::cast(*left.handle())->value() < 0) {
1318 return TypeInfo::Smi();
1319 }
1320 return (operands_type.IsSmi())
1321 ? TypeInfo::Smi()
1322 : TypeInfo::Integer32();
1323 }
1324 case Token::BIT_XOR:
1325 // Result is always a 32 bit integer. Smi property of inputs is preserved.
1326 return (operands_type.IsSmi())
1327 ? TypeInfo::Smi()
1328 : TypeInfo::Integer32();
1329 case Token::SAR:
1330 if (left.is_smi()) return TypeInfo::Smi();
1331 // Result is a smi if we shift by a constant >= 1, otherwise an integer32.
1332 // Shift amount is masked with 0x1F (ECMA standard 11.7.2).
1333 return (right.is_constant() && right.handle()->IsSmi()
1334 && (Smi::cast(*right.handle())->value() & 0x1F) >= 1)
1335 ? TypeInfo::Smi()
1336 : TypeInfo::Integer32();
1337 case Token::SHR:
1338 // Result is a smi if we shift by a constant >= 2, an integer32 if
1339 // we shift by 1, and an unsigned 32-bit integer if we shift by 0.
1340 if (right.is_constant() && right.handle()->IsSmi()) {
1341 int shift_amount = Smi::cast(*right.handle())->value() & 0x1F;
1342 if (shift_amount > 1) {
1343 return TypeInfo::Smi();
1344 } else if (shift_amount > 0) {
1345 return TypeInfo::Integer32();
1346 }
1347 }
1348 return TypeInfo::Number();
1349 case Token::ADD:
1350 if (operands_type.IsSmi()) {
1351 // The Integer32 range is big enough to take the sum of any two Smis.
1352 return TypeInfo::Integer32();
1353 } else if (operands_type.IsNumber()) {
1354 return TypeInfo::Number();
1355 } else if (left.type_info().IsString() || right.type_info().IsString()) {
1356 return TypeInfo::String();
1357 } else {
1358 return TypeInfo::Unknown();
1359 }
1360 case Token::SHL:
1361 return TypeInfo::Integer32();
1362 case Token::SUB:
1363 // The Integer32 range is big enough to take the difference of any two
1364 // Smis.
1365 return (operands_type.IsSmi()) ?
1366 TypeInfo::Integer32() :
1367 TypeInfo::Number();
1368 case Token::MUL:
1369 case Token::DIV:
1370 case Token::MOD:
1371 // Result is always a number.
1372 return TypeInfo::Number();
1373 default:
1374 UNREACHABLE();
1375 }
1376 UNREACHABLE();
1377 return TypeInfo::Unknown();
1378}
1379
1380
1381void CodeGenerator::GenericBinaryOperation(BinaryOperation* expr,
Steve Blocka7e24c12009-10-30 11:49:00 +00001382 OverwriteMode overwrite_mode) {
1383 Comment cmnt(masm_, "[ BinaryOperation");
Steve Block6ded16b2010-05-10 14:33:55 +01001384 Token::Value op = expr->op();
Steve Blocka7e24c12009-10-30 11:49:00 +00001385 Comment cmnt_token(masm_, Token::String(op));
1386
1387 if (op == Token::COMMA) {
1388 // Simply discard left value.
1389 frame_->Nip(1);
1390 return;
1391 }
1392
Steve Blocka7e24c12009-10-30 11:49:00 +00001393 Result right = frame_->Pop();
1394 Result left = frame_->Pop();
1395
1396 if (op == Token::ADD) {
Steve Block6ded16b2010-05-10 14:33:55 +01001397 const bool left_is_string = left.type_info().IsString();
1398 const bool right_is_string = right.type_info().IsString();
1399 // Make sure constant strings have string type info.
1400 ASSERT(!(left.is_constant() && left.handle()->IsString()) ||
1401 left_is_string);
1402 ASSERT(!(right.is_constant() && right.handle()->IsString()) ||
1403 right_is_string);
Steve Blocka7e24c12009-10-30 11:49:00 +00001404 if (left_is_string || right_is_string) {
1405 frame_->Push(&left);
1406 frame_->Push(&right);
1407 Result answer;
1408 if (left_is_string) {
1409 if (right_is_string) {
Steve Block6ded16b2010-05-10 14:33:55 +01001410 StringAddStub stub(NO_STRING_CHECK_IN_STUB);
1411 answer = frame_->CallStub(&stub, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001412 } else {
Iain Merrick9ac36c92010-09-13 15:29:50 +01001413 StringAddStub stub(NO_STRING_CHECK_LEFT_IN_STUB);
1414 answer = frame_->CallStub(&stub, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001415 }
1416 } else if (right_is_string) {
Iain Merrick9ac36c92010-09-13 15:29:50 +01001417 StringAddStub stub(NO_STRING_CHECK_RIGHT_IN_STUB);
1418 answer = frame_->CallStub(&stub, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001419 }
Steve Block6ded16b2010-05-10 14:33:55 +01001420 answer.set_type_info(TypeInfo::String());
Steve Blocka7e24c12009-10-30 11:49:00 +00001421 frame_->Push(&answer);
1422 return;
1423 }
1424 // Neither operand is known to be a string.
1425 }
1426
Andrei Popescu402d9372010-02-26 13:31:12 +00001427 bool left_is_smi_constant = left.is_constant() && left.handle()->IsSmi();
1428 bool left_is_non_smi_constant = left.is_constant() && !left.handle()->IsSmi();
1429 bool right_is_smi_constant = right.is_constant() && right.handle()->IsSmi();
1430 bool right_is_non_smi_constant =
1431 right.is_constant() && !right.handle()->IsSmi();
Steve Blocka7e24c12009-10-30 11:49:00 +00001432
Andrei Popescu402d9372010-02-26 13:31:12 +00001433 if (left_is_smi_constant && right_is_smi_constant) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001434 // Compute the constant result at compile time, and leave it on the frame.
1435 int left_int = Smi::cast(*left.handle())->value();
1436 int right_int = Smi::cast(*right.handle())->value();
1437 if (FoldConstantSmis(op, left_int, right_int)) return;
1438 }
1439
Andrei Popescu402d9372010-02-26 13:31:12 +00001440 // Get number type of left and right sub-expressions.
Steve Block6ded16b2010-05-10 14:33:55 +01001441 TypeInfo operands_type =
1442 TypeInfo::Combine(left.type_info(), right.type_info());
1443
1444 TypeInfo result_type = CalculateTypeInfo(operands_type, op, right, left);
Andrei Popescu402d9372010-02-26 13:31:12 +00001445
Leon Clarked91b9f72010-01-27 17:25:45 +00001446 Result answer;
Andrei Popescu402d9372010-02-26 13:31:12 +00001447 if (left_is_non_smi_constant || right_is_non_smi_constant) {
Leon Clarked91b9f72010-01-27 17:25:45 +00001448 // Go straight to the slow case, with no smi code.
Andrei Popescu402d9372010-02-26 13:31:12 +00001449 GenericBinaryOpStub stub(op,
1450 overwrite_mode,
1451 NO_SMI_CODE_IN_STUB,
1452 operands_type);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001453 answer = GenerateGenericBinaryOpStubCall(&stub, &left, &right);
Andrei Popescu402d9372010-02-26 13:31:12 +00001454 } else if (right_is_smi_constant) {
Steve Block6ded16b2010-05-10 14:33:55 +01001455 answer = ConstantSmiBinaryOperation(expr, &left, right.handle(),
1456 false, overwrite_mode);
Andrei Popescu402d9372010-02-26 13:31:12 +00001457 } else if (left_is_smi_constant) {
Steve Block6ded16b2010-05-10 14:33:55 +01001458 answer = ConstantSmiBinaryOperation(expr, &right, left.handle(),
1459 true, overwrite_mode);
Leon Clarkeeab96aa2010-01-27 16:31:12 +00001460 } else {
Leon Clarked91b9f72010-01-27 17:25:45 +00001461 // Set the flags based on the operation, type and loop nesting level.
1462 // Bit operations always assume they likely operate on Smis. Still only
1463 // generate the inline Smi check code if this operation is part of a loop.
1464 // For all other operations only inline the Smi check code for likely smis
1465 // if the operation is part of a loop.
Steve Block6ded16b2010-05-10 14:33:55 +01001466 if (loop_nesting() > 0 &&
1467 (Token::IsBitOp(op) ||
1468 operands_type.IsInteger32() ||
1469 expr->type()->IsLikelySmi())) {
1470 answer = LikelySmiBinaryOperation(expr, &left, &right, overwrite_mode);
Leon Clarked91b9f72010-01-27 17:25:45 +00001471 } else {
Andrei Popescu402d9372010-02-26 13:31:12 +00001472 GenericBinaryOpStub stub(op,
1473 overwrite_mode,
1474 NO_GENERIC_BINARY_FLAGS,
1475 operands_type);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001476 answer = GenerateGenericBinaryOpStubCall(&stub, &left, &right);
Leon Clarkeeab96aa2010-01-27 16:31:12 +00001477 }
Leon Clarkeeab96aa2010-01-27 16:31:12 +00001478 }
Andrei Popescu402d9372010-02-26 13:31:12 +00001479
Steve Block6ded16b2010-05-10 14:33:55 +01001480 answer.set_type_info(result_type);
Leon Clarked91b9f72010-01-27 17:25:45 +00001481 frame_->Push(&answer);
Steve Blocka7e24c12009-10-30 11:49:00 +00001482}
1483
1484
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001485Result CodeGenerator::GenerateGenericBinaryOpStubCall(GenericBinaryOpStub* stub,
1486 Result* left,
1487 Result* right) {
1488 if (stub->ArgsInRegistersSupported()) {
1489 stub->SetArgsInRegisters();
1490 return frame_->CallStub(stub, left, right);
1491 } else {
1492 frame_->Push(left);
1493 frame_->Push(right);
1494 return frame_->CallStub(stub, 2);
1495 }
1496}
1497
1498
Steve Blocka7e24c12009-10-30 11:49:00 +00001499bool CodeGenerator::FoldConstantSmis(Token::Value op, int left, int right) {
1500 Object* answer_object = Heap::undefined_value();
1501 switch (op) {
1502 case Token::ADD:
1503 if (Smi::IsValid(left + right)) {
1504 answer_object = Smi::FromInt(left + right);
1505 }
1506 break;
1507 case Token::SUB:
1508 if (Smi::IsValid(left - right)) {
1509 answer_object = Smi::FromInt(left - right);
1510 }
1511 break;
1512 case Token::MUL: {
1513 double answer = static_cast<double>(left) * right;
1514 if (answer >= Smi::kMinValue && answer <= Smi::kMaxValue) {
1515 // If the product is zero and the non-zero factor is negative,
1516 // the spec requires us to return floating point negative zero.
1517 if (answer != 0 || (left >= 0 && right >= 0)) {
1518 answer_object = Smi::FromInt(static_cast<int>(answer));
1519 }
1520 }
1521 }
1522 break;
1523 case Token::DIV:
1524 case Token::MOD:
1525 break;
1526 case Token::BIT_OR:
1527 answer_object = Smi::FromInt(left | right);
1528 break;
1529 case Token::BIT_AND:
1530 answer_object = Smi::FromInt(left & right);
1531 break;
1532 case Token::BIT_XOR:
1533 answer_object = Smi::FromInt(left ^ right);
1534 break;
1535
1536 case Token::SHL: {
1537 int shift_amount = right & 0x1F;
1538 if (Smi::IsValid(left << shift_amount)) {
1539 answer_object = Smi::FromInt(left << shift_amount);
1540 }
1541 break;
1542 }
1543 case Token::SHR: {
1544 int shift_amount = right & 0x1F;
1545 unsigned int unsigned_left = left;
1546 unsigned_left >>= shift_amount;
1547 if (unsigned_left <= static_cast<unsigned int>(Smi::kMaxValue)) {
1548 answer_object = Smi::FromInt(unsigned_left);
1549 }
1550 break;
1551 }
1552 case Token::SAR: {
1553 int shift_amount = right & 0x1F;
1554 unsigned int unsigned_left = left;
1555 if (left < 0) {
1556 // Perform arithmetic shift of a negative number by
1557 // complementing number, logical shifting, complementing again.
1558 unsigned_left = ~unsigned_left;
1559 unsigned_left >>= shift_amount;
1560 unsigned_left = ~unsigned_left;
1561 } else {
1562 unsigned_left >>= shift_amount;
1563 }
Ben Murdoch3bec4d22010-07-22 14:51:16 +01001564 ASSERT(Smi::IsValid(static_cast<int32_t>(unsigned_left)));
1565 answer_object = Smi::FromInt(static_cast<int32_t>(unsigned_left));
Steve Blocka7e24c12009-10-30 11:49:00 +00001566 break;
1567 }
1568 default:
1569 UNREACHABLE();
1570 break;
1571 }
1572 if (answer_object == Heap::undefined_value()) {
1573 return false;
1574 }
1575 frame_->Push(Handle<Object>(answer_object));
1576 return true;
1577}
1578
1579
Kristian Monsen50ef84f2010-07-29 15:18:00 +01001580void CodeGenerator::JumpIfBothSmiUsingTypeInfo(Result* left,
1581 Result* right,
1582 JumpTarget* both_smi) {
1583 TypeInfo left_info = left->type_info();
1584 TypeInfo right_info = right->type_info();
1585 if (left_info.IsDouble() || left_info.IsString() ||
1586 right_info.IsDouble() || right_info.IsString()) {
1587 // We know that left and right are not both smi. Don't do any tests.
1588 return;
1589 }
1590
1591 if (left->reg().is(right->reg())) {
1592 if (!left_info.IsSmi()) {
1593 __ test(left->reg(), Immediate(kSmiTagMask));
1594 both_smi->Branch(zero);
1595 } else {
1596 if (FLAG_debug_code) __ AbortIfNotSmi(left->reg());
1597 left->Unuse();
1598 right->Unuse();
1599 both_smi->Jump();
1600 }
1601 } else if (!left_info.IsSmi()) {
1602 if (!right_info.IsSmi()) {
1603 Result temp = allocator_->Allocate();
1604 ASSERT(temp.is_valid());
1605 __ mov(temp.reg(), left->reg());
1606 __ or_(temp.reg(), Operand(right->reg()));
1607 __ test(temp.reg(), Immediate(kSmiTagMask));
1608 temp.Unuse();
1609 both_smi->Branch(zero);
1610 } else {
1611 __ test(left->reg(), Immediate(kSmiTagMask));
1612 both_smi->Branch(zero);
1613 }
1614 } else {
1615 if (FLAG_debug_code) __ AbortIfNotSmi(left->reg());
1616 if (!right_info.IsSmi()) {
1617 __ test(right->reg(), Immediate(kSmiTagMask));
1618 both_smi->Branch(zero);
1619 } else {
1620 if (FLAG_debug_code) __ AbortIfNotSmi(right->reg());
1621 left->Unuse();
1622 right->Unuse();
1623 both_smi->Jump();
1624 }
1625 }
1626}
1627
1628
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001629void CodeGenerator::JumpIfNotBothSmiUsingTypeInfo(Register left,
1630 Register right,
1631 Register scratch,
1632 TypeInfo left_info,
1633 TypeInfo right_info,
1634 DeferredCode* deferred) {
Iain Merrick75681382010-08-19 15:07:18 +01001635 JumpIfNotBothSmiUsingTypeInfo(left,
1636 right,
1637 scratch,
1638 left_info,
1639 right_info,
1640 deferred->entry_label());
1641}
1642
1643
1644void CodeGenerator::JumpIfNotBothSmiUsingTypeInfo(Register left,
1645 Register right,
1646 Register scratch,
1647 TypeInfo left_info,
1648 TypeInfo right_info,
1649 Label* on_not_smi) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001650 if (left.is(right)) {
1651 if (!left_info.IsSmi()) {
1652 __ test(left, Immediate(kSmiTagMask));
Iain Merrick75681382010-08-19 15:07:18 +01001653 __ j(not_zero, on_not_smi);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001654 } else {
1655 if (FLAG_debug_code) __ AbortIfNotSmi(left);
1656 }
1657 } else if (!left_info.IsSmi()) {
1658 if (!right_info.IsSmi()) {
1659 __ mov(scratch, left);
1660 __ or_(scratch, Operand(right));
1661 __ test(scratch, Immediate(kSmiTagMask));
Iain Merrick75681382010-08-19 15:07:18 +01001662 __ j(not_zero, on_not_smi);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001663 } else {
1664 __ test(left, Immediate(kSmiTagMask));
Iain Merrick75681382010-08-19 15:07:18 +01001665 __ j(not_zero, on_not_smi);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001666 if (FLAG_debug_code) __ AbortIfNotSmi(right);
1667 }
1668 } else {
1669 if (FLAG_debug_code) __ AbortIfNotSmi(left);
1670 if (!right_info.IsSmi()) {
1671 __ test(right, Immediate(kSmiTagMask));
Iain Merrick75681382010-08-19 15:07:18 +01001672 __ j(not_zero, on_not_smi);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001673 } else {
1674 if (FLAG_debug_code) __ AbortIfNotSmi(right);
1675 }
1676 }
1677}
Steve Block6ded16b2010-05-10 14:33:55 +01001678
1679
Steve Blocka7e24c12009-10-30 11:49:00 +00001680// Implements a binary operation using a deferred code object and some
1681// inline code to operate on smis quickly.
Steve Block6ded16b2010-05-10 14:33:55 +01001682Result CodeGenerator::LikelySmiBinaryOperation(BinaryOperation* expr,
Leon Clarked91b9f72010-01-27 17:25:45 +00001683 Result* left,
1684 Result* right,
1685 OverwriteMode overwrite_mode) {
Kristian Monsen25f61362010-05-21 11:50:48 +01001686 // Copy the type info because left and right may be overwritten.
1687 TypeInfo left_type_info = left->type_info();
1688 TypeInfo right_type_info = right->type_info();
Steve Block6ded16b2010-05-10 14:33:55 +01001689 Token::Value op = expr->op();
Leon Clarked91b9f72010-01-27 17:25:45 +00001690 Result answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00001691 // Special handling of div and mod because they use fixed registers.
1692 if (op == Token::DIV || op == Token::MOD) {
1693 // We need eax as the quotient register, edx as the remainder
1694 // register, neither left nor right in eax or edx, and left copied
1695 // to eax.
1696 Result quotient;
1697 Result remainder;
1698 bool left_is_in_eax = false;
1699 // Step 1: get eax for quotient.
1700 if ((left->is_register() && left->reg().is(eax)) ||
1701 (right->is_register() && right->reg().is(eax))) {
1702 // One or both is in eax. Use a fresh non-edx register for
1703 // them.
1704 Result fresh = allocator_->Allocate();
1705 ASSERT(fresh.is_valid());
1706 if (fresh.reg().is(edx)) {
1707 remainder = fresh;
1708 fresh = allocator_->Allocate();
1709 ASSERT(fresh.is_valid());
1710 }
1711 if (left->is_register() && left->reg().is(eax)) {
1712 quotient = *left;
1713 *left = fresh;
1714 left_is_in_eax = true;
1715 }
1716 if (right->is_register() && right->reg().is(eax)) {
1717 quotient = *right;
1718 *right = fresh;
1719 }
1720 __ mov(fresh.reg(), eax);
1721 } else {
1722 // Neither left nor right is in eax.
1723 quotient = allocator_->Allocate(eax);
1724 }
1725 ASSERT(quotient.is_register() && quotient.reg().is(eax));
1726 ASSERT(!(left->is_register() && left->reg().is(eax)));
1727 ASSERT(!(right->is_register() && right->reg().is(eax)));
1728
1729 // Step 2: get edx for remainder if necessary.
1730 if (!remainder.is_valid()) {
1731 if ((left->is_register() && left->reg().is(edx)) ||
1732 (right->is_register() && right->reg().is(edx))) {
1733 Result fresh = allocator_->Allocate();
1734 ASSERT(fresh.is_valid());
1735 if (left->is_register() && left->reg().is(edx)) {
1736 remainder = *left;
1737 *left = fresh;
1738 }
1739 if (right->is_register() && right->reg().is(edx)) {
1740 remainder = *right;
1741 *right = fresh;
1742 }
1743 __ mov(fresh.reg(), edx);
1744 } else {
1745 // Neither left nor right is in edx.
1746 remainder = allocator_->Allocate(edx);
1747 }
1748 }
1749 ASSERT(remainder.is_register() && remainder.reg().is(edx));
1750 ASSERT(!(left->is_register() && left->reg().is(edx)));
1751 ASSERT(!(right->is_register() && right->reg().is(edx)));
1752
1753 left->ToRegister();
1754 right->ToRegister();
1755 frame_->Spill(eax);
1756 frame_->Spill(edx);
Iain Merrick75681382010-08-19 15:07:18 +01001757 // DeferredInlineBinaryOperation requires all the registers that it is
1758 // told about to be spilled and distinct.
1759 Result distinct_right = frame_->MakeDistinctAndSpilled(left, right);
Steve Blocka7e24c12009-10-30 11:49:00 +00001760
1761 // Check that left and right are smi tagged.
1762 DeferredInlineBinaryOperation* deferred =
1763 new DeferredInlineBinaryOperation(op,
1764 (op == Token::DIV) ? eax : edx,
1765 left->reg(),
Iain Merrick75681382010-08-19 15:07:18 +01001766 distinct_right.reg(),
Kristian Monsen25f61362010-05-21 11:50:48 +01001767 left_type_info,
1768 right_type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00001769 overwrite_mode);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001770 JumpIfNotBothSmiUsingTypeInfo(left->reg(), right->reg(), edx,
1771 left_type_info, right_type_info, deferred);
1772 if (!left_is_in_eax) {
1773 __ mov(eax, left->reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001774 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001775 // Sign extend eax into edx:eax.
1776 __ cdq();
1777 // Check for 0 divisor.
1778 __ test(right->reg(), Operand(right->reg()));
1779 deferred->Branch(zero);
1780 // Divide edx:eax by the right operand.
1781 __ idiv(right->reg());
1782
1783 // Complete the operation.
1784 if (op == Token::DIV) {
1785 // Check for negative zero result. If result is zero, and divisor
1786 // is negative, return a floating point negative zero. The
1787 // virtual frame is unchanged in this block, so local control flow
Steve Block6ded16b2010-05-10 14:33:55 +01001788 // can use a Label rather than a JumpTarget. If the context of this
1789 // expression will treat -0 like 0, do not do this test.
1790 if (!expr->no_negative_zero()) {
1791 Label non_zero_result;
1792 __ test(left->reg(), Operand(left->reg()));
1793 __ j(not_zero, &non_zero_result);
1794 __ test(right->reg(), Operand(right->reg()));
1795 deferred->Branch(negative);
1796 __ bind(&non_zero_result);
1797 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001798 // Check for the corner case of dividing the most negative smi by
1799 // -1. We cannot use the overflow flag, since it is not set by
1800 // idiv instruction.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01001801 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001802 __ cmp(eax, 0x40000000);
1803 deferred->Branch(equal);
1804 // Check that the remainder is zero.
1805 __ test(edx, Operand(edx));
1806 deferred->Branch(not_zero);
1807 // Tag the result and store it in the quotient register.
Leon Clarkee46be812010-01-19 14:06:41 +00001808 __ SmiTag(eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00001809 deferred->BindExit();
1810 left->Unuse();
1811 right->Unuse();
Leon Clarked91b9f72010-01-27 17:25:45 +00001812 answer = quotient;
Steve Blocka7e24c12009-10-30 11:49:00 +00001813 } else {
1814 ASSERT(op == Token::MOD);
1815 // Check for a negative zero result. If the result is zero, and
1816 // the dividend is negative, return a floating point negative
1817 // zero. The frame is unchanged in this block, so local control
1818 // flow can use a Label rather than a JumpTarget.
Steve Block6ded16b2010-05-10 14:33:55 +01001819 if (!expr->no_negative_zero()) {
1820 Label non_zero_result;
1821 __ test(edx, Operand(edx));
1822 __ j(not_zero, &non_zero_result, taken);
1823 __ test(left->reg(), Operand(left->reg()));
1824 deferred->Branch(negative);
1825 __ bind(&non_zero_result);
1826 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001827 deferred->BindExit();
1828 left->Unuse();
1829 right->Unuse();
Leon Clarked91b9f72010-01-27 17:25:45 +00001830 answer = remainder;
Steve Blocka7e24c12009-10-30 11:49:00 +00001831 }
Leon Clarked91b9f72010-01-27 17:25:45 +00001832 ASSERT(answer.is_valid());
1833 return answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00001834 }
1835
1836 // Special handling of shift operations because they use fixed
1837 // registers.
1838 if (op == Token::SHL || op == Token::SHR || op == Token::SAR) {
1839 // Move left out of ecx if necessary.
1840 if (left->is_register() && left->reg().is(ecx)) {
1841 *left = allocator_->Allocate();
1842 ASSERT(left->is_valid());
1843 __ mov(left->reg(), ecx);
1844 }
1845 right->ToRegister(ecx);
1846 left->ToRegister();
1847 ASSERT(left->is_register() && !left->reg().is(ecx));
1848 ASSERT(right->is_register() && right->reg().is(ecx));
Iain Merrick75681382010-08-19 15:07:18 +01001849 if (left_type_info.IsSmi()) {
1850 if (FLAG_debug_code) __ AbortIfNotSmi(left->reg());
1851 }
1852 if (right_type_info.IsSmi()) {
1853 if (FLAG_debug_code) __ AbortIfNotSmi(right->reg());
1854 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001855
1856 // We will modify right, it must be spilled.
1857 frame_->Spill(ecx);
Iain Merrick75681382010-08-19 15:07:18 +01001858 // DeferredInlineBinaryOperation requires all the registers that it is told
1859 // about to be spilled and distinct. We know that right is ecx and left is
1860 // not ecx.
1861 frame_->Spill(left->reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001862
1863 // Use a fresh answer register to avoid spilling the left operand.
Leon Clarked91b9f72010-01-27 17:25:45 +00001864 answer = allocator_->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00001865 ASSERT(answer.is_valid());
Iain Merrick75681382010-08-19 15:07:18 +01001866
Steve Blocka7e24c12009-10-30 11:49:00 +00001867 DeferredInlineBinaryOperation* deferred =
1868 new DeferredInlineBinaryOperation(op,
1869 answer.reg(),
1870 left->reg(),
1871 ecx,
Kristian Monsen25f61362010-05-21 11:50:48 +01001872 left_type_info,
1873 right_type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00001874 overwrite_mode);
Iain Merrick75681382010-08-19 15:07:18 +01001875 JumpIfNotBothSmiUsingTypeInfo(left->reg(), right->reg(), answer.reg(),
1876 left_type_info, right_type_info,
1877 deferred->NonSmiInputLabel());
Steve Blocka7e24c12009-10-30 11:49:00 +00001878
Iain Merrick75681382010-08-19 15:07:18 +01001879 // Untag both operands.
1880 __ mov(answer.reg(), left->reg());
1881 __ SmiUntag(answer.reg());
1882 __ SmiUntag(right->reg()); // Right is ecx.
Steve Block6ded16b2010-05-10 14:33:55 +01001883
Steve Blocka7e24c12009-10-30 11:49:00 +00001884 // Perform the operation.
Iain Merrick75681382010-08-19 15:07:18 +01001885 ASSERT(right->reg().is(ecx));
Steve Blocka7e24c12009-10-30 11:49:00 +00001886 switch (op) {
Iain Merrick75681382010-08-19 15:07:18 +01001887 case Token::SAR: {
Steve Blockd0582a62009-12-15 09:54:21 +00001888 __ sar_cl(answer.reg());
Iain Merrick75681382010-08-19 15:07:18 +01001889 if (!left_type_info.IsSmi()) {
1890 // Check that the *signed* result fits in a smi.
1891 __ cmp(answer.reg(), 0xc0000000);
1892 deferred->JumpToAnswerOutOfRange(negative);
1893 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001894 break;
Iain Merrick75681382010-08-19 15:07:18 +01001895 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001896 case Token::SHR: {
Steve Blockd0582a62009-12-15 09:54:21 +00001897 __ shr_cl(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001898 // Check that the *unsigned* result fits in a smi. Neither of
1899 // the two high-order bits can be set:
1900 // * 0x80000000: high bit would be lost when smi tagging.
1901 // * 0x40000000: this number would convert to negative when smi
1902 // tagging.
1903 // These two cases can only happen with shifts by 0 or 1 when
1904 // handed a valid smi. If the answer cannot be represented by a
1905 // smi, restore the left and right arguments, and jump to slow
1906 // case. The low bit of the left argument may be lost, but only
1907 // in a case where it is dropped anyway.
1908 __ test(answer.reg(), Immediate(0xc0000000));
Iain Merrick75681382010-08-19 15:07:18 +01001909 deferred->JumpToAnswerOutOfRange(not_zero);
Steve Blocka7e24c12009-10-30 11:49:00 +00001910 break;
1911 }
1912 case Token::SHL: {
Steve Blockd0582a62009-12-15 09:54:21 +00001913 __ shl_cl(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001914 // Check that the *signed* result fits in a smi.
1915 __ cmp(answer.reg(), 0xc0000000);
Iain Merrick75681382010-08-19 15:07:18 +01001916 deferred->JumpToAnswerOutOfRange(negative);
Steve Blocka7e24c12009-10-30 11:49:00 +00001917 break;
1918 }
1919 default:
1920 UNREACHABLE();
1921 }
1922 // Smi-tag the result in answer.
Leon Clarkee46be812010-01-19 14:06:41 +00001923 __ SmiTag(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001924 deferred->BindExit();
1925 left->Unuse();
1926 right->Unuse();
Leon Clarked91b9f72010-01-27 17:25:45 +00001927 ASSERT(answer.is_valid());
1928 return answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00001929 }
1930
1931 // Handle the other binary operations.
1932 left->ToRegister();
1933 right->ToRegister();
Iain Merrick75681382010-08-19 15:07:18 +01001934 // DeferredInlineBinaryOperation requires all the registers that it is told
1935 // about to be spilled.
1936 Result distinct_right = frame_->MakeDistinctAndSpilled(left, right);
Steve Blocka7e24c12009-10-30 11:49:00 +00001937 // A newly allocated register answer is used to hold the answer. The
1938 // registers containing left and right are not modified so they don't
1939 // need to be spilled in the fast case.
Leon Clarked91b9f72010-01-27 17:25:45 +00001940 answer = allocator_->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00001941 ASSERT(answer.is_valid());
1942
1943 // Perform the smi tag check.
1944 DeferredInlineBinaryOperation* deferred =
1945 new DeferredInlineBinaryOperation(op,
1946 answer.reg(),
1947 left->reg(),
Iain Merrick75681382010-08-19 15:07:18 +01001948 distinct_right.reg(),
Kristian Monsen25f61362010-05-21 11:50:48 +01001949 left_type_info,
1950 right_type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00001951 overwrite_mode);
Iain Merrick75681382010-08-19 15:07:18 +01001952 Label non_smi_bit_op;
1953 if (op != Token::BIT_OR) {
1954 JumpIfNotBothSmiUsingTypeInfo(left->reg(), right->reg(), answer.reg(),
1955 left_type_info, right_type_info,
1956 deferred->NonSmiInputLabel());
1957 }
Steve Block6ded16b2010-05-10 14:33:55 +01001958
Steve Blocka7e24c12009-10-30 11:49:00 +00001959 __ mov(answer.reg(), left->reg());
1960 switch (op) {
1961 case Token::ADD:
Leon Clarked91b9f72010-01-27 17:25:45 +00001962 __ add(answer.reg(), Operand(right->reg()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001963 deferred->Branch(overflow);
1964 break;
1965
1966 case Token::SUB:
Leon Clarked91b9f72010-01-27 17:25:45 +00001967 __ sub(answer.reg(), Operand(right->reg()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001968 deferred->Branch(overflow);
1969 break;
1970
1971 case Token::MUL: {
1972 // If the smi tag is 0 we can just leave the tag on one operand.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01001973 STATIC_ASSERT(kSmiTag == 0); // Adjust code below if not the case.
Steve Blocka7e24c12009-10-30 11:49:00 +00001974 // Remove smi tag from the left operand (but keep sign).
1975 // Left-hand operand has been copied into answer.
Leon Clarkee46be812010-01-19 14:06:41 +00001976 __ SmiUntag(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001977 // Do multiplication of smis, leaving result in answer.
1978 __ imul(answer.reg(), Operand(right->reg()));
1979 // Go slow on overflows.
1980 deferred->Branch(overflow);
1981 // Check for negative zero result. If product is zero, and one
1982 // argument is negative, go to slow case. The frame is unchanged
1983 // in this block, so local control flow can use a Label rather
1984 // than a JumpTarget.
Steve Block6ded16b2010-05-10 14:33:55 +01001985 if (!expr->no_negative_zero()) {
1986 Label non_zero_result;
1987 __ test(answer.reg(), Operand(answer.reg()));
1988 __ j(not_zero, &non_zero_result, taken);
1989 __ mov(answer.reg(), left->reg());
1990 __ or_(answer.reg(), Operand(right->reg()));
1991 deferred->Branch(negative);
1992 __ xor_(answer.reg(), Operand(answer.reg())); // Positive 0 is correct.
1993 __ bind(&non_zero_result);
1994 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001995 break;
1996 }
1997
1998 case Token::BIT_OR:
1999 __ or_(answer.reg(), Operand(right->reg()));
Iain Merrick75681382010-08-19 15:07:18 +01002000 __ test(answer.reg(), Immediate(kSmiTagMask));
2001 __ j(not_zero, deferred->NonSmiInputLabel());
Steve Blocka7e24c12009-10-30 11:49:00 +00002002 break;
2003
2004 case Token::BIT_AND:
2005 __ and_(answer.reg(), Operand(right->reg()));
2006 break;
2007
2008 case Token::BIT_XOR:
2009 __ xor_(answer.reg(), Operand(right->reg()));
2010 break;
2011
2012 default:
2013 UNREACHABLE();
2014 break;
2015 }
Iain Merrick75681382010-08-19 15:07:18 +01002016
Steve Blocka7e24c12009-10-30 11:49:00 +00002017 deferred->BindExit();
2018 left->Unuse();
2019 right->Unuse();
Leon Clarked91b9f72010-01-27 17:25:45 +00002020 ASSERT(answer.is_valid());
2021 return answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00002022}
2023
2024
2025// Call the appropriate binary operation stub to compute src op value
2026// and leave the result in dst.
2027class DeferredInlineSmiOperation: public DeferredCode {
2028 public:
2029 DeferredInlineSmiOperation(Token::Value op,
2030 Register dst,
2031 Register src,
Steve Block6ded16b2010-05-10 14:33:55 +01002032 TypeInfo type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00002033 Smi* value,
2034 OverwriteMode overwrite_mode)
2035 : op_(op),
2036 dst_(dst),
2037 src_(src),
Steve Block6ded16b2010-05-10 14:33:55 +01002038 type_info_(type_info),
Steve Blocka7e24c12009-10-30 11:49:00 +00002039 value_(value),
2040 overwrite_mode_(overwrite_mode) {
Steve Block6ded16b2010-05-10 14:33:55 +01002041 if (type_info.IsSmi()) overwrite_mode_ = NO_OVERWRITE;
Steve Blocka7e24c12009-10-30 11:49:00 +00002042 set_comment("[ DeferredInlineSmiOperation");
2043 }
2044
2045 virtual void Generate();
2046
2047 private:
2048 Token::Value op_;
2049 Register dst_;
2050 Register src_;
Steve Block6ded16b2010-05-10 14:33:55 +01002051 TypeInfo type_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +00002052 Smi* value_;
2053 OverwriteMode overwrite_mode_;
2054};
2055
2056
2057void DeferredInlineSmiOperation::Generate() {
Steve Blocka7e24c12009-10-30 11:49:00 +00002058 // For mod we don't generate all the Smi code inline.
2059 GenericBinaryOpStub stub(
2060 op_,
2061 overwrite_mode_,
Steve Block6ded16b2010-05-10 14:33:55 +01002062 (op_ == Token::MOD) ? NO_GENERIC_BINARY_FLAGS : NO_SMI_CODE_IN_STUB,
2063 TypeInfo::Combine(TypeInfo::Smi(), type_info_));
Steve Block3ce2e202009-11-05 08:53:23 +00002064 stub.GenerateCall(masm_, src_, value_);
Steve Blocka7e24c12009-10-30 11:49:00 +00002065 if (!dst_.is(eax)) __ mov(dst_, eax);
2066}
2067
2068
2069// Call the appropriate binary operation stub to compute value op src
2070// and leave the result in dst.
2071class DeferredInlineSmiOperationReversed: public DeferredCode {
2072 public:
2073 DeferredInlineSmiOperationReversed(Token::Value op,
2074 Register dst,
2075 Smi* value,
2076 Register src,
Steve Block6ded16b2010-05-10 14:33:55 +01002077 TypeInfo type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00002078 OverwriteMode overwrite_mode)
2079 : op_(op),
2080 dst_(dst),
Steve Block6ded16b2010-05-10 14:33:55 +01002081 type_info_(type_info),
Steve Blocka7e24c12009-10-30 11:49:00 +00002082 value_(value),
2083 src_(src),
2084 overwrite_mode_(overwrite_mode) {
2085 set_comment("[ DeferredInlineSmiOperationReversed");
2086 }
2087
2088 virtual void Generate();
2089
2090 private:
2091 Token::Value op_;
2092 Register dst_;
Steve Block6ded16b2010-05-10 14:33:55 +01002093 TypeInfo type_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +00002094 Smi* value_;
2095 Register src_;
2096 OverwriteMode overwrite_mode_;
2097};
2098
2099
2100void DeferredInlineSmiOperationReversed::Generate() {
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002101 GenericBinaryOpStub stub(
Steve Block6ded16b2010-05-10 14:33:55 +01002102 op_,
2103 overwrite_mode_,
2104 NO_SMI_CODE_IN_STUB,
2105 TypeInfo::Combine(TypeInfo::Smi(), type_info_));
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002106 stub.GenerateCall(masm_, value_, src_);
Steve Blocka7e24c12009-10-30 11:49:00 +00002107 if (!dst_.is(eax)) __ mov(dst_, eax);
2108}
2109
2110
2111// The result of src + value is in dst. It either overflowed or was not
2112// smi tagged. Undo the speculative addition and call the appropriate
2113// specialized stub for add. The result is left in dst.
2114class DeferredInlineSmiAdd: public DeferredCode {
2115 public:
2116 DeferredInlineSmiAdd(Register dst,
Steve Block6ded16b2010-05-10 14:33:55 +01002117 TypeInfo type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00002118 Smi* value,
2119 OverwriteMode overwrite_mode)
Steve Block6ded16b2010-05-10 14:33:55 +01002120 : dst_(dst),
2121 type_info_(type_info),
2122 value_(value),
2123 overwrite_mode_(overwrite_mode) {
2124 if (type_info_.IsSmi()) overwrite_mode_ = NO_OVERWRITE;
Steve Blocka7e24c12009-10-30 11:49:00 +00002125 set_comment("[ DeferredInlineSmiAdd");
2126 }
2127
2128 virtual void Generate();
2129
2130 private:
2131 Register dst_;
Steve Block6ded16b2010-05-10 14:33:55 +01002132 TypeInfo type_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +00002133 Smi* value_;
2134 OverwriteMode overwrite_mode_;
2135};
2136
2137
2138void DeferredInlineSmiAdd::Generate() {
2139 // Undo the optimistic add operation and call the shared stub.
2140 __ sub(Operand(dst_), Immediate(value_));
Steve Block6ded16b2010-05-10 14:33:55 +01002141 GenericBinaryOpStub igostub(
2142 Token::ADD,
2143 overwrite_mode_,
2144 NO_SMI_CODE_IN_STUB,
2145 TypeInfo::Combine(TypeInfo::Smi(), type_info_));
Steve Block3ce2e202009-11-05 08:53:23 +00002146 igostub.GenerateCall(masm_, dst_, value_);
Steve Blocka7e24c12009-10-30 11:49:00 +00002147 if (!dst_.is(eax)) __ mov(dst_, eax);
2148}
2149
2150
2151// The result of value + src is in dst. It either overflowed or was not
2152// smi tagged. Undo the speculative addition and call the appropriate
2153// specialized stub for add. The result is left in dst.
2154class DeferredInlineSmiAddReversed: public DeferredCode {
2155 public:
2156 DeferredInlineSmiAddReversed(Register dst,
Steve Block6ded16b2010-05-10 14:33:55 +01002157 TypeInfo type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00002158 Smi* value,
2159 OverwriteMode overwrite_mode)
Steve Block6ded16b2010-05-10 14:33:55 +01002160 : dst_(dst),
2161 type_info_(type_info),
2162 value_(value),
2163 overwrite_mode_(overwrite_mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002164 set_comment("[ DeferredInlineSmiAddReversed");
2165 }
2166
2167 virtual void Generate();
2168
2169 private:
2170 Register dst_;
Steve Block6ded16b2010-05-10 14:33:55 +01002171 TypeInfo type_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +00002172 Smi* value_;
2173 OverwriteMode overwrite_mode_;
2174};
2175
2176
2177void DeferredInlineSmiAddReversed::Generate() {
2178 // Undo the optimistic add operation and call the shared stub.
2179 __ sub(Operand(dst_), Immediate(value_));
Steve Block6ded16b2010-05-10 14:33:55 +01002180 GenericBinaryOpStub igostub(
2181 Token::ADD,
2182 overwrite_mode_,
2183 NO_SMI_CODE_IN_STUB,
2184 TypeInfo::Combine(TypeInfo::Smi(), type_info_));
Steve Block3ce2e202009-11-05 08:53:23 +00002185 igostub.GenerateCall(masm_, value_, dst_);
Steve Blocka7e24c12009-10-30 11:49:00 +00002186 if (!dst_.is(eax)) __ mov(dst_, eax);
2187}
2188
2189
2190// The result of src - value is in dst. It either overflowed or was not
2191// smi tagged. Undo the speculative subtraction and call the
2192// appropriate specialized stub for subtract. The result is left in
2193// dst.
2194class DeferredInlineSmiSub: public DeferredCode {
2195 public:
2196 DeferredInlineSmiSub(Register dst,
Steve Block6ded16b2010-05-10 14:33:55 +01002197 TypeInfo type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00002198 Smi* value,
2199 OverwriteMode overwrite_mode)
Steve Block6ded16b2010-05-10 14:33:55 +01002200 : dst_(dst),
2201 type_info_(type_info),
2202 value_(value),
2203 overwrite_mode_(overwrite_mode) {
2204 if (type_info.IsSmi()) overwrite_mode_ = NO_OVERWRITE;
Steve Blocka7e24c12009-10-30 11:49:00 +00002205 set_comment("[ DeferredInlineSmiSub");
2206 }
2207
2208 virtual void Generate();
2209
2210 private:
2211 Register dst_;
Steve Block6ded16b2010-05-10 14:33:55 +01002212 TypeInfo type_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +00002213 Smi* value_;
2214 OverwriteMode overwrite_mode_;
2215};
2216
2217
2218void DeferredInlineSmiSub::Generate() {
2219 // Undo the optimistic sub operation and call the shared stub.
2220 __ add(Operand(dst_), Immediate(value_));
Steve Block6ded16b2010-05-10 14:33:55 +01002221 GenericBinaryOpStub igostub(
2222 Token::SUB,
2223 overwrite_mode_,
2224 NO_SMI_CODE_IN_STUB,
2225 TypeInfo::Combine(TypeInfo::Smi(), type_info_));
Steve Block3ce2e202009-11-05 08:53:23 +00002226 igostub.GenerateCall(masm_, dst_, value_);
Steve Blocka7e24c12009-10-30 11:49:00 +00002227 if (!dst_.is(eax)) __ mov(dst_, eax);
2228}
2229
2230
Kristian Monsen25f61362010-05-21 11:50:48 +01002231Result CodeGenerator::ConstantSmiBinaryOperation(BinaryOperation* expr,
2232 Result* operand,
2233 Handle<Object> value,
2234 bool reversed,
2235 OverwriteMode overwrite_mode) {
2236 // Generate inline code for a binary operation when one of the
2237 // operands is a constant smi. Consumes the argument "operand".
Steve Blocka7e24c12009-10-30 11:49:00 +00002238 if (IsUnsafeSmi(value)) {
2239 Result unsafe_operand(value);
2240 if (reversed) {
Steve Block6ded16b2010-05-10 14:33:55 +01002241 return LikelySmiBinaryOperation(expr, &unsafe_operand, operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00002242 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002243 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01002244 return LikelySmiBinaryOperation(expr, operand, &unsafe_operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00002245 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002246 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002247 }
2248
2249 // Get the literal value.
2250 Smi* smi_value = Smi::cast(*value);
2251 int int_value = smi_value->value();
2252
Steve Block6ded16b2010-05-10 14:33:55 +01002253 Token::Value op = expr->op();
Leon Clarked91b9f72010-01-27 17:25:45 +00002254 Result answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00002255 switch (op) {
2256 case Token::ADD: {
2257 operand->ToRegister();
2258 frame_->Spill(operand->reg());
2259
2260 // Optimistically add. Call the specialized add stub if the
2261 // result is not a smi or overflows.
2262 DeferredCode* deferred = NULL;
2263 if (reversed) {
2264 deferred = new DeferredInlineSmiAddReversed(operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002265 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002266 smi_value,
2267 overwrite_mode);
2268 } else {
2269 deferred = new DeferredInlineSmiAdd(operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002270 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002271 smi_value,
2272 overwrite_mode);
2273 }
2274 __ add(Operand(operand->reg()), Immediate(value));
2275 deferred->Branch(overflow);
Steve Block6ded16b2010-05-10 14:33:55 +01002276 if (!operand->type_info().IsSmi()) {
2277 __ test(operand->reg(), Immediate(kSmiTagMask));
2278 deferred->Branch(not_zero);
2279 } else if (FLAG_debug_code) {
2280 __ AbortIfNotSmi(operand->reg());
2281 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002282 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00002283 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00002284 break;
2285 }
2286
2287 case Token::SUB: {
2288 DeferredCode* deferred = NULL;
Steve Blocka7e24c12009-10-30 11:49:00 +00002289 if (reversed) {
2290 // The reversed case is only hit when the right operand is not a
2291 // constant.
2292 ASSERT(operand->is_register());
2293 answer = allocator()->Allocate();
2294 ASSERT(answer.is_valid());
2295 __ Set(answer.reg(), Immediate(value));
Steve Block6ded16b2010-05-10 14:33:55 +01002296 deferred =
2297 new DeferredInlineSmiOperationReversed(op,
2298 answer.reg(),
2299 smi_value,
2300 operand->reg(),
2301 operand->type_info(),
2302 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002303 __ sub(answer.reg(), Operand(operand->reg()));
2304 } else {
2305 operand->ToRegister();
2306 frame_->Spill(operand->reg());
2307 answer = *operand;
2308 deferred = new DeferredInlineSmiSub(operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002309 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002310 smi_value,
2311 overwrite_mode);
2312 __ sub(Operand(operand->reg()), Immediate(value));
2313 }
2314 deferred->Branch(overflow);
Steve Block6ded16b2010-05-10 14:33:55 +01002315 if (!operand->type_info().IsSmi()) {
2316 __ test(answer.reg(), Immediate(kSmiTagMask));
2317 deferred->Branch(not_zero);
2318 } else if (FLAG_debug_code) {
2319 __ AbortIfNotSmi(operand->reg());
2320 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002321 deferred->BindExit();
2322 operand->Unuse();
Steve Blocka7e24c12009-10-30 11:49:00 +00002323 break;
2324 }
2325
2326 case Token::SAR:
2327 if (reversed) {
2328 Result constant_operand(value);
Steve Block6ded16b2010-05-10 14:33:55 +01002329 answer = LikelySmiBinaryOperation(expr, &constant_operand, operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00002330 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002331 } else {
2332 // Only the least significant 5 bits of the shift value are used.
2333 // In the slow case, this masking is done inside the runtime call.
2334 int shift_value = int_value & 0x1f;
2335 operand->ToRegister();
2336 frame_->Spill(operand->reg());
Steve Block6ded16b2010-05-10 14:33:55 +01002337 if (!operand->type_info().IsSmi()) {
2338 DeferredInlineSmiOperation* deferred =
2339 new DeferredInlineSmiOperation(op,
2340 operand->reg(),
2341 operand->reg(),
2342 operand->type_info(),
2343 smi_value,
2344 overwrite_mode);
2345 __ test(operand->reg(), Immediate(kSmiTagMask));
2346 deferred->Branch(not_zero);
2347 if (shift_value > 0) {
2348 __ sar(operand->reg(), shift_value);
2349 __ and_(operand->reg(), ~kSmiTagMask);
2350 }
2351 deferred->BindExit();
2352 } else {
2353 if (FLAG_debug_code) {
2354 __ AbortIfNotSmi(operand->reg());
2355 }
2356 if (shift_value > 0) {
2357 __ sar(operand->reg(), shift_value);
2358 __ and_(operand->reg(), ~kSmiTagMask);
2359 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002360 }
Leon Clarked91b9f72010-01-27 17:25:45 +00002361 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00002362 }
2363 break;
2364
2365 case Token::SHR:
2366 if (reversed) {
2367 Result constant_operand(value);
Steve Block6ded16b2010-05-10 14:33:55 +01002368 answer = LikelySmiBinaryOperation(expr, &constant_operand, operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00002369 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002370 } else {
2371 // Only the least significant 5 bits of the shift value are used.
2372 // In the slow case, this masking is done inside the runtime call.
2373 int shift_value = int_value & 0x1f;
2374 operand->ToRegister();
Leon Clarked91b9f72010-01-27 17:25:45 +00002375 answer = allocator()->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00002376 ASSERT(answer.is_valid());
2377 DeferredInlineSmiOperation* deferred =
2378 new DeferredInlineSmiOperation(op,
2379 answer.reg(),
2380 operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002381 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002382 smi_value,
2383 overwrite_mode);
Steve Block6ded16b2010-05-10 14:33:55 +01002384 if (!operand->type_info().IsSmi()) {
2385 __ test(operand->reg(), Immediate(kSmiTagMask));
2386 deferred->Branch(not_zero);
2387 } else if (FLAG_debug_code) {
2388 __ AbortIfNotSmi(operand->reg());
2389 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002390 __ mov(answer.reg(), operand->reg());
Leon Clarkee46be812010-01-19 14:06:41 +00002391 __ SmiUntag(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00002392 __ shr(answer.reg(), shift_value);
2393 // A negative Smi shifted right two is in the positive Smi range.
2394 if (shift_value < 2) {
2395 __ test(answer.reg(), Immediate(0xc0000000));
2396 deferred->Branch(not_zero);
2397 }
2398 operand->Unuse();
Leon Clarkee46be812010-01-19 14:06:41 +00002399 __ SmiTag(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00002400 deferred->BindExit();
Steve Blocka7e24c12009-10-30 11:49:00 +00002401 }
2402 break;
2403
2404 case Token::SHL:
2405 if (reversed) {
Steve Block6ded16b2010-05-10 14:33:55 +01002406 // Move operand into ecx and also into a second register.
2407 // If operand is already in a register, take advantage of that.
2408 // This lets us modify ecx, but still bail out to deferred code.
Leon Clarkee46be812010-01-19 14:06:41 +00002409 Result right;
2410 Result right_copy_in_ecx;
Steve Block6ded16b2010-05-10 14:33:55 +01002411 TypeInfo right_type_info = operand->type_info();
Leon Clarkee46be812010-01-19 14:06:41 +00002412 operand->ToRegister();
2413 if (operand->reg().is(ecx)) {
2414 right = allocator()->Allocate();
2415 __ mov(right.reg(), ecx);
2416 frame_->Spill(ecx);
2417 right_copy_in_ecx = *operand;
2418 } else {
2419 right_copy_in_ecx = allocator()->Allocate(ecx);
2420 __ mov(ecx, operand->reg());
2421 right = *operand;
2422 }
2423 operand->Unuse();
2424
Leon Clarked91b9f72010-01-27 17:25:45 +00002425 answer = allocator()->Allocate();
Leon Clarkee46be812010-01-19 14:06:41 +00002426 DeferredInlineSmiOperationReversed* deferred =
2427 new DeferredInlineSmiOperationReversed(op,
2428 answer.reg(),
2429 smi_value,
2430 right.reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002431 right_type_info,
Leon Clarkee46be812010-01-19 14:06:41 +00002432 overwrite_mode);
2433 __ mov(answer.reg(), Immediate(int_value));
2434 __ sar(ecx, kSmiTagSize);
Steve Block6ded16b2010-05-10 14:33:55 +01002435 if (!right_type_info.IsSmi()) {
2436 deferred->Branch(carry);
2437 } else if (FLAG_debug_code) {
2438 __ AbortIfNotSmi(right.reg());
2439 }
Leon Clarkee46be812010-01-19 14:06:41 +00002440 __ shl_cl(answer.reg());
2441 __ cmp(answer.reg(), 0xc0000000);
2442 deferred->Branch(sign);
2443 __ SmiTag(answer.reg());
2444
2445 deferred->BindExit();
Steve Blocka7e24c12009-10-30 11:49:00 +00002446 } else {
2447 // Only the least significant 5 bits of the shift value are used.
2448 // In the slow case, this masking is done inside the runtime call.
2449 int shift_value = int_value & 0x1f;
2450 operand->ToRegister();
2451 if (shift_value == 0) {
2452 // Spill operand so it can be overwritten in the slow case.
2453 frame_->Spill(operand->reg());
2454 DeferredInlineSmiOperation* deferred =
2455 new DeferredInlineSmiOperation(op,
2456 operand->reg(),
2457 operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002458 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002459 smi_value,
2460 overwrite_mode);
2461 __ test(operand->reg(), Immediate(kSmiTagMask));
2462 deferred->Branch(not_zero);
2463 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00002464 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00002465 } else {
2466 // Use a fresh temporary for nonzero shift values.
Leon Clarked91b9f72010-01-27 17:25:45 +00002467 answer = allocator()->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00002468 ASSERT(answer.is_valid());
2469 DeferredInlineSmiOperation* deferred =
2470 new DeferredInlineSmiOperation(op,
2471 answer.reg(),
2472 operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002473 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002474 smi_value,
2475 overwrite_mode);
Steve Block6ded16b2010-05-10 14:33:55 +01002476 if (!operand->type_info().IsSmi()) {
2477 __ test(operand->reg(), Immediate(kSmiTagMask));
2478 deferred->Branch(not_zero);
2479 } else if (FLAG_debug_code) {
2480 __ AbortIfNotSmi(operand->reg());
2481 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002482 __ mov(answer.reg(), operand->reg());
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002483 STATIC_ASSERT(kSmiTag == 0); // adjust code if not the case
Steve Blocka7e24c12009-10-30 11:49:00 +00002484 // We do no shifts, only the Smi conversion, if shift_value is 1.
2485 if (shift_value > 1) {
2486 __ shl(answer.reg(), shift_value - 1);
2487 }
2488 // Convert int result to Smi, checking that it is in int range.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002489 STATIC_ASSERT(kSmiTagSize == 1); // adjust code if not the case
Steve Blocka7e24c12009-10-30 11:49:00 +00002490 __ add(answer.reg(), Operand(answer.reg()));
2491 deferred->Branch(overflow);
2492 deferred->BindExit();
2493 operand->Unuse();
Steve Blocka7e24c12009-10-30 11:49:00 +00002494 }
2495 }
2496 break;
2497
2498 case Token::BIT_OR:
2499 case Token::BIT_XOR:
2500 case Token::BIT_AND: {
2501 operand->ToRegister();
Iain Merrick75681382010-08-19 15:07:18 +01002502 // DeferredInlineBinaryOperation requires all the registers that it is
2503 // told about to be spilled.
Steve Blocka7e24c12009-10-30 11:49:00 +00002504 frame_->Spill(operand->reg());
Iain Merrick75681382010-08-19 15:07:18 +01002505 DeferredInlineBinaryOperation* deferred = NULL;
Steve Block6ded16b2010-05-10 14:33:55 +01002506 if (!operand->type_info().IsSmi()) {
Iain Merrick75681382010-08-19 15:07:18 +01002507 Result left = allocator()->Allocate();
2508 ASSERT(left.is_valid());
2509 Result right = allocator()->Allocate();
2510 ASSERT(right.is_valid());
2511 deferred = new DeferredInlineBinaryOperation(
2512 op,
2513 operand->reg(),
2514 left.reg(),
2515 right.reg(),
2516 operand->type_info(),
2517 TypeInfo::Smi(),
2518 overwrite_mode == NO_OVERWRITE ? NO_OVERWRITE : OVERWRITE_LEFT);
Steve Block6ded16b2010-05-10 14:33:55 +01002519 __ test(operand->reg(), Immediate(kSmiTagMask));
Iain Merrick75681382010-08-19 15:07:18 +01002520 deferred->JumpToConstantRhs(not_zero, smi_value);
Steve Block6ded16b2010-05-10 14:33:55 +01002521 } else if (FLAG_debug_code) {
2522 __ AbortIfNotSmi(operand->reg());
2523 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002524 if (op == Token::BIT_AND) {
2525 __ and_(Operand(operand->reg()), Immediate(value));
2526 } else if (op == Token::BIT_XOR) {
2527 if (int_value != 0) {
2528 __ xor_(Operand(operand->reg()), Immediate(value));
2529 }
2530 } else {
2531 ASSERT(op == Token::BIT_OR);
2532 if (int_value != 0) {
2533 __ or_(Operand(operand->reg()), Immediate(value));
2534 }
2535 }
Iain Merrick75681382010-08-19 15:07:18 +01002536 if (deferred != NULL) deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00002537 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00002538 break;
2539 }
2540
Andrei Popescu402d9372010-02-26 13:31:12 +00002541 case Token::DIV:
2542 if (!reversed && int_value == 2) {
2543 operand->ToRegister();
2544 frame_->Spill(operand->reg());
2545
2546 DeferredInlineSmiOperation* deferred =
2547 new DeferredInlineSmiOperation(op,
2548 operand->reg(),
2549 operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002550 operand->type_info(),
Andrei Popescu402d9372010-02-26 13:31:12 +00002551 smi_value,
2552 overwrite_mode);
2553 // Check that lowest log2(value) bits of operand are zero, and test
2554 // smi tag at the same time.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002555 STATIC_ASSERT(kSmiTag == 0);
2556 STATIC_ASSERT(kSmiTagSize == 1);
Andrei Popescu402d9372010-02-26 13:31:12 +00002557 __ test(operand->reg(), Immediate(3));
2558 deferred->Branch(not_zero); // Branch if non-smi or odd smi.
2559 __ sar(operand->reg(), 1);
2560 deferred->BindExit();
2561 answer = *operand;
2562 } else {
2563 // Cannot fall through MOD to default case, so we duplicate the
2564 // default case here.
2565 Result constant_operand(value);
2566 if (reversed) {
Steve Block6ded16b2010-05-10 14:33:55 +01002567 answer = LikelySmiBinaryOperation(expr, &constant_operand, operand,
Andrei Popescu402d9372010-02-26 13:31:12 +00002568 overwrite_mode);
2569 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01002570 answer = LikelySmiBinaryOperation(expr, operand, &constant_operand,
Andrei Popescu402d9372010-02-26 13:31:12 +00002571 overwrite_mode);
2572 }
2573 }
2574 break;
Steve Block6ded16b2010-05-10 14:33:55 +01002575
Steve Blocka7e24c12009-10-30 11:49:00 +00002576 // Generate inline code for mod of powers of 2 and negative powers of 2.
2577 case Token::MOD:
2578 if (!reversed &&
2579 int_value != 0 &&
2580 (IsPowerOf2(int_value) || IsPowerOf2(-int_value))) {
2581 operand->ToRegister();
2582 frame_->Spill(operand->reg());
Steve Block6ded16b2010-05-10 14:33:55 +01002583 DeferredCode* deferred =
2584 new DeferredInlineSmiOperation(op,
2585 operand->reg(),
2586 operand->reg(),
2587 operand->type_info(),
2588 smi_value,
2589 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002590 // Check for negative or non-Smi left hand side.
Steve Block6ded16b2010-05-10 14:33:55 +01002591 __ test(operand->reg(), Immediate(kSmiTagMask | kSmiSignMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00002592 deferred->Branch(not_zero);
2593 if (int_value < 0) int_value = -int_value;
2594 if (int_value == 1) {
2595 __ mov(operand->reg(), Immediate(Smi::FromInt(0)));
2596 } else {
2597 __ and_(operand->reg(), (int_value << kSmiTagSize) - 1);
2598 }
2599 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00002600 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00002601 break;
2602 }
2603 // Fall through if we did not find a power of 2 on the right hand side!
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002604 // The next case must be the default.
Steve Blocka7e24c12009-10-30 11:49:00 +00002605
2606 default: {
2607 Result constant_operand(value);
2608 if (reversed) {
Steve Block6ded16b2010-05-10 14:33:55 +01002609 answer = LikelySmiBinaryOperation(expr, &constant_operand, operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00002610 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002611 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01002612 answer = LikelySmiBinaryOperation(expr, operand, &constant_operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00002613 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002614 }
2615 break;
2616 }
2617 }
Leon Clarked91b9f72010-01-27 17:25:45 +00002618 ASSERT(answer.is_valid());
2619 return answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00002620}
2621
2622
Leon Clarkee46be812010-01-19 14:06:41 +00002623static bool CouldBeNaN(const Result& result) {
Steve Block6ded16b2010-05-10 14:33:55 +01002624 if (result.type_info().IsSmi()) return false;
2625 if (result.type_info().IsInteger32()) return false;
Leon Clarkee46be812010-01-19 14:06:41 +00002626 if (!result.is_constant()) return true;
2627 if (!result.handle()->IsHeapNumber()) return false;
2628 return isnan(HeapNumber::cast(*result.handle())->value());
2629}
2630
2631
Steve Block6ded16b2010-05-10 14:33:55 +01002632// Convert from signed to unsigned comparison to match the way EFLAGS are set
2633// by FPU and XMM compare instructions.
2634static Condition DoubleCondition(Condition cc) {
2635 switch (cc) {
2636 case less: return below;
2637 case equal: return equal;
2638 case less_equal: return below_equal;
2639 case greater: return above;
2640 case greater_equal: return above_equal;
2641 default: UNREACHABLE();
2642 }
2643 UNREACHABLE();
2644 return equal;
2645}
2646
2647
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002648static CompareFlags ComputeCompareFlags(NaNInformation nan_info,
2649 bool inline_number_compare) {
2650 CompareFlags flags = NO_SMI_COMPARE_IN_STUB;
2651 if (nan_info == kCantBothBeNaN) {
2652 flags = static_cast<CompareFlags>(flags | CANT_BOTH_BE_NAN);
2653 }
2654 if (inline_number_compare) {
2655 flags = static_cast<CompareFlags>(flags | NO_NUMBER_COMPARE_IN_STUB);
2656 }
2657 return flags;
2658}
2659
2660
Leon Clarkee46be812010-01-19 14:06:41 +00002661void CodeGenerator::Comparison(AstNode* node,
2662 Condition cc,
Steve Blocka7e24c12009-10-30 11:49:00 +00002663 bool strict,
2664 ControlDestination* dest) {
2665 // Strict only makes sense for equality comparisons.
2666 ASSERT(!strict || cc == equal);
2667
2668 Result left_side;
2669 Result right_side;
2670 // Implement '>' and '<=' by reversal to obtain ECMA-262 conversion order.
2671 if (cc == greater || cc == less_equal) {
2672 cc = ReverseCondition(cc);
2673 left_side = frame_->Pop();
2674 right_side = frame_->Pop();
2675 } else {
2676 right_side = frame_->Pop();
2677 left_side = frame_->Pop();
2678 }
2679 ASSERT(cc == less || cc == equal || cc == greater_equal);
2680
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002681 // If either side is a constant smi, optimize the comparison.
Leon Clarkee46be812010-01-19 14:06:41 +00002682 bool left_side_constant_smi = false;
2683 bool left_side_constant_null = false;
2684 bool left_side_constant_1_char_string = false;
2685 if (left_side.is_constant()) {
2686 left_side_constant_smi = left_side.handle()->IsSmi();
2687 left_side_constant_null = left_side.handle()->IsNull();
2688 left_side_constant_1_char_string =
2689 (left_side.handle()->IsString() &&
Steve Block6ded16b2010-05-10 14:33:55 +01002690 String::cast(*left_side.handle())->length() == 1 &&
2691 String::cast(*left_side.handle())->IsAsciiRepresentation());
Leon Clarkee46be812010-01-19 14:06:41 +00002692 }
2693 bool right_side_constant_smi = false;
2694 bool right_side_constant_null = false;
2695 bool right_side_constant_1_char_string = false;
2696 if (right_side.is_constant()) {
2697 right_side_constant_smi = right_side.handle()->IsSmi();
2698 right_side_constant_null = right_side.handle()->IsNull();
2699 right_side_constant_1_char_string =
2700 (right_side.handle()->IsString() &&
Steve Block6ded16b2010-05-10 14:33:55 +01002701 String::cast(*right_side.handle())->length() == 1 &&
2702 String::cast(*right_side.handle())->IsAsciiRepresentation());
Leon Clarkee46be812010-01-19 14:06:41 +00002703 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002704
2705 if (left_side_constant_smi || right_side_constant_smi) {
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002706 bool is_loop_condition = (node->AsExpression() != NULL) &&
2707 node->AsExpression()->is_loop_condition();
2708 ConstantSmiComparison(cc, strict, dest, &left_side, &right_side,
2709 left_side_constant_smi, right_side_constant_smi,
2710 is_loop_condition);
Leon Clarkee46be812010-01-19 14:06:41 +00002711 } else if (left_side_constant_1_char_string ||
2712 right_side_constant_1_char_string) {
2713 if (left_side_constant_1_char_string && right_side_constant_1_char_string) {
2714 // Trivial case, comparing two constants.
2715 int left_value = String::cast(*left_side.handle())->Get(0);
2716 int right_value = String::cast(*right_side.handle())->Get(0);
2717 switch (cc) {
2718 case less:
2719 dest->Goto(left_value < right_value);
2720 break;
2721 case equal:
2722 dest->Goto(left_value == right_value);
2723 break;
2724 case greater_equal:
2725 dest->Goto(left_value >= right_value);
2726 break;
2727 default:
2728 UNREACHABLE();
2729 }
2730 } else {
2731 // Only one side is a constant 1 character string.
2732 // If left side is a constant 1-character string, reverse the operands.
2733 // Since one side is a constant string, conversion order does not matter.
2734 if (left_side_constant_1_char_string) {
2735 Result temp = left_side;
2736 left_side = right_side;
2737 right_side = temp;
2738 cc = ReverseCondition(cc);
2739 // This may reintroduce greater or less_equal as the value of cc.
2740 // CompareStub and the inline code both support all values of cc.
2741 }
2742 // Implement comparison against a constant string, inlining the case
2743 // where both sides are strings.
2744 left_side.ToRegister();
2745
2746 // Here we split control flow to the stub call and inlined cases
2747 // before finally splitting it to the control destination. We use
2748 // a jump target and branching to duplicate the virtual frame at
2749 // the first split. We manually handle the off-frame references
2750 // by reconstituting them on the non-fall-through path.
2751 JumpTarget is_not_string, is_string;
2752 Register left_reg = left_side.reg();
2753 Handle<Object> right_val = right_side.handle();
Steve Block6ded16b2010-05-10 14:33:55 +01002754 ASSERT(StringShape(String::cast(*right_val)).IsSymbol());
Leon Clarkee46be812010-01-19 14:06:41 +00002755 __ test(left_side.reg(), Immediate(kSmiTagMask));
2756 is_not_string.Branch(zero, &left_side);
2757 Result temp = allocator_->Allocate();
2758 ASSERT(temp.is_valid());
2759 __ mov(temp.reg(),
2760 FieldOperand(left_side.reg(), HeapObject::kMapOffset));
2761 __ movzx_b(temp.reg(),
2762 FieldOperand(temp.reg(), Map::kInstanceTypeOffset));
2763 // If we are testing for equality then make use of the symbol shortcut.
2764 // Check if the right left hand side has the same type as the left hand
2765 // side (which is always a symbol).
2766 if (cc == equal) {
2767 Label not_a_symbol;
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002768 STATIC_ASSERT(kSymbolTag != 0);
Leon Clarkee46be812010-01-19 14:06:41 +00002769 // Ensure that no non-strings have the symbol bit set.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002770 STATIC_ASSERT(LAST_TYPE < kNotStringTag + kIsSymbolMask);
Leon Clarkee46be812010-01-19 14:06:41 +00002771 __ test(temp.reg(), Immediate(kIsSymbolMask)); // Test the symbol bit.
2772 __ j(zero, &not_a_symbol);
2773 // They are symbols, so do identity compare.
2774 __ cmp(left_side.reg(), right_side.handle());
2775 dest->true_target()->Branch(equal);
2776 dest->false_target()->Branch(not_equal);
2777 __ bind(&not_a_symbol);
2778 }
Steve Block6ded16b2010-05-10 14:33:55 +01002779 // Call the compare stub if the left side is not a flat ascii string.
Leon Clarkee46be812010-01-19 14:06:41 +00002780 __ and_(temp.reg(),
2781 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
2782 __ cmp(temp.reg(), kStringTag | kSeqStringTag | kAsciiStringTag);
2783 temp.Unuse();
2784 is_string.Branch(equal, &left_side);
2785
2786 // Setup and call the compare stub.
2787 is_not_string.Bind(&left_side);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002788 CompareFlags flags =
2789 static_cast<CompareFlags>(CANT_BOTH_BE_NAN | NO_SMI_COMPARE_IN_STUB);
2790 CompareStub stub(cc, strict, flags);
Leon Clarkee46be812010-01-19 14:06:41 +00002791 Result result = frame_->CallStub(&stub, &left_side, &right_side);
2792 result.ToRegister();
2793 __ cmp(result.reg(), 0);
2794 result.Unuse();
2795 dest->true_target()->Branch(cc);
2796 dest->false_target()->Jump();
2797
2798 is_string.Bind(&left_side);
Steve Block6ded16b2010-05-10 14:33:55 +01002799 // left_side is a sequential ASCII string.
Leon Clarkee46be812010-01-19 14:06:41 +00002800 left_side = Result(left_reg);
2801 right_side = Result(right_val);
Leon Clarkee46be812010-01-19 14:06:41 +00002802 // Test string equality and comparison.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002803 Label comparison_done;
Leon Clarkee46be812010-01-19 14:06:41 +00002804 if (cc == equal) {
Leon Clarkee46be812010-01-19 14:06:41 +00002805 __ cmp(FieldOperand(left_side.reg(), String::kLengthOffset),
Steve Block6ded16b2010-05-10 14:33:55 +01002806 Immediate(Smi::FromInt(1)));
Leon Clarkee46be812010-01-19 14:06:41 +00002807 __ j(not_equal, &comparison_done);
2808 uint8_t char_value =
Steve Block6ded16b2010-05-10 14:33:55 +01002809 static_cast<uint8_t>(String::cast(*right_val)->Get(0));
Leon Clarkee46be812010-01-19 14:06:41 +00002810 __ cmpb(FieldOperand(left_side.reg(), SeqAsciiString::kHeaderSize),
2811 char_value);
Leon Clarkee46be812010-01-19 14:06:41 +00002812 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002813 __ cmp(FieldOperand(left_side.reg(), String::kLengthOffset),
2814 Immediate(Smi::FromInt(1)));
2815 // If the length is 0 then the jump is taken and the flags
2816 // correctly represent being less than the one-character string.
2817 __ j(below, &comparison_done);
Steve Block6ded16b2010-05-10 14:33:55 +01002818 // Compare the first character of the string with the
2819 // constant 1-character string.
Leon Clarkee46be812010-01-19 14:06:41 +00002820 uint8_t char_value =
Steve Block6ded16b2010-05-10 14:33:55 +01002821 static_cast<uint8_t>(String::cast(*right_val)->Get(0));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002822 __ cmpb(FieldOperand(left_side.reg(), SeqAsciiString::kHeaderSize),
2823 char_value);
2824 __ j(not_equal, &comparison_done);
Leon Clarkee46be812010-01-19 14:06:41 +00002825 // If the first character is the same then the long string sorts after
2826 // the short one.
2827 __ cmp(FieldOperand(left_side.reg(), String::kLengthOffset),
Steve Block6ded16b2010-05-10 14:33:55 +01002828 Immediate(Smi::FromInt(1)));
Leon Clarkee46be812010-01-19 14:06:41 +00002829 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002830 __ bind(&comparison_done);
Leon Clarkee46be812010-01-19 14:06:41 +00002831 left_side.Unuse();
2832 right_side.Unuse();
2833 dest->Split(cc);
2834 }
2835 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01002836 // Neither side is a constant Smi, constant 1-char string or constant null.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002837 // If either side is a non-smi constant, or known to be a heap number,
2838 // skip the smi check.
Steve Blocka7e24c12009-10-30 11:49:00 +00002839 bool known_non_smi =
2840 (left_side.is_constant() && !left_side.handle()->IsSmi()) ||
Steve Block6ded16b2010-05-10 14:33:55 +01002841 (right_side.is_constant() && !right_side.handle()->IsSmi()) ||
2842 left_side.type_info().IsDouble() ||
2843 right_side.type_info().IsDouble();
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002844
Leon Clarkee46be812010-01-19 14:06:41 +00002845 NaNInformation nan_info =
2846 (CouldBeNaN(left_side) && CouldBeNaN(right_side)) ?
2847 kBothCouldBeNaN :
2848 kCantBothBeNaN;
Steve Block6ded16b2010-05-10 14:33:55 +01002849
2850 // Inline number comparison handling any combination of smi's and heap
2851 // numbers if:
2852 // code is in a loop
2853 // the compare operation is different from equal
2854 // compare is not a for-loop comparison
2855 // The reason for excluding equal is that it will most likely be done
2856 // with smi's (not heap numbers) and the code to comparing smi's is inlined
2857 // separately. The same reason applies for for-loop comparison which will
2858 // also most likely be smi comparisons.
2859 bool is_loop_condition = (node->AsExpression() != NULL)
2860 && node->AsExpression()->is_loop_condition();
2861 bool inline_number_compare =
2862 loop_nesting() > 0 && cc != equal && !is_loop_condition;
2863
2864 // Left and right needed in registers for the following code.
Steve Blocka7e24c12009-10-30 11:49:00 +00002865 left_side.ToRegister();
2866 right_side.ToRegister();
2867
2868 if (known_non_smi) {
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002869 // Inlined equality check:
2870 // If at least one of the objects is not NaN, then if the objects
2871 // are identical, they are equal.
Steve Block6ded16b2010-05-10 14:33:55 +01002872 if (nan_info == kCantBothBeNaN && cc == equal) {
2873 __ cmp(left_side.reg(), Operand(right_side.reg()));
2874 dest->true_target()->Branch(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +00002875 }
Steve Block6ded16b2010-05-10 14:33:55 +01002876
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002877 // Inlined number comparison:
Steve Block6ded16b2010-05-10 14:33:55 +01002878 if (inline_number_compare) {
2879 GenerateInlineNumberComparison(&left_side, &right_side, cc, dest);
2880 }
2881
2882 // End of in-line compare, call out to the compare stub. Don't include
2883 // number comparison in the stub if it was inlined.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002884 CompareFlags flags = ComputeCompareFlags(nan_info, inline_number_compare);
2885 CompareStub stub(cc, strict, flags);
Steve Block6ded16b2010-05-10 14:33:55 +01002886 Result answer = frame_->CallStub(&stub, &left_side, &right_side);
2887 __ test(answer.reg(), Operand(answer.reg()));
Steve Blocka7e24c12009-10-30 11:49:00 +00002888 answer.Unuse();
2889 dest->Split(cc);
2890 } else {
2891 // Here we split control flow to the stub call and inlined cases
2892 // before finally splitting it to the control destination. We use
2893 // a jump target and branching to duplicate the virtual frame at
2894 // the first split. We manually handle the off-frame references
2895 // by reconstituting them on the non-fall-through path.
2896 JumpTarget is_smi;
2897 Register left_reg = left_side.reg();
2898 Register right_reg = right_side.reg();
2899
Steve Block6ded16b2010-05-10 14:33:55 +01002900 // In-line check for comparing two smis.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002901 JumpIfBothSmiUsingTypeInfo(&left_side, &right_side, &is_smi);
Steve Block6ded16b2010-05-10 14:33:55 +01002902
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002903 if (has_valid_frame()) {
2904 // Inline the equality check if both operands can't be a NaN. If both
2905 // objects are the same they are equal.
2906 if (nan_info == kCantBothBeNaN && cc == equal) {
2907 __ cmp(left_side.reg(), Operand(right_side.reg()));
2908 dest->true_target()->Branch(equal);
2909 }
2910
2911 // Inlined number comparison:
2912 if (inline_number_compare) {
2913 GenerateInlineNumberComparison(&left_side, &right_side, cc, dest);
2914 }
2915
2916 // End of in-line compare, call out to the compare stub. Don't include
2917 // number comparison in the stub if it was inlined.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002918 CompareFlags flags =
2919 ComputeCompareFlags(nan_info, inline_number_compare);
2920 CompareStub stub(cc, strict, flags);
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002921 Result answer = frame_->CallStub(&stub, &left_side, &right_side);
2922 __ test(answer.reg(), Operand(answer.reg()));
2923 answer.Unuse();
2924 if (is_smi.is_linked()) {
2925 dest->true_target()->Branch(cc);
2926 dest->false_target()->Jump();
2927 } else {
2928 dest->Split(cc);
2929 }
2930 }
2931
2932 if (is_smi.is_linked()) {
2933 is_smi.Bind();
2934 left_side = Result(left_reg);
2935 right_side = Result(right_reg);
Steve Block6ded16b2010-05-10 14:33:55 +01002936 __ cmp(left_side.reg(), Operand(right_side.reg()));
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002937 right_side.Unuse();
2938 left_side.Unuse();
2939 dest->Split(cc);
Steve Block6ded16b2010-05-10 14:33:55 +01002940 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002941 }
2942 }
2943}
2944
2945
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002946void CodeGenerator::ConstantSmiComparison(Condition cc,
2947 bool strict,
2948 ControlDestination* dest,
2949 Result* left_side,
2950 Result* right_side,
2951 bool left_side_constant_smi,
2952 bool right_side_constant_smi,
2953 bool is_loop_condition) {
2954 if (left_side_constant_smi && right_side_constant_smi) {
2955 // Trivial case, comparing two constants.
2956 int left_value = Smi::cast(*left_side->handle())->value();
2957 int right_value = Smi::cast(*right_side->handle())->value();
2958 switch (cc) {
2959 case less:
2960 dest->Goto(left_value < right_value);
2961 break;
2962 case equal:
2963 dest->Goto(left_value == right_value);
2964 break;
2965 case greater_equal:
2966 dest->Goto(left_value >= right_value);
2967 break;
2968 default:
2969 UNREACHABLE();
2970 }
2971 } else {
2972 // Only one side is a constant Smi.
2973 // If left side is a constant Smi, reverse the operands.
2974 // Since one side is a constant Smi, conversion order does not matter.
2975 if (left_side_constant_smi) {
2976 Result* temp = left_side;
2977 left_side = right_side;
2978 right_side = temp;
2979 cc = ReverseCondition(cc);
2980 // This may re-introduce greater or less_equal as the value of cc.
2981 // CompareStub and the inline code both support all values of cc.
2982 }
2983 // Implement comparison against a constant Smi, inlining the case
2984 // where both sides are Smis.
2985 left_side->ToRegister();
2986 Register left_reg = left_side->reg();
2987 Handle<Object> right_val = right_side->handle();
2988
2989 if (left_side->is_smi()) {
2990 if (FLAG_debug_code) {
2991 __ AbortIfNotSmi(left_reg);
2992 }
2993 // Test smi equality and comparison by signed int comparison.
2994 if (IsUnsafeSmi(right_side->handle())) {
2995 right_side->ToRegister();
2996 __ cmp(left_reg, Operand(right_side->reg()));
2997 } else {
2998 __ cmp(Operand(left_reg), Immediate(right_side->handle()));
2999 }
3000 left_side->Unuse();
3001 right_side->Unuse();
3002 dest->Split(cc);
3003 } else {
3004 // Only the case where the left side could possibly be a non-smi is left.
3005 JumpTarget is_smi;
3006 if (cc == equal) {
3007 // We can do the equality comparison before the smi check.
3008 __ cmp(Operand(left_reg), Immediate(right_side->handle()));
3009 dest->true_target()->Branch(equal);
3010 __ test(left_reg, Immediate(kSmiTagMask));
3011 dest->false_target()->Branch(zero);
3012 } else {
3013 // Do the smi check, then the comparison.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003014 __ test(left_reg, Immediate(kSmiTagMask));
3015 is_smi.Branch(zero, left_side, right_side);
3016 }
3017
3018 // Jump or fall through to here if we are comparing a non-smi to a
3019 // constant smi. If the non-smi is a heap number and this is not
3020 // a loop condition, inline the floating point code.
3021 if (!is_loop_condition && CpuFeatures::IsSupported(SSE2)) {
3022 // Right side is a constant smi and left side has been checked
3023 // not to be a smi.
3024 CpuFeatures::Scope use_sse2(SSE2);
3025 JumpTarget not_number;
3026 __ cmp(FieldOperand(left_reg, HeapObject::kMapOffset),
3027 Immediate(Factory::heap_number_map()));
3028 not_number.Branch(not_equal, left_side);
3029 __ movdbl(xmm1,
3030 FieldOperand(left_reg, HeapNumber::kValueOffset));
3031 int value = Smi::cast(*right_val)->value();
3032 if (value == 0) {
3033 __ xorpd(xmm0, xmm0);
3034 } else {
3035 Result temp = allocator()->Allocate();
3036 __ mov(temp.reg(), Immediate(value));
3037 __ cvtsi2sd(xmm0, Operand(temp.reg()));
3038 temp.Unuse();
3039 }
3040 __ ucomisd(xmm1, xmm0);
3041 // Jump to builtin for NaN.
3042 not_number.Branch(parity_even, left_side);
3043 left_side->Unuse();
3044 dest->true_target()->Branch(DoubleCondition(cc));
3045 dest->false_target()->Jump();
3046 not_number.Bind(left_side);
3047 }
3048
3049 // Setup and call the compare stub.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003050 CompareFlags flags =
3051 static_cast<CompareFlags>(CANT_BOTH_BE_NAN | NO_SMI_CODE_IN_STUB);
3052 CompareStub stub(cc, strict, flags);
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003053 Result result = frame_->CallStub(&stub, left_side, right_side);
3054 result.ToRegister();
3055 __ test(result.reg(), Operand(result.reg()));
3056 result.Unuse();
3057 if (cc == equal) {
3058 dest->Split(cc);
3059 } else {
3060 dest->true_target()->Branch(cc);
3061 dest->false_target()->Jump();
3062
3063 // It is important for performance for this case to be at the end.
3064 is_smi.Bind(left_side, right_side);
3065 if (IsUnsafeSmi(right_side->handle())) {
3066 right_side->ToRegister();
3067 __ cmp(left_reg, Operand(right_side->reg()));
3068 } else {
3069 __ cmp(Operand(left_reg), Immediate(right_side->handle()));
3070 }
3071 left_side->Unuse();
3072 right_side->Unuse();
3073 dest->Split(cc);
3074 }
3075 }
3076 }
3077}
3078
3079
Steve Block6ded16b2010-05-10 14:33:55 +01003080// Check that the comparison operand is a number. Jump to not_numbers jump
3081// target passing the left and right result if the operand is not a number.
3082static void CheckComparisonOperand(MacroAssembler* masm_,
3083 Result* operand,
3084 Result* left_side,
3085 Result* right_side,
3086 JumpTarget* not_numbers) {
3087 // Perform check if operand is not known to be a number.
3088 if (!operand->type_info().IsNumber()) {
3089 Label done;
3090 __ test(operand->reg(), Immediate(kSmiTagMask));
3091 __ j(zero, &done);
3092 __ cmp(FieldOperand(operand->reg(), HeapObject::kMapOffset),
3093 Immediate(Factory::heap_number_map()));
3094 not_numbers->Branch(not_equal, left_side, right_side, not_taken);
3095 __ bind(&done);
3096 }
3097}
3098
3099
3100// Load a comparison operand to the FPU stack. This assumes that the operand has
3101// already been checked and is a number.
3102static void LoadComparisonOperand(MacroAssembler* masm_,
3103 Result* operand) {
3104 Label done;
3105 if (operand->type_info().IsDouble()) {
3106 // Operand is known to be a heap number, just load it.
3107 __ fld_d(FieldOperand(operand->reg(), HeapNumber::kValueOffset));
3108 } else if (operand->type_info().IsSmi()) {
3109 // Operand is known to be a smi. Convert it to double and keep the original
3110 // smi.
3111 __ SmiUntag(operand->reg());
3112 __ push(operand->reg());
3113 __ fild_s(Operand(esp, 0));
3114 __ pop(operand->reg());
3115 __ SmiTag(operand->reg());
3116 } else {
3117 // Operand type not known, check for smi otherwise assume heap number.
3118 Label smi;
3119 __ test(operand->reg(), Immediate(kSmiTagMask));
3120 __ j(zero, &smi);
3121 __ fld_d(FieldOperand(operand->reg(), HeapNumber::kValueOffset));
3122 __ jmp(&done);
3123 __ bind(&smi);
3124 __ SmiUntag(operand->reg());
3125 __ push(operand->reg());
3126 __ fild_s(Operand(esp, 0));
3127 __ pop(operand->reg());
3128 __ SmiTag(operand->reg());
3129 __ jmp(&done);
3130 }
3131 __ bind(&done);
3132}
3133
3134
3135// Load a comparison operand into into a XMM register. Jump to not_numbers jump
3136// target passing the left and right result if the operand is not a number.
3137static void LoadComparisonOperandSSE2(MacroAssembler* masm_,
3138 Result* operand,
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003139 XMMRegister xmm_reg,
Steve Block6ded16b2010-05-10 14:33:55 +01003140 Result* left_side,
3141 Result* right_side,
3142 JumpTarget* not_numbers) {
3143 Label done;
3144 if (operand->type_info().IsDouble()) {
3145 // Operand is known to be a heap number, just load it.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003146 __ movdbl(xmm_reg, FieldOperand(operand->reg(), HeapNumber::kValueOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01003147 } else if (operand->type_info().IsSmi()) {
3148 // Operand is known to be a smi. Convert it to double and keep the original
3149 // smi.
3150 __ SmiUntag(operand->reg());
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003151 __ cvtsi2sd(xmm_reg, Operand(operand->reg()));
Steve Block6ded16b2010-05-10 14:33:55 +01003152 __ SmiTag(operand->reg());
3153 } else {
3154 // Operand type not known, check for smi or heap number.
3155 Label smi;
3156 __ test(operand->reg(), Immediate(kSmiTagMask));
3157 __ j(zero, &smi);
3158 if (!operand->type_info().IsNumber()) {
3159 __ cmp(FieldOperand(operand->reg(), HeapObject::kMapOffset),
3160 Immediate(Factory::heap_number_map()));
3161 not_numbers->Branch(not_equal, left_side, right_side, taken);
3162 }
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003163 __ movdbl(xmm_reg, FieldOperand(operand->reg(), HeapNumber::kValueOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01003164 __ jmp(&done);
3165
3166 __ bind(&smi);
3167 // Comvert smi to float and keep the original smi.
3168 __ SmiUntag(operand->reg());
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003169 __ cvtsi2sd(xmm_reg, Operand(operand->reg()));
Steve Block6ded16b2010-05-10 14:33:55 +01003170 __ SmiTag(operand->reg());
3171 __ jmp(&done);
3172 }
3173 __ bind(&done);
3174}
3175
3176
3177void CodeGenerator::GenerateInlineNumberComparison(Result* left_side,
3178 Result* right_side,
3179 Condition cc,
3180 ControlDestination* dest) {
3181 ASSERT(left_side->is_register());
3182 ASSERT(right_side->is_register());
3183
3184 JumpTarget not_numbers;
3185 if (CpuFeatures::IsSupported(SSE2)) {
3186 CpuFeatures::Scope use_sse2(SSE2);
3187
3188 // Load left and right operand into registers xmm0 and xmm1 and compare.
3189 LoadComparisonOperandSSE2(masm_, left_side, xmm0, left_side, right_side,
3190 &not_numbers);
3191 LoadComparisonOperandSSE2(masm_, right_side, xmm1, left_side, right_side,
3192 &not_numbers);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003193 __ ucomisd(xmm0, xmm1);
Steve Block6ded16b2010-05-10 14:33:55 +01003194 } else {
3195 Label check_right, compare;
3196
3197 // Make sure that both comparison operands are numbers.
3198 CheckComparisonOperand(masm_, left_side, left_side, right_side,
3199 &not_numbers);
3200 CheckComparisonOperand(masm_, right_side, left_side, right_side,
3201 &not_numbers);
3202
3203 // Load right and left operand to FPU stack and compare.
3204 LoadComparisonOperand(masm_, right_side);
3205 LoadComparisonOperand(masm_, left_side);
3206 __ FCmp();
3207 }
3208
3209 // Bail out if a NaN is involved.
3210 not_numbers.Branch(parity_even, left_side, right_side, not_taken);
3211
3212 // Split to destination targets based on comparison.
3213 left_side->Unuse();
3214 right_side->Unuse();
3215 dest->true_target()->Branch(DoubleCondition(cc));
3216 dest->false_target()->Jump();
3217
3218 not_numbers.Bind(left_side, right_side);
3219}
3220
3221
Steve Blocka7e24c12009-10-30 11:49:00 +00003222// Call the function just below TOS on the stack with the given
3223// arguments. The receiver is the TOS.
3224void CodeGenerator::CallWithArguments(ZoneList<Expression*>* args,
Leon Clarkee46be812010-01-19 14:06:41 +00003225 CallFunctionFlags flags,
Steve Blocka7e24c12009-10-30 11:49:00 +00003226 int position) {
3227 // Push the arguments ("left-to-right") on the stack.
3228 int arg_count = args->length();
3229 for (int i = 0; i < arg_count; i++) {
3230 Load(args->at(i));
Leon Clarkef7060e22010-06-03 12:02:55 +01003231 frame_->SpillTop();
Steve Blocka7e24c12009-10-30 11:49:00 +00003232 }
3233
3234 // Record the position for debugging purposes.
3235 CodeForSourcePosition(position);
3236
3237 // Use the shared code stub to call the function.
3238 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Leon Clarkee46be812010-01-19 14:06:41 +00003239 CallFunctionStub call_function(arg_count, in_loop, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00003240 Result answer = frame_->CallStub(&call_function, arg_count + 1);
3241 // Restore context and replace function on the stack with the
3242 // result of the stub invocation.
3243 frame_->RestoreContextRegister();
3244 frame_->SetElementAt(0, &answer);
3245}
3246
3247
Leon Clarked91b9f72010-01-27 17:25:45 +00003248void CodeGenerator::CallApplyLazy(Expression* applicand,
Steve Blocka7e24c12009-10-30 11:49:00 +00003249 Expression* receiver,
3250 VariableProxy* arguments,
3251 int position) {
Leon Clarked91b9f72010-01-27 17:25:45 +00003252 // An optimized implementation of expressions of the form
3253 // x.apply(y, arguments).
3254 // If the arguments object of the scope has not been allocated,
3255 // and x.apply is Function.prototype.apply, this optimization
3256 // just copies y and the arguments of the current function on the
3257 // stack, as receiver and arguments, and calls x.
3258 // In the implementation comments, we call x the applicand
3259 // and y the receiver.
Steve Blocka7e24c12009-10-30 11:49:00 +00003260 ASSERT(ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION);
3261 ASSERT(arguments->IsArguments());
3262
Leon Clarked91b9f72010-01-27 17:25:45 +00003263 // Load applicand.apply onto the stack. This will usually
Steve Blocka7e24c12009-10-30 11:49:00 +00003264 // give us a megamorphic load site. Not super, but it works.
Leon Clarked91b9f72010-01-27 17:25:45 +00003265 Load(applicand);
Andrei Popescu402d9372010-02-26 13:31:12 +00003266 frame()->Dup();
Leon Clarked91b9f72010-01-27 17:25:45 +00003267 Handle<String> name = Factory::LookupAsciiSymbol("apply");
3268 frame()->Push(name);
3269 Result answer = frame()->CallLoadIC(RelocInfo::CODE_TARGET);
3270 __ nop();
3271 frame()->Push(&answer);
Steve Blocka7e24c12009-10-30 11:49:00 +00003272
3273 // Load the receiver and the existing arguments object onto the
3274 // expression stack. Avoid allocating the arguments object here.
3275 Load(receiver);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003276 LoadFromSlot(scope()->arguments()->AsSlot(), NOT_INSIDE_TYPEOF);
Steve Blocka7e24c12009-10-30 11:49:00 +00003277
3278 // Emit the source position information after having loaded the
3279 // receiver and the arguments.
3280 CodeForSourcePosition(position);
Leon Clarked91b9f72010-01-27 17:25:45 +00003281 // Contents of frame at this point:
3282 // Frame[0]: arguments object of the current function or the hole.
3283 // Frame[1]: receiver
3284 // Frame[2]: applicand.apply
3285 // Frame[3]: applicand.
Steve Blocka7e24c12009-10-30 11:49:00 +00003286
3287 // Check if the arguments object has been lazily allocated
3288 // already. If so, just use that instead of copying the arguments
3289 // from the stack. This also deals with cases where a local variable
3290 // named 'arguments' has been introduced.
3291 frame_->Dup();
3292 Result probe = frame_->Pop();
Leon Clarked91b9f72010-01-27 17:25:45 +00003293 { VirtualFrame::SpilledScope spilled_scope;
3294 Label slow, done;
3295 bool try_lazy = true;
3296 if (probe.is_constant()) {
3297 try_lazy = probe.handle()->IsTheHole();
3298 } else {
3299 __ cmp(Operand(probe.reg()), Immediate(Factory::the_hole_value()));
3300 probe.Unuse();
3301 __ j(not_equal, &slow);
3302 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003303
Leon Clarked91b9f72010-01-27 17:25:45 +00003304 if (try_lazy) {
3305 Label build_args;
3306 // Get rid of the arguments object probe.
3307 frame_->Drop(); // Can be called on a spilled frame.
3308 // Stack now has 3 elements on it.
3309 // Contents of stack at this point:
3310 // esp[0]: receiver
3311 // esp[1]: applicand.apply
3312 // esp[2]: applicand.
Steve Blocka7e24c12009-10-30 11:49:00 +00003313
Leon Clarked91b9f72010-01-27 17:25:45 +00003314 // Check that the receiver really is a JavaScript object.
3315 __ mov(eax, Operand(esp, 0));
3316 __ test(eax, Immediate(kSmiTagMask));
3317 __ j(zero, &build_args);
Steve Blocka7e24c12009-10-30 11:49:00 +00003318 // We allow all JSObjects including JSFunctions. As long as
3319 // JS_FUNCTION_TYPE is the last instance type and it is right
3320 // after LAST_JS_OBJECT_TYPE, we do not have to check the upper
3321 // bound.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01003322 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
3323 STATIC_ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
Leon Clarked91b9f72010-01-27 17:25:45 +00003324 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
3325 __ j(below, &build_args);
Steve Blocka7e24c12009-10-30 11:49:00 +00003326
Leon Clarked91b9f72010-01-27 17:25:45 +00003327 // Check that applicand.apply is Function.prototype.apply.
3328 __ mov(eax, Operand(esp, kPointerSize));
3329 __ test(eax, Immediate(kSmiTagMask));
3330 __ j(zero, &build_args);
3331 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ecx);
3332 __ j(not_equal, &build_args);
Steve Block791712a2010-08-27 10:21:07 +01003333 __ mov(ecx, FieldOperand(eax, JSFunction::kCodeEntryOffset));
3334 __ sub(Operand(ecx), Immediate(Code::kHeaderSize - kHeapObjectTag));
Leon Clarkeeab96aa2010-01-27 16:31:12 +00003335 Handle<Code> apply_code(Builtins::builtin(Builtins::FunctionApply));
Steve Block791712a2010-08-27 10:21:07 +01003336 __ cmp(Operand(ecx), Immediate(apply_code));
Leon Clarked91b9f72010-01-27 17:25:45 +00003337 __ j(not_equal, &build_args);
3338
3339 // Check that applicand is a function.
3340 __ mov(edi, Operand(esp, 2 * kPointerSize));
3341 __ test(edi, Immediate(kSmiTagMask));
3342 __ j(zero, &build_args);
3343 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
3344 __ j(not_equal, &build_args);
3345
3346 // Copy the arguments to this function possibly from the
3347 // adaptor frame below it.
3348 Label invoke, adapted;
3349 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
3350 __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
3351 __ cmp(Operand(ecx),
3352 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3353 __ j(equal, &adapted);
3354
3355 // No arguments adaptor frame. Copy fixed number of arguments.
Andrei Popescu31002712010-02-23 13:46:05 +00003356 __ mov(eax, Immediate(scope()->num_parameters()));
3357 for (int i = 0; i < scope()->num_parameters(); i++) {
Leon Clarked91b9f72010-01-27 17:25:45 +00003358 __ push(frame_->ParameterAt(i));
3359 }
3360 __ jmp(&invoke);
3361
3362 // Arguments adaptor frame present. Copy arguments from there, but
3363 // avoid copying too many arguments to avoid stack overflows.
3364 __ bind(&adapted);
3365 static const uint32_t kArgumentsLimit = 1 * KB;
3366 __ mov(eax, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
3367 __ SmiUntag(eax);
3368 __ mov(ecx, Operand(eax));
3369 __ cmp(eax, kArgumentsLimit);
3370 __ j(above, &build_args);
3371
3372 // Loop through the arguments pushing them onto the execution
3373 // stack. We don't inform the virtual frame of the push, so we don't
3374 // have to worry about getting rid of the elements from the virtual
3375 // frame.
3376 Label loop;
3377 // ecx is a small non-negative integer, due to the test above.
3378 __ test(ecx, Operand(ecx));
3379 __ j(zero, &invoke);
3380 __ bind(&loop);
3381 __ push(Operand(edx, ecx, times_pointer_size, 1 * kPointerSize));
3382 __ dec(ecx);
3383 __ j(not_zero, &loop);
3384
3385 // Invoke the function.
3386 __ bind(&invoke);
3387 ParameterCount actual(eax);
3388 __ InvokeFunction(edi, actual, CALL_FUNCTION);
3389 // Drop applicand.apply and applicand from the stack, and push
3390 // the result of the function call, but leave the spilled frame
3391 // unchanged, with 3 elements, so it is correct when we compile the
3392 // slow-case code.
3393 __ add(Operand(esp), Immediate(2 * kPointerSize));
3394 __ push(eax);
3395 // Stack now has 1 element:
3396 // esp[0]: result
3397 __ jmp(&done);
3398
3399 // Slow-case: Allocate the arguments object since we know it isn't
3400 // there, and fall-through to the slow-case where we call
3401 // applicand.apply.
3402 __ bind(&build_args);
3403 // Stack now has 3 elements, because we have jumped from where:
3404 // esp[0]: receiver
3405 // esp[1]: applicand.apply
3406 // esp[2]: applicand.
3407
3408 // StoreArgumentsObject requires a correct frame, and may modify it.
3409 Result arguments_object = StoreArgumentsObject(false);
3410 frame_->SpillAll();
3411 arguments_object.ToRegister();
3412 frame_->EmitPush(arguments_object.reg());
3413 arguments_object.Unuse();
3414 // Stack and frame now have 4 elements.
3415 __ bind(&slow);
Leon Clarkeeab96aa2010-01-27 16:31:12 +00003416 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003417
Leon Clarked91b9f72010-01-27 17:25:45 +00003418 // Generic computation of x.apply(y, args) with no special optimization.
3419 // Flip applicand.apply and applicand on the stack, so
3420 // applicand looks like the receiver of the applicand.apply call.
3421 // Then process it as a normal function call.
3422 __ mov(eax, Operand(esp, 3 * kPointerSize));
3423 __ mov(ebx, Operand(esp, 2 * kPointerSize));
3424 __ mov(Operand(esp, 2 * kPointerSize), eax);
3425 __ mov(Operand(esp, 3 * kPointerSize), ebx);
Leon Clarkeeab96aa2010-01-27 16:31:12 +00003426
Leon Clarked91b9f72010-01-27 17:25:45 +00003427 CallFunctionStub call_function(2, NOT_IN_LOOP, NO_CALL_FUNCTION_FLAGS);
3428 Result res = frame_->CallStub(&call_function, 3);
3429 // The function and its two arguments have been dropped.
3430 frame_->Drop(1); // Drop the receiver as well.
3431 res.ToRegister();
3432 frame_->EmitPush(res.reg());
3433 // Stack now has 1 element:
3434 // esp[0]: result
3435 if (try_lazy) __ bind(&done);
3436 } // End of spilled scope.
3437 // Restore the context register after a call.
Steve Blocka7e24c12009-10-30 11:49:00 +00003438 frame_->RestoreContextRegister();
3439}
3440
3441
3442class DeferredStackCheck: public DeferredCode {
3443 public:
3444 DeferredStackCheck() {
3445 set_comment("[ DeferredStackCheck");
3446 }
3447
3448 virtual void Generate();
3449};
3450
3451
3452void DeferredStackCheck::Generate() {
3453 StackCheckStub stub;
3454 __ CallStub(&stub);
3455}
3456
3457
3458void CodeGenerator::CheckStack() {
Steve Blockd0582a62009-12-15 09:54:21 +00003459 DeferredStackCheck* deferred = new DeferredStackCheck;
3460 ExternalReference stack_limit =
3461 ExternalReference::address_of_stack_limit();
3462 __ cmp(esp, Operand::StaticVariable(stack_limit));
3463 deferred->Branch(below);
3464 deferred->BindExit();
Steve Blocka7e24c12009-10-30 11:49:00 +00003465}
3466
3467
3468void CodeGenerator::VisitAndSpill(Statement* statement) {
3469 ASSERT(in_spilled_code());
3470 set_in_spilled_code(false);
3471 Visit(statement);
3472 if (frame_ != NULL) {
3473 frame_->SpillAll();
3474 }
3475 set_in_spilled_code(true);
3476}
3477
3478
3479void CodeGenerator::VisitStatementsAndSpill(ZoneList<Statement*>* statements) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003480#ifdef DEBUG
3481 int original_height = frame_->height();
3482#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003483 ASSERT(in_spilled_code());
3484 set_in_spilled_code(false);
3485 VisitStatements(statements);
3486 if (frame_ != NULL) {
3487 frame_->SpillAll();
3488 }
3489 set_in_spilled_code(true);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003490
3491 ASSERT(!has_valid_frame() || frame_->height() == original_height);
Steve Blocka7e24c12009-10-30 11:49:00 +00003492}
3493
3494
3495void CodeGenerator::VisitStatements(ZoneList<Statement*>* statements) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003496#ifdef DEBUG
3497 int original_height = frame_->height();
3498#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003499 ASSERT(!in_spilled_code());
3500 for (int i = 0; has_valid_frame() && i < statements->length(); i++) {
3501 Visit(statements->at(i));
3502 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003503 ASSERT(!has_valid_frame() || frame_->height() == original_height);
Steve Blocka7e24c12009-10-30 11:49:00 +00003504}
3505
3506
3507void CodeGenerator::VisitBlock(Block* node) {
3508 ASSERT(!in_spilled_code());
3509 Comment cmnt(masm_, "[ Block");
3510 CodeForStatementPosition(node);
3511 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
3512 VisitStatements(node->statements());
3513 if (node->break_target()->is_linked()) {
3514 node->break_target()->Bind();
3515 }
3516 node->break_target()->Unuse();
3517}
3518
3519
3520void CodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
3521 // Call the runtime to declare the globals. The inevitable call
3522 // will sync frame elements to memory anyway, so we do it eagerly to
3523 // allow us to push the arguments directly into place.
3524 frame_->SyncRange(0, frame_->element_count() - 1);
3525
Steve Block3ce2e202009-11-05 08:53:23 +00003526 frame_->EmitPush(esi); // The context is the first argument.
Steve Blocka7e24c12009-10-30 11:49:00 +00003527 frame_->EmitPush(Immediate(pairs));
Steve Blocka7e24c12009-10-30 11:49:00 +00003528 frame_->EmitPush(Immediate(Smi::FromInt(is_eval() ? 1 : 0)));
3529 Result ignored = frame_->CallRuntime(Runtime::kDeclareGlobals, 3);
3530 // Return value is ignored.
3531}
3532
3533
3534void CodeGenerator::VisitDeclaration(Declaration* node) {
3535 Comment cmnt(masm_, "[ Declaration");
3536 Variable* var = node->proxy()->var();
3537 ASSERT(var != NULL); // must have been resolved
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003538 Slot* slot = var->AsSlot();
Steve Blocka7e24c12009-10-30 11:49:00 +00003539
3540 // If it was not possible to allocate the variable at compile time,
3541 // we need to "declare" it at runtime to make sure it actually
3542 // exists in the local context.
3543 if (slot != NULL && slot->type() == Slot::LOOKUP) {
3544 // Variables with a "LOOKUP" slot were introduced as non-locals
3545 // during variable resolution and must have mode DYNAMIC.
3546 ASSERT(var->is_dynamic());
3547 // For now, just do a runtime call. Sync the virtual frame eagerly
3548 // so we can simply push the arguments into place.
3549 frame_->SyncRange(0, frame_->element_count() - 1);
3550 frame_->EmitPush(esi);
3551 frame_->EmitPush(Immediate(var->name()));
3552 // Declaration nodes are always introduced in one of two modes.
3553 ASSERT(node->mode() == Variable::VAR || node->mode() == Variable::CONST);
3554 PropertyAttributes attr = node->mode() == Variable::VAR ? NONE : READ_ONLY;
3555 frame_->EmitPush(Immediate(Smi::FromInt(attr)));
3556 // Push initial value, if any.
3557 // Note: For variables we must not push an initial value (such as
3558 // 'undefined') because we may have a (legal) redeclaration and we
3559 // must not destroy the current value.
3560 if (node->mode() == Variable::CONST) {
3561 frame_->EmitPush(Immediate(Factory::the_hole_value()));
3562 } else if (node->fun() != NULL) {
3563 Load(node->fun());
3564 } else {
3565 frame_->EmitPush(Immediate(Smi::FromInt(0))); // no initial value!
3566 }
3567 Result ignored = frame_->CallRuntime(Runtime::kDeclareContextSlot, 4);
3568 // Ignore the return value (declarations are statements).
3569 return;
3570 }
3571
3572 ASSERT(!var->is_global());
3573
3574 // If we have a function or a constant, we need to initialize the variable.
3575 Expression* val = NULL;
3576 if (node->mode() == Variable::CONST) {
3577 val = new Literal(Factory::the_hole_value());
3578 } else {
3579 val = node->fun(); // NULL if we don't have a function
3580 }
3581
3582 if (val != NULL) {
3583 {
3584 // Set the initial value.
3585 Reference target(this, node->proxy());
3586 Load(val);
3587 target.SetValue(NOT_CONST_INIT);
3588 // The reference is removed from the stack (preserving TOS) when
3589 // it goes out of scope.
3590 }
3591 // Get rid of the assigned value (declarations are statements).
3592 frame_->Drop();
3593 }
3594}
3595
3596
3597void CodeGenerator::VisitExpressionStatement(ExpressionStatement* node) {
3598 ASSERT(!in_spilled_code());
3599 Comment cmnt(masm_, "[ ExpressionStatement");
3600 CodeForStatementPosition(node);
3601 Expression* expression = node->expression();
3602 expression->MarkAsStatement();
3603 Load(expression);
3604 // Remove the lingering expression result from the top of stack.
3605 frame_->Drop();
3606}
3607
3608
3609void CodeGenerator::VisitEmptyStatement(EmptyStatement* node) {
3610 ASSERT(!in_spilled_code());
3611 Comment cmnt(masm_, "// EmptyStatement");
3612 CodeForStatementPosition(node);
3613 // nothing to do
3614}
3615
3616
3617void CodeGenerator::VisitIfStatement(IfStatement* node) {
3618 ASSERT(!in_spilled_code());
3619 Comment cmnt(masm_, "[ IfStatement");
3620 // Generate different code depending on which parts of the if statement
3621 // are present or not.
3622 bool has_then_stm = node->HasThenStatement();
3623 bool has_else_stm = node->HasElseStatement();
3624
3625 CodeForStatementPosition(node);
3626 JumpTarget exit;
3627 if (has_then_stm && has_else_stm) {
3628 JumpTarget then;
3629 JumpTarget else_;
3630 ControlDestination dest(&then, &else_, true);
Steve Blockd0582a62009-12-15 09:54:21 +00003631 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00003632
3633 if (dest.false_was_fall_through()) {
3634 // The else target was bound, so we compile the else part first.
3635 Visit(node->else_statement());
3636
3637 // We may have dangling jumps to the then part.
3638 if (then.is_linked()) {
3639 if (has_valid_frame()) exit.Jump();
3640 then.Bind();
3641 Visit(node->then_statement());
3642 }
3643 } else {
3644 // The then target was bound, so we compile the then part first.
3645 Visit(node->then_statement());
3646
3647 if (else_.is_linked()) {
3648 if (has_valid_frame()) exit.Jump();
3649 else_.Bind();
3650 Visit(node->else_statement());
3651 }
3652 }
3653
3654 } else if (has_then_stm) {
3655 ASSERT(!has_else_stm);
3656 JumpTarget then;
3657 ControlDestination dest(&then, &exit, true);
Steve Blockd0582a62009-12-15 09:54:21 +00003658 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00003659
3660 if (dest.false_was_fall_through()) {
3661 // The exit label was bound. We may have dangling jumps to the
3662 // then part.
3663 if (then.is_linked()) {
3664 exit.Unuse();
3665 exit.Jump();
3666 then.Bind();
3667 Visit(node->then_statement());
3668 }
3669 } else {
3670 // The then label was bound.
3671 Visit(node->then_statement());
3672 }
3673
3674 } else if (has_else_stm) {
3675 ASSERT(!has_then_stm);
3676 JumpTarget else_;
3677 ControlDestination dest(&exit, &else_, false);
Steve Blockd0582a62009-12-15 09:54:21 +00003678 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00003679
3680 if (dest.true_was_fall_through()) {
3681 // The exit label was bound. We may have dangling jumps to the
3682 // else part.
3683 if (else_.is_linked()) {
3684 exit.Unuse();
3685 exit.Jump();
3686 else_.Bind();
3687 Visit(node->else_statement());
3688 }
3689 } else {
3690 // The else label was bound.
3691 Visit(node->else_statement());
3692 }
3693
3694 } else {
3695 ASSERT(!has_then_stm && !has_else_stm);
3696 // We only care about the condition's side effects (not its value
3697 // or control flow effect). LoadCondition is called without
3698 // forcing control flow.
3699 ControlDestination dest(&exit, &exit, true);
Steve Blockd0582a62009-12-15 09:54:21 +00003700 LoadCondition(node->condition(), &dest, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00003701 if (!dest.is_used()) {
3702 // We got a value on the frame rather than (or in addition to)
3703 // control flow.
3704 frame_->Drop();
3705 }
3706 }
3707
3708 if (exit.is_linked()) {
3709 exit.Bind();
3710 }
3711}
3712
3713
3714void CodeGenerator::VisitContinueStatement(ContinueStatement* node) {
3715 ASSERT(!in_spilled_code());
3716 Comment cmnt(masm_, "[ ContinueStatement");
3717 CodeForStatementPosition(node);
3718 node->target()->continue_target()->Jump();
3719}
3720
3721
3722void CodeGenerator::VisitBreakStatement(BreakStatement* node) {
3723 ASSERT(!in_spilled_code());
3724 Comment cmnt(masm_, "[ BreakStatement");
3725 CodeForStatementPosition(node);
3726 node->target()->break_target()->Jump();
3727}
3728
3729
3730void CodeGenerator::VisitReturnStatement(ReturnStatement* node) {
3731 ASSERT(!in_spilled_code());
3732 Comment cmnt(masm_, "[ ReturnStatement");
3733
3734 CodeForStatementPosition(node);
3735 Load(node->expression());
3736 Result return_value = frame_->Pop();
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08003737 masm()->positions_recorder()->WriteRecordedPositions();
Steve Blocka7e24c12009-10-30 11:49:00 +00003738 if (function_return_is_shadowed_) {
3739 function_return_.Jump(&return_value);
3740 } else {
3741 frame_->PrepareForReturn();
3742 if (function_return_.is_bound()) {
3743 // If the function return label is already bound we reuse the
3744 // code by jumping to the return site.
3745 function_return_.Jump(&return_value);
3746 } else {
3747 function_return_.Bind(&return_value);
3748 GenerateReturnSequence(&return_value);
3749 }
3750 }
3751}
3752
3753
3754void CodeGenerator::GenerateReturnSequence(Result* return_value) {
3755 // The return value is a live (but not currently reference counted)
3756 // reference to eax. This is safe because the current frame does not
3757 // contain a reference to eax (it is prepared for the return by spilling
3758 // all registers).
3759 if (FLAG_trace) {
3760 frame_->Push(return_value);
3761 *return_value = frame_->CallRuntime(Runtime::kTraceExit, 1);
3762 }
3763 return_value->ToRegister(eax);
3764
3765 // Add a label for checking the size of the code used for returning.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003766#ifdef DEBUG
Steve Blocka7e24c12009-10-30 11:49:00 +00003767 Label check_exit_codesize;
3768 masm_->bind(&check_exit_codesize);
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003769#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003770
3771 // Leave the frame and return popping the arguments and the
3772 // receiver.
3773 frame_->Exit();
Andrei Popescu31002712010-02-23 13:46:05 +00003774 masm_->ret((scope()->num_parameters() + 1) * kPointerSize);
Steve Blocka7e24c12009-10-30 11:49:00 +00003775 DeleteFrame();
3776
3777#ifdef ENABLE_DEBUGGER_SUPPORT
3778 // Check that the size of the code used for returning matches what is
3779 // expected by the debugger.
Steve Blockd0582a62009-12-15 09:54:21 +00003780 ASSERT_EQ(Assembler::kJSReturnSequenceLength,
Steve Blocka7e24c12009-10-30 11:49:00 +00003781 masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
3782#endif
3783}
3784
3785
3786void CodeGenerator::VisitWithEnterStatement(WithEnterStatement* node) {
3787 ASSERT(!in_spilled_code());
3788 Comment cmnt(masm_, "[ WithEnterStatement");
3789 CodeForStatementPosition(node);
3790 Load(node->expression());
3791 Result context;
3792 if (node->is_catch_block()) {
3793 context = frame_->CallRuntime(Runtime::kPushCatchContext, 1);
3794 } else {
3795 context = frame_->CallRuntime(Runtime::kPushContext, 1);
3796 }
3797
3798 // Update context local.
3799 frame_->SaveContextRegister();
3800
3801 // Verify that the runtime call result and esi agree.
3802 if (FLAG_debug_code) {
3803 __ cmp(context.reg(), Operand(esi));
3804 __ Assert(equal, "Runtime::NewContext should end up in esi");
3805 }
3806}
3807
3808
3809void CodeGenerator::VisitWithExitStatement(WithExitStatement* node) {
3810 ASSERT(!in_spilled_code());
3811 Comment cmnt(masm_, "[ WithExitStatement");
3812 CodeForStatementPosition(node);
3813 // Pop context.
3814 __ mov(esi, ContextOperand(esi, Context::PREVIOUS_INDEX));
3815 // Update context local.
3816 frame_->SaveContextRegister();
3817}
3818
3819
3820void CodeGenerator::VisitSwitchStatement(SwitchStatement* node) {
3821 ASSERT(!in_spilled_code());
3822 Comment cmnt(masm_, "[ SwitchStatement");
3823 CodeForStatementPosition(node);
3824 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
3825
3826 // Compile the switch value.
3827 Load(node->tag());
3828
3829 ZoneList<CaseClause*>* cases = node->cases();
3830 int length = cases->length();
3831 CaseClause* default_clause = NULL;
3832
3833 JumpTarget next_test;
3834 // Compile the case label expressions and comparisons. Exit early
3835 // if a comparison is unconditionally true. The target next_test is
3836 // bound before the loop in order to indicate control flow to the
3837 // first comparison.
3838 next_test.Bind();
3839 for (int i = 0; i < length && !next_test.is_unused(); i++) {
3840 CaseClause* clause = cases->at(i);
3841 // The default is not a test, but remember it for later.
3842 if (clause->is_default()) {
3843 default_clause = clause;
3844 continue;
3845 }
3846
3847 Comment cmnt(masm_, "[ Case comparison");
3848 // We recycle the same target next_test for each test. Bind it if
3849 // the previous test has not done so and then unuse it for the
3850 // loop.
3851 if (next_test.is_linked()) {
3852 next_test.Bind();
3853 }
3854 next_test.Unuse();
3855
3856 // Duplicate the switch value.
3857 frame_->Dup();
3858
3859 // Compile the label expression.
3860 Load(clause->label());
3861
3862 // Compare and branch to the body if true or the next test if
3863 // false. Prefer the next test as a fall through.
3864 ControlDestination dest(clause->body_target(), &next_test, false);
Leon Clarkee46be812010-01-19 14:06:41 +00003865 Comparison(node, equal, true, &dest);
Steve Blocka7e24c12009-10-30 11:49:00 +00003866
3867 // If the comparison fell through to the true target, jump to the
3868 // actual body.
3869 if (dest.true_was_fall_through()) {
3870 clause->body_target()->Unuse();
3871 clause->body_target()->Jump();
3872 }
3873 }
3874
3875 // If there was control flow to a next test from the last one
3876 // compiled, compile a jump to the default or break target.
3877 if (!next_test.is_unused()) {
3878 if (next_test.is_linked()) {
3879 next_test.Bind();
3880 }
3881 // Drop the switch value.
3882 frame_->Drop();
3883 if (default_clause != NULL) {
3884 default_clause->body_target()->Jump();
3885 } else {
3886 node->break_target()->Jump();
3887 }
3888 }
3889
Steve Blocka7e24c12009-10-30 11:49:00 +00003890 // The last instruction emitted was a jump, either to the default
3891 // clause or the break target, or else to a case body from the loop
3892 // that compiles the tests.
3893 ASSERT(!has_valid_frame());
3894 // Compile case bodies as needed.
3895 for (int i = 0; i < length; i++) {
3896 CaseClause* clause = cases->at(i);
3897
3898 // There are two ways to reach the body: from the corresponding
3899 // test or as the fall through of the previous body.
3900 if (clause->body_target()->is_linked() || has_valid_frame()) {
3901 if (clause->body_target()->is_linked()) {
3902 if (has_valid_frame()) {
3903 // If we have both a jump to the test and a fall through, put
3904 // a jump on the fall through path to avoid the dropping of
3905 // the switch value on the test path. The exception is the
3906 // default which has already had the switch value dropped.
3907 if (clause->is_default()) {
3908 clause->body_target()->Bind();
3909 } else {
3910 JumpTarget body;
3911 body.Jump();
3912 clause->body_target()->Bind();
3913 frame_->Drop();
3914 body.Bind();
3915 }
3916 } else {
3917 // No fall through to worry about.
3918 clause->body_target()->Bind();
3919 if (!clause->is_default()) {
3920 frame_->Drop();
3921 }
3922 }
3923 } else {
3924 // Otherwise, we have only fall through.
3925 ASSERT(has_valid_frame());
3926 }
3927
3928 // We are now prepared to compile the body.
3929 Comment cmnt(masm_, "[ Case body");
3930 VisitStatements(clause->statements());
3931 }
3932 clause->body_target()->Unuse();
3933 }
3934
3935 // We may not have a valid frame here so bind the break target only
3936 // if needed.
3937 if (node->break_target()->is_linked()) {
3938 node->break_target()->Bind();
3939 }
3940 node->break_target()->Unuse();
3941}
3942
3943
Steve Block3ce2e202009-11-05 08:53:23 +00003944void CodeGenerator::VisitDoWhileStatement(DoWhileStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003945 ASSERT(!in_spilled_code());
Steve Block3ce2e202009-11-05 08:53:23 +00003946 Comment cmnt(masm_, "[ DoWhileStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00003947 CodeForStatementPosition(node);
3948 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
Steve Block3ce2e202009-11-05 08:53:23 +00003949 JumpTarget body(JumpTarget::BIDIRECTIONAL);
3950 IncrementLoopNesting();
Steve Blocka7e24c12009-10-30 11:49:00 +00003951
Steve Block3ce2e202009-11-05 08:53:23 +00003952 ConditionAnalysis info = AnalyzeCondition(node->cond());
3953 // Label the top of the loop for the backward jump if necessary.
3954 switch (info) {
3955 case ALWAYS_TRUE:
3956 // Use the continue target.
3957 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
3958 node->continue_target()->Bind();
3959 break;
3960 case ALWAYS_FALSE:
3961 // No need to label it.
3962 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
3963 break;
3964 case DONT_KNOW:
3965 // Continue is the test, so use the backward body target.
3966 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
3967 body.Bind();
3968 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00003969 }
3970
Steve Block3ce2e202009-11-05 08:53:23 +00003971 CheckStack(); // TODO(1222600): ignore if body contains calls.
3972 Visit(node->body());
Steve Blocka7e24c12009-10-30 11:49:00 +00003973
Steve Block3ce2e202009-11-05 08:53:23 +00003974 // Compile the test.
3975 switch (info) {
3976 case ALWAYS_TRUE:
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003977 // If control flow can fall off the end of the body, jump back
3978 // to the top and bind the break target at the exit.
Steve Block3ce2e202009-11-05 08:53:23 +00003979 if (has_valid_frame()) {
3980 node->continue_target()->Jump();
Steve Blocka7e24c12009-10-30 11:49:00 +00003981 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003982 if (node->break_target()->is_linked()) {
3983 node->break_target()->Bind();
3984 }
3985 break;
Steve Block3ce2e202009-11-05 08:53:23 +00003986 case ALWAYS_FALSE:
3987 // We may have had continues or breaks in the body.
3988 if (node->continue_target()->is_linked()) {
3989 node->continue_target()->Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00003990 }
Steve Block3ce2e202009-11-05 08:53:23 +00003991 if (node->break_target()->is_linked()) {
3992 node->break_target()->Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00003993 }
Steve Block3ce2e202009-11-05 08:53:23 +00003994 break;
3995 case DONT_KNOW:
3996 // We have to compile the test expression if it can be reached by
3997 // control flow falling out of the body or via continue.
3998 if (node->continue_target()->is_linked()) {
3999 node->continue_target()->Bind();
4000 }
4001 if (has_valid_frame()) {
Steve Blockd0582a62009-12-15 09:54:21 +00004002 Comment cmnt(masm_, "[ DoWhileCondition");
4003 CodeForDoWhileConditionPosition(node);
Steve Block3ce2e202009-11-05 08:53:23 +00004004 ControlDestination dest(&body, node->break_target(), false);
Steve Blockd0582a62009-12-15 09:54:21 +00004005 LoadCondition(node->cond(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00004006 }
Steve Block3ce2e202009-11-05 08:53:23 +00004007 if (node->break_target()->is_linked()) {
4008 node->break_target()->Bind();
4009 }
4010 break;
4011 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004012
Steve Block3ce2e202009-11-05 08:53:23 +00004013 DecrementLoopNesting();
Ben Murdoch3bec4d22010-07-22 14:51:16 +01004014 node->continue_target()->Unuse();
4015 node->break_target()->Unuse();
Steve Block3ce2e202009-11-05 08:53:23 +00004016}
Steve Blocka7e24c12009-10-30 11:49:00 +00004017
Steve Block3ce2e202009-11-05 08:53:23 +00004018
4019void CodeGenerator::VisitWhileStatement(WhileStatement* node) {
4020 ASSERT(!in_spilled_code());
4021 Comment cmnt(masm_, "[ WhileStatement");
4022 CodeForStatementPosition(node);
4023
4024 // If the condition is always false and has no side effects, we do not
4025 // need to compile anything.
4026 ConditionAnalysis info = AnalyzeCondition(node->cond());
4027 if (info == ALWAYS_FALSE) return;
4028
4029 // Do not duplicate conditions that may have function literal
4030 // subexpressions. This can cause us to compile the function literal
4031 // twice.
4032 bool test_at_bottom = !node->may_have_function_literal();
4033 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
4034 IncrementLoopNesting();
4035 JumpTarget body;
4036 if (test_at_bottom) {
4037 body.set_direction(JumpTarget::BIDIRECTIONAL);
4038 }
4039
4040 // Based on the condition analysis, compile the test as necessary.
4041 switch (info) {
4042 case ALWAYS_TRUE:
4043 // We will not compile the test expression. Label the top of the
4044 // loop with the continue target.
4045 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
4046 node->continue_target()->Bind();
4047 break;
4048 case DONT_KNOW: {
4049 if (test_at_bottom) {
4050 // Continue is the test at the bottom, no need to label the test
4051 // at the top. The body is a backward target.
4052 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
4053 } else {
4054 // Label the test at the top as the continue target. The body
4055 // is a forward-only target.
4056 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
4057 node->continue_target()->Bind();
4058 }
4059 // Compile the test with the body as the true target and preferred
4060 // fall-through and with the break target as the false target.
4061 ControlDestination dest(&body, node->break_target(), true);
Steve Blockd0582a62009-12-15 09:54:21 +00004062 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00004063
4064 if (dest.false_was_fall_through()) {
4065 // If we got the break target as fall-through, the test may have
4066 // been unconditionally false (if there are no jumps to the
4067 // body).
4068 if (!body.is_linked()) {
4069 DecrementLoopNesting();
4070 return;
4071 }
4072
4073 // Otherwise, jump around the body on the fall through and then
4074 // bind the body target.
4075 node->break_target()->Unuse();
4076 node->break_target()->Jump();
4077 body.Bind();
4078 }
4079 break;
4080 }
4081 case ALWAYS_FALSE:
4082 UNREACHABLE();
4083 break;
4084 }
4085
4086 CheckStack(); // TODO(1222600): ignore if body contains calls.
4087 Visit(node->body());
4088
4089 // Based on the condition analysis, compile the backward jump as
4090 // necessary.
4091 switch (info) {
4092 case ALWAYS_TRUE:
4093 // The loop body has been labeled with the continue target.
4094 if (has_valid_frame()) {
4095 node->continue_target()->Jump();
4096 }
4097 break;
4098 case DONT_KNOW:
4099 if (test_at_bottom) {
Ben Murdoch3bec4d22010-07-22 14:51:16 +01004100 // If we have chosen to recompile the test at the bottom,
4101 // then it is the continue target.
Steve Blocka7e24c12009-10-30 11:49:00 +00004102 if (node->continue_target()->is_linked()) {
4103 node->continue_target()->Bind();
4104 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004105 if (has_valid_frame()) {
Steve Block3ce2e202009-11-05 08:53:23 +00004106 // The break target is the fall-through (body is a backward
4107 // jump from here and thus an invalid fall-through).
4108 ControlDestination dest(&body, node->break_target(), false);
Steve Blockd0582a62009-12-15 09:54:21 +00004109 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00004110 }
4111 } else {
4112 // If we have chosen not to recompile the test at the bottom,
4113 // jump back to the one at the top.
4114 if (has_valid_frame()) {
4115 node->continue_target()->Jump();
Steve Blocka7e24c12009-10-30 11:49:00 +00004116 }
4117 }
Steve Block3ce2e202009-11-05 08:53:23 +00004118 break;
4119 case ALWAYS_FALSE:
4120 UNREACHABLE();
4121 break;
4122 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004123
Steve Block3ce2e202009-11-05 08:53:23 +00004124 // The break target may be already bound (by the condition), or there
4125 // may not be a valid frame. Bind it only if needed.
4126 if (node->break_target()->is_linked()) {
4127 node->break_target()->Bind();
4128 }
4129 DecrementLoopNesting();
4130}
4131
4132
Steve Block6ded16b2010-05-10 14:33:55 +01004133void CodeGenerator::SetTypeForStackSlot(Slot* slot, TypeInfo info) {
4134 ASSERT(slot->type() == Slot::LOCAL || slot->type() == Slot::PARAMETER);
4135 if (slot->type() == Slot::LOCAL) {
4136 frame_->SetTypeForLocalAt(slot->index(), info);
4137 } else {
4138 frame_->SetTypeForParamAt(slot->index(), info);
4139 }
4140 if (FLAG_debug_code && info.IsSmi()) {
4141 if (slot->type() == Slot::LOCAL) {
4142 frame_->PushLocalAt(slot->index());
4143 } else {
4144 frame_->PushParameterAt(slot->index());
4145 }
4146 Result var = frame_->Pop();
4147 var.ToRegister();
4148 __ AbortIfNotSmi(var.reg());
4149 }
4150}
4151
4152
Steve Block3ce2e202009-11-05 08:53:23 +00004153void CodeGenerator::VisitForStatement(ForStatement* node) {
4154 ASSERT(!in_spilled_code());
4155 Comment cmnt(masm_, "[ ForStatement");
4156 CodeForStatementPosition(node);
4157
4158 // Compile the init expression if present.
4159 if (node->init() != NULL) {
4160 Visit(node->init());
4161 }
4162
4163 // If the condition is always false and has no side effects, we do not
4164 // need to compile anything else.
4165 ConditionAnalysis info = AnalyzeCondition(node->cond());
4166 if (info == ALWAYS_FALSE) return;
4167
4168 // Do not duplicate conditions that may have function literal
4169 // subexpressions. This can cause us to compile the function literal
4170 // twice.
4171 bool test_at_bottom = !node->may_have_function_literal();
4172 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
4173 IncrementLoopNesting();
4174
4175 // Target for backward edge if no test at the bottom, otherwise
4176 // unused.
4177 JumpTarget loop(JumpTarget::BIDIRECTIONAL);
4178
4179 // Target for backward edge if there is a test at the bottom,
4180 // otherwise used as target for test at the top.
4181 JumpTarget body;
4182 if (test_at_bottom) {
4183 body.set_direction(JumpTarget::BIDIRECTIONAL);
4184 }
4185
4186 // Based on the condition analysis, compile the test as necessary.
4187 switch (info) {
4188 case ALWAYS_TRUE:
4189 // We will not compile the test expression. Label the top of the
4190 // loop.
4191 if (node->next() == NULL) {
4192 // Use the continue target if there is no update expression.
4193 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
4194 node->continue_target()->Bind();
4195 } else {
4196 // Otherwise use the backward loop target.
4197 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
4198 loop.Bind();
4199 }
4200 break;
4201 case DONT_KNOW: {
4202 if (test_at_bottom) {
4203 // Continue is either the update expression or the test at the
4204 // bottom, no need to label the test at the top.
4205 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
4206 } else if (node->next() == NULL) {
4207 // We are not recompiling the test at the bottom and there is no
4208 // update expression.
4209 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
4210 node->continue_target()->Bind();
4211 } else {
4212 // We are not recompiling the test at the bottom and there is an
4213 // update expression.
4214 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
4215 loop.Bind();
4216 }
Ben Murdoch3bec4d22010-07-22 14:51:16 +01004217
Steve Block3ce2e202009-11-05 08:53:23 +00004218 // Compile the test with the body as the true target and preferred
4219 // fall-through and with the break target as the false target.
4220 ControlDestination dest(&body, node->break_target(), true);
Steve Blockd0582a62009-12-15 09:54:21 +00004221 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00004222
4223 if (dest.false_was_fall_through()) {
4224 // If we got the break target as fall-through, the test may have
4225 // been unconditionally false (if there are no jumps to the
4226 // body).
4227 if (!body.is_linked()) {
4228 DecrementLoopNesting();
4229 return;
4230 }
4231
4232 // Otherwise, jump around the body on the fall through and then
4233 // bind the body target.
4234 node->break_target()->Unuse();
4235 node->break_target()->Jump();
4236 body.Bind();
4237 }
4238 break;
4239 }
4240 case ALWAYS_FALSE:
4241 UNREACHABLE();
4242 break;
4243 }
4244
4245 CheckStack(); // TODO(1222600): ignore if body contains calls.
Steve Block6ded16b2010-05-10 14:33:55 +01004246
4247 // We know that the loop index is a smi if it is not modified in the
4248 // loop body and it is checked against a constant limit in the loop
4249 // condition. In this case, we reset the static type information of the
4250 // loop index to smi before compiling the body, the update expression, and
4251 // the bottom check of the loop condition.
4252 if (node->is_fast_smi_loop()) {
4253 // Set number type of the loop variable to smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004254 SetTypeForStackSlot(node->loop_variable()->AsSlot(), TypeInfo::Smi());
Steve Block6ded16b2010-05-10 14:33:55 +01004255 }
4256
Steve Block3ce2e202009-11-05 08:53:23 +00004257 Visit(node->body());
4258
4259 // If there is an update expression, compile it if necessary.
4260 if (node->next() != NULL) {
4261 if (node->continue_target()->is_linked()) {
4262 node->continue_target()->Bind();
4263 }
4264
4265 // Control can reach the update by falling out of the body or by a
4266 // continue.
4267 if (has_valid_frame()) {
4268 // Record the source position of the statement as this code which
4269 // is after the code for the body actually belongs to the loop
4270 // statement and not the body.
4271 CodeForStatementPosition(node);
4272 Visit(node->next());
4273 }
4274 }
4275
Steve Block6ded16b2010-05-10 14:33:55 +01004276 // Set the type of the loop variable to smi before compiling the test
4277 // expression if we are in a fast smi loop condition.
4278 if (node->is_fast_smi_loop() && has_valid_frame()) {
4279 // Set number type of the loop variable to smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004280 SetTypeForStackSlot(node->loop_variable()->AsSlot(), TypeInfo::Smi());
Steve Block6ded16b2010-05-10 14:33:55 +01004281 }
4282
Steve Block3ce2e202009-11-05 08:53:23 +00004283 // Based on the condition analysis, compile the backward jump as
4284 // necessary.
4285 switch (info) {
4286 case ALWAYS_TRUE:
4287 if (has_valid_frame()) {
4288 if (node->next() == NULL) {
4289 node->continue_target()->Jump();
4290 } else {
4291 loop.Jump();
4292 }
4293 }
4294 break;
4295 case DONT_KNOW:
4296 if (test_at_bottom) {
4297 if (node->continue_target()->is_linked()) {
4298 // We can have dangling jumps to the continue target if there
4299 // was no update expression.
4300 node->continue_target()->Bind();
4301 }
4302 // Control can reach the test at the bottom by falling out of
4303 // the body, by a continue in the body, or from the update
4304 // expression.
4305 if (has_valid_frame()) {
4306 // The break target is the fall-through (body is a backward
4307 // jump from here).
4308 ControlDestination dest(&body, node->break_target(), false);
Steve Blockd0582a62009-12-15 09:54:21 +00004309 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00004310 }
4311 } else {
4312 // Otherwise, jump back to the test at the top.
Steve Blocka7e24c12009-10-30 11:49:00 +00004313 if (has_valid_frame()) {
4314 if (node->next() == NULL) {
4315 node->continue_target()->Jump();
4316 } else {
4317 loop.Jump();
4318 }
4319 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004320 }
4321 break;
Steve Block3ce2e202009-11-05 08:53:23 +00004322 case ALWAYS_FALSE:
4323 UNREACHABLE();
4324 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00004325 }
4326
Ben Murdoch3bec4d22010-07-22 14:51:16 +01004327 // The break target may be already bound (by the condition), or there
4328 // may not be a valid frame. Bind it only if needed.
Steve Block3ce2e202009-11-05 08:53:23 +00004329 if (node->break_target()->is_linked()) {
4330 node->break_target()->Bind();
4331 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004332 DecrementLoopNesting();
Steve Blocka7e24c12009-10-30 11:49:00 +00004333}
4334
4335
4336void CodeGenerator::VisitForInStatement(ForInStatement* node) {
4337 ASSERT(!in_spilled_code());
4338 VirtualFrame::SpilledScope spilled_scope;
4339 Comment cmnt(masm_, "[ ForInStatement");
4340 CodeForStatementPosition(node);
4341
4342 JumpTarget primitive;
4343 JumpTarget jsobject;
4344 JumpTarget fixed_array;
4345 JumpTarget entry(JumpTarget::BIDIRECTIONAL);
4346 JumpTarget end_del_check;
4347 JumpTarget exit;
4348
4349 // Get the object to enumerate over (converted to JSObject).
4350 LoadAndSpill(node->enumerable());
4351
4352 // Both SpiderMonkey and kjs ignore null and undefined in contrast
4353 // to the specification. 12.6.4 mandates a call to ToObject.
4354 frame_->EmitPop(eax);
4355
4356 // eax: value to be iterated over
4357 __ cmp(eax, Factory::undefined_value());
4358 exit.Branch(equal);
4359 __ cmp(eax, Factory::null_value());
4360 exit.Branch(equal);
4361
4362 // Stack layout in body:
4363 // [iteration counter (smi)] <- slot 0
4364 // [length of array] <- slot 1
4365 // [FixedArray] <- slot 2
4366 // [Map or 0] <- slot 3
4367 // [Object] <- slot 4
4368
4369 // Check if enumerable is already a JSObject
4370 // eax: value to be iterated over
4371 __ test(eax, Immediate(kSmiTagMask));
4372 primitive.Branch(zero);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004373 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
Steve Blocka7e24c12009-10-30 11:49:00 +00004374 jsobject.Branch(above_equal);
4375
4376 primitive.Bind();
4377 frame_->EmitPush(eax);
4378 frame_->InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION, 1);
4379 // function call returns the value in eax, which is where we want it below
4380
4381 jsobject.Bind();
4382 // Get the set of properties (as a FixedArray or Map).
4383 // eax: value to be iterated over
Steve Blockd0582a62009-12-15 09:54:21 +00004384 frame_->EmitPush(eax); // Push the object being iterated over.
Steve Blocka7e24c12009-10-30 11:49:00 +00004385
Steve Blockd0582a62009-12-15 09:54:21 +00004386 // Check cache validity in generated code. This is a fast case for
4387 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
4388 // guarantee cache validity, call the runtime system to check cache
4389 // validity or get the property names in a fixed array.
4390 JumpTarget call_runtime;
4391 JumpTarget loop(JumpTarget::BIDIRECTIONAL);
4392 JumpTarget check_prototype;
4393 JumpTarget use_cache;
4394 __ mov(ecx, eax);
4395 loop.Bind();
4396 // Check that there are no elements.
4397 __ mov(edx, FieldOperand(ecx, JSObject::kElementsOffset));
4398 __ cmp(Operand(edx), Immediate(Factory::empty_fixed_array()));
4399 call_runtime.Branch(not_equal);
4400 // Check that instance descriptors are not empty so that we can
4401 // check for an enum cache. Leave the map in ebx for the subsequent
4402 // prototype load.
4403 __ mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
4404 __ mov(edx, FieldOperand(ebx, Map::kInstanceDescriptorsOffset));
4405 __ cmp(Operand(edx), Immediate(Factory::empty_descriptor_array()));
4406 call_runtime.Branch(equal);
4407 // Check that there in an enum cache in the non-empty instance
4408 // descriptors. This is the case if the next enumeration index
4409 // field does not contain a smi.
4410 __ mov(edx, FieldOperand(edx, DescriptorArray::kEnumerationIndexOffset));
4411 __ test(edx, Immediate(kSmiTagMask));
4412 call_runtime.Branch(zero);
4413 // For all objects but the receiver, check that the cache is empty.
4414 __ cmp(ecx, Operand(eax));
4415 check_prototype.Branch(equal);
4416 __ mov(edx, FieldOperand(edx, DescriptorArray::kEnumCacheBridgeCacheOffset));
4417 __ cmp(Operand(edx), Immediate(Factory::empty_fixed_array()));
4418 call_runtime.Branch(not_equal);
4419 check_prototype.Bind();
4420 // Load the prototype from the map and loop if non-null.
4421 __ mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
4422 __ cmp(Operand(ecx), Immediate(Factory::null_value()));
4423 loop.Branch(not_equal);
4424 // The enum cache is valid. Load the map of the object being
4425 // iterated over and use the cache for the iteration.
4426 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
4427 use_cache.Jump();
4428
4429 call_runtime.Bind();
4430 // Call the runtime to get the property names for the object.
Steve Blocka7e24c12009-10-30 11:49:00 +00004431 frame_->EmitPush(eax); // push the Object (slot 4) for the runtime call
4432 frame_->CallRuntime(Runtime::kGetPropertyNamesFast, 1);
4433
Steve Blockd0582a62009-12-15 09:54:21 +00004434 // If we got a map from the runtime call, we can do a fast
4435 // modification check. Otherwise, we got a fixed array, and we have
4436 // to do a slow check.
Steve Blocka7e24c12009-10-30 11:49:00 +00004437 // eax: map or fixed array (result from call to
4438 // Runtime::kGetPropertyNamesFast)
4439 __ mov(edx, Operand(eax));
4440 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
4441 __ cmp(ecx, Factory::meta_map());
4442 fixed_array.Branch(not_equal);
4443
Steve Blockd0582a62009-12-15 09:54:21 +00004444 use_cache.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00004445 // Get enum cache
Steve Blockd0582a62009-12-15 09:54:21 +00004446 // eax: map (either the result from a call to
4447 // Runtime::kGetPropertyNamesFast or has been fetched directly from
4448 // the object)
Steve Blocka7e24c12009-10-30 11:49:00 +00004449 __ mov(ecx, Operand(eax));
Steve Blockd0582a62009-12-15 09:54:21 +00004450
Steve Blocka7e24c12009-10-30 11:49:00 +00004451 __ mov(ecx, FieldOperand(ecx, Map::kInstanceDescriptorsOffset));
4452 // Get the bridge array held in the enumeration index field.
4453 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumerationIndexOffset));
4454 // Get the cache from the bridge array.
4455 __ mov(edx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset));
4456
4457 frame_->EmitPush(eax); // <- slot 3
4458 frame_->EmitPush(edx); // <- slot 2
4459 __ mov(eax, FieldOperand(edx, FixedArray::kLengthOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004460 frame_->EmitPush(eax); // <- slot 1
4461 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 0
4462 entry.Jump();
4463
4464 fixed_array.Bind();
4465 // eax: fixed array (result from call to Runtime::kGetPropertyNamesFast)
4466 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 3
4467 frame_->EmitPush(eax); // <- slot 2
4468
4469 // Push the length of the array and the initial index onto the stack.
4470 __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004471 frame_->EmitPush(eax); // <- slot 1
4472 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 0
4473
4474 // Condition.
4475 entry.Bind();
4476 // Grab the current frame's height for the break and continue
4477 // targets only after all the state is pushed on the frame.
4478 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
4479 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
4480
4481 __ mov(eax, frame_->ElementAt(0)); // load the current count
4482 __ cmp(eax, frame_->ElementAt(1)); // compare to the array length
4483 node->break_target()->Branch(above_equal);
4484
4485 // Get the i'th entry of the array.
4486 __ mov(edx, frame_->ElementAt(2));
Kristian Monsen25f61362010-05-21 11:50:48 +01004487 __ mov(ebx, FixedArrayElementOperand(edx, eax));
Steve Blocka7e24c12009-10-30 11:49:00 +00004488
4489 // Get the expected map from the stack or a zero map in the
4490 // permanent slow case eax: current iteration count ebx: i'th entry
4491 // of the enum cache
4492 __ mov(edx, frame_->ElementAt(3));
4493 // Check if the expected map still matches that of the enumerable.
4494 // If not, we have to filter the key.
4495 // eax: current iteration count
4496 // ebx: i'th entry of the enum cache
4497 // edx: expected map value
4498 __ mov(ecx, frame_->ElementAt(4));
4499 __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
4500 __ cmp(ecx, Operand(edx));
4501 end_del_check.Branch(equal);
4502
4503 // Convert the entry to a string (or null if it isn't a property anymore).
4504 frame_->EmitPush(frame_->ElementAt(4)); // push enumerable
4505 frame_->EmitPush(ebx); // push entry
4506 frame_->InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION, 2);
4507 __ mov(ebx, Operand(eax));
4508
4509 // If the property has been removed while iterating, we just skip it.
Iain Merrick75681382010-08-19 15:07:18 +01004510 __ test(ebx, Operand(ebx));
Steve Blocka7e24c12009-10-30 11:49:00 +00004511 node->continue_target()->Branch(equal);
4512
4513 end_del_check.Bind();
4514 // Store the entry in the 'each' expression and take another spin in the
4515 // loop. edx: i'th entry of the enum cache (or string there of)
4516 frame_->EmitPush(ebx);
4517 { Reference each(this, node->each());
Steve Blocka7e24c12009-10-30 11:49:00 +00004518 if (!each.is_illegal()) {
4519 if (each.size() > 0) {
Iain Merrick75681382010-08-19 15:07:18 +01004520 // Loading a reference may leave the frame in an unspilled state.
4521 frame_->SpillAll();
4522 // Get the value (under the reference on the stack) from memory.
Steve Blocka7e24c12009-10-30 11:49:00 +00004523 frame_->EmitPush(frame_->ElementAt(each.size()));
Leon Clarked91b9f72010-01-27 17:25:45 +00004524 each.SetValue(NOT_CONST_INIT);
4525 frame_->Drop(2);
4526 } else {
4527 // If the reference was to a slot we rely on the convenient property
4528 // that it doesn't matter whether a value (eg, ebx pushed above) is
4529 // right on top of or right underneath a zero-sized reference.
4530 each.SetValue(NOT_CONST_INIT);
Steve Blocka7e24c12009-10-30 11:49:00 +00004531 frame_->Drop();
4532 }
4533 }
4534 }
4535 // Unloading a reference may leave the frame in an unspilled state.
4536 frame_->SpillAll();
4537
Steve Blocka7e24c12009-10-30 11:49:00 +00004538 // Body.
4539 CheckStack(); // TODO(1222600): ignore if body contains calls.
4540 VisitAndSpill(node->body());
4541
4542 // Next. Reestablish a spilled frame in case we are coming here via
4543 // a continue in the body.
4544 node->continue_target()->Bind();
4545 frame_->SpillAll();
4546 frame_->EmitPop(eax);
4547 __ add(Operand(eax), Immediate(Smi::FromInt(1)));
4548 frame_->EmitPush(eax);
4549 entry.Jump();
4550
4551 // Cleanup. No need to spill because VirtualFrame::Drop is safe for
4552 // any frame.
4553 node->break_target()->Bind();
4554 frame_->Drop(5);
4555
4556 // Exit.
4557 exit.Bind();
4558
4559 node->continue_target()->Unuse();
4560 node->break_target()->Unuse();
4561}
4562
4563
Steve Block3ce2e202009-11-05 08:53:23 +00004564void CodeGenerator::VisitTryCatchStatement(TryCatchStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004565 ASSERT(!in_spilled_code());
4566 VirtualFrame::SpilledScope spilled_scope;
Steve Block3ce2e202009-11-05 08:53:23 +00004567 Comment cmnt(masm_, "[ TryCatchStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00004568 CodeForStatementPosition(node);
4569
4570 JumpTarget try_block;
4571 JumpTarget exit;
4572
4573 try_block.Call();
4574 // --- Catch block ---
4575 frame_->EmitPush(eax);
4576
4577 // Store the caught exception in the catch variable.
Leon Clarkee46be812010-01-19 14:06:41 +00004578 Variable* catch_var = node->catch_var()->var();
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004579 ASSERT(catch_var != NULL && catch_var->AsSlot() != NULL);
4580 StoreToSlot(catch_var->AsSlot(), NOT_CONST_INIT);
Steve Blocka7e24c12009-10-30 11:49:00 +00004581
4582 // Remove the exception from the stack.
4583 frame_->Drop();
4584
4585 VisitStatementsAndSpill(node->catch_block()->statements());
4586 if (has_valid_frame()) {
4587 exit.Jump();
4588 }
4589
4590
4591 // --- Try block ---
4592 try_block.Bind();
4593
4594 frame_->PushTryHandler(TRY_CATCH_HANDLER);
4595 int handler_height = frame_->height();
4596
4597 // Shadow the jump targets for all escapes from the try block, including
4598 // returns. During shadowing, the original target is hidden as the
4599 // ShadowTarget and operations on the original actually affect the
4600 // shadowing target.
4601 //
4602 // We should probably try to unify the escaping targets and the return
4603 // target.
4604 int nof_escapes = node->escaping_targets()->length();
4605 List<ShadowTarget*> shadows(1 + nof_escapes);
4606
4607 // Add the shadow target for the function return.
4608 static const int kReturnShadowIndex = 0;
4609 shadows.Add(new ShadowTarget(&function_return_));
4610 bool function_return_was_shadowed = function_return_is_shadowed_;
4611 function_return_is_shadowed_ = true;
4612 ASSERT(shadows[kReturnShadowIndex]->other_target() == &function_return_);
4613
4614 // Add the remaining shadow targets.
4615 for (int i = 0; i < nof_escapes; i++) {
4616 shadows.Add(new ShadowTarget(node->escaping_targets()->at(i)));
4617 }
4618
4619 // Generate code for the statements in the try block.
4620 VisitStatementsAndSpill(node->try_block()->statements());
4621
4622 // Stop the introduced shadowing and count the number of required unlinks.
4623 // After shadowing stops, the original targets are unshadowed and the
4624 // ShadowTargets represent the formerly shadowing targets.
4625 bool has_unlinks = false;
4626 for (int i = 0; i < shadows.length(); i++) {
4627 shadows[i]->StopShadowing();
4628 has_unlinks = has_unlinks || shadows[i]->is_linked();
4629 }
4630 function_return_is_shadowed_ = function_return_was_shadowed;
4631
4632 // Get an external reference to the handler address.
4633 ExternalReference handler_address(Top::k_handler_address);
4634
4635 // Make sure that there's nothing left on the stack above the
4636 // handler structure.
4637 if (FLAG_debug_code) {
4638 __ mov(eax, Operand::StaticVariable(handler_address));
4639 __ cmp(esp, Operand(eax));
4640 __ Assert(equal, "stack pointer should point to top handler");
4641 }
4642
4643 // If we can fall off the end of the try block, unlink from try chain.
4644 if (has_valid_frame()) {
4645 // The next handler address is on top of the frame. Unlink from
4646 // the handler list and drop the rest of this handler from the
4647 // frame.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01004648 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00004649 frame_->EmitPop(Operand::StaticVariable(handler_address));
4650 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
4651 if (has_unlinks) {
4652 exit.Jump();
4653 }
4654 }
4655
4656 // Generate unlink code for the (formerly) shadowing targets that
4657 // have been jumped to. Deallocate each shadow target.
4658 Result return_value;
4659 for (int i = 0; i < shadows.length(); i++) {
4660 if (shadows[i]->is_linked()) {
4661 // Unlink from try chain; be careful not to destroy the TOS if
4662 // there is one.
4663 if (i == kReturnShadowIndex) {
4664 shadows[i]->Bind(&return_value);
4665 return_value.ToRegister(eax);
4666 } else {
4667 shadows[i]->Bind();
4668 }
4669 // Because we can be jumping here (to spilled code) from
4670 // unspilled code, we need to reestablish a spilled frame at
4671 // this block.
4672 frame_->SpillAll();
4673
4674 // Reload sp from the top handler, because some statements that we
4675 // break from (eg, for...in) may have left stuff on the stack.
4676 __ mov(esp, Operand::StaticVariable(handler_address));
4677 frame_->Forget(frame_->height() - handler_height);
4678
Kristian Monsen50ef84f2010-07-29 15:18:00 +01004679 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00004680 frame_->EmitPop(Operand::StaticVariable(handler_address));
4681 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
4682
4683 if (i == kReturnShadowIndex) {
4684 if (!function_return_is_shadowed_) frame_->PrepareForReturn();
4685 shadows[i]->other_target()->Jump(&return_value);
4686 } else {
4687 shadows[i]->other_target()->Jump();
4688 }
4689 }
4690 }
4691
4692 exit.Bind();
4693}
4694
4695
Steve Block3ce2e202009-11-05 08:53:23 +00004696void CodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004697 ASSERT(!in_spilled_code());
4698 VirtualFrame::SpilledScope spilled_scope;
Steve Block3ce2e202009-11-05 08:53:23 +00004699 Comment cmnt(masm_, "[ TryFinallyStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00004700 CodeForStatementPosition(node);
4701
4702 // State: Used to keep track of reason for entering the finally
4703 // block. Should probably be extended to hold information for
4704 // break/continue from within the try block.
4705 enum { FALLING, THROWING, JUMPING };
4706
4707 JumpTarget try_block;
4708 JumpTarget finally_block;
4709
4710 try_block.Call();
4711
4712 frame_->EmitPush(eax);
4713 // In case of thrown exceptions, this is where we continue.
4714 __ Set(ecx, Immediate(Smi::FromInt(THROWING)));
4715 finally_block.Jump();
4716
4717 // --- Try block ---
4718 try_block.Bind();
4719
4720 frame_->PushTryHandler(TRY_FINALLY_HANDLER);
4721 int handler_height = frame_->height();
4722
4723 // Shadow the jump targets for all escapes from the try block, including
4724 // returns. During shadowing, the original target is hidden as the
4725 // ShadowTarget and operations on the original actually affect the
4726 // shadowing target.
4727 //
4728 // We should probably try to unify the escaping targets and the return
4729 // target.
4730 int nof_escapes = node->escaping_targets()->length();
4731 List<ShadowTarget*> shadows(1 + nof_escapes);
4732
4733 // Add the shadow target for the function return.
4734 static const int kReturnShadowIndex = 0;
4735 shadows.Add(new ShadowTarget(&function_return_));
4736 bool function_return_was_shadowed = function_return_is_shadowed_;
4737 function_return_is_shadowed_ = true;
4738 ASSERT(shadows[kReturnShadowIndex]->other_target() == &function_return_);
4739
4740 // Add the remaining shadow targets.
4741 for (int i = 0; i < nof_escapes; i++) {
4742 shadows.Add(new ShadowTarget(node->escaping_targets()->at(i)));
4743 }
4744
4745 // Generate code for the statements in the try block.
4746 VisitStatementsAndSpill(node->try_block()->statements());
4747
4748 // Stop the introduced shadowing and count the number of required unlinks.
4749 // After shadowing stops, the original targets are unshadowed and the
4750 // ShadowTargets represent the formerly shadowing targets.
4751 int nof_unlinks = 0;
4752 for (int i = 0; i < shadows.length(); i++) {
4753 shadows[i]->StopShadowing();
4754 if (shadows[i]->is_linked()) nof_unlinks++;
4755 }
4756 function_return_is_shadowed_ = function_return_was_shadowed;
4757
4758 // Get an external reference to the handler address.
4759 ExternalReference handler_address(Top::k_handler_address);
4760
4761 // If we can fall off the end of the try block, unlink from the try
4762 // chain and set the state on the frame to FALLING.
4763 if (has_valid_frame()) {
4764 // The next handler address is on top of the frame.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01004765 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00004766 frame_->EmitPop(Operand::StaticVariable(handler_address));
4767 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
4768
4769 // Fake a top of stack value (unneeded when FALLING) and set the
4770 // state in ecx, then jump around the unlink blocks if any.
4771 frame_->EmitPush(Immediate(Factory::undefined_value()));
4772 __ Set(ecx, Immediate(Smi::FromInt(FALLING)));
4773 if (nof_unlinks > 0) {
4774 finally_block.Jump();
4775 }
4776 }
4777
4778 // Generate code to unlink and set the state for the (formerly)
4779 // shadowing targets that have been jumped to.
4780 for (int i = 0; i < shadows.length(); i++) {
4781 if (shadows[i]->is_linked()) {
4782 // If we have come from the shadowed return, the return value is
4783 // on the virtual frame. We must preserve it until it is
4784 // pushed.
4785 if (i == kReturnShadowIndex) {
4786 Result return_value;
4787 shadows[i]->Bind(&return_value);
4788 return_value.ToRegister(eax);
4789 } else {
4790 shadows[i]->Bind();
4791 }
4792 // Because we can be jumping here (to spilled code) from
4793 // unspilled code, we need to reestablish a spilled frame at
4794 // this block.
4795 frame_->SpillAll();
4796
4797 // Reload sp from the top handler, because some statements that
4798 // we break from (eg, for...in) may have left stuff on the
4799 // stack.
4800 __ mov(esp, Operand::StaticVariable(handler_address));
4801 frame_->Forget(frame_->height() - handler_height);
4802
4803 // Unlink this handler and drop it from the frame.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01004804 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00004805 frame_->EmitPop(Operand::StaticVariable(handler_address));
4806 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
4807
4808 if (i == kReturnShadowIndex) {
4809 // If this target shadowed the function return, materialize
4810 // the return value on the stack.
4811 frame_->EmitPush(eax);
4812 } else {
4813 // Fake TOS for targets that shadowed breaks and continues.
4814 frame_->EmitPush(Immediate(Factory::undefined_value()));
4815 }
4816 __ Set(ecx, Immediate(Smi::FromInt(JUMPING + i)));
4817 if (--nof_unlinks > 0) {
4818 // If this is not the last unlink block, jump around the next.
4819 finally_block.Jump();
4820 }
4821 }
4822 }
4823
4824 // --- Finally block ---
4825 finally_block.Bind();
4826
4827 // Push the state on the stack.
4828 frame_->EmitPush(ecx);
4829
4830 // We keep two elements on the stack - the (possibly faked) result
4831 // and the state - while evaluating the finally block.
4832 //
4833 // Generate code for the statements in the finally block.
4834 VisitStatementsAndSpill(node->finally_block()->statements());
4835
4836 if (has_valid_frame()) {
4837 // Restore state and return value or faked TOS.
4838 frame_->EmitPop(ecx);
4839 frame_->EmitPop(eax);
4840 }
4841
4842 // Generate code to jump to the right destination for all used
4843 // formerly shadowing targets. Deallocate each shadow target.
4844 for (int i = 0; i < shadows.length(); i++) {
4845 if (has_valid_frame() && shadows[i]->is_bound()) {
4846 BreakTarget* original = shadows[i]->other_target();
4847 __ cmp(Operand(ecx), Immediate(Smi::FromInt(JUMPING + i)));
4848 if (i == kReturnShadowIndex) {
4849 // The return value is (already) in eax.
4850 Result return_value = allocator_->Allocate(eax);
4851 ASSERT(return_value.is_valid());
4852 if (function_return_is_shadowed_) {
4853 original->Branch(equal, &return_value);
4854 } else {
4855 // Branch around the preparation for return which may emit
4856 // code.
4857 JumpTarget skip;
4858 skip.Branch(not_equal);
4859 frame_->PrepareForReturn();
4860 original->Jump(&return_value);
4861 skip.Bind();
4862 }
4863 } else {
4864 original->Branch(equal);
4865 }
4866 }
4867 }
4868
4869 if (has_valid_frame()) {
4870 // Check if we need to rethrow the exception.
4871 JumpTarget exit;
4872 __ cmp(Operand(ecx), Immediate(Smi::FromInt(THROWING)));
4873 exit.Branch(not_equal);
4874
4875 // Rethrow exception.
4876 frame_->EmitPush(eax); // undo pop from above
4877 frame_->CallRuntime(Runtime::kReThrow, 1);
4878
4879 // Done.
4880 exit.Bind();
4881 }
4882}
4883
4884
4885void CodeGenerator::VisitDebuggerStatement(DebuggerStatement* node) {
4886 ASSERT(!in_spilled_code());
4887 Comment cmnt(masm_, "[ DebuggerStatement");
4888 CodeForStatementPosition(node);
4889#ifdef ENABLE_DEBUGGER_SUPPORT
4890 // Spill everything, even constants, to the frame.
4891 frame_->SpillAll();
Leon Clarke4515c472010-02-03 11:58:03 +00004892
Andrei Popescu402d9372010-02-26 13:31:12 +00004893 frame_->DebugBreak();
Steve Blocka7e24c12009-10-30 11:49:00 +00004894 // Ignore the return value.
4895#endif
4896}
4897
4898
Steve Block6ded16b2010-05-10 14:33:55 +01004899Result CodeGenerator::InstantiateFunction(
4900 Handle<SharedFunctionInfo> function_info) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004901 // The inevitable call will sync frame elements to memory anyway, so
4902 // we do it eagerly to allow us to push the arguments directly into
4903 // place.
Andrei Popescu402d9372010-02-26 13:31:12 +00004904 frame()->SyncRange(0, frame()->element_count() - 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00004905
Leon Clarkee46be812010-01-19 14:06:41 +00004906 // Use the fast case closure allocation code that allocates in new
4907 // space for nested functions that don't need literals cloning.
Steve Block6ded16b2010-05-10 14:33:55 +01004908 if (scope()->is_function_scope() && function_info->num_literals() == 0) {
Leon Clarkee46be812010-01-19 14:06:41 +00004909 FastNewClosureStub stub;
Steve Block6ded16b2010-05-10 14:33:55 +01004910 frame()->EmitPush(Immediate(function_info));
Andrei Popescu402d9372010-02-26 13:31:12 +00004911 return frame()->CallStub(&stub, 1);
Leon Clarkee46be812010-01-19 14:06:41 +00004912 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01004913 // Call the runtime to instantiate the function based on the
4914 // shared function info.
Andrei Popescu402d9372010-02-26 13:31:12 +00004915 frame()->EmitPush(esi);
Steve Block6ded16b2010-05-10 14:33:55 +01004916 frame()->EmitPush(Immediate(function_info));
Andrei Popescu402d9372010-02-26 13:31:12 +00004917 return frame()->CallRuntime(Runtime::kNewClosure, 2);
Leon Clarkee46be812010-01-19 14:06:41 +00004918 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004919}
4920
4921
4922void CodeGenerator::VisitFunctionLiteral(FunctionLiteral* node) {
4923 Comment cmnt(masm_, "[ FunctionLiteral");
Steve Block6ded16b2010-05-10 14:33:55 +01004924 ASSERT(!in_safe_int32_mode());
4925 // Build the function info and instantiate it.
4926 Handle<SharedFunctionInfo> function_info =
Ben Murdochf87a2032010-10-22 12:50:53 +01004927 Compiler::BuildFunctionInfo(node, script());
Steve Blocka7e24c12009-10-30 11:49:00 +00004928 // Check for stack-overflow exception.
Ben Murdochf87a2032010-10-22 12:50:53 +01004929 if (function_info.is_null()) {
4930 SetStackOverflow();
4931 return;
4932 }
Steve Block6ded16b2010-05-10 14:33:55 +01004933 Result result = InstantiateFunction(function_info);
Andrei Popescu402d9372010-02-26 13:31:12 +00004934 frame()->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00004935}
4936
4937
Steve Block6ded16b2010-05-10 14:33:55 +01004938void CodeGenerator::VisitSharedFunctionInfoLiteral(
4939 SharedFunctionInfoLiteral* node) {
4940 ASSERT(!in_safe_int32_mode());
4941 Comment cmnt(masm_, "[ SharedFunctionInfoLiteral");
4942 Result result = InstantiateFunction(node->shared_function_info());
Andrei Popescu402d9372010-02-26 13:31:12 +00004943 frame()->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00004944}
4945
4946
4947void CodeGenerator::VisitConditional(Conditional* node) {
4948 Comment cmnt(masm_, "[ Conditional");
Steve Block6ded16b2010-05-10 14:33:55 +01004949 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00004950 JumpTarget then;
4951 JumpTarget else_;
4952 JumpTarget exit;
4953 ControlDestination dest(&then, &else_, true);
Steve Blockd0582a62009-12-15 09:54:21 +00004954 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00004955
4956 if (dest.false_was_fall_through()) {
4957 // The else target was bound, so we compile the else part first.
Steve Blockd0582a62009-12-15 09:54:21 +00004958 Load(node->else_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00004959
4960 if (then.is_linked()) {
4961 exit.Jump();
4962 then.Bind();
Steve Blockd0582a62009-12-15 09:54:21 +00004963 Load(node->then_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00004964 }
4965 } else {
4966 // The then target was bound, so we compile the then part first.
Steve Blockd0582a62009-12-15 09:54:21 +00004967 Load(node->then_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00004968
4969 if (else_.is_linked()) {
4970 exit.Jump();
4971 else_.Bind();
Steve Blockd0582a62009-12-15 09:54:21 +00004972 Load(node->else_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00004973 }
4974 }
4975
4976 exit.Bind();
4977}
4978
4979
Leon Clarkef7060e22010-06-03 12:02:55 +01004980void CodeGenerator::LoadFromSlot(Slot* slot, TypeofState typeof_state) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004981 if (slot->type() == Slot::LOOKUP) {
4982 ASSERT(slot->var()->is_dynamic());
Steve Blocka7e24c12009-10-30 11:49:00 +00004983 JumpTarget slow;
4984 JumpTarget done;
Leon Clarkef7060e22010-06-03 12:02:55 +01004985 Result value;
Steve Blocka7e24c12009-10-30 11:49:00 +00004986
Kristian Monsen25f61362010-05-21 11:50:48 +01004987 // Generate fast case for loading from slots that correspond to
4988 // local/global variables or arguments unless they are shadowed by
4989 // eval-introduced bindings.
4990 EmitDynamicLoadFromSlotFastCase(slot,
4991 typeof_state,
Leon Clarkef7060e22010-06-03 12:02:55 +01004992 &value,
Kristian Monsen25f61362010-05-21 11:50:48 +01004993 &slow,
4994 &done);
Steve Blocka7e24c12009-10-30 11:49:00 +00004995
4996 slow.Bind();
4997 // A runtime call is inevitable. We eagerly sync frame elements
4998 // to memory so that we can push the arguments directly into place
4999 // on top of the frame.
Andrei Popescu402d9372010-02-26 13:31:12 +00005000 frame()->SyncRange(0, frame()->element_count() - 1);
5001 frame()->EmitPush(esi);
5002 frame()->EmitPush(Immediate(slot->var()->name()));
Steve Blocka7e24c12009-10-30 11:49:00 +00005003 if (typeof_state == INSIDE_TYPEOF) {
Leon Clarkef7060e22010-06-03 12:02:55 +01005004 value =
Andrei Popescu402d9372010-02-26 13:31:12 +00005005 frame()->CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00005006 } else {
Leon Clarkef7060e22010-06-03 12:02:55 +01005007 value = frame()->CallRuntime(Runtime::kLoadContextSlot, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00005008 }
5009
Leon Clarkef7060e22010-06-03 12:02:55 +01005010 done.Bind(&value);
5011 frame_->Push(&value);
Steve Blocka7e24c12009-10-30 11:49:00 +00005012
5013 } else if (slot->var()->mode() == Variable::CONST) {
5014 // Const slots may contain 'the hole' value (the constant hasn't been
5015 // initialized yet) which needs to be converted into the 'undefined'
5016 // value.
5017 //
5018 // We currently spill the virtual frame because constants use the
5019 // potentially unsafe direct-frame access of SlotOperand.
5020 VirtualFrame::SpilledScope spilled_scope;
5021 Comment cmnt(masm_, "[ Load const");
Andrei Popescu402d9372010-02-26 13:31:12 +00005022 Label exit;
Steve Blocka7e24c12009-10-30 11:49:00 +00005023 __ mov(ecx, SlotOperand(slot, ecx));
5024 __ cmp(ecx, Factory::the_hole_value());
Andrei Popescu402d9372010-02-26 13:31:12 +00005025 __ j(not_equal, &exit);
Steve Blocka7e24c12009-10-30 11:49:00 +00005026 __ mov(ecx, Factory::undefined_value());
Andrei Popescu402d9372010-02-26 13:31:12 +00005027 __ bind(&exit);
Leon Clarkef7060e22010-06-03 12:02:55 +01005028 frame()->EmitPush(ecx);
Steve Blocka7e24c12009-10-30 11:49:00 +00005029
5030 } else if (slot->type() == Slot::PARAMETER) {
Andrei Popescu402d9372010-02-26 13:31:12 +00005031 frame()->PushParameterAt(slot->index());
Steve Blocka7e24c12009-10-30 11:49:00 +00005032
5033 } else if (slot->type() == Slot::LOCAL) {
Andrei Popescu402d9372010-02-26 13:31:12 +00005034 frame()->PushLocalAt(slot->index());
Steve Blocka7e24c12009-10-30 11:49:00 +00005035
5036 } else {
5037 // The other remaining slot types (LOOKUP and GLOBAL) cannot reach
5038 // here.
5039 //
5040 // The use of SlotOperand below is safe for an unspilled frame
5041 // because it will always be a context slot.
5042 ASSERT(slot->type() == Slot::CONTEXT);
Leon Clarkef7060e22010-06-03 12:02:55 +01005043 Result temp = allocator()->Allocate();
5044 ASSERT(temp.is_valid());
5045 __ mov(temp.reg(), SlotOperand(slot, temp.reg()));
5046 frame()->Push(&temp);
Steve Blocka7e24c12009-10-30 11:49:00 +00005047 }
5048}
5049
5050
Leon Clarkef7060e22010-06-03 12:02:55 +01005051void CodeGenerator::LoadFromSlotCheckForArguments(Slot* slot,
Andrei Popescu402d9372010-02-26 13:31:12 +00005052 TypeofState state) {
Leon Clarkef7060e22010-06-03 12:02:55 +01005053 LoadFromSlot(slot, state);
Steve Blocka7e24c12009-10-30 11:49:00 +00005054
5055 // Bail out quickly if we're not using lazy arguments allocation.
Leon Clarkef7060e22010-06-03 12:02:55 +01005056 if (ArgumentsMode() != LAZY_ARGUMENTS_ALLOCATION) return;
Steve Blocka7e24c12009-10-30 11:49:00 +00005057
5058 // ... or if the slot isn't a non-parameter arguments slot.
Leon Clarkef7060e22010-06-03 12:02:55 +01005059 if (slot->type() == Slot::PARAMETER || !slot->is_arguments()) return;
Steve Blocka7e24c12009-10-30 11:49:00 +00005060
5061 // If the loaded value is a constant, we know if the arguments
5062 // object has been lazily loaded yet.
Leon Clarkef7060e22010-06-03 12:02:55 +01005063 Result result = frame()->Pop();
Andrei Popescu402d9372010-02-26 13:31:12 +00005064 if (result.is_constant()) {
5065 if (result.handle()->IsTheHole()) {
Leon Clarkef7060e22010-06-03 12:02:55 +01005066 result = StoreArgumentsObject(false);
Steve Blocka7e24c12009-10-30 11:49:00 +00005067 }
Leon Clarkef7060e22010-06-03 12:02:55 +01005068 frame()->Push(&result);
5069 return;
Steve Blocka7e24c12009-10-30 11:49:00 +00005070 }
Leon Clarkef7060e22010-06-03 12:02:55 +01005071 ASSERT(result.is_register());
Steve Blocka7e24c12009-10-30 11:49:00 +00005072 // The loaded value is in a register. If it is the sentinel that
5073 // indicates that we haven't loaded the arguments object yet, we
5074 // need to do it now.
5075 JumpTarget exit;
Andrei Popescu402d9372010-02-26 13:31:12 +00005076 __ cmp(Operand(result.reg()), Immediate(Factory::the_hole_value()));
Leon Clarkef7060e22010-06-03 12:02:55 +01005077 frame()->Push(&result);
5078 exit.Branch(not_equal);
Andrei Popescu402d9372010-02-26 13:31:12 +00005079
Andrei Popescu402d9372010-02-26 13:31:12 +00005080 result = StoreArgumentsObject(false);
Leon Clarkef7060e22010-06-03 12:02:55 +01005081 frame()->SetElementAt(0, &result);
5082 result.Unuse();
5083 exit.Bind();
5084 return;
Steve Blocka7e24c12009-10-30 11:49:00 +00005085}
5086
5087
5088Result CodeGenerator::LoadFromGlobalSlotCheckExtensions(
5089 Slot* slot,
5090 TypeofState typeof_state,
5091 JumpTarget* slow) {
Steve Block6ded16b2010-05-10 14:33:55 +01005092 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005093 // Check that no extension objects have been created by calls to
5094 // eval from the current scope to the global scope.
5095 Register context = esi;
5096 Result tmp = allocator_->Allocate();
5097 ASSERT(tmp.is_valid()); // All non-reserved registers were available.
5098
5099 Scope* s = scope();
5100 while (s != NULL) {
5101 if (s->num_heap_slots() > 0) {
5102 if (s->calls_eval()) {
5103 // Check that extension is NULL.
5104 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
5105 Immediate(0));
5106 slow->Branch(not_equal, not_taken);
5107 }
5108 // Load next context in chain.
5109 __ mov(tmp.reg(), ContextOperand(context, Context::CLOSURE_INDEX));
5110 __ mov(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset));
5111 context = tmp.reg();
5112 }
5113 // If no outer scope calls eval, we do not need to check more
5114 // context extensions. If we have reached an eval scope, we check
5115 // all extensions from this point.
5116 if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break;
5117 s = s->outer_scope();
5118 }
5119
5120 if (s != NULL && s->is_eval_scope()) {
5121 // Loop up the context chain. There is no frame effect so it is
5122 // safe to use raw labels here.
5123 Label next, fast;
5124 if (!context.is(tmp.reg())) {
5125 __ mov(tmp.reg(), context);
5126 }
5127 __ bind(&next);
5128 // Terminate at global context.
5129 __ cmp(FieldOperand(tmp.reg(), HeapObject::kMapOffset),
5130 Immediate(Factory::global_context_map()));
5131 __ j(equal, &fast);
5132 // Check that extension is NULL.
5133 __ cmp(ContextOperand(tmp.reg(), Context::EXTENSION_INDEX), Immediate(0));
5134 slow->Branch(not_equal, not_taken);
5135 // Load next context in chain.
5136 __ mov(tmp.reg(), ContextOperand(tmp.reg(), Context::CLOSURE_INDEX));
5137 __ mov(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset));
5138 __ jmp(&next);
5139 __ bind(&fast);
5140 }
5141 tmp.Unuse();
5142
5143 // All extension objects were empty and it is safe to use a global
5144 // load IC call.
Andrei Popescu402d9372010-02-26 13:31:12 +00005145 // The register allocator prefers eax if it is free, so the code generator
5146 // will load the global object directly into eax, which is where the LoadIC
5147 // expects it.
5148 frame_->Spill(eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00005149 LoadGlobal();
5150 frame_->Push(slot->var()->name());
5151 RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
5152 ? RelocInfo::CODE_TARGET
5153 : RelocInfo::CODE_TARGET_CONTEXT;
5154 Result answer = frame_->CallLoadIC(mode);
5155 // A test eax instruction following the call signals that the inobject
5156 // property case was inlined. Ensure that there is not a test eax
5157 // instruction here.
5158 __ nop();
Steve Blocka7e24c12009-10-30 11:49:00 +00005159 return answer;
5160}
5161
5162
Kristian Monsen25f61362010-05-21 11:50:48 +01005163void CodeGenerator::EmitDynamicLoadFromSlotFastCase(Slot* slot,
5164 TypeofState typeof_state,
5165 Result* result,
5166 JumpTarget* slow,
5167 JumpTarget* done) {
5168 // Generate fast-case code for variables that might be shadowed by
5169 // eval-introduced variables. Eval is used a lot without
5170 // introducing variables. In those cases, we do not want to
5171 // perform a runtime call for all variables in the scope
5172 // containing the eval.
5173 if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) {
5174 *result = LoadFromGlobalSlotCheckExtensions(slot, typeof_state, slow);
5175 done->Jump(result);
5176
5177 } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01005178 Slot* potential_slot = slot->var()->local_if_not_shadowed()->AsSlot();
Kristian Monsen25f61362010-05-21 11:50:48 +01005179 Expression* rewrite = slot->var()->local_if_not_shadowed()->rewrite();
5180 if (potential_slot != NULL) {
5181 // Generate fast case for locals that rewrite to slots.
5182 // Allocate a fresh register to use as a temp in
5183 // ContextSlotOperandCheckExtensions and to hold the result
5184 // value.
5185 *result = allocator()->Allocate();
5186 ASSERT(result->is_valid());
5187 __ mov(result->reg(),
5188 ContextSlotOperandCheckExtensions(potential_slot, *result, slow));
5189 if (potential_slot->var()->mode() == Variable::CONST) {
5190 __ cmp(result->reg(), Factory::the_hole_value());
5191 done->Branch(not_equal, result);
5192 __ mov(result->reg(), Factory::undefined_value());
5193 }
5194 done->Jump(result);
5195 } else if (rewrite != NULL) {
5196 // Generate fast case for calls of an argument function.
5197 Property* property = rewrite->AsProperty();
5198 if (property != NULL) {
5199 VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
5200 Literal* key_literal = property->key()->AsLiteral();
5201 if (obj_proxy != NULL &&
5202 key_literal != NULL &&
5203 obj_proxy->IsArguments() &&
5204 key_literal->handle()->IsSmi()) {
5205 // Load arguments object if there are no eval-introduced
5206 // variables. Then load the argument from the arguments
5207 // object using keyed load.
5208 Result arguments = allocator()->Allocate();
5209 ASSERT(arguments.is_valid());
5210 __ mov(arguments.reg(),
Kristian Monsen0d5e1162010-09-30 15:31:59 +01005211 ContextSlotOperandCheckExtensions(obj_proxy->var()->AsSlot(),
Kristian Monsen25f61362010-05-21 11:50:48 +01005212 arguments,
5213 slow));
5214 frame_->Push(&arguments);
5215 frame_->Push(key_literal->handle());
5216 *result = EmitKeyedLoad();
5217 done->Jump(result);
5218 }
5219 }
5220 }
5221 }
5222}
5223
5224
Steve Blocka7e24c12009-10-30 11:49:00 +00005225void CodeGenerator::StoreToSlot(Slot* slot, InitState init_state) {
5226 if (slot->type() == Slot::LOOKUP) {
5227 ASSERT(slot->var()->is_dynamic());
5228
5229 // For now, just do a runtime call. Since the call is inevitable,
5230 // we eagerly sync the virtual frame so we can directly push the
5231 // arguments into place.
5232 frame_->SyncRange(0, frame_->element_count() - 1);
5233
5234 frame_->EmitPush(esi);
5235 frame_->EmitPush(Immediate(slot->var()->name()));
5236
5237 Result value;
5238 if (init_state == CONST_INIT) {
5239 // Same as the case for a normal store, but ignores attribute
5240 // (e.g. READ_ONLY) of context slot so that we can initialize const
5241 // properties (introduced via eval("const foo = (some expr);")). Also,
5242 // uses the current function context instead of the top context.
5243 //
5244 // Note that we must declare the foo upon entry of eval(), via a
5245 // context slot declaration, but we cannot initialize it at the same
5246 // time, because the const declaration may be at the end of the eval
5247 // code (sigh...) and the const variable may have been used before
5248 // (where its value is 'undefined'). Thus, we can only do the
5249 // initialization when we actually encounter the expression and when
5250 // the expression operands are defined and valid, and thus we need the
5251 // split into 2 operations: declaration of the context slot followed
5252 // by initialization.
5253 value = frame_->CallRuntime(Runtime::kInitializeConstContextSlot, 3);
5254 } else {
5255 value = frame_->CallRuntime(Runtime::kStoreContextSlot, 3);
5256 }
5257 // Storing a variable must keep the (new) value on the expression
5258 // stack. This is necessary for compiling chained assignment
5259 // expressions.
5260 frame_->Push(&value);
5261
5262 } else {
5263 ASSERT(!slot->var()->is_dynamic());
5264
5265 JumpTarget exit;
5266 if (init_state == CONST_INIT) {
5267 ASSERT(slot->var()->mode() == Variable::CONST);
5268 // Only the first const initialization must be executed (the slot
5269 // still contains 'the hole' value). When the assignment is executed,
5270 // the code is identical to a normal store (see below).
5271 //
5272 // We spill the frame in the code below because the direct-frame
5273 // access of SlotOperand is potentially unsafe with an unspilled
5274 // frame.
5275 VirtualFrame::SpilledScope spilled_scope;
5276 Comment cmnt(masm_, "[ Init const");
5277 __ mov(ecx, SlotOperand(slot, ecx));
5278 __ cmp(ecx, Factory::the_hole_value());
5279 exit.Branch(not_equal);
5280 }
5281
5282 // We must execute the store. Storing a variable must keep the (new)
5283 // value on the stack. This is necessary for compiling assignment
5284 // expressions.
5285 //
5286 // Note: We will reach here even with slot->var()->mode() ==
5287 // Variable::CONST because of const declarations which will initialize
5288 // consts to 'the hole' value and by doing so, end up calling this code.
5289 if (slot->type() == Slot::PARAMETER) {
5290 frame_->StoreToParameterAt(slot->index());
5291 } else if (slot->type() == Slot::LOCAL) {
5292 frame_->StoreToLocalAt(slot->index());
5293 } else {
5294 // The other slot types (LOOKUP and GLOBAL) cannot reach here.
5295 //
5296 // The use of SlotOperand below is safe for an unspilled frame
5297 // because the slot is a context slot.
5298 ASSERT(slot->type() == Slot::CONTEXT);
5299 frame_->Dup();
5300 Result value = frame_->Pop();
5301 value.ToRegister();
5302 Result start = allocator_->Allocate();
5303 ASSERT(start.is_valid());
5304 __ mov(SlotOperand(slot, start.reg()), value.reg());
5305 // RecordWrite may destroy the value registers.
5306 //
5307 // TODO(204): Avoid actually spilling when the value is not
5308 // needed (probably the common case).
5309 frame_->Spill(value.reg());
5310 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
5311 Result temp = allocator_->Allocate();
5312 ASSERT(temp.is_valid());
5313 __ RecordWrite(start.reg(), offset, value.reg(), temp.reg());
5314 // The results start, value, and temp are unused by going out of
5315 // scope.
5316 }
5317
5318 exit.Bind();
5319 }
5320}
5321
5322
Steve Block6ded16b2010-05-10 14:33:55 +01005323void CodeGenerator::VisitSlot(Slot* slot) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005324 Comment cmnt(masm_, "[ Slot");
Steve Block6ded16b2010-05-10 14:33:55 +01005325 if (in_safe_int32_mode()) {
5326 if ((slot->type() == Slot::LOCAL && !slot->is_arguments())) {
5327 frame()->UntaggedPushLocalAt(slot->index());
5328 } else if (slot->type() == Slot::PARAMETER) {
5329 frame()->UntaggedPushParameterAt(slot->index());
5330 } else {
5331 UNREACHABLE();
5332 }
5333 } else {
Leon Clarkef7060e22010-06-03 12:02:55 +01005334 LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF);
Steve Block6ded16b2010-05-10 14:33:55 +01005335 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005336}
5337
5338
5339void CodeGenerator::VisitVariableProxy(VariableProxy* node) {
5340 Comment cmnt(masm_, "[ VariableProxy");
5341 Variable* var = node->var();
5342 Expression* expr = var->rewrite();
5343 if (expr != NULL) {
5344 Visit(expr);
5345 } else {
5346 ASSERT(var->is_global());
Steve Block6ded16b2010-05-10 14:33:55 +01005347 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005348 Reference ref(this, node);
Steve Blockd0582a62009-12-15 09:54:21 +00005349 ref.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00005350 }
5351}
5352
5353
5354void CodeGenerator::VisitLiteral(Literal* node) {
5355 Comment cmnt(masm_, "[ Literal");
Steve Block6ded16b2010-05-10 14:33:55 +01005356 if (in_safe_int32_mode()) {
5357 frame_->PushUntaggedElement(node->handle());
5358 } else {
5359 frame_->Push(node->handle());
5360 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005361}
5362
5363
Steve Blockd0582a62009-12-15 09:54:21 +00005364void CodeGenerator::PushUnsafeSmi(Handle<Object> value) {
5365 ASSERT(value->IsSmi());
5366 int bits = reinterpret_cast<int>(*value);
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08005367 __ push(Immediate(bits ^ jit_cookie_));
5368 __ xor_(Operand(esp, 0), Immediate(jit_cookie_));
Steve Blockd0582a62009-12-15 09:54:21 +00005369}
5370
5371
5372void CodeGenerator::StoreUnsafeSmiToLocal(int offset, Handle<Object> value) {
5373 ASSERT(value->IsSmi());
5374 int bits = reinterpret_cast<int>(*value);
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08005375 __ mov(Operand(ebp, offset), Immediate(bits ^ jit_cookie_));
5376 __ xor_(Operand(ebp, offset), Immediate(jit_cookie_));
Steve Blockd0582a62009-12-15 09:54:21 +00005377}
5378
5379
5380void CodeGenerator::MoveUnsafeSmi(Register target, Handle<Object> value) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005381 ASSERT(target.is_valid());
5382 ASSERT(value->IsSmi());
5383 int bits = reinterpret_cast<int>(*value);
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08005384 __ Set(target, Immediate(bits ^ jit_cookie_));
5385 __ xor_(target, jit_cookie_);
Steve Blocka7e24c12009-10-30 11:49:00 +00005386}
5387
5388
5389bool CodeGenerator::IsUnsafeSmi(Handle<Object> value) {
5390 if (!value->IsSmi()) return false;
5391 int int_value = Smi::cast(*value)->value();
5392 return !is_intn(int_value, kMaxSmiInlinedBits);
5393}
5394
5395
5396// Materialize the regexp literal 'node' in the literals array
5397// 'literals' of the function. Leave the regexp boilerplate in
5398// 'boilerplate'.
5399class DeferredRegExpLiteral: public DeferredCode {
5400 public:
5401 DeferredRegExpLiteral(Register boilerplate,
5402 Register literals,
5403 RegExpLiteral* node)
5404 : boilerplate_(boilerplate), literals_(literals), node_(node) {
5405 set_comment("[ DeferredRegExpLiteral");
5406 }
5407
5408 void Generate();
5409
5410 private:
5411 Register boilerplate_;
5412 Register literals_;
5413 RegExpLiteral* node_;
5414};
5415
5416
5417void DeferredRegExpLiteral::Generate() {
5418 // Since the entry is undefined we call the runtime system to
5419 // compute the literal.
5420 // Literal array (0).
5421 __ push(literals_);
5422 // Literal index (1).
5423 __ push(Immediate(Smi::FromInt(node_->literal_index())));
5424 // RegExp pattern (2).
5425 __ push(Immediate(node_->pattern()));
5426 // RegExp flags (3).
5427 __ push(Immediate(node_->flags()));
5428 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
5429 if (!boilerplate_.is(eax)) __ mov(boilerplate_, eax);
5430}
5431
5432
Ben Murdochbb769b22010-08-11 14:56:33 +01005433class DeferredAllocateInNewSpace: public DeferredCode {
5434 public:
Steve Block791712a2010-08-27 10:21:07 +01005435 DeferredAllocateInNewSpace(int size,
5436 Register target,
5437 int registers_to_save = 0)
5438 : size_(size), target_(target), registers_to_save_(registers_to_save) {
Ben Murdochbb769b22010-08-11 14:56:33 +01005439 ASSERT(size >= kPointerSize && size <= Heap::MaxObjectSizeInNewSpace());
Steve Block791712a2010-08-27 10:21:07 +01005440 ASSERT_EQ(0, registers_to_save & target.bit());
Ben Murdochbb769b22010-08-11 14:56:33 +01005441 set_comment("[ DeferredAllocateInNewSpace");
5442 }
5443 void Generate();
5444
5445 private:
5446 int size_;
5447 Register target_;
Steve Block791712a2010-08-27 10:21:07 +01005448 int registers_to_save_;
Ben Murdochbb769b22010-08-11 14:56:33 +01005449};
5450
5451
5452void DeferredAllocateInNewSpace::Generate() {
Steve Block791712a2010-08-27 10:21:07 +01005453 for (int i = 0; i < kNumRegs; i++) {
5454 if (registers_to_save_ & (1 << i)) {
5455 Register save_register = { i };
5456 __ push(save_register);
5457 }
5458 }
Ben Murdochbb769b22010-08-11 14:56:33 +01005459 __ push(Immediate(Smi::FromInt(size_)));
5460 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
5461 if (!target_.is(eax)) {
5462 __ mov(target_, eax);
5463 }
Steve Block791712a2010-08-27 10:21:07 +01005464 for (int i = kNumRegs - 1; i >= 0; i--) {
5465 if (registers_to_save_ & (1 << i)) {
5466 Register save_register = { i };
5467 __ pop(save_register);
5468 }
5469 }
Ben Murdochbb769b22010-08-11 14:56:33 +01005470}
5471
5472
Steve Blocka7e24c12009-10-30 11:49:00 +00005473void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01005474 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005475 Comment cmnt(masm_, "[ RegExp Literal");
5476
5477 // Retrieve the literals array and check the allocated entry. Begin
5478 // with a writable copy of the function of this activation in a
5479 // register.
5480 frame_->PushFunction();
5481 Result literals = frame_->Pop();
5482 literals.ToRegister();
5483 frame_->Spill(literals.reg());
5484
5485 // Load the literals array of the function.
5486 __ mov(literals.reg(),
5487 FieldOperand(literals.reg(), JSFunction::kLiteralsOffset));
5488
5489 // Load the literal at the ast saved index.
5490 Result boilerplate = allocator_->Allocate();
5491 ASSERT(boilerplate.is_valid());
5492 int literal_offset =
5493 FixedArray::kHeaderSize + node->literal_index() * kPointerSize;
5494 __ mov(boilerplate.reg(), FieldOperand(literals.reg(), literal_offset));
5495
5496 // Check whether we need to materialize the RegExp object. If so,
5497 // jump to the deferred code passing the literals array.
5498 DeferredRegExpLiteral* deferred =
5499 new DeferredRegExpLiteral(boilerplate.reg(), literals.reg(), node);
5500 __ cmp(boilerplate.reg(), Factory::undefined_value());
5501 deferred->Branch(equal);
5502 deferred->BindExit();
Steve Blocka7e24c12009-10-30 11:49:00 +00005503
Ben Murdochbb769b22010-08-11 14:56:33 +01005504 // Register of boilerplate contains RegExp object.
5505
5506 Result tmp = allocator()->Allocate();
5507 ASSERT(tmp.is_valid());
5508
5509 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
5510
5511 DeferredAllocateInNewSpace* allocate_fallback =
5512 new DeferredAllocateInNewSpace(size, literals.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00005513 frame_->Push(&boilerplate);
Ben Murdochbb769b22010-08-11 14:56:33 +01005514 frame_->SpillTop();
5515 __ AllocateInNewSpace(size,
5516 literals.reg(),
5517 tmp.reg(),
5518 no_reg,
5519 allocate_fallback->entry_label(),
5520 TAG_OBJECT);
5521 allocate_fallback->BindExit();
5522 boilerplate = frame_->Pop();
5523 // Copy from boilerplate to clone and return clone.
5524
5525 for (int i = 0; i < size; i += kPointerSize) {
5526 __ mov(tmp.reg(), FieldOperand(boilerplate.reg(), i));
5527 __ mov(FieldOperand(literals.reg(), i), tmp.reg());
5528 }
5529 frame_->Push(&literals);
Steve Blocka7e24c12009-10-30 11:49:00 +00005530}
5531
5532
Steve Blocka7e24c12009-10-30 11:49:00 +00005533void CodeGenerator::VisitObjectLiteral(ObjectLiteral* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01005534 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005535 Comment cmnt(masm_, "[ ObjectLiteral");
5536
Leon Clarkee46be812010-01-19 14:06:41 +00005537 // Load a writable copy of the function of this activation in a
Steve Blocka7e24c12009-10-30 11:49:00 +00005538 // register.
5539 frame_->PushFunction();
5540 Result literals = frame_->Pop();
5541 literals.ToRegister();
5542 frame_->Spill(literals.reg());
5543
5544 // Load the literals array of the function.
5545 __ mov(literals.reg(),
5546 FieldOperand(literals.reg(), JSFunction::kLiteralsOffset));
Leon Clarkee46be812010-01-19 14:06:41 +00005547 // Literal array.
5548 frame_->Push(&literals);
5549 // Literal index.
5550 frame_->Push(Smi::FromInt(node->literal_index()));
5551 // Constant properties.
5552 frame_->Push(node->constant_properties());
Steve Block6ded16b2010-05-10 14:33:55 +01005553 // Should the object literal have fast elements?
5554 frame_->Push(Smi::FromInt(node->fast_elements() ? 1 : 0));
Leon Clarkee46be812010-01-19 14:06:41 +00005555 Result clone;
5556 if (node->depth() > 1) {
Steve Block6ded16b2010-05-10 14:33:55 +01005557 clone = frame_->CallRuntime(Runtime::kCreateObjectLiteral, 4);
Leon Clarkee46be812010-01-19 14:06:41 +00005558 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01005559 clone = frame_->CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
Steve Blocka7e24c12009-10-30 11:49:00 +00005560 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005561 frame_->Push(&clone);
5562
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08005563 // Mark all computed expressions that are bound to a key that
5564 // is shadowed by a later occurrence of the same key. For the
5565 // marked expressions, no store code is emitted.
5566 node->CalculateEmitStore();
5567
Steve Blocka7e24c12009-10-30 11:49:00 +00005568 for (int i = 0; i < node->properties()->length(); i++) {
5569 ObjectLiteral::Property* property = node->properties()->at(i);
5570 switch (property->kind()) {
5571 case ObjectLiteral::Property::CONSTANT:
5572 break;
5573 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
5574 if (CompileTimeValue::IsCompileTimeValue(property->value())) break;
5575 // else fall through.
5576 case ObjectLiteral::Property::COMPUTED: {
5577 Handle<Object> key(property->key()->handle());
5578 if (key->IsSymbol()) {
5579 // Duplicate the object as the IC receiver.
5580 frame_->Dup();
5581 Load(property->value());
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08005582 if (property->emit_store()) {
5583 Result ignored =
5584 frame_->CallStoreIC(Handle<String>::cast(key), false);
5585 // A test eax instruction following the store IC call would
5586 // indicate the presence of an inlined version of the
5587 // store. Add a nop to indicate that there is no such
5588 // inlined version.
5589 __ nop();
5590 } else {
5591 frame_->Drop(2);
5592 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005593 break;
5594 }
5595 // Fall through
5596 }
5597 case ObjectLiteral::Property::PROTOTYPE: {
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08005598 // Duplicate the object as an argument to the runtime call.
5599 frame_->Dup();
5600 Load(property->key());
5601 Load(property->value());
5602 if (property->emit_store()) {
5603 // Ignore the result.
5604 Result ignored = frame_->CallRuntime(Runtime::kSetProperty, 3);
5605 } else {
5606 frame_->Drop(3);
5607 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005608 break;
5609 }
5610 case ObjectLiteral::Property::SETTER: {
5611 // Duplicate the object as an argument to the runtime call.
5612 frame_->Dup();
5613 Load(property->key());
5614 frame_->Push(Smi::FromInt(1));
5615 Load(property->value());
5616 Result ignored = frame_->CallRuntime(Runtime::kDefineAccessor, 4);
5617 // Ignore the result.
5618 break;
5619 }
5620 case ObjectLiteral::Property::GETTER: {
5621 // Duplicate the object as an argument to the runtime call.
5622 frame_->Dup();
5623 Load(property->key());
5624 frame_->Push(Smi::FromInt(0));
5625 Load(property->value());
5626 Result ignored = frame_->CallRuntime(Runtime::kDefineAccessor, 4);
5627 // Ignore the result.
5628 break;
5629 }
5630 default: UNREACHABLE();
5631 }
5632 }
5633}
5634
5635
Steve Blocka7e24c12009-10-30 11:49:00 +00005636void CodeGenerator::VisitArrayLiteral(ArrayLiteral* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01005637 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005638 Comment cmnt(masm_, "[ ArrayLiteral");
5639
Leon Clarkee46be812010-01-19 14:06:41 +00005640 // Load a writable copy of the function of this activation in a
Steve Blocka7e24c12009-10-30 11:49:00 +00005641 // register.
5642 frame_->PushFunction();
5643 Result literals = frame_->Pop();
5644 literals.ToRegister();
5645 frame_->Spill(literals.reg());
5646
5647 // Load the literals array of the function.
5648 __ mov(literals.reg(),
5649 FieldOperand(literals.reg(), JSFunction::kLiteralsOffset));
5650
Leon Clarkee46be812010-01-19 14:06:41 +00005651 frame_->Push(&literals);
5652 frame_->Push(Smi::FromInt(node->literal_index()));
5653 frame_->Push(node->constant_elements());
5654 int length = node->values()->length();
5655 Result clone;
Iain Merrick75681382010-08-19 15:07:18 +01005656 if (node->constant_elements()->map() == Heap::fixed_cow_array_map()) {
5657 FastCloneShallowArrayStub stub(
5658 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
5659 clone = frame_->CallStub(&stub, 3);
5660 __ IncrementCounter(&Counters::cow_arrays_created_stub, 1);
5661 } else if (node->depth() > 1) {
Leon Clarkee46be812010-01-19 14:06:41 +00005662 clone = frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3);
Iain Merrick75681382010-08-19 15:07:18 +01005663 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
Leon Clarkee46be812010-01-19 14:06:41 +00005664 clone = frame_->CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
5665 } else {
Iain Merrick75681382010-08-19 15:07:18 +01005666 FastCloneShallowArrayStub stub(
5667 FastCloneShallowArrayStub::CLONE_ELEMENTS, length);
Leon Clarkee46be812010-01-19 14:06:41 +00005668 clone = frame_->CallStub(&stub, 3);
Steve Blocka7e24c12009-10-30 11:49:00 +00005669 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005670 frame_->Push(&clone);
5671
5672 // Generate code to set the elements in the array that are not
5673 // literals.
Leon Clarkee46be812010-01-19 14:06:41 +00005674 for (int i = 0; i < length; i++) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005675 Expression* value = node->values()->at(i);
5676
Iain Merrick75681382010-08-19 15:07:18 +01005677 if (!CompileTimeValue::ArrayLiteralElementNeedsInitialization(value)) {
5678 continue;
5679 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005680
5681 // The property must be set by generated code.
5682 Load(value);
5683
5684 // Get the property value off the stack.
5685 Result prop_value = frame_->Pop();
5686 prop_value.ToRegister();
5687
5688 // Fetch the array literal while leaving a copy on the stack and
5689 // use it to get the elements array.
5690 frame_->Dup();
5691 Result elements = frame_->Pop();
5692 elements.ToRegister();
5693 frame_->Spill(elements.reg());
5694 // Get the elements array.
5695 __ mov(elements.reg(),
5696 FieldOperand(elements.reg(), JSObject::kElementsOffset));
5697
5698 // Write to the indexed properties array.
5699 int offset = i * kPointerSize + FixedArray::kHeaderSize;
5700 __ mov(FieldOperand(elements.reg(), offset), prop_value.reg());
5701
5702 // Update the write barrier for the array address.
5703 frame_->Spill(prop_value.reg()); // Overwritten by the write barrier.
5704 Result scratch = allocator_->Allocate();
5705 ASSERT(scratch.is_valid());
5706 __ RecordWrite(elements.reg(), offset, prop_value.reg(), scratch.reg());
5707 }
5708}
5709
5710
5711void CodeGenerator::VisitCatchExtensionObject(CatchExtensionObject* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01005712 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005713 ASSERT(!in_spilled_code());
5714 // Call runtime routine to allocate the catch extension object and
5715 // assign the exception value to the catch variable.
5716 Comment cmnt(masm_, "[ CatchExtensionObject");
5717 Load(node->key());
5718 Load(node->value());
5719 Result result =
5720 frame_->CallRuntime(Runtime::kCreateCatchExtensionObject, 2);
5721 frame_->Push(&result);
5722}
5723
5724
Andrei Popescu402d9372010-02-26 13:31:12 +00005725void CodeGenerator::EmitSlotAssignment(Assignment* node) {
5726#ifdef DEBUG
5727 int original_height = frame()->height();
5728#endif
5729 Comment cmnt(masm(), "[ Variable Assignment");
5730 Variable* var = node->target()->AsVariableProxy()->AsVariable();
5731 ASSERT(var != NULL);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01005732 Slot* slot = var->AsSlot();
Andrei Popescu402d9372010-02-26 13:31:12 +00005733 ASSERT(slot != NULL);
5734
5735 // Evaluate the right-hand side.
5736 if (node->is_compound()) {
Steve Block6ded16b2010-05-10 14:33:55 +01005737 // For a compound assignment the right-hand side is a binary operation
5738 // between the current property value and the actual right-hand side.
Leon Clarkef7060e22010-06-03 12:02:55 +01005739 LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF);
Andrei Popescu402d9372010-02-26 13:31:12 +00005740 Load(node->value());
5741
Steve Block6ded16b2010-05-10 14:33:55 +01005742 // Perform the binary operation.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005743 bool overwrite_value = node->value()->ResultOverwriteAllowed();
Steve Block6ded16b2010-05-10 14:33:55 +01005744 // Construct the implicit binary operation.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005745 BinaryOperation expr(node);
Steve Block6ded16b2010-05-10 14:33:55 +01005746 GenericBinaryOperation(&expr,
Andrei Popescu402d9372010-02-26 13:31:12 +00005747 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
5748 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01005749 // For non-compound assignment just load the right-hand side.
Andrei Popescu402d9372010-02-26 13:31:12 +00005750 Load(node->value());
5751 }
5752
5753 // Perform the assignment.
5754 if (var->mode() != Variable::CONST || node->op() == Token::INIT_CONST) {
5755 CodeForSourcePosition(node->position());
5756 StoreToSlot(slot,
5757 node->op() == Token::INIT_CONST ? CONST_INIT : NOT_CONST_INIT);
5758 }
5759 ASSERT(frame()->height() == original_height + 1);
5760}
5761
5762
5763void CodeGenerator::EmitNamedPropertyAssignment(Assignment* node) {
5764#ifdef DEBUG
5765 int original_height = frame()->height();
5766#endif
5767 Comment cmnt(masm(), "[ Named Property Assignment");
5768 Variable* var = node->target()->AsVariableProxy()->AsVariable();
5769 Property* prop = node->target()->AsProperty();
5770 ASSERT(var == NULL || (prop == NULL && var->is_global()));
5771
Steve Block6ded16b2010-05-10 14:33:55 +01005772 // Initialize name and evaluate the receiver sub-expression if necessary. If
5773 // the receiver is trivial it is not placed on the stack at this point, but
5774 // loaded whenever actually needed.
Andrei Popescu402d9372010-02-26 13:31:12 +00005775 Handle<String> name;
5776 bool is_trivial_receiver = false;
5777 if (var != NULL) {
5778 name = var->name();
5779 } else {
5780 Literal* lit = prop->key()->AsLiteral();
5781 ASSERT_NOT_NULL(lit);
5782 name = Handle<String>::cast(lit->handle());
5783 // Do not materialize the receiver on the frame if it is trivial.
5784 is_trivial_receiver = prop->obj()->IsTrivial();
5785 if (!is_trivial_receiver) Load(prop->obj());
5786 }
5787
Steve Block6ded16b2010-05-10 14:33:55 +01005788 // Change to slow case in the beginning of an initialization block to
5789 // avoid the quadratic behavior of repeatedly adding fast properties.
Andrei Popescu402d9372010-02-26 13:31:12 +00005790 if (node->starts_initialization_block()) {
Steve Block6ded16b2010-05-10 14:33:55 +01005791 // Initialization block consists of assignments of the form expr.x = ..., so
5792 // this will never be an assignment to a variable, so there must be a
5793 // receiver object.
Andrei Popescu402d9372010-02-26 13:31:12 +00005794 ASSERT_EQ(NULL, var);
Andrei Popescu402d9372010-02-26 13:31:12 +00005795 if (is_trivial_receiver) {
5796 frame()->Push(prop->obj());
5797 } else {
5798 frame()->Dup();
5799 }
5800 Result ignored = frame()->CallRuntime(Runtime::kToSlowProperties, 1);
5801 }
5802
Steve Block6ded16b2010-05-10 14:33:55 +01005803 // Change to fast case at the end of an initialization block. To prepare for
5804 // that add an extra copy of the receiver to the frame, so that it can be
5805 // converted back to fast case after the assignment.
Andrei Popescu402d9372010-02-26 13:31:12 +00005806 if (node->ends_initialization_block() && !is_trivial_receiver) {
Andrei Popescu402d9372010-02-26 13:31:12 +00005807 frame()->Dup();
5808 }
5809
Steve Block6ded16b2010-05-10 14:33:55 +01005810 // Stack layout:
5811 // [tos] : receiver (only materialized if non-trivial)
5812 // [tos+1] : receiver if at the end of an initialization block
5813
Andrei Popescu402d9372010-02-26 13:31:12 +00005814 // Evaluate the right-hand side.
5815 if (node->is_compound()) {
Steve Block6ded16b2010-05-10 14:33:55 +01005816 // For a compound assignment the right-hand side is a binary operation
5817 // between the current property value and the actual right-hand side.
Andrei Popescu402d9372010-02-26 13:31:12 +00005818 if (is_trivial_receiver) {
5819 frame()->Push(prop->obj());
5820 } else if (var != NULL) {
5821 // The LoadIC stub expects the object in eax.
5822 // Freeing eax causes the code generator to load the global into it.
5823 frame_->Spill(eax);
5824 LoadGlobal();
5825 } else {
5826 frame()->Dup();
5827 }
5828 Result value = EmitNamedLoad(name, var != NULL);
5829 frame()->Push(&value);
5830 Load(node->value());
5831
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005832 bool overwrite_value = node->value()->ResultOverwriteAllowed();
Steve Block6ded16b2010-05-10 14:33:55 +01005833 // Construct the implicit binary operation.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005834 BinaryOperation expr(node);
Steve Block6ded16b2010-05-10 14:33:55 +01005835 GenericBinaryOperation(&expr,
Andrei Popescu402d9372010-02-26 13:31:12 +00005836 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
5837 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01005838 // For non-compound assignment just load the right-hand side.
Andrei Popescu402d9372010-02-26 13:31:12 +00005839 Load(node->value());
5840 }
5841
Steve Block6ded16b2010-05-10 14:33:55 +01005842 // Stack layout:
5843 // [tos] : value
5844 // [tos+1] : receiver (only materialized if non-trivial)
5845 // [tos+2] : receiver if at the end of an initialization block
5846
Andrei Popescu402d9372010-02-26 13:31:12 +00005847 // Perform the assignment. It is safe to ignore constants here.
5848 ASSERT(var == NULL || var->mode() != Variable::CONST);
5849 ASSERT_NE(Token::INIT_CONST, node->op());
5850 if (is_trivial_receiver) {
5851 Result value = frame()->Pop();
5852 frame()->Push(prop->obj());
5853 frame()->Push(&value);
5854 }
5855 CodeForSourcePosition(node->position());
5856 bool is_contextual = (var != NULL);
5857 Result answer = EmitNamedStore(name, is_contextual);
5858 frame()->Push(&answer);
5859
Steve Block6ded16b2010-05-10 14:33:55 +01005860 // Stack layout:
5861 // [tos] : result
5862 // [tos+1] : receiver if at the end of an initialization block
5863
Andrei Popescu402d9372010-02-26 13:31:12 +00005864 if (node->ends_initialization_block()) {
5865 ASSERT_EQ(NULL, var);
5866 // The argument to the runtime call is the receiver.
5867 if (is_trivial_receiver) {
5868 frame()->Push(prop->obj());
5869 } else {
5870 // A copy of the receiver is below the value of the assignment. Swap
5871 // the receiver and the value of the assignment expression.
5872 Result result = frame()->Pop();
5873 Result receiver = frame()->Pop();
5874 frame()->Push(&result);
5875 frame()->Push(&receiver);
5876 }
5877 Result ignored = frame_->CallRuntime(Runtime::kToFastProperties, 1);
5878 }
5879
Steve Block6ded16b2010-05-10 14:33:55 +01005880 // Stack layout:
5881 // [tos] : result
5882
Andrei Popescu402d9372010-02-26 13:31:12 +00005883 ASSERT_EQ(frame()->height(), original_height + 1);
5884}
5885
5886
5887void CodeGenerator::EmitKeyedPropertyAssignment(Assignment* node) {
5888#ifdef DEBUG
5889 int original_height = frame()->height();
5890#endif
Steve Block6ded16b2010-05-10 14:33:55 +01005891 Comment cmnt(masm_, "[ Keyed Property Assignment");
Andrei Popescu402d9372010-02-26 13:31:12 +00005892 Property* prop = node->target()->AsProperty();
5893 ASSERT_NOT_NULL(prop);
5894
5895 // Evaluate the receiver subexpression.
5896 Load(prop->obj());
5897
Steve Block6ded16b2010-05-10 14:33:55 +01005898 // Change to slow case in the beginning of an initialization block to
5899 // avoid the quadratic behavior of repeatedly adding fast properties.
Andrei Popescu402d9372010-02-26 13:31:12 +00005900 if (node->starts_initialization_block()) {
Andrei Popescu402d9372010-02-26 13:31:12 +00005901 frame_->Dup();
5902 Result ignored = frame_->CallRuntime(Runtime::kToSlowProperties, 1);
5903 }
5904
Steve Block6ded16b2010-05-10 14:33:55 +01005905 // Change to fast case at the end of an initialization block. To prepare for
5906 // that add an extra copy of the receiver to the frame, so that it can be
5907 // converted back to fast case after the assignment.
Andrei Popescu402d9372010-02-26 13:31:12 +00005908 if (node->ends_initialization_block()) {
Andrei Popescu402d9372010-02-26 13:31:12 +00005909 frame_->Dup();
5910 }
5911
5912 // Evaluate the key subexpression.
5913 Load(prop->key());
5914
Steve Block6ded16b2010-05-10 14:33:55 +01005915 // Stack layout:
5916 // [tos] : key
5917 // [tos+1] : receiver
5918 // [tos+2] : receiver if at the end of an initialization block
5919
Andrei Popescu402d9372010-02-26 13:31:12 +00005920 // Evaluate the right-hand side.
5921 if (node->is_compound()) {
Steve Block6ded16b2010-05-10 14:33:55 +01005922 // For a compound assignment the right-hand side is a binary operation
5923 // between the current property value and the actual right-hand side.
5924 // Duplicate receiver and key for loading the current property value.
Andrei Popescu402d9372010-02-26 13:31:12 +00005925 frame()->PushElementAt(1);
5926 frame()->PushElementAt(1);
5927 Result value = EmitKeyedLoad();
5928 frame()->Push(&value);
5929 Load(node->value());
5930
Steve Block6ded16b2010-05-10 14:33:55 +01005931 // Perform the binary operation.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005932 bool overwrite_value = node->value()->ResultOverwriteAllowed();
5933 BinaryOperation expr(node);
Steve Block6ded16b2010-05-10 14:33:55 +01005934 GenericBinaryOperation(&expr,
Andrei Popescu402d9372010-02-26 13:31:12 +00005935 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
5936 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01005937 // For non-compound assignment just load the right-hand side.
Andrei Popescu402d9372010-02-26 13:31:12 +00005938 Load(node->value());
5939 }
5940
Steve Block6ded16b2010-05-10 14:33:55 +01005941 // Stack layout:
5942 // [tos] : value
5943 // [tos+1] : key
5944 // [tos+2] : receiver
5945 // [tos+3] : receiver if at the end of an initialization block
5946
Andrei Popescu402d9372010-02-26 13:31:12 +00005947 // Perform the assignment. It is safe to ignore constants here.
5948 ASSERT(node->op() != Token::INIT_CONST);
5949 CodeForSourcePosition(node->position());
5950 Result answer = EmitKeyedStore(prop->key()->type());
5951 frame()->Push(&answer);
5952
Steve Block6ded16b2010-05-10 14:33:55 +01005953 // Stack layout:
5954 // [tos] : result
5955 // [tos+1] : receiver if at the end of an initialization block
5956
5957 // Change to fast case at the end of an initialization block.
Andrei Popescu402d9372010-02-26 13:31:12 +00005958 if (node->ends_initialization_block()) {
5959 // The argument to the runtime call is the extra copy of the receiver,
5960 // which is below the value of the assignment. Swap the receiver and
5961 // the value of the assignment expression.
5962 Result result = frame()->Pop();
5963 Result receiver = frame()->Pop();
5964 frame()->Push(&result);
5965 frame()->Push(&receiver);
5966 Result ignored = frame_->CallRuntime(Runtime::kToFastProperties, 1);
5967 }
5968
Steve Block6ded16b2010-05-10 14:33:55 +01005969 // Stack layout:
5970 // [tos] : result
5971
Andrei Popescu402d9372010-02-26 13:31:12 +00005972 ASSERT(frame()->height() == original_height + 1);
5973}
5974
5975
Steve Blocka7e24c12009-10-30 11:49:00 +00005976void CodeGenerator::VisitAssignment(Assignment* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01005977 ASSERT(!in_safe_int32_mode());
Leon Clarked91b9f72010-01-27 17:25:45 +00005978#ifdef DEBUG
Andrei Popescu402d9372010-02-26 13:31:12 +00005979 int original_height = frame()->height();
Leon Clarked91b9f72010-01-27 17:25:45 +00005980#endif
Andrei Popescu402d9372010-02-26 13:31:12 +00005981 Variable* var = node->target()->AsVariableProxy()->AsVariable();
5982 Property* prop = node->target()->AsProperty();
Steve Blocka7e24c12009-10-30 11:49:00 +00005983
Andrei Popescu402d9372010-02-26 13:31:12 +00005984 if (var != NULL && !var->is_global()) {
5985 EmitSlotAssignment(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00005986
Andrei Popescu402d9372010-02-26 13:31:12 +00005987 } else if ((prop != NULL && prop->key()->IsPropertyName()) ||
5988 (var != NULL && var->is_global())) {
5989 // Properties whose keys are property names and global variables are
5990 // treated as named property references. We do not need to consider
5991 // global 'this' because it is not a valid left-hand side.
5992 EmitNamedPropertyAssignment(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00005993
Andrei Popescu402d9372010-02-26 13:31:12 +00005994 } else if (prop != NULL) {
5995 // Other properties (including rewritten parameters for a function that
5996 // uses arguments) are keyed property assignments.
5997 EmitKeyedPropertyAssignment(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00005998
Andrei Popescu402d9372010-02-26 13:31:12 +00005999 } else {
6000 // Invalid left-hand side.
6001 Load(node->target());
6002 Result result = frame()->CallRuntime(Runtime::kThrowReferenceError, 1);
6003 // The runtime call doesn't actually return but the code generator will
6004 // still generate code and expects a certain frame height.
6005 frame()->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00006006 }
Andrei Popescu402d9372010-02-26 13:31:12 +00006007
6008 ASSERT(frame()->height() == original_height + 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00006009}
6010
6011
6012void CodeGenerator::VisitThrow(Throw* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01006013 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00006014 Comment cmnt(masm_, "[ Throw");
6015 Load(node->exception());
6016 Result result = frame_->CallRuntime(Runtime::kThrow, 1);
6017 frame_->Push(&result);
6018}
6019
6020
6021void CodeGenerator::VisitProperty(Property* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01006022 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00006023 Comment cmnt(masm_, "[ Property");
6024 Reference property(this, node);
Steve Blockd0582a62009-12-15 09:54:21 +00006025 property.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00006026}
6027
6028
6029void CodeGenerator::VisitCall(Call* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01006030 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00006031 Comment cmnt(masm_, "[ Call");
6032
6033 Expression* function = node->expression();
6034 ZoneList<Expression*>* args = node->arguments();
6035
6036 // Check if the function is a variable or a property.
6037 Variable* var = function->AsVariableProxy()->AsVariable();
6038 Property* property = function->AsProperty();
6039
6040 // ------------------------------------------------------------------------
6041 // Fast-case: Use inline caching.
6042 // ---
6043 // According to ECMA-262, section 11.2.3, page 44, the function to call
6044 // must be resolved after the arguments have been evaluated. The IC code
6045 // automatically handles this by loading the arguments before the function
6046 // is resolved in cache misses (this also holds for megamorphic calls).
6047 // ------------------------------------------------------------------------
6048
6049 if (var != NULL && var->is_possibly_eval()) {
6050 // ----------------------------------
6051 // JavaScript example: 'eval(arg)' // eval is not known to be shadowed
6052 // ----------------------------------
6053
6054 // In a call to eval, we first call %ResolvePossiblyDirectEval to
6055 // resolve the function we need to call and the receiver of the
6056 // call. Then we call the resolved function using the given
6057 // arguments.
6058
6059 // Prepare the stack for the call to the resolved function.
6060 Load(function);
6061
6062 // Allocate a frame slot for the receiver.
6063 frame_->Push(Factory::undefined_value());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006064
6065 // Load the arguments.
Steve Blocka7e24c12009-10-30 11:49:00 +00006066 int arg_count = args->length();
6067 for (int i = 0; i < arg_count; i++) {
6068 Load(args->at(i));
Leon Clarkef7060e22010-06-03 12:02:55 +01006069 frame_->SpillTop();
Steve Blocka7e24c12009-10-30 11:49:00 +00006070 }
6071
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006072 // Result to hold the result of the function resolution and the
6073 // final result of the eval call.
6074 Result result;
6075
6076 // If we know that eval can only be shadowed by eval-introduced
6077 // variables we attempt to load the global eval function directly
6078 // in generated code. If we succeed, there is no need to perform a
6079 // context lookup in the runtime system.
6080 JumpTarget done;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01006081 if (var->AsSlot() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) {
6082 ASSERT(var->AsSlot()->type() == Slot::LOOKUP);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006083 JumpTarget slow;
6084 // Prepare the stack for the call to
6085 // ResolvePossiblyDirectEvalNoLookup by pushing the loaded
6086 // function, the first argument to the eval call and the
6087 // receiver.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01006088 Result fun = LoadFromGlobalSlotCheckExtensions(var->AsSlot(),
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006089 NOT_INSIDE_TYPEOF,
6090 &slow);
6091 frame_->Push(&fun);
6092 if (arg_count > 0) {
6093 frame_->PushElementAt(arg_count);
6094 } else {
6095 frame_->Push(Factory::undefined_value());
6096 }
6097 frame_->PushParameterAt(-1);
6098
6099 // Resolve the call.
6100 result =
6101 frame_->CallRuntime(Runtime::kResolvePossiblyDirectEvalNoLookup, 3);
6102
6103 done.Jump(&result);
6104 slow.Bind();
6105 }
6106
6107 // Prepare the stack for the call to ResolvePossiblyDirectEval by
6108 // pushing the loaded function, the first argument to the eval
6109 // call and the receiver.
Steve Blocka7e24c12009-10-30 11:49:00 +00006110 frame_->PushElementAt(arg_count + 1);
6111 if (arg_count > 0) {
6112 frame_->PushElementAt(arg_count);
6113 } else {
6114 frame_->Push(Factory::undefined_value());
6115 }
Leon Clarkee46be812010-01-19 14:06:41 +00006116 frame_->PushParameterAt(-1);
6117
Steve Blocka7e24c12009-10-30 11:49:00 +00006118 // Resolve the call.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006119 result = frame_->CallRuntime(Runtime::kResolvePossiblyDirectEval, 3);
6120
6121 // If we generated fast-case code bind the jump-target where fast
6122 // and slow case merge.
6123 if (done.is_linked()) done.Bind(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00006124
Leon Clarkee46be812010-01-19 14:06:41 +00006125 // The runtime call returns a pair of values in eax (function) and
6126 // edx (receiver). Touch up the stack with the right values.
6127 Result receiver = allocator_->Allocate(edx);
6128 frame_->SetElementAt(arg_count + 1, &result);
6129 frame_->SetElementAt(arg_count, &receiver);
6130 receiver.Unuse();
Steve Blocka7e24c12009-10-30 11:49:00 +00006131
6132 // Call the function.
6133 CodeForSourcePosition(node->position());
6134 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Leon Clarkee46be812010-01-19 14:06:41 +00006135 CallFunctionStub call_function(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
Steve Blocka7e24c12009-10-30 11:49:00 +00006136 result = frame_->CallStub(&call_function, arg_count + 1);
6137
6138 // Restore the context and overwrite the function on the stack with
6139 // the result.
6140 frame_->RestoreContextRegister();
6141 frame_->SetElementAt(0, &result);
6142
6143 } else if (var != NULL && !var->is_this() && var->is_global()) {
6144 // ----------------------------------
6145 // JavaScript example: 'foo(1, 2, 3)' // foo is global
6146 // ----------------------------------
6147
Steve Blocka7e24c12009-10-30 11:49:00 +00006148 // Pass the global object as the receiver and let the IC stub
6149 // patch the stack to use the global proxy as 'this' in the
6150 // invoked function.
6151 LoadGlobal();
6152
6153 // Load the arguments.
6154 int arg_count = args->length();
6155 for (int i = 0; i < arg_count; i++) {
6156 Load(args->at(i));
Leon Clarkef7060e22010-06-03 12:02:55 +01006157 frame_->SpillTop();
Steve Blocka7e24c12009-10-30 11:49:00 +00006158 }
6159
Leon Clarkee46be812010-01-19 14:06:41 +00006160 // Push the name of the function onto the frame.
6161 frame_->Push(var->name());
6162
Steve Blocka7e24c12009-10-30 11:49:00 +00006163 // Call the IC initialization code.
6164 CodeForSourcePosition(node->position());
6165 Result result = frame_->CallCallIC(RelocInfo::CODE_TARGET_CONTEXT,
6166 arg_count,
6167 loop_nesting());
6168 frame_->RestoreContextRegister();
Leon Clarkee46be812010-01-19 14:06:41 +00006169 frame_->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00006170
Kristian Monsen0d5e1162010-09-30 15:31:59 +01006171 } else if (var != NULL && var->AsSlot() != NULL &&
6172 var->AsSlot()->type() == Slot::LOOKUP) {
Steve Blocka7e24c12009-10-30 11:49:00 +00006173 // ----------------------------------
Kristian Monsen25f61362010-05-21 11:50:48 +01006174 // JavaScript examples:
6175 //
6176 // with (obj) foo(1, 2, 3) // foo may be in obj.
6177 //
6178 // function f() {};
6179 // function g() {
6180 // eval(...);
6181 // f(); // f could be in extension object.
6182 // }
Steve Blocka7e24c12009-10-30 11:49:00 +00006183 // ----------------------------------
6184
Kristian Monsen25f61362010-05-21 11:50:48 +01006185 JumpTarget slow, done;
6186 Result function;
6187
6188 // Generate fast case for loading functions from slots that
6189 // correspond to local/global variables or arguments unless they
6190 // are shadowed by eval-introduced bindings.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01006191 EmitDynamicLoadFromSlotFastCase(var->AsSlot(),
Kristian Monsen25f61362010-05-21 11:50:48 +01006192 NOT_INSIDE_TYPEOF,
6193 &function,
6194 &slow,
6195 &done);
6196
6197 slow.Bind();
6198 // Enter the runtime system to load the function from the context.
6199 // Sync the frame so we can push the arguments directly into
6200 // place.
Steve Blocka7e24c12009-10-30 11:49:00 +00006201 frame_->SyncRange(0, frame_->element_count() - 1);
6202 frame_->EmitPush(esi);
6203 frame_->EmitPush(Immediate(var->name()));
6204 frame_->CallRuntime(Runtime::kLoadContextSlot, 2);
6205 // The runtime call returns a pair of values in eax and edx. The
6206 // looked-up function is in eax and the receiver is in edx. These
6207 // register references are not ref counted here. We spill them
6208 // eagerly since they are arguments to an inevitable call (and are
6209 // not sharable by the arguments).
6210 ASSERT(!allocator()->is_used(eax));
6211 frame_->EmitPush(eax);
6212
6213 // Load the receiver.
6214 ASSERT(!allocator()->is_used(edx));
6215 frame_->EmitPush(edx);
6216
Kristian Monsen25f61362010-05-21 11:50:48 +01006217 // If fast case code has been generated, emit code to push the
6218 // function and receiver and have the slow path jump around this
6219 // code.
6220 if (done.is_linked()) {
6221 JumpTarget call;
6222 call.Jump();
6223 done.Bind(&function);
6224 frame_->Push(&function);
6225 LoadGlobalReceiver();
6226 call.Bind();
6227 }
6228
Steve Blocka7e24c12009-10-30 11:49:00 +00006229 // Call the function.
Leon Clarkee46be812010-01-19 14:06:41 +00006230 CallWithArguments(args, NO_CALL_FUNCTION_FLAGS, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00006231
6232 } else if (property != NULL) {
6233 // Check if the key is a literal string.
6234 Literal* literal = property->key()->AsLiteral();
6235
6236 if (literal != NULL && literal->handle()->IsSymbol()) {
6237 // ------------------------------------------------------------------
6238 // JavaScript example: 'object.foo(1, 2, 3)' or 'map["key"](1, 2, 3)'
6239 // ------------------------------------------------------------------
6240
6241 Handle<String> name = Handle<String>::cast(literal->handle());
6242
6243 if (ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION &&
6244 name->IsEqualTo(CStrVector("apply")) &&
6245 args->length() == 2 &&
6246 args->at(1)->AsVariableProxy() != NULL &&
6247 args->at(1)->AsVariableProxy()->IsArguments()) {
6248 // Use the optimized Function.prototype.apply that avoids
6249 // allocating lazily allocated arguments objects.
Leon Clarked91b9f72010-01-27 17:25:45 +00006250 CallApplyLazy(property->obj(),
Steve Blocka7e24c12009-10-30 11:49:00 +00006251 args->at(0),
6252 args->at(1)->AsVariableProxy(),
6253 node->position());
6254
6255 } else {
Leon Clarkee46be812010-01-19 14:06:41 +00006256 // Push the receiver onto the frame.
Steve Blocka7e24c12009-10-30 11:49:00 +00006257 Load(property->obj());
6258
6259 // Load the arguments.
6260 int arg_count = args->length();
6261 for (int i = 0; i < arg_count; i++) {
6262 Load(args->at(i));
Leon Clarkef7060e22010-06-03 12:02:55 +01006263 frame_->SpillTop();
Steve Blocka7e24c12009-10-30 11:49:00 +00006264 }
6265
Leon Clarkee46be812010-01-19 14:06:41 +00006266 // Push the name of the function onto the frame.
6267 frame_->Push(name);
6268
Steve Blocka7e24c12009-10-30 11:49:00 +00006269 // Call the IC initialization code.
6270 CodeForSourcePosition(node->position());
6271 Result result =
6272 frame_->CallCallIC(RelocInfo::CODE_TARGET, arg_count,
6273 loop_nesting());
6274 frame_->RestoreContextRegister();
Leon Clarkee46be812010-01-19 14:06:41 +00006275 frame_->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00006276 }
6277
6278 } else {
6279 // -------------------------------------------
6280 // JavaScript example: 'array[index](1, 2, 3)'
6281 // -------------------------------------------
6282
6283 // Load the function to call from the property through a reference.
Steve Blocka7e24c12009-10-30 11:49:00 +00006284
6285 // Pass receiver to called function.
6286 if (property->is_synthetic()) {
Leon Clarked91b9f72010-01-27 17:25:45 +00006287 Reference ref(this, property);
6288 ref.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00006289 // Use global object as receiver.
6290 LoadGlobalReceiver();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006291 // Call the function.
6292 CallWithArguments(args, RECEIVER_MIGHT_BE_VALUE, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00006293 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006294 // Push the receiver onto the frame.
Leon Clarked91b9f72010-01-27 17:25:45 +00006295 Load(property->obj());
Steve Blocka7e24c12009-10-30 11:49:00 +00006296
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006297 // Load the arguments.
6298 int arg_count = args->length();
6299 for (int i = 0; i < arg_count; i++) {
6300 Load(args->at(i));
6301 frame_->SpillTop();
6302 }
6303
6304 // Load the name of the function.
6305 Load(property->key());
6306
6307 // Call the IC initialization code.
6308 CodeForSourcePosition(node->position());
6309 Result result =
6310 frame_->CallKeyedCallIC(RelocInfo::CODE_TARGET,
6311 arg_count,
6312 loop_nesting());
6313 frame_->RestoreContextRegister();
6314 frame_->Push(&result);
6315 }
Steve Blocka7e24c12009-10-30 11:49:00 +00006316 }
6317
6318 } else {
6319 // ----------------------------------
6320 // JavaScript example: 'foo(1, 2, 3)' // foo is not global
6321 // ----------------------------------
6322
6323 // Load the function.
6324 Load(function);
6325
6326 // Pass the global proxy as the receiver.
6327 LoadGlobalReceiver();
6328
6329 // Call the function.
Leon Clarkee46be812010-01-19 14:06:41 +00006330 CallWithArguments(args, NO_CALL_FUNCTION_FLAGS, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00006331 }
6332}
6333
6334
6335void CodeGenerator::VisitCallNew(CallNew* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01006336 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00006337 Comment cmnt(masm_, "[ CallNew");
6338
6339 // According to ECMA-262, section 11.2.2, page 44, the function
6340 // expression in new calls must be evaluated before the
6341 // arguments. This is different from ordinary calls, where the
6342 // actual function to call is resolved after the arguments have been
6343 // evaluated.
6344
Kristian Monsen80d68ea2010-09-08 11:05:35 +01006345 // Push constructor on the stack. If it's not a function it's used as
6346 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
6347 // ignored.
Steve Blocka7e24c12009-10-30 11:49:00 +00006348 Load(node->expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00006349
6350 // Push the arguments ("left-to-right") on the stack.
6351 ZoneList<Expression*>* args = node->arguments();
6352 int arg_count = args->length();
6353 for (int i = 0; i < arg_count; i++) {
6354 Load(args->at(i));
6355 }
6356
6357 // Call the construct call builtin that handles allocation and
6358 // constructor invocation.
6359 CodeForSourcePosition(node->position());
6360 Result result = frame_->CallConstructor(arg_count);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01006361 frame_->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00006362}
6363
6364
6365void CodeGenerator::GenerateIsSmi(ZoneList<Expression*>* args) {
6366 ASSERT(args->length() == 1);
6367 Load(args->at(0));
6368 Result value = frame_->Pop();
6369 value.ToRegister();
6370 ASSERT(value.is_valid());
6371 __ test(value.reg(), Immediate(kSmiTagMask));
6372 value.Unuse();
6373 destination()->Split(zero);
6374}
6375
6376
6377void CodeGenerator::GenerateLog(ZoneList<Expression*>* args) {
6378 // Conditionally generate a log call.
6379 // Args:
6380 // 0 (literal string): The type of logging (corresponds to the flags).
6381 // This is used to determine whether or not to generate the log call.
6382 // 1 (string): Format string. Access the string at argument index 2
6383 // with '%2s' (see Logger::LogRuntime for all the formats).
6384 // 2 (array): Arguments to the format string.
6385 ASSERT_EQ(args->length(), 3);
6386#ifdef ENABLE_LOGGING_AND_PROFILING
6387 if (ShouldGenerateLog(args->at(0))) {
6388 Load(args->at(1));
6389 Load(args->at(2));
6390 frame_->CallRuntime(Runtime::kLog, 2);
6391 }
6392#endif
6393 // Finally, we're expected to leave a value on the top of the stack.
6394 frame_->Push(Factory::undefined_value());
6395}
6396
6397
6398void CodeGenerator::GenerateIsNonNegativeSmi(ZoneList<Expression*>* args) {
6399 ASSERT(args->length() == 1);
6400 Load(args->at(0));
6401 Result value = frame_->Pop();
6402 value.ToRegister();
6403 ASSERT(value.is_valid());
Steve Block6ded16b2010-05-10 14:33:55 +01006404 __ test(value.reg(), Immediate(kSmiTagMask | kSmiSignMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00006405 value.Unuse();
6406 destination()->Split(zero);
6407}
6408
6409
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006410class DeferredStringCharCodeAt : public DeferredCode {
6411 public:
6412 DeferredStringCharCodeAt(Register object,
6413 Register index,
6414 Register scratch,
6415 Register result)
6416 : result_(result),
6417 char_code_at_generator_(object,
6418 index,
6419 scratch,
6420 result,
6421 &need_conversion_,
6422 &need_conversion_,
6423 &index_out_of_range_,
6424 STRING_INDEX_IS_NUMBER) {}
6425
6426 StringCharCodeAtGenerator* fast_case_generator() {
6427 return &char_code_at_generator_;
6428 }
6429
6430 virtual void Generate() {
6431 VirtualFrameRuntimeCallHelper call_helper(frame_state());
6432 char_code_at_generator_.GenerateSlow(masm(), call_helper);
6433
6434 __ bind(&need_conversion_);
6435 // Move the undefined value into the result register, which will
6436 // trigger conversion.
6437 __ Set(result_, Immediate(Factory::undefined_value()));
6438 __ jmp(exit_label());
6439
6440 __ bind(&index_out_of_range_);
6441 // When the index is out of range, the spec requires us to return
6442 // NaN.
6443 __ Set(result_, Immediate(Factory::nan_value()));
6444 __ jmp(exit_label());
6445 }
6446
6447 private:
6448 Register result_;
6449
6450 Label need_conversion_;
6451 Label index_out_of_range_;
6452
6453 StringCharCodeAtGenerator char_code_at_generator_;
6454};
6455
6456
6457// This generates code that performs a String.prototype.charCodeAt() call
6458// or returns a smi in order to trigger conversion.
6459void CodeGenerator::GenerateStringCharCodeAt(ZoneList<Expression*>* args) {
6460 Comment(masm_, "[ GenerateStringCharCodeAt");
Steve Blocka7e24c12009-10-30 11:49:00 +00006461 ASSERT(args->length() == 2);
6462
Steve Blocka7e24c12009-10-30 11:49:00 +00006463 Load(args->at(0));
6464 Load(args->at(1));
6465 Result index = frame_->Pop();
6466 Result object = frame_->Pop();
Steve Blocka7e24c12009-10-30 11:49:00 +00006467 object.ToRegister();
6468 index.ToRegister();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006469 // We might mutate the object register.
Steve Blocka7e24c12009-10-30 11:49:00 +00006470 frame_->Spill(object.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00006471
Steve Block6ded16b2010-05-10 14:33:55 +01006472 // We need two extra registers.
6473 Result result = allocator()->Allocate();
6474 ASSERT(result.is_valid());
6475 Result scratch = allocator()->Allocate();
6476 ASSERT(scratch.is_valid());
Steve Blocka7e24c12009-10-30 11:49:00 +00006477
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006478 DeferredStringCharCodeAt* deferred =
6479 new DeferredStringCharCodeAt(object.reg(),
6480 index.reg(),
6481 scratch.reg(),
6482 result.reg());
6483 deferred->fast_case_generator()->GenerateFast(masm_);
6484 deferred->BindExit();
Steve Block6ded16b2010-05-10 14:33:55 +01006485 frame_->Push(&result);
6486}
6487
6488
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006489class DeferredStringCharFromCode : public DeferredCode {
6490 public:
6491 DeferredStringCharFromCode(Register code,
6492 Register result)
6493 : char_from_code_generator_(code, result) {}
6494
6495 StringCharFromCodeGenerator* fast_case_generator() {
6496 return &char_from_code_generator_;
6497 }
6498
6499 virtual void Generate() {
6500 VirtualFrameRuntimeCallHelper call_helper(frame_state());
6501 char_from_code_generator_.GenerateSlow(masm(), call_helper);
6502 }
6503
6504 private:
6505 StringCharFromCodeGenerator char_from_code_generator_;
6506};
6507
6508
6509// Generates code for creating a one-char string from a char code.
6510void CodeGenerator::GenerateStringCharFromCode(ZoneList<Expression*>* args) {
6511 Comment(masm_, "[ GenerateStringCharFromCode");
Steve Block6ded16b2010-05-10 14:33:55 +01006512 ASSERT(args->length() == 1);
6513
6514 Load(args->at(0));
6515
6516 Result code = frame_->Pop();
6517 code.ToRegister();
6518 ASSERT(code.is_valid());
6519
Steve Block6ded16b2010-05-10 14:33:55 +01006520 Result result = allocator()->Allocate();
6521 ASSERT(result.is_valid());
6522
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006523 DeferredStringCharFromCode* deferred = new DeferredStringCharFromCode(
6524 code.reg(), result.reg());
6525 deferred->fast_case_generator()->GenerateFast(masm_);
6526 deferred->BindExit();
6527 frame_->Push(&result);
6528}
6529
6530
6531class DeferredStringCharAt : public DeferredCode {
6532 public:
6533 DeferredStringCharAt(Register object,
6534 Register index,
6535 Register scratch1,
6536 Register scratch2,
6537 Register result)
6538 : result_(result),
6539 char_at_generator_(object,
6540 index,
6541 scratch1,
6542 scratch2,
6543 result,
6544 &need_conversion_,
6545 &need_conversion_,
6546 &index_out_of_range_,
6547 STRING_INDEX_IS_NUMBER) {}
6548
6549 StringCharAtGenerator* fast_case_generator() {
6550 return &char_at_generator_;
6551 }
6552
6553 virtual void Generate() {
6554 VirtualFrameRuntimeCallHelper call_helper(frame_state());
6555 char_at_generator_.GenerateSlow(masm(), call_helper);
6556
6557 __ bind(&need_conversion_);
6558 // Move smi zero into the result register, which will trigger
6559 // conversion.
6560 __ Set(result_, Immediate(Smi::FromInt(0)));
6561 __ jmp(exit_label());
6562
6563 __ bind(&index_out_of_range_);
6564 // When the index is out of range, the spec requires us to return
6565 // the empty string.
6566 __ Set(result_, Immediate(Factory::empty_string()));
6567 __ jmp(exit_label());
6568 }
6569
6570 private:
6571 Register result_;
6572
6573 Label need_conversion_;
6574 Label index_out_of_range_;
6575
6576 StringCharAtGenerator char_at_generator_;
6577};
6578
6579
6580// This generates code that performs a String.prototype.charAt() call
6581// or returns a smi in order to trigger conversion.
6582void CodeGenerator::GenerateStringCharAt(ZoneList<Expression*>* args) {
6583 Comment(masm_, "[ GenerateStringCharAt");
6584 ASSERT(args->length() == 2);
6585
6586 Load(args->at(0));
6587 Load(args->at(1));
6588 Result index = frame_->Pop();
6589 Result object = frame_->Pop();
6590 object.ToRegister();
6591 index.ToRegister();
6592 // We might mutate the object register.
6593 frame_->Spill(object.reg());
6594
6595 // We need three extra registers.
6596 Result result = allocator()->Allocate();
6597 ASSERT(result.is_valid());
6598 Result scratch1 = allocator()->Allocate();
6599 ASSERT(scratch1.is_valid());
6600 Result scratch2 = allocator()->Allocate();
6601 ASSERT(scratch2.is_valid());
6602
6603 DeferredStringCharAt* deferred =
6604 new DeferredStringCharAt(object.reg(),
6605 index.reg(),
6606 scratch1.reg(),
6607 scratch2.reg(),
6608 result.reg());
6609 deferred->fast_case_generator()->GenerateFast(masm_);
6610 deferred->BindExit();
Steve Block6ded16b2010-05-10 14:33:55 +01006611 frame_->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00006612}
6613
6614
6615void CodeGenerator::GenerateIsArray(ZoneList<Expression*>* args) {
6616 ASSERT(args->length() == 1);
6617 Load(args->at(0));
6618 Result value = frame_->Pop();
6619 value.ToRegister();
6620 ASSERT(value.is_valid());
6621 __ test(value.reg(), Immediate(kSmiTagMask));
6622 destination()->false_target()->Branch(equal);
6623 // It is a heap object - get map.
6624 Result temp = allocator()->Allocate();
6625 ASSERT(temp.is_valid());
6626 // Check if the object is a JS array or not.
6627 __ CmpObjectType(value.reg(), JS_ARRAY_TYPE, temp.reg());
6628 value.Unuse();
6629 temp.Unuse();
6630 destination()->Split(equal);
6631}
6632
6633
Andrei Popescu402d9372010-02-26 13:31:12 +00006634void CodeGenerator::GenerateIsRegExp(ZoneList<Expression*>* args) {
6635 ASSERT(args->length() == 1);
6636 Load(args->at(0));
6637 Result value = frame_->Pop();
6638 value.ToRegister();
6639 ASSERT(value.is_valid());
6640 __ test(value.reg(), Immediate(kSmiTagMask));
6641 destination()->false_target()->Branch(equal);
6642 // It is a heap object - get map.
6643 Result temp = allocator()->Allocate();
6644 ASSERT(temp.is_valid());
6645 // Check if the object is a regexp.
6646 __ CmpObjectType(value.reg(), JS_REGEXP_TYPE, temp.reg());
6647 value.Unuse();
6648 temp.Unuse();
6649 destination()->Split(equal);
6650}
6651
6652
Steve Blockd0582a62009-12-15 09:54:21 +00006653void CodeGenerator::GenerateIsObject(ZoneList<Expression*>* args) {
6654 // This generates a fast version of:
6655 // (typeof(arg) === 'object' || %_ClassOf(arg) == 'RegExp')
6656 ASSERT(args->length() == 1);
6657 Load(args->at(0));
6658 Result obj = frame_->Pop();
6659 obj.ToRegister();
6660
6661 __ test(obj.reg(), Immediate(kSmiTagMask));
6662 destination()->false_target()->Branch(zero);
6663 __ cmp(obj.reg(), Factory::null_value());
6664 destination()->true_target()->Branch(equal);
6665
6666 Result map = allocator()->Allocate();
6667 ASSERT(map.is_valid());
6668 __ mov(map.reg(), FieldOperand(obj.reg(), HeapObject::kMapOffset));
6669 // Undetectable objects behave like undefined when tested with typeof.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006670 __ test_b(FieldOperand(map.reg(), Map::kBitFieldOffset),
6671 1 << Map::kIsUndetectable);
Steve Blockd0582a62009-12-15 09:54:21 +00006672 destination()->false_target()->Branch(not_zero);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006673 // Do a range test for JSObject type. We can't use
6674 // MacroAssembler::IsInstanceJSObjectType, because we are using a
6675 // ControlDestination, so we copy its implementation here.
Steve Blockd0582a62009-12-15 09:54:21 +00006676 __ movzx_b(map.reg(), FieldOperand(map.reg(), Map::kInstanceTypeOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006677 __ sub(Operand(map.reg()), Immediate(FIRST_JS_OBJECT_TYPE));
6678 __ cmp(map.reg(), LAST_JS_OBJECT_TYPE - FIRST_JS_OBJECT_TYPE);
Steve Blockd0582a62009-12-15 09:54:21 +00006679 obj.Unuse();
6680 map.Unuse();
Leon Clarkef7060e22010-06-03 12:02:55 +01006681 destination()->Split(below_equal);
Steve Blockd0582a62009-12-15 09:54:21 +00006682}
6683
6684
Iain Merrick75681382010-08-19 15:07:18 +01006685void CodeGenerator::GenerateIsSpecObject(ZoneList<Expression*>* args) {
Ben Murdoch3bec4d22010-07-22 14:51:16 +01006686 // This generates a fast version of:
6687 // (typeof(arg) === 'object' || %_ClassOf(arg) == 'RegExp' ||
6688 // typeof(arg) == function).
6689 // It includes undetectable objects (as opposed to IsObject).
6690 ASSERT(args->length() == 1);
6691 Load(args->at(0));
6692 Result value = frame_->Pop();
6693 value.ToRegister();
6694 ASSERT(value.is_valid());
6695 __ test(value.reg(), Immediate(kSmiTagMask));
6696 destination()->false_target()->Branch(equal);
6697
6698 // Check that this is an object.
6699 frame_->Spill(value.reg());
6700 __ CmpObjectType(value.reg(), FIRST_JS_OBJECT_TYPE, value.reg());
6701 value.Unuse();
6702 destination()->Split(above_equal);
6703}
6704
6705
Iain Merrick75681382010-08-19 15:07:18 +01006706// Deferred code to check whether the String JavaScript object is safe for using
6707// default value of. This code is called after the bit caching this information
6708// in the map has been checked with the map for the object in the map_result_
6709// register. On return the register map_result_ contains 1 for true and 0 for
6710// false.
6711class DeferredIsStringWrapperSafeForDefaultValueOf : public DeferredCode {
6712 public:
6713 DeferredIsStringWrapperSafeForDefaultValueOf(Register object,
6714 Register map_result,
6715 Register scratch1,
6716 Register scratch2)
6717 : object_(object),
6718 map_result_(map_result),
6719 scratch1_(scratch1),
6720 scratch2_(scratch2) { }
6721
6722 virtual void Generate() {
6723 Label false_result;
6724
6725 // Check that map is loaded as expected.
6726 if (FLAG_debug_code) {
6727 __ cmp(map_result_, FieldOperand(object_, HeapObject::kMapOffset));
6728 __ Assert(equal, "Map not in expected register");
6729 }
6730
6731 // Check for fast case object. Generate false result for slow case object.
6732 __ mov(scratch1_, FieldOperand(object_, JSObject::kPropertiesOffset));
6733 __ mov(scratch1_, FieldOperand(scratch1_, HeapObject::kMapOffset));
6734 __ cmp(scratch1_, Factory::hash_table_map());
6735 __ j(equal, &false_result);
6736
6737 // Look for valueOf symbol in the descriptor array, and indicate false if
6738 // found. The type is not checked, so if it is a transition it is a false
6739 // negative.
6740 __ mov(map_result_,
6741 FieldOperand(map_result_, Map::kInstanceDescriptorsOffset));
6742 __ mov(scratch1_, FieldOperand(map_result_, FixedArray::kLengthOffset));
6743 // map_result_: descriptor array
6744 // scratch1_: length of descriptor array
6745 // Calculate the end of the descriptor array.
6746 STATIC_ASSERT(kSmiTag == 0);
6747 STATIC_ASSERT(kSmiTagSize == 1);
6748 STATIC_ASSERT(kPointerSize == 4);
6749 __ lea(scratch1_,
6750 Operand(map_result_, scratch1_, times_2, FixedArray::kHeaderSize));
6751 // Calculate location of the first key name.
6752 __ add(Operand(map_result_),
6753 Immediate(FixedArray::kHeaderSize +
6754 DescriptorArray::kFirstIndex * kPointerSize));
6755 // Loop through all the keys in the descriptor array. If one of these is the
6756 // symbol valueOf the result is false.
6757 Label entry, loop;
6758 __ jmp(&entry);
6759 __ bind(&loop);
6760 __ mov(scratch2_, FieldOperand(map_result_, 0));
6761 __ cmp(scratch2_, Factory::value_of_symbol());
6762 __ j(equal, &false_result);
6763 __ add(Operand(map_result_), Immediate(kPointerSize));
6764 __ bind(&entry);
6765 __ cmp(map_result_, Operand(scratch1_));
6766 __ j(not_equal, &loop);
6767
6768 // Reload map as register map_result_ was used as temporary above.
6769 __ mov(map_result_, FieldOperand(object_, HeapObject::kMapOffset));
6770
6771 // If a valueOf property is not found on the object check that it's
6772 // prototype is the un-modified String prototype. If not result is false.
6773 __ mov(scratch1_, FieldOperand(map_result_, Map::kPrototypeOffset));
6774 __ test(scratch1_, Immediate(kSmiTagMask));
6775 __ j(zero, &false_result);
6776 __ mov(scratch1_, FieldOperand(scratch1_, HeapObject::kMapOffset));
6777 __ mov(scratch2_, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
6778 __ mov(scratch2_,
6779 FieldOperand(scratch2_, GlobalObject::kGlobalContextOffset));
6780 __ cmp(scratch1_,
6781 CodeGenerator::ContextOperand(
6782 scratch2_, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
6783 __ j(not_equal, &false_result);
6784 // Set the bit in the map to indicate that it has been checked safe for
6785 // default valueOf and set true result.
6786 __ or_(FieldOperand(map_result_, Map::kBitField2Offset),
6787 Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
6788 __ Set(map_result_, Immediate(1));
6789 __ jmp(exit_label());
6790 __ bind(&false_result);
6791 // Set false result.
6792 __ Set(map_result_, Immediate(0));
6793 }
6794
6795 private:
6796 Register object_;
6797 Register map_result_;
6798 Register scratch1_;
6799 Register scratch2_;
6800};
6801
6802
6803void CodeGenerator::GenerateIsStringWrapperSafeForDefaultValueOf(
6804 ZoneList<Expression*>* args) {
6805 ASSERT(args->length() == 1);
6806 Load(args->at(0));
6807 Result obj = frame_->Pop(); // Pop the string wrapper.
6808 obj.ToRegister();
6809 ASSERT(obj.is_valid());
6810 if (FLAG_debug_code) {
6811 __ AbortIfSmi(obj.reg());
6812 }
6813
6814 // Check whether this map has already been checked to be safe for default
6815 // valueOf.
6816 Result map_result = allocator()->Allocate();
6817 ASSERT(map_result.is_valid());
6818 __ mov(map_result.reg(), FieldOperand(obj.reg(), HeapObject::kMapOffset));
6819 __ test_b(FieldOperand(map_result.reg(), Map::kBitField2Offset),
6820 1 << Map::kStringWrapperSafeForDefaultValueOf);
6821 destination()->true_target()->Branch(not_zero);
6822
6823 // We need an additional two scratch registers for the deferred code.
6824 Result temp1 = allocator()->Allocate();
6825 ASSERT(temp1.is_valid());
6826 Result temp2 = allocator()->Allocate();
6827 ASSERT(temp2.is_valid());
6828
6829 DeferredIsStringWrapperSafeForDefaultValueOf* deferred =
6830 new DeferredIsStringWrapperSafeForDefaultValueOf(
6831 obj.reg(), map_result.reg(), temp1.reg(), temp2.reg());
6832 deferred->Branch(zero);
6833 deferred->BindExit();
6834 __ test(map_result.reg(), Operand(map_result.reg()));
6835 obj.Unuse();
6836 map_result.Unuse();
6837 temp1.Unuse();
6838 temp2.Unuse();
6839 destination()->Split(not_equal);
6840}
6841
6842
Steve Blockd0582a62009-12-15 09:54:21 +00006843void CodeGenerator::GenerateIsFunction(ZoneList<Expression*>* args) {
6844 // This generates a fast version of:
6845 // (%_ClassOf(arg) === 'Function')
6846 ASSERT(args->length() == 1);
6847 Load(args->at(0));
6848 Result obj = frame_->Pop();
6849 obj.ToRegister();
6850 __ test(obj.reg(), Immediate(kSmiTagMask));
6851 destination()->false_target()->Branch(zero);
6852 Result temp = allocator()->Allocate();
6853 ASSERT(temp.is_valid());
6854 __ CmpObjectType(obj.reg(), JS_FUNCTION_TYPE, temp.reg());
6855 obj.Unuse();
6856 temp.Unuse();
6857 destination()->Split(equal);
6858}
6859
6860
Leon Clarked91b9f72010-01-27 17:25:45 +00006861void CodeGenerator::GenerateIsUndetectableObject(ZoneList<Expression*>* args) {
6862 ASSERT(args->length() == 1);
6863 Load(args->at(0));
6864 Result obj = frame_->Pop();
6865 obj.ToRegister();
6866 __ test(obj.reg(), Immediate(kSmiTagMask));
6867 destination()->false_target()->Branch(zero);
6868 Result temp = allocator()->Allocate();
6869 ASSERT(temp.is_valid());
6870 __ mov(temp.reg(),
6871 FieldOperand(obj.reg(), HeapObject::kMapOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006872 __ test_b(FieldOperand(temp.reg(), Map::kBitFieldOffset),
6873 1 << Map::kIsUndetectable);
Leon Clarked91b9f72010-01-27 17:25:45 +00006874 obj.Unuse();
6875 temp.Unuse();
6876 destination()->Split(not_zero);
6877}
6878
6879
Steve Blocka7e24c12009-10-30 11:49:00 +00006880void CodeGenerator::GenerateIsConstructCall(ZoneList<Expression*>* args) {
6881 ASSERT(args->length() == 0);
6882
6883 // Get the frame pointer for the calling frame.
6884 Result fp = allocator()->Allocate();
6885 __ mov(fp.reg(), Operand(ebp, StandardFrameConstants::kCallerFPOffset));
6886
6887 // Skip the arguments adaptor frame if it exists.
6888 Label check_frame_marker;
6889 __ cmp(Operand(fp.reg(), StandardFrameConstants::kContextOffset),
6890 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
6891 __ j(not_equal, &check_frame_marker);
6892 __ mov(fp.reg(), Operand(fp.reg(), StandardFrameConstants::kCallerFPOffset));
6893
6894 // Check the marker in the calling frame.
6895 __ bind(&check_frame_marker);
6896 __ cmp(Operand(fp.reg(), StandardFrameConstants::kMarkerOffset),
6897 Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
6898 fp.Unuse();
6899 destination()->Split(equal);
6900}
6901
6902
6903void CodeGenerator::GenerateArgumentsLength(ZoneList<Expression*>* args) {
6904 ASSERT(args->length() == 0);
Steve Block6ded16b2010-05-10 14:33:55 +01006905
6906 Result fp = allocator_->Allocate();
6907 Result result = allocator_->Allocate();
6908 ASSERT(fp.is_valid() && result.is_valid());
6909
6910 Label exit;
6911
6912 // Get the number of formal parameters.
6913 __ Set(result.reg(), Immediate(Smi::FromInt(scope()->num_parameters())));
6914
6915 // Check if the calling frame is an arguments adaptor frame.
6916 __ mov(fp.reg(), Operand(ebp, StandardFrameConstants::kCallerFPOffset));
6917 __ cmp(Operand(fp.reg(), StandardFrameConstants::kContextOffset),
6918 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
6919 __ j(not_equal, &exit);
6920
6921 // Arguments adaptor case: Read the arguments length from the
6922 // adaptor frame.
6923 __ mov(result.reg(),
6924 Operand(fp.reg(), ArgumentsAdaptorFrameConstants::kLengthOffset));
6925
6926 __ bind(&exit);
6927 result.set_type_info(TypeInfo::Smi());
6928 if (FLAG_debug_code) __ AbortIfNotSmi(result.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00006929 frame_->Push(&result);
6930}
6931
6932
6933void CodeGenerator::GenerateClassOf(ZoneList<Expression*>* args) {
6934 ASSERT(args->length() == 1);
6935 JumpTarget leave, null, function, non_function_constructor;
6936 Load(args->at(0)); // Load the object.
6937 Result obj = frame_->Pop();
6938 obj.ToRegister();
6939 frame_->Spill(obj.reg());
6940
6941 // If the object is a smi, we return null.
6942 __ test(obj.reg(), Immediate(kSmiTagMask));
6943 null.Branch(zero);
6944
6945 // Check that the object is a JS object but take special care of JS
6946 // functions to make sure they have 'Function' as their class.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006947 __ CmpObjectType(obj.reg(), FIRST_JS_OBJECT_TYPE, obj.reg());
6948 null.Branch(below);
Steve Blocka7e24c12009-10-30 11:49:00 +00006949
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006950 // As long as JS_FUNCTION_TYPE is the last instance type and it is
6951 // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
6952 // LAST_JS_OBJECT_TYPE.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01006953 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
6954 STATIC_ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006955 __ CmpInstanceType(obj.reg(), JS_FUNCTION_TYPE);
6956 function.Branch(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +00006957
6958 // Check if the constructor in the map is a function.
6959 { Result tmp = allocator()->Allocate();
6960 __ mov(obj.reg(), FieldOperand(obj.reg(), Map::kConstructorOffset));
6961 __ CmpObjectType(obj.reg(), JS_FUNCTION_TYPE, tmp.reg());
6962 non_function_constructor.Branch(not_equal);
6963 }
6964
6965 // The map register now contains the constructor function. Grab the
6966 // instance class name from there.
6967 __ mov(obj.reg(),
6968 FieldOperand(obj.reg(), JSFunction::kSharedFunctionInfoOffset));
6969 __ mov(obj.reg(),
6970 FieldOperand(obj.reg(), SharedFunctionInfo::kInstanceClassNameOffset));
6971 frame_->Push(&obj);
6972 leave.Jump();
6973
6974 // Functions have class 'Function'.
6975 function.Bind();
6976 frame_->Push(Factory::function_class_symbol());
6977 leave.Jump();
6978
6979 // Objects with a non-function constructor have class 'Object'.
6980 non_function_constructor.Bind();
6981 frame_->Push(Factory::Object_symbol());
6982 leave.Jump();
6983
6984 // Non-JS objects have class null.
6985 null.Bind();
6986 frame_->Push(Factory::null_value());
6987
6988 // All done.
6989 leave.Bind();
6990}
6991
6992
6993void CodeGenerator::GenerateValueOf(ZoneList<Expression*>* args) {
6994 ASSERT(args->length() == 1);
6995 JumpTarget leave;
6996 Load(args->at(0)); // Load the object.
6997 frame_->Dup();
6998 Result object = frame_->Pop();
6999 object.ToRegister();
7000 ASSERT(object.is_valid());
7001 // if (object->IsSmi()) return object.
7002 __ test(object.reg(), Immediate(kSmiTagMask));
7003 leave.Branch(zero, taken);
7004 // It is a heap object - get map.
7005 Result temp = allocator()->Allocate();
7006 ASSERT(temp.is_valid());
7007 // if (!object->IsJSValue()) return object.
7008 __ CmpObjectType(object.reg(), JS_VALUE_TYPE, temp.reg());
7009 leave.Branch(not_equal, not_taken);
7010 __ mov(temp.reg(), FieldOperand(object.reg(), JSValue::kValueOffset));
7011 object.Unuse();
7012 frame_->SetElementAt(0, &temp);
7013 leave.Bind();
7014}
7015
7016
7017void CodeGenerator::GenerateSetValueOf(ZoneList<Expression*>* args) {
7018 ASSERT(args->length() == 2);
7019 JumpTarget leave;
7020 Load(args->at(0)); // Load the object.
7021 Load(args->at(1)); // Load the value.
7022 Result value = frame_->Pop();
7023 Result object = frame_->Pop();
7024 value.ToRegister();
7025 object.ToRegister();
7026
7027 // if (object->IsSmi()) return value.
7028 __ test(object.reg(), Immediate(kSmiTagMask));
7029 leave.Branch(zero, &value, taken);
7030
7031 // It is a heap object - get its map.
7032 Result scratch = allocator_->Allocate();
7033 ASSERT(scratch.is_valid());
7034 // if (!object->IsJSValue()) return value.
7035 __ CmpObjectType(object.reg(), JS_VALUE_TYPE, scratch.reg());
7036 leave.Branch(not_equal, &value, not_taken);
7037
7038 // Store the value.
7039 __ mov(FieldOperand(object.reg(), JSValue::kValueOffset), value.reg());
7040 // Update the write barrier. Save the value as it will be
7041 // overwritten by the write barrier code and is needed afterward.
7042 Result duplicate_value = allocator_->Allocate();
7043 ASSERT(duplicate_value.is_valid());
7044 __ mov(duplicate_value.reg(), value.reg());
7045 // The object register is also overwritten by the write barrier and
7046 // possibly aliased in the frame.
7047 frame_->Spill(object.reg());
7048 __ RecordWrite(object.reg(), JSValue::kValueOffset, duplicate_value.reg(),
7049 scratch.reg());
7050 object.Unuse();
7051 scratch.Unuse();
7052 duplicate_value.Unuse();
7053
7054 // Leave.
7055 leave.Bind(&value);
7056 frame_->Push(&value);
7057}
7058
7059
Steve Block6ded16b2010-05-10 14:33:55 +01007060void CodeGenerator::GenerateArguments(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00007061 ASSERT(args->length() == 1);
7062
7063 // ArgumentsAccessStub expects the key in edx and the formal
7064 // parameter count in eax.
7065 Load(args->at(0));
7066 Result key = frame_->Pop();
7067 // Explicitly create a constant result.
Andrei Popescu31002712010-02-23 13:46:05 +00007068 Result count(Handle<Smi>(Smi::FromInt(scope()->num_parameters())));
Steve Blocka7e24c12009-10-30 11:49:00 +00007069 // Call the shared stub to get to arguments[key].
7070 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
7071 Result result = frame_->CallStub(&stub, &key, &count);
7072 frame_->Push(&result);
7073}
7074
7075
7076void CodeGenerator::GenerateObjectEquals(ZoneList<Expression*>* args) {
7077 ASSERT(args->length() == 2);
7078
7079 // Load the two objects into registers and perform the comparison.
7080 Load(args->at(0));
7081 Load(args->at(1));
7082 Result right = frame_->Pop();
7083 Result left = frame_->Pop();
7084 right.ToRegister();
7085 left.ToRegister();
7086 __ cmp(right.reg(), Operand(left.reg()));
7087 right.Unuse();
7088 left.Unuse();
7089 destination()->Split(equal);
7090}
7091
7092
7093void CodeGenerator::GenerateGetFramePointer(ZoneList<Expression*>* args) {
7094 ASSERT(args->length() == 0);
Kristian Monsen50ef84f2010-07-29 15:18:00 +01007095 STATIC_ASSERT(kSmiTag == 0); // EBP value is aligned, so it looks like a Smi.
Steve Blocka7e24c12009-10-30 11:49:00 +00007096 Result ebp_as_smi = allocator_->Allocate();
7097 ASSERT(ebp_as_smi.is_valid());
7098 __ mov(ebp_as_smi.reg(), Operand(ebp));
7099 frame_->Push(&ebp_as_smi);
7100}
7101
7102
Steve Block6ded16b2010-05-10 14:33:55 +01007103void CodeGenerator::GenerateRandomHeapNumber(
7104 ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00007105 ASSERT(args->length() == 0);
7106 frame_->SpillAll();
7107
Steve Block6ded16b2010-05-10 14:33:55 +01007108 Label slow_allocate_heapnumber;
7109 Label heapnumber_allocated;
Steve Blocka7e24c12009-10-30 11:49:00 +00007110
Steve Block6ded16b2010-05-10 14:33:55 +01007111 __ AllocateHeapNumber(edi, ebx, ecx, &slow_allocate_heapnumber);
7112 __ jmp(&heapnumber_allocated);
Steve Blocka7e24c12009-10-30 11:49:00 +00007113
Steve Block6ded16b2010-05-10 14:33:55 +01007114 __ bind(&slow_allocate_heapnumber);
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007115 // Allocate a heap number.
7116 __ CallRuntime(Runtime::kNumberAlloc, 0);
Steve Block6ded16b2010-05-10 14:33:55 +01007117 __ mov(edi, eax);
7118
7119 __ bind(&heapnumber_allocated);
7120
7121 __ PrepareCallCFunction(0, ebx);
7122 __ CallCFunction(ExternalReference::random_uint32_function(), 0);
7123
7124 // Convert 32 random bits in eax to 0.(32 random bits) in a double
7125 // by computing:
7126 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
7127 // This is implemented on both SSE2 and FPU.
7128 if (CpuFeatures::IsSupported(SSE2)) {
7129 CpuFeatures::Scope fscope(SSE2);
7130 __ mov(ebx, Immediate(0x49800000)); // 1.0 x 2^20 as single.
7131 __ movd(xmm1, Operand(ebx));
7132 __ movd(xmm0, Operand(eax));
7133 __ cvtss2sd(xmm1, xmm1);
7134 __ pxor(xmm0, xmm1);
7135 __ subsd(xmm0, xmm1);
7136 __ movdbl(FieldOperand(edi, HeapNumber::kValueOffset), xmm0);
7137 } else {
7138 // 0x4130000000000000 is 1.0 x 2^20 as a double.
7139 __ mov(FieldOperand(edi, HeapNumber::kExponentOffset),
7140 Immediate(0x41300000));
7141 __ mov(FieldOperand(edi, HeapNumber::kMantissaOffset), eax);
7142 __ fld_d(FieldOperand(edi, HeapNumber::kValueOffset));
7143 __ mov(FieldOperand(edi, HeapNumber::kMantissaOffset), Immediate(0));
7144 __ fld_d(FieldOperand(edi, HeapNumber::kValueOffset));
7145 __ fsubp(1);
7146 __ fstp_d(FieldOperand(edi, HeapNumber::kValueOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00007147 }
Steve Block6ded16b2010-05-10 14:33:55 +01007148 __ mov(eax, edi);
Steve Blocka7e24c12009-10-30 11:49:00 +00007149
7150 Result result = allocator_->Allocate(eax);
7151 frame_->Push(&result);
7152}
7153
7154
Steve Blockd0582a62009-12-15 09:54:21 +00007155void CodeGenerator::GenerateStringAdd(ZoneList<Expression*>* args) {
7156 ASSERT_EQ(2, args->length());
7157
7158 Load(args->at(0));
7159 Load(args->at(1));
7160
7161 StringAddStub stub(NO_STRING_ADD_FLAGS);
7162 Result answer = frame_->CallStub(&stub, 2);
7163 frame_->Push(&answer);
7164}
7165
7166
Leon Clarkee46be812010-01-19 14:06:41 +00007167void CodeGenerator::GenerateSubString(ZoneList<Expression*>* args) {
7168 ASSERT_EQ(3, args->length());
7169
7170 Load(args->at(0));
7171 Load(args->at(1));
7172 Load(args->at(2));
7173
7174 SubStringStub stub;
7175 Result answer = frame_->CallStub(&stub, 3);
7176 frame_->Push(&answer);
7177}
7178
7179
7180void CodeGenerator::GenerateStringCompare(ZoneList<Expression*>* args) {
7181 ASSERT_EQ(2, args->length());
7182
7183 Load(args->at(0));
7184 Load(args->at(1));
7185
7186 StringCompareStub stub;
7187 Result answer = frame_->CallStub(&stub, 2);
7188 frame_->Push(&answer);
7189}
7190
7191
7192void CodeGenerator::GenerateRegExpExec(ZoneList<Expression*>* args) {
Steve Block6ded16b2010-05-10 14:33:55 +01007193 ASSERT_EQ(4, args->length());
Leon Clarkee46be812010-01-19 14:06:41 +00007194
7195 // Load the arguments on the stack and call the stub.
7196 Load(args->at(0));
7197 Load(args->at(1));
7198 Load(args->at(2));
7199 Load(args->at(3));
7200 RegExpExecStub stub;
7201 Result result = frame_->CallStub(&stub, 4);
7202 frame_->Push(&result);
7203}
7204
7205
Steve Block6ded16b2010-05-10 14:33:55 +01007206void CodeGenerator::GenerateRegExpConstructResult(ZoneList<Expression*>* args) {
7207 // No stub. This code only occurs a few times in regexp.js.
7208 const int kMaxInlineLength = 100;
7209 ASSERT_EQ(3, args->length());
7210 Load(args->at(0)); // Size of array, smi.
7211 Load(args->at(1)); // "index" property value.
7212 Load(args->at(2)); // "input" property value.
7213 {
7214 VirtualFrame::SpilledScope spilled_scope;
7215
7216 Label slowcase;
7217 Label done;
7218 __ mov(ebx, Operand(esp, kPointerSize * 2));
7219 __ test(ebx, Immediate(kSmiTagMask));
7220 __ j(not_zero, &slowcase);
7221 __ cmp(Operand(ebx), Immediate(Smi::FromInt(kMaxInlineLength)));
7222 __ j(above, &slowcase);
7223 // Smi-tagging is equivalent to multiplying by 2.
7224 STATIC_ASSERT(kSmiTag == 0);
7225 STATIC_ASSERT(kSmiTagSize == 1);
7226 // Allocate RegExpResult followed by FixedArray with size in ebx.
7227 // JSArray: [Map][empty properties][Elements][Length-smi][index][input]
7228 // Elements: [Map][Length][..elements..]
7229 __ AllocateInNewSpace(JSRegExpResult::kSize + FixedArray::kHeaderSize,
7230 times_half_pointer_size,
7231 ebx, // In: Number of elements (times 2, being a smi)
7232 eax, // Out: Start of allocation (tagged).
7233 ecx, // Out: End of allocation.
7234 edx, // Scratch register
7235 &slowcase,
7236 TAG_OBJECT);
7237 // eax: Start of allocated area, object-tagged.
7238
7239 // Set JSArray map to global.regexp_result_map().
7240 // Set empty properties FixedArray.
7241 // Set elements to point to FixedArray allocated right after the JSArray.
7242 // Interleave operations for better latency.
7243 __ mov(edx, ContextOperand(esi, Context::GLOBAL_INDEX));
7244 __ mov(ecx, Immediate(Factory::empty_fixed_array()));
7245 __ lea(ebx, Operand(eax, JSRegExpResult::kSize));
7246 __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalContextOffset));
7247 __ mov(FieldOperand(eax, JSObject::kElementsOffset), ebx);
7248 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset), ecx);
7249 __ mov(edx, ContextOperand(edx, Context::REGEXP_RESULT_MAP_INDEX));
7250 __ mov(FieldOperand(eax, HeapObject::kMapOffset), edx);
7251
7252 // Set input, index and length fields from arguments.
7253 __ pop(FieldOperand(eax, JSRegExpResult::kInputOffset));
7254 __ pop(FieldOperand(eax, JSRegExpResult::kIndexOffset));
7255 __ pop(ecx);
7256 __ mov(FieldOperand(eax, JSArray::kLengthOffset), ecx);
7257
7258 // Fill out the elements FixedArray.
7259 // eax: JSArray.
7260 // ebx: FixedArray.
7261 // ecx: Number of elements in array, as smi.
7262
7263 // Set map.
7264 __ mov(FieldOperand(ebx, HeapObject::kMapOffset),
7265 Immediate(Factory::fixed_array_map()));
7266 // Set length.
Steve Block6ded16b2010-05-10 14:33:55 +01007267 __ mov(FieldOperand(ebx, FixedArray::kLengthOffset), ecx);
7268 // Fill contents of fixed-array with the-hole.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007269 __ SmiUntag(ecx);
Steve Block6ded16b2010-05-10 14:33:55 +01007270 __ mov(edx, Immediate(Factory::the_hole_value()));
7271 __ lea(ebx, FieldOperand(ebx, FixedArray::kHeaderSize));
7272 // Fill fixed array elements with hole.
7273 // eax: JSArray.
7274 // ecx: Number of elements to fill.
7275 // ebx: Start of elements in FixedArray.
7276 // edx: the hole.
7277 Label loop;
7278 __ test(ecx, Operand(ecx));
7279 __ bind(&loop);
7280 __ j(less_equal, &done); // Jump if ecx is negative or zero.
7281 __ sub(Operand(ecx), Immediate(1));
7282 __ mov(Operand(ebx, ecx, times_pointer_size, 0), edx);
7283 __ jmp(&loop);
7284
7285 __ bind(&slowcase);
7286 __ CallRuntime(Runtime::kRegExpConstructResult, 3);
7287
7288 __ bind(&done);
7289 }
7290 frame_->Forget(3);
7291 frame_->Push(eax);
7292}
7293
7294
7295class DeferredSearchCache: public DeferredCode {
7296 public:
7297 DeferredSearchCache(Register dst, Register cache, Register key)
7298 : dst_(dst), cache_(cache), key_(key) {
7299 set_comment("[ DeferredSearchCache");
7300 }
7301
7302 virtual void Generate();
7303
7304 private:
Kristian Monsen25f61362010-05-21 11:50:48 +01007305 Register dst_; // on invocation Smi index of finger, on exit
7306 // holds value being looked up.
7307 Register cache_; // instance of JSFunctionResultCache.
7308 Register key_; // key being looked up.
Steve Block6ded16b2010-05-10 14:33:55 +01007309};
7310
7311
7312void DeferredSearchCache::Generate() {
Kristian Monsen25f61362010-05-21 11:50:48 +01007313 Label first_loop, search_further, second_loop, cache_miss;
7314
7315 // Smi-tagging is equivalent to multiplying by 2.
7316 STATIC_ASSERT(kSmiTag == 0);
7317 STATIC_ASSERT(kSmiTagSize == 1);
7318
7319 Smi* kEntrySizeSmi = Smi::FromInt(JSFunctionResultCache::kEntrySize);
7320 Smi* kEntriesIndexSmi = Smi::FromInt(JSFunctionResultCache::kEntriesIndex);
7321
7322 // Check the cache from finger to start of the cache.
7323 __ bind(&first_loop);
7324 __ sub(Operand(dst_), Immediate(kEntrySizeSmi));
7325 __ cmp(Operand(dst_), Immediate(kEntriesIndexSmi));
7326 __ j(less, &search_further);
7327
7328 __ cmp(key_, CodeGenerator::FixedArrayElementOperand(cache_, dst_));
7329 __ j(not_equal, &first_loop);
7330
7331 __ mov(FieldOperand(cache_, JSFunctionResultCache::kFingerOffset), dst_);
7332 __ mov(dst_, CodeGenerator::FixedArrayElementOperand(cache_, dst_, 1));
7333 __ jmp(exit_label());
7334
7335 __ bind(&search_further);
7336
7337 // Check the cache from end of cache up to finger.
7338 __ mov(dst_, FieldOperand(cache_, JSFunctionResultCache::kCacheSizeOffset));
7339
7340 __ bind(&second_loop);
7341 __ sub(Operand(dst_), Immediate(kEntrySizeSmi));
7342 // Consider prefetching into some reg.
7343 __ cmp(dst_, FieldOperand(cache_, JSFunctionResultCache::kFingerOffset));
7344 __ j(less_equal, &cache_miss);
7345
7346 __ cmp(key_, CodeGenerator::FixedArrayElementOperand(cache_, dst_));
7347 __ j(not_equal, &second_loop);
7348
7349 __ mov(FieldOperand(cache_, JSFunctionResultCache::kFingerOffset), dst_);
7350 __ mov(dst_, CodeGenerator::FixedArrayElementOperand(cache_, dst_, 1));
7351 __ jmp(exit_label());
7352
7353 __ bind(&cache_miss);
7354 __ push(cache_); // store a reference to cache
7355 __ push(key_); // store a key
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01007356 __ push(Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
Steve Block6ded16b2010-05-10 14:33:55 +01007357 __ push(key_);
Kristian Monsen25f61362010-05-21 11:50:48 +01007358 // On ia32 function must be in edi.
7359 __ mov(edi, FieldOperand(cache_, JSFunctionResultCache::kFactoryOffset));
7360 ParameterCount expected(1);
7361 __ InvokeFunction(edi, expected, CALL_FUNCTION);
7362
7363 // Find a place to put new cached value into.
7364 Label add_new_entry, update_cache;
7365 __ mov(ecx, Operand(esp, kPointerSize)); // restore the cache
7366 // Possible optimization: cache size is constant for the given cache
7367 // so technically we could use a constant here. However, if we have
7368 // cache miss this optimization would hardly matter much.
7369
7370 // Check if we could add new entry to cache.
7371 __ mov(ebx, FieldOperand(ecx, FixedArray::kLengthOffset));
Kristian Monsen25f61362010-05-21 11:50:48 +01007372 __ cmp(ebx, FieldOperand(ecx, JSFunctionResultCache::kCacheSizeOffset));
7373 __ j(greater, &add_new_entry);
7374
7375 // Check if we could evict entry after finger.
7376 __ mov(edx, FieldOperand(ecx, JSFunctionResultCache::kFingerOffset));
7377 __ add(Operand(edx), Immediate(kEntrySizeSmi));
7378 __ cmp(ebx, Operand(edx));
7379 __ j(greater, &update_cache);
7380
7381 // Need to wrap over the cache.
7382 __ mov(edx, Immediate(kEntriesIndexSmi));
7383 __ jmp(&update_cache);
7384
7385 __ bind(&add_new_entry);
7386 __ mov(edx, FieldOperand(ecx, JSFunctionResultCache::kCacheSizeOffset));
7387 __ lea(ebx, Operand(edx, JSFunctionResultCache::kEntrySize << 1));
7388 __ mov(FieldOperand(ecx, JSFunctionResultCache::kCacheSizeOffset), ebx);
7389
7390 // Update the cache itself.
7391 // edx holds the index.
7392 __ bind(&update_cache);
7393 __ pop(ebx); // restore the key
7394 __ mov(FieldOperand(ecx, JSFunctionResultCache::kFingerOffset), edx);
7395 // Store key.
7396 __ mov(CodeGenerator::FixedArrayElementOperand(ecx, edx), ebx);
7397 __ RecordWrite(ecx, 0, ebx, edx);
7398
7399 // Store value.
7400 __ pop(ecx); // restore the cache.
7401 __ mov(edx, FieldOperand(ecx, JSFunctionResultCache::kFingerOffset));
7402 __ add(Operand(edx), Immediate(Smi::FromInt(1)));
7403 __ mov(ebx, eax);
7404 __ mov(CodeGenerator::FixedArrayElementOperand(ecx, edx), ebx);
7405 __ RecordWrite(ecx, 0, ebx, edx);
7406
Steve Block6ded16b2010-05-10 14:33:55 +01007407 if (!dst_.is(eax)) {
7408 __ mov(dst_, eax);
7409 }
7410}
7411
7412
7413void CodeGenerator::GenerateGetFromCache(ZoneList<Expression*>* args) {
7414 ASSERT_EQ(2, args->length());
7415
7416 ASSERT_NE(NULL, args->at(0)->AsLiteral());
7417 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
7418
7419 Handle<FixedArray> jsfunction_result_caches(
7420 Top::global_context()->jsfunction_result_caches());
7421 if (jsfunction_result_caches->length() <= cache_id) {
7422 __ Abort("Attempt to use undefined cache.");
7423 frame_->Push(Factory::undefined_value());
7424 return;
7425 }
7426
7427 Load(args->at(1));
7428 Result key = frame_->Pop();
7429 key.ToRegister();
7430
7431 Result cache = allocator()->Allocate();
7432 ASSERT(cache.is_valid());
7433 __ mov(cache.reg(), ContextOperand(esi, Context::GLOBAL_INDEX));
7434 __ mov(cache.reg(),
7435 FieldOperand(cache.reg(), GlobalObject::kGlobalContextOffset));
7436 __ mov(cache.reg(),
7437 ContextOperand(cache.reg(), Context::JSFUNCTION_RESULT_CACHES_INDEX));
7438 __ mov(cache.reg(),
7439 FieldOperand(cache.reg(), FixedArray::OffsetOfElementAt(cache_id)));
7440
7441 Result tmp = allocator()->Allocate();
7442 ASSERT(tmp.is_valid());
7443
7444 DeferredSearchCache* deferred = new DeferredSearchCache(tmp.reg(),
7445 cache.reg(),
7446 key.reg());
7447
Steve Block6ded16b2010-05-10 14:33:55 +01007448 // tmp.reg() now holds finger offset as a smi.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01007449 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
Kristian Monsen25f61362010-05-21 11:50:48 +01007450 __ mov(tmp.reg(), FieldOperand(cache.reg(),
7451 JSFunctionResultCache::kFingerOffset));
7452 __ cmp(key.reg(), FixedArrayElementOperand(cache.reg(), tmp.reg()));
Steve Block6ded16b2010-05-10 14:33:55 +01007453 deferred->Branch(not_equal);
7454
Kristian Monsen25f61362010-05-21 11:50:48 +01007455 __ mov(tmp.reg(), FixedArrayElementOperand(cache.reg(), tmp.reg(), 1));
Steve Block6ded16b2010-05-10 14:33:55 +01007456
7457 deferred->BindExit();
7458 frame_->Push(&tmp);
7459}
7460
7461
Andrei Popescu402d9372010-02-26 13:31:12 +00007462void CodeGenerator::GenerateNumberToString(ZoneList<Expression*>* args) {
7463 ASSERT_EQ(args->length(), 1);
7464
7465 // Load the argument on the stack and call the stub.
7466 Load(args->at(0));
7467 NumberToStringStub stub;
7468 Result result = frame_->CallStub(&stub, 1);
7469 frame_->Push(&result);
7470}
7471
7472
Steve Block6ded16b2010-05-10 14:33:55 +01007473class DeferredSwapElements: public DeferredCode {
7474 public:
7475 DeferredSwapElements(Register object, Register index1, Register index2)
7476 : object_(object), index1_(index1), index2_(index2) {
7477 set_comment("[ DeferredSwapElements");
7478 }
7479
7480 virtual void Generate();
7481
7482 private:
7483 Register object_, index1_, index2_;
7484};
7485
7486
7487void DeferredSwapElements::Generate() {
7488 __ push(object_);
7489 __ push(index1_);
7490 __ push(index2_);
7491 __ CallRuntime(Runtime::kSwapElements, 3);
7492}
7493
7494
7495void CodeGenerator::GenerateSwapElements(ZoneList<Expression*>* args) {
7496 // Note: this code assumes that indices are passed are within
7497 // elements' bounds and refer to valid (not holes) values.
7498 Comment cmnt(masm_, "[ GenerateSwapElements");
7499
7500 ASSERT_EQ(3, args->length());
7501
7502 Load(args->at(0));
7503 Load(args->at(1));
7504 Load(args->at(2));
7505
7506 Result index2 = frame_->Pop();
7507 index2.ToRegister();
7508
7509 Result index1 = frame_->Pop();
7510 index1.ToRegister();
7511
7512 Result object = frame_->Pop();
7513 object.ToRegister();
7514
7515 Result tmp1 = allocator()->Allocate();
7516 tmp1.ToRegister();
7517 Result tmp2 = allocator()->Allocate();
7518 tmp2.ToRegister();
7519
7520 frame_->Spill(object.reg());
7521 frame_->Spill(index1.reg());
7522 frame_->Spill(index2.reg());
7523
7524 DeferredSwapElements* deferred = new DeferredSwapElements(object.reg(),
7525 index1.reg(),
7526 index2.reg());
7527
7528 // Fetch the map and check if array is in fast case.
7529 // Check that object doesn't require security checks and
7530 // has no indexed interceptor.
7531 __ CmpObjectType(object.reg(), FIRST_JS_OBJECT_TYPE, tmp1.reg());
Leon Clarkef7060e22010-06-03 12:02:55 +01007532 deferred->Branch(below);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007533 __ test_b(FieldOperand(tmp1.reg(), Map::kBitFieldOffset),
7534 KeyedLoadIC::kSlowCaseBitFieldMask);
Steve Block6ded16b2010-05-10 14:33:55 +01007535 deferred->Branch(not_zero);
7536
Iain Merrick75681382010-08-19 15:07:18 +01007537 // Check the object's elements are in fast case and writable.
Steve Block6ded16b2010-05-10 14:33:55 +01007538 __ mov(tmp1.reg(), FieldOperand(object.reg(), JSObject::kElementsOffset));
7539 __ cmp(FieldOperand(tmp1.reg(), HeapObject::kMapOffset),
7540 Immediate(Factory::fixed_array_map()));
7541 deferred->Branch(not_equal);
7542
7543 // Smi-tagging is equivalent to multiplying by 2.
7544 STATIC_ASSERT(kSmiTag == 0);
7545 STATIC_ASSERT(kSmiTagSize == 1);
7546
7547 // Check that both indices are smis.
7548 __ mov(tmp2.reg(), index1.reg());
7549 __ or_(tmp2.reg(), Operand(index2.reg()));
7550 __ test(tmp2.reg(), Immediate(kSmiTagMask));
7551 deferred->Branch(not_zero);
7552
7553 // Bring addresses into index1 and index2.
Kristian Monsen25f61362010-05-21 11:50:48 +01007554 __ lea(index1.reg(), FixedArrayElementOperand(tmp1.reg(), index1.reg()));
7555 __ lea(index2.reg(), FixedArrayElementOperand(tmp1.reg(), index2.reg()));
Steve Block6ded16b2010-05-10 14:33:55 +01007556
7557 // Swap elements.
7558 __ mov(object.reg(), Operand(index1.reg(), 0));
7559 __ mov(tmp2.reg(), Operand(index2.reg(), 0));
7560 __ mov(Operand(index2.reg(), 0), object.reg());
7561 __ mov(Operand(index1.reg(), 0), tmp2.reg());
7562
7563 Label done;
7564 __ InNewSpace(tmp1.reg(), tmp2.reg(), equal, &done);
7565 // Possible optimization: do a check that both values are Smis
7566 // (or them and test against Smi mask.)
7567
7568 __ mov(tmp2.reg(), tmp1.reg());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007569 __ RecordWriteHelper(tmp2.reg(), index1.reg(), object.reg());
7570 __ RecordWriteHelper(tmp1.reg(), index2.reg(), object.reg());
Steve Block6ded16b2010-05-10 14:33:55 +01007571 __ bind(&done);
7572
7573 deferred->BindExit();
7574 frame_->Push(Factory::undefined_value());
7575}
7576
7577
7578void CodeGenerator::GenerateCallFunction(ZoneList<Expression*>* args) {
7579 Comment cmnt(masm_, "[ GenerateCallFunction");
7580
7581 ASSERT(args->length() >= 2);
7582
7583 int n_args = args->length() - 2; // for receiver and function.
7584 Load(args->at(0)); // receiver
7585 for (int i = 0; i < n_args; i++) {
7586 Load(args->at(i + 1));
7587 }
7588 Load(args->at(n_args + 1)); // function
7589 Result result = frame_->CallJSFunction(n_args);
7590 frame_->Push(&result);
7591}
7592
7593
7594// Generates the Math.pow method. Only handles special cases and
7595// branches to the runtime system for everything else. Please note
7596// that this function assumes that the callsite has executed ToNumber
7597// on both arguments.
7598void CodeGenerator::GenerateMathPow(ZoneList<Expression*>* args) {
7599 ASSERT(args->length() == 2);
7600 Load(args->at(0));
7601 Load(args->at(1));
7602 if (!CpuFeatures::IsSupported(SSE2)) {
7603 Result res = frame_->CallRuntime(Runtime::kMath_pow, 2);
7604 frame_->Push(&res);
7605 } else {
7606 CpuFeatures::Scope use_sse2(SSE2);
7607 Label allocate_return;
7608 // Load the two operands while leaving the values on the frame.
7609 frame()->Dup();
7610 Result exponent = frame()->Pop();
7611 exponent.ToRegister();
7612 frame()->Spill(exponent.reg());
7613 frame()->PushElementAt(1);
7614 Result base = frame()->Pop();
7615 base.ToRegister();
7616 frame()->Spill(base.reg());
7617
7618 Result answer = allocator()->Allocate();
7619 ASSERT(answer.is_valid());
7620 ASSERT(!exponent.reg().is(base.reg()));
7621 JumpTarget call_runtime;
7622
7623 // Save 1 in xmm3 - we need this several times later on.
7624 __ mov(answer.reg(), Immediate(1));
7625 __ cvtsi2sd(xmm3, Operand(answer.reg()));
7626
7627 Label exponent_nonsmi;
7628 Label base_nonsmi;
7629 // If the exponent is a heap number go to that specific case.
7630 __ test(exponent.reg(), Immediate(kSmiTagMask));
7631 __ j(not_zero, &exponent_nonsmi);
7632 __ test(base.reg(), Immediate(kSmiTagMask));
7633 __ j(not_zero, &base_nonsmi);
7634
7635 // Optimized version when y is an integer.
7636 Label powi;
7637 __ SmiUntag(base.reg());
7638 __ cvtsi2sd(xmm0, Operand(base.reg()));
7639 __ jmp(&powi);
7640 // exponent is smi and base is a heapnumber.
7641 __ bind(&base_nonsmi);
7642 __ cmp(FieldOperand(base.reg(), HeapObject::kMapOffset),
7643 Factory::heap_number_map());
7644 call_runtime.Branch(not_equal);
7645
7646 __ movdbl(xmm0, FieldOperand(base.reg(), HeapNumber::kValueOffset));
7647
7648 // Optimized version of pow if y is an integer.
7649 __ bind(&powi);
7650 __ SmiUntag(exponent.reg());
7651
7652 // Save exponent in base as we need to check if exponent is negative later.
7653 // We know that base and exponent are in different registers.
7654 __ mov(base.reg(), exponent.reg());
7655
7656 // Get absolute value of exponent.
7657 Label no_neg;
7658 __ cmp(exponent.reg(), 0);
7659 __ j(greater_equal, &no_neg);
7660 __ neg(exponent.reg());
7661 __ bind(&no_neg);
7662
7663 // Load xmm1 with 1.
7664 __ movsd(xmm1, xmm3);
7665 Label while_true;
7666 Label no_multiply;
7667
7668 __ bind(&while_true);
7669 __ shr(exponent.reg(), 1);
7670 __ j(not_carry, &no_multiply);
7671 __ mulsd(xmm1, xmm0);
7672 __ bind(&no_multiply);
7673 __ test(exponent.reg(), Operand(exponent.reg()));
7674 __ mulsd(xmm0, xmm0);
7675 __ j(not_zero, &while_true);
7676
7677 // x has the original value of y - if y is negative return 1/result.
7678 __ test(base.reg(), Operand(base.reg()));
7679 __ j(positive, &allocate_return);
7680 // Special case if xmm1 has reached infinity.
7681 __ mov(answer.reg(), Immediate(0x7FB00000));
7682 __ movd(xmm0, Operand(answer.reg()));
7683 __ cvtss2sd(xmm0, xmm0);
7684 __ ucomisd(xmm0, xmm1);
7685 call_runtime.Branch(equal);
7686 __ divsd(xmm3, xmm1);
7687 __ movsd(xmm1, xmm3);
7688 __ jmp(&allocate_return);
7689
7690 // exponent (or both) is a heapnumber - no matter what we should now work
7691 // on doubles.
7692 __ bind(&exponent_nonsmi);
7693 __ cmp(FieldOperand(exponent.reg(), HeapObject::kMapOffset),
7694 Factory::heap_number_map());
7695 call_runtime.Branch(not_equal);
7696 __ movdbl(xmm1, FieldOperand(exponent.reg(), HeapNumber::kValueOffset));
7697 // Test if exponent is nan.
7698 __ ucomisd(xmm1, xmm1);
7699 call_runtime.Branch(parity_even);
7700
7701 Label base_not_smi;
7702 Label handle_special_cases;
7703 __ test(base.reg(), Immediate(kSmiTagMask));
7704 __ j(not_zero, &base_not_smi);
7705 __ SmiUntag(base.reg());
7706 __ cvtsi2sd(xmm0, Operand(base.reg()));
7707 __ jmp(&handle_special_cases);
7708 __ bind(&base_not_smi);
7709 __ cmp(FieldOperand(base.reg(), HeapObject::kMapOffset),
7710 Factory::heap_number_map());
7711 call_runtime.Branch(not_equal);
7712 __ mov(answer.reg(), FieldOperand(base.reg(), HeapNumber::kExponentOffset));
7713 __ and_(answer.reg(), HeapNumber::kExponentMask);
7714 __ cmp(Operand(answer.reg()), Immediate(HeapNumber::kExponentMask));
7715 // base is NaN or +/-Infinity
7716 call_runtime.Branch(greater_equal);
7717 __ movdbl(xmm0, FieldOperand(base.reg(), HeapNumber::kValueOffset));
7718
7719 // base is in xmm0 and exponent is in xmm1.
7720 __ bind(&handle_special_cases);
7721 Label not_minus_half;
7722 // Test for -0.5.
7723 // Load xmm2 with -0.5.
7724 __ mov(answer.reg(), Immediate(0xBF000000));
7725 __ movd(xmm2, Operand(answer.reg()));
7726 __ cvtss2sd(xmm2, xmm2);
7727 // xmm2 now has -0.5.
7728 __ ucomisd(xmm2, xmm1);
7729 __ j(not_equal, &not_minus_half);
7730
7731 // Calculates reciprocal of square root.
7732 // Note that 1/sqrt(x) = sqrt(1/x))
7733 __ divsd(xmm3, xmm0);
7734 __ movsd(xmm1, xmm3);
7735 __ sqrtsd(xmm1, xmm1);
7736 __ jmp(&allocate_return);
7737
7738 // Test for 0.5.
7739 __ bind(&not_minus_half);
7740 // Load xmm2 with 0.5.
7741 // Since xmm3 is 1 and xmm2 is -0.5 this is simply xmm2 + xmm3.
7742 __ addsd(xmm2, xmm3);
7743 // xmm2 now has 0.5.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01007744 __ ucomisd(xmm2, xmm1);
Steve Block6ded16b2010-05-10 14:33:55 +01007745 call_runtime.Branch(not_equal);
7746 // Calculates square root.
7747 __ movsd(xmm1, xmm0);
7748 __ sqrtsd(xmm1, xmm1);
7749
7750 JumpTarget done;
7751 Label failure, success;
7752 __ bind(&allocate_return);
7753 // Make a copy of the frame to enable us to handle allocation
7754 // failure after the JumpTarget jump.
7755 VirtualFrame* clone = new VirtualFrame(frame());
7756 __ AllocateHeapNumber(answer.reg(), exponent.reg(),
7757 base.reg(), &failure);
7758 __ movdbl(FieldOperand(answer.reg(), HeapNumber::kValueOffset), xmm1);
7759 // Remove the two original values from the frame - we only need those
7760 // in the case where we branch to runtime.
7761 frame()->Drop(2);
7762 exponent.Unuse();
7763 base.Unuse();
7764 done.Jump(&answer);
7765 // Use the copy of the original frame as our current frame.
7766 RegisterFile empty_regs;
7767 SetFrame(clone, &empty_regs);
7768 // If we experience an allocation failure we branch to runtime.
7769 __ bind(&failure);
7770 call_runtime.Bind();
7771 answer = frame()->CallRuntime(Runtime::kMath_pow_cfunction, 2);
7772
7773 done.Bind(&answer);
7774 frame()->Push(&answer);
7775 }
7776}
7777
7778
Andrei Popescu402d9372010-02-26 13:31:12 +00007779void CodeGenerator::GenerateMathSin(ZoneList<Expression*>* args) {
7780 ASSERT_EQ(args->length(), 1);
7781 Load(args->at(0));
7782 TranscendentalCacheStub stub(TranscendentalCache::SIN);
7783 Result result = frame_->CallStub(&stub, 1);
7784 frame_->Push(&result);
7785}
7786
7787
7788void CodeGenerator::GenerateMathCos(ZoneList<Expression*>* args) {
7789 ASSERT_EQ(args->length(), 1);
7790 Load(args->at(0));
7791 TranscendentalCacheStub stub(TranscendentalCache::COS);
7792 Result result = frame_->CallStub(&stub, 1);
7793 frame_->Push(&result);
7794}
7795
7796
Steve Block6ded16b2010-05-10 14:33:55 +01007797// Generates the Math.sqrt method. Please note - this function assumes that
7798// the callsite has executed ToNumber on the argument.
7799void CodeGenerator::GenerateMathSqrt(ZoneList<Expression*>* args) {
7800 ASSERT_EQ(args->length(), 1);
7801 Load(args->at(0));
7802
7803 if (!CpuFeatures::IsSupported(SSE2)) {
7804 Result result = frame()->CallRuntime(Runtime::kMath_sqrt, 1);
7805 frame()->Push(&result);
7806 } else {
7807 CpuFeatures::Scope use_sse2(SSE2);
7808 // Leave original value on the frame if we need to call runtime.
7809 frame()->Dup();
7810 Result result = frame()->Pop();
7811 result.ToRegister();
7812 frame()->Spill(result.reg());
7813 Label runtime;
7814 Label non_smi;
7815 Label load_done;
7816 JumpTarget end;
7817
7818 __ test(result.reg(), Immediate(kSmiTagMask));
7819 __ j(not_zero, &non_smi);
7820 __ SmiUntag(result.reg());
7821 __ cvtsi2sd(xmm0, Operand(result.reg()));
7822 __ jmp(&load_done);
7823 __ bind(&non_smi);
7824 __ cmp(FieldOperand(result.reg(), HeapObject::kMapOffset),
7825 Factory::heap_number_map());
7826 __ j(not_equal, &runtime);
7827 __ movdbl(xmm0, FieldOperand(result.reg(), HeapNumber::kValueOffset));
7828
7829 __ bind(&load_done);
7830 __ sqrtsd(xmm0, xmm0);
7831 // A copy of the virtual frame to allow us to go to runtime after the
7832 // JumpTarget jump.
7833 Result scratch = allocator()->Allocate();
7834 VirtualFrame* clone = new VirtualFrame(frame());
7835 __ AllocateHeapNumber(result.reg(), scratch.reg(), no_reg, &runtime);
7836
7837 __ movdbl(FieldOperand(result.reg(), HeapNumber::kValueOffset), xmm0);
7838 frame()->Drop(1);
7839 scratch.Unuse();
7840 end.Jump(&result);
7841 // We only branch to runtime if we have an allocation error.
7842 // Use the copy of the original frame as our current frame.
7843 RegisterFile empty_regs;
7844 SetFrame(clone, &empty_regs);
7845 __ bind(&runtime);
7846 result = frame()->CallRuntime(Runtime::kMath_sqrt, 1);
7847
7848 end.Bind(&result);
7849 frame()->Push(&result);
7850 }
7851}
7852
7853
Ben Murdochbb769b22010-08-11 14:56:33 +01007854void CodeGenerator::GenerateIsRegExpEquivalent(ZoneList<Expression*>* args) {
7855 ASSERT_EQ(2, args->length());
7856 Load(args->at(0));
7857 Load(args->at(1));
7858 Result right_res = frame_->Pop();
7859 Result left_res = frame_->Pop();
7860 right_res.ToRegister();
7861 left_res.ToRegister();
7862 Result tmp_res = allocator()->Allocate();
7863 ASSERT(tmp_res.is_valid());
7864 Register right = right_res.reg();
7865 Register left = left_res.reg();
7866 Register tmp = tmp_res.reg();
7867 right_res.Unuse();
7868 left_res.Unuse();
7869 tmp_res.Unuse();
7870 __ cmp(left, Operand(right));
7871 destination()->true_target()->Branch(equal);
7872 // Fail if either is a non-HeapObject.
7873 __ mov(tmp, left);
7874 __ and_(Operand(tmp), right);
7875 __ test(Operand(tmp), Immediate(kSmiTagMask));
7876 destination()->false_target()->Branch(equal);
7877 __ CmpObjectType(left, JS_REGEXP_TYPE, tmp);
7878 destination()->false_target()->Branch(not_equal);
7879 __ cmp(tmp, FieldOperand(right, HeapObject::kMapOffset));
7880 destination()->false_target()->Branch(not_equal);
7881 __ mov(tmp, FieldOperand(left, JSRegExp::kDataOffset));
7882 __ cmp(tmp, FieldOperand(right, JSRegExp::kDataOffset));
7883 destination()->Split(equal);
7884}
7885
7886
Kristian Monsen80d68ea2010-09-08 11:05:35 +01007887void CodeGenerator::GenerateHasCachedArrayIndex(ZoneList<Expression*>* args) {
7888 ASSERT(args->length() == 1);
7889 Load(args->at(0));
7890 Result value = frame_->Pop();
7891 value.ToRegister();
7892 ASSERT(value.is_valid());
7893 if (FLAG_debug_code) {
7894 __ AbortIfNotString(value.reg());
7895 }
7896
7897 __ test(FieldOperand(value.reg(), String::kHashFieldOffset),
7898 Immediate(String::kContainsCachedArrayIndexMask));
7899
7900 value.Unuse();
7901 destination()->Split(zero);
7902}
7903
7904
7905void CodeGenerator::GenerateGetCachedArrayIndex(ZoneList<Expression*>* args) {
7906 ASSERT(args->length() == 1);
7907 Load(args->at(0));
7908 Result string = frame_->Pop();
7909 string.ToRegister();
7910 if (FLAG_debug_code) {
7911 __ AbortIfNotString(string.reg());
7912 }
7913
7914 Result number = allocator()->Allocate();
7915 ASSERT(number.is_valid());
7916 __ mov(number.reg(), FieldOperand(string.reg(), String::kHashFieldOffset));
7917 __ IndexFromHash(number.reg(), number.reg());
7918 string.Unuse();
7919 frame_->Push(&number);
7920}
7921
7922
Steve Blocka7e24c12009-10-30 11:49:00 +00007923void CodeGenerator::VisitCallRuntime(CallRuntime* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01007924 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00007925 if (CheckForInlineRuntimeCall(node)) {
7926 return;
7927 }
7928
7929 ZoneList<Expression*>* args = node->arguments();
7930 Comment cmnt(masm_, "[ CallRuntime");
7931 Runtime::Function* function = node->function();
7932
7933 if (function == NULL) {
Steve Blocka7e24c12009-10-30 11:49:00 +00007934 // Push the builtins object found in the current global object.
7935 Result temp = allocator()->Allocate();
7936 ASSERT(temp.is_valid());
7937 __ mov(temp.reg(), GlobalObject());
7938 __ mov(temp.reg(), FieldOperand(temp.reg(), GlobalObject::kBuiltinsOffset));
7939 frame_->Push(&temp);
7940 }
7941
7942 // Push the arguments ("left-to-right").
7943 int arg_count = args->length();
7944 for (int i = 0; i < arg_count; i++) {
7945 Load(args->at(i));
7946 }
7947
7948 if (function == NULL) {
7949 // Call the JS runtime function.
Leon Clarkee46be812010-01-19 14:06:41 +00007950 frame_->Push(node->name());
Steve Blocka7e24c12009-10-30 11:49:00 +00007951 Result answer = frame_->CallCallIC(RelocInfo::CODE_TARGET,
7952 arg_count,
7953 loop_nesting_);
7954 frame_->RestoreContextRegister();
Leon Clarkee46be812010-01-19 14:06:41 +00007955 frame_->Push(&answer);
Steve Blocka7e24c12009-10-30 11:49:00 +00007956 } else {
7957 // Call the C runtime function.
7958 Result answer = frame_->CallRuntime(function, arg_count);
7959 frame_->Push(&answer);
7960 }
7961}
7962
7963
7964void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00007965 Comment cmnt(masm_, "[ UnaryOperation");
7966
7967 Token::Value op = node->op();
7968
7969 if (op == Token::NOT) {
7970 // Swap the true and false targets but keep the same actual label
7971 // as the fall through.
7972 destination()->Invert();
Steve Blockd0582a62009-12-15 09:54:21 +00007973 LoadCondition(node->expression(), destination(), true);
Steve Blocka7e24c12009-10-30 11:49:00 +00007974 // Swap the labels back.
7975 destination()->Invert();
7976
7977 } else if (op == Token::DELETE) {
7978 Property* property = node->expression()->AsProperty();
7979 if (property != NULL) {
7980 Load(property->obj());
7981 Load(property->key());
7982 Result answer = frame_->InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, 2);
7983 frame_->Push(&answer);
7984 return;
7985 }
7986
7987 Variable* variable = node->expression()->AsVariableProxy()->AsVariable();
7988 if (variable != NULL) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01007989 Slot* slot = variable->AsSlot();
Steve Blocka7e24c12009-10-30 11:49:00 +00007990 if (variable->is_global()) {
7991 LoadGlobal();
7992 frame_->Push(variable->name());
7993 Result answer = frame_->InvokeBuiltin(Builtins::DELETE,
7994 CALL_FUNCTION, 2);
7995 frame_->Push(&answer);
7996 return;
7997
7998 } else if (slot != NULL && slot->type() == Slot::LOOKUP) {
7999 // Call the runtime to look up the context holding the named
8000 // variable. Sync the virtual frame eagerly so we can push the
8001 // arguments directly into place.
8002 frame_->SyncRange(0, frame_->element_count() - 1);
8003 frame_->EmitPush(esi);
8004 frame_->EmitPush(Immediate(variable->name()));
8005 Result context = frame_->CallRuntime(Runtime::kLookupContext, 2);
8006 ASSERT(context.is_register());
8007 frame_->EmitPush(context.reg());
8008 context.Unuse();
8009 frame_->EmitPush(Immediate(variable->name()));
8010 Result answer = frame_->InvokeBuiltin(Builtins::DELETE,
8011 CALL_FUNCTION, 2);
8012 frame_->Push(&answer);
8013 return;
8014 }
8015
8016 // Default: Result of deleting non-global, not dynamically
8017 // introduced variables is false.
8018 frame_->Push(Factory::false_value());
8019
8020 } else {
8021 // Default: Result of deleting expressions is true.
8022 Load(node->expression()); // may have side-effects
8023 frame_->SetElementAt(0, Factory::true_value());
8024 }
8025
8026 } else if (op == Token::TYPEOF) {
8027 // Special case for loading the typeof expression; see comment on
8028 // LoadTypeofExpression().
8029 LoadTypeofExpression(node->expression());
8030 Result answer = frame_->CallRuntime(Runtime::kTypeof, 1);
8031 frame_->Push(&answer);
8032
8033 } else if (op == Token::VOID) {
8034 Expression* expression = node->expression();
8035 if (expression && expression->AsLiteral() && (
8036 expression->AsLiteral()->IsTrue() ||
8037 expression->AsLiteral()->IsFalse() ||
8038 expression->AsLiteral()->handle()->IsNumber() ||
8039 expression->AsLiteral()->handle()->IsString() ||
8040 expression->AsLiteral()->handle()->IsJSRegExp() ||
8041 expression->AsLiteral()->IsNull())) {
8042 // Omit evaluating the value of the primitive literal.
8043 // It will be discarded anyway, and can have no side effect.
8044 frame_->Push(Factory::undefined_value());
8045 } else {
8046 Load(node->expression());
8047 frame_->SetElementAt(0, Factory::undefined_value());
8048 }
8049
8050 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01008051 if (in_safe_int32_mode()) {
8052 Visit(node->expression());
8053 Result value = frame_->Pop();
8054 ASSERT(value.is_untagged_int32());
8055 // Registers containing an int32 value are not multiply used.
8056 ASSERT(!value.is_register() || !frame_->is_used(value.reg()));
8057 value.ToRegister();
8058 switch (op) {
8059 case Token::SUB: {
8060 __ neg(value.reg());
8061 if (node->no_negative_zero()) {
8062 // -MIN_INT is MIN_INT with the overflow flag set.
8063 unsafe_bailout_->Branch(overflow);
8064 } else {
8065 // MIN_INT and 0 both have bad negations. They both have 31 zeros.
8066 __ test(value.reg(), Immediate(0x7FFFFFFF));
8067 unsafe_bailout_->Branch(zero);
8068 }
8069 break;
8070 }
8071 case Token::BIT_NOT: {
8072 __ not_(value.reg());
8073 break;
8074 }
8075 case Token::ADD: {
8076 // Unary plus has no effect on int32 values.
8077 break;
8078 }
8079 default:
8080 UNREACHABLE();
8081 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00008082 }
Steve Block6ded16b2010-05-10 14:33:55 +01008083 frame_->Push(&value);
8084 } else {
8085 Load(node->expression());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01008086 bool can_overwrite = node->expression()->ResultOverwriteAllowed();
Leon Clarkeac952652010-07-15 11:15:24 +01008087 UnaryOverwriteMode overwrite =
8088 can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
8089 bool no_negative_zero = node->expression()->no_negative_zero();
Steve Block6ded16b2010-05-10 14:33:55 +01008090 switch (op) {
8091 case Token::NOT:
8092 case Token::DELETE:
8093 case Token::TYPEOF:
8094 UNREACHABLE(); // handled above
8095 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00008096
Steve Block6ded16b2010-05-10 14:33:55 +01008097 case Token::SUB: {
Leon Clarkeac952652010-07-15 11:15:24 +01008098 GenericUnaryOpStub stub(
8099 Token::SUB,
8100 overwrite,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01008101 NO_UNARY_FLAGS,
Leon Clarkeac952652010-07-15 11:15:24 +01008102 no_negative_zero ? kIgnoreNegativeZero : kStrictNegativeZero);
Steve Block6ded16b2010-05-10 14:33:55 +01008103 Result operand = frame_->Pop();
8104 Result answer = frame_->CallStub(&stub, &operand);
8105 answer.set_type_info(TypeInfo::Number());
8106 frame_->Push(&answer);
8107 break;
8108 }
8109 case Token::BIT_NOT: {
8110 // Smi check.
8111 JumpTarget smi_label;
8112 JumpTarget continue_label;
8113 Result operand = frame_->Pop();
8114 TypeInfo operand_info = operand.type_info();
8115 operand.ToRegister();
8116 if (operand_info.IsSmi()) {
8117 if (FLAG_debug_code) __ AbortIfNotSmi(operand.reg());
8118 frame_->Spill(operand.reg());
8119 // Set smi tag bit. It will be reset by the not operation.
8120 __ lea(operand.reg(), Operand(operand.reg(), kSmiTagMask));
8121 __ not_(operand.reg());
8122 Result answer = operand;
8123 answer.set_type_info(TypeInfo::Smi());
8124 frame_->Push(&answer);
8125 } else {
8126 __ test(operand.reg(), Immediate(kSmiTagMask));
8127 smi_label.Branch(zero, &operand, taken);
Steve Blocka7e24c12009-10-30 11:49:00 +00008128
Kristian Monsen0d5e1162010-09-30 15:31:59 +01008129 GenericUnaryOpStub stub(Token::BIT_NOT,
8130 overwrite,
8131 NO_UNARY_SMI_CODE_IN_STUB);
Steve Block6ded16b2010-05-10 14:33:55 +01008132 Result answer = frame_->CallStub(&stub, &operand);
8133 continue_label.Jump(&answer);
Leon Clarkee46be812010-01-19 14:06:41 +00008134
Steve Block6ded16b2010-05-10 14:33:55 +01008135 smi_label.Bind(&answer);
8136 answer.ToRegister();
8137 frame_->Spill(answer.reg());
8138 // Set smi tag bit. It will be reset by the not operation.
8139 __ lea(answer.reg(), Operand(answer.reg(), kSmiTagMask));
8140 __ not_(answer.reg());
Leon Clarkee46be812010-01-19 14:06:41 +00008141
Steve Block6ded16b2010-05-10 14:33:55 +01008142 continue_label.Bind(&answer);
8143 answer.set_type_info(TypeInfo::Integer32());
8144 frame_->Push(&answer);
8145 }
8146 break;
8147 }
8148 case Token::ADD: {
8149 // Smi check.
8150 JumpTarget continue_label;
8151 Result operand = frame_->Pop();
8152 TypeInfo operand_info = operand.type_info();
8153 operand.ToRegister();
8154 __ test(operand.reg(), Immediate(kSmiTagMask));
8155 continue_label.Branch(zero, &operand, taken);
Steve Blocka7e24c12009-10-30 11:49:00 +00008156
Steve Block6ded16b2010-05-10 14:33:55 +01008157 frame_->Push(&operand);
8158 Result answer = frame_->InvokeBuiltin(Builtins::TO_NUMBER,
Steve Blocka7e24c12009-10-30 11:49:00 +00008159 CALL_FUNCTION, 1);
8160
Steve Block6ded16b2010-05-10 14:33:55 +01008161 continue_label.Bind(&answer);
8162 if (operand_info.IsSmi()) {
8163 answer.set_type_info(TypeInfo::Smi());
8164 } else if (operand_info.IsInteger32()) {
8165 answer.set_type_info(TypeInfo::Integer32());
8166 } else {
8167 answer.set_type_info(TypeInfo::Number());
8168 }
8169 frame_->Push(&answer);
8170 break;
8171 }
8172 default:
8173 UNREACHABLE();
Steve Blocka7e24c12009-10-30 11:49:00 +00008174 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008175 }
8176 }
8177}
8178
8179
8180// The value in dst was optimistically incremented or decremented. The
8181// result overflowed or was not smi tagged. Undo the operation, call
8182// into the runtime to convert the argument to a number, and call the
8183// specialized add or subtract stub. The result is left in dst.
8184class DeferredPrefixCountOperation: public DeferredCode {
8185 public:
Steve Block6ded16b2010-05-10 14:33:55 +01008186 DeferredPrefixCountOperation(Register dst,
8187 bool is_increment,
8188 TypeInfo input_type)
8189 : dst_(dst), is_increment_(is_increment), input_type_(input_type) {
Steve Blocka7e24c12009-10-30 11:49:00 +00008190 set_comment("[ DeferredCountOperation");
8191 }
8192
8193 virtual void Generate();
8194
8195 private:
8196 Register dst_;
8197 bool is_increment_;
Steve Block6ded16b2010-05-10 14:33:55 +01008198 TypeInfo input_type_;
Steve Blocka7e24c12009-10-30 11:49:00 +00008199};
8200
8201
8202void DeferredPrefixCountOperation::Generate() {
8203 // Undo the optimistic smi operation.
8204 if (is_increment_) {
8205 __ sub(Operand(dst_), Immediate(Smi::FromInt(1)));
8206 } else {
8207 __ add(Operand(dst_), Immediate(Smi::FromInt(1)));
8208 }
Steve Block6ded16b2010-05-10 14:33:55 +01008209 Register left;
8210 if (input_type_.IsNumber()) {
8211 left = dst_;
Steve Blocka7e24c12009-10-30 11:49:00 +00008212 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01008213 __ push(dst_);
8214 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION);
8215 left = eax;
Steve Blocka7e24c12009-10-30 11:49:00 +00008216 }
Steve Block6ded16b2010-05-10 14:33:55 +01008217
8218 GenericBinaryOpStub stub(is_increment_ ? Token::ADD : Token::SUB,
8219 NO_OVERWRITE,
8220 NO_GENERIC_BINARY_FLAGS,
8221 TypeInfo::Number());
8222 stub.GenerateCall(masm_, left, Smi::FromInt(1));
8223
Steve Blocka7e24c12009-10-30 11:49:00 +00008224 if (!dst_.is(eax)) __ mov(dst_, eax);
8225}
8226
8227
8228// The value in dst was optimistically incremented or decremented. The
8229// result overflowed or was not smi tagged. Undo the operation and call
8230// into the runtime to convert the argument to a number. Update the
8231// original value in old. Call the specialized add or subtract stub.
8232// The result is left in dst.
8233class DeferredPostfixCountOperation: public DeferredCode {
8234 public:
Steve Block6ded16b2010-05-10 14:33:55 +01008235 DeferredPostfixCountOperation(Register dst,
8236 Register old,
8237 bool is_increment,
8238 TypeInfo input_type)
8239 : dst_(dst),
8240 old_(old),
8241 is_increment_(is_increment),
8242 input_type_(input_type) {
Steve Blocka7e24c12009-10-30 11:49:00 +00008243 set_comment("[ DeferredCountOperation");
8244 }
8245
8246 virtual void Generate();
8247
8248 private:
8249 Register dst_;
8250 Register old_;
8251 bool is_increment_;
Steve Block6ded16b2010-05-10 14:33:55 +01008252 TypeInfo input_type_;
Steve Blocka7e24c12009-10-30 11:49:00 +00008253};
8254
8255
8256void DeferredPostfixCountOperation::Generate() {
8257 // Undo the optimistic smi operation.
8258 if (is_increment_) {
8259 __ sub(Operand(dst_), Immediate(Smi::FromInt(1)));
8260 } else {
8261 __ add(Operand(dst_), Immediate(Smi::FromInt(1)));
8262 }
Steve Block6ded16b2010-05-10 14:33:55 +01008263 Register left;
8264 if (input_type_.IsNumber()) {
8265 __ push(dst_); // Save the input to use as the old value.
8266 left = dst_;
Steve Blocka7e24c12009-10-30 11:49:00 +00008267 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01008268 __ push(dst_);
8269 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION);
8270 __ push(eax); // Save the result of ToNumber to use as the old value.
8271 left = eax;
Steve Blocka7e24c12009-10-30 11:49:00 +00008272 }
Steve Block6ded16b2010-05-10 14:33:55 +01008273
8274 GenericBinaryOpStub stub(is_increment_ ? Token::ADD : Token::SUB,
8275 NO_OVERWRITE,
8276 NO_GENERIC_BINARY_FLAGS,
8277 TypeInfo::Number());
8278 stub.GenerateCall(masm_, left, Smi::FromInt(1));
8279
Steve Blocka7e24c12009-10-30 11:49:00 +00008280 if (!dst_.is(eax)) __ mov(dst_, eax);
8281 __ pop(old_);
8282}
8283
8284
8285void CodeGenerator::VisitCountOperation(CountOperation* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01008286 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00008287 Comment cmnt(masm_, "[ CountOperation");
8288
8289 bool is_postfix = node->is_postfix();
8290 bool is_increment = node->op() == Token::INC;
8291
8292 Variable* var = node->expression()->AsVariableProxy()->AsVariable();
8293 bool is_const = (var != NULL && var->mode() == Variable::CONST);
8294
8295 // Postfix operations need a stack slot under the reference to hold
8296 // the old value while the new value is being stored. This is so that
8297 // in the case that storing the new value requires a call, the old
8298 // value will be in the frame to be spilled.
8299 if (is_postfix) frame_->Push(Smi::FromInt(0));
8300
Leon Clarked91b9f72010-01-27 17:25:45 +00008301 // A constant reference is not saved to, so a constant reference is not a
8302 // compound assignment reference.
8303 { Reference target(this, node->expression(), !is_const);
Steve Blocka7e24c12009-10-30 11:49:00 +00008304 if (target.is_illegal()) {
8305 // Spoof the virtual frame to have the expected height (one higher
8306 // than on entry).
8307 if (!is_postfix) frame_->Push(Smi::FromInt(0));
8308 return;
8309 }
Steve Blockd0582a62009-12-15 09:54:21 +00008310 target.TakeValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00008311
8312 Result new_value = frame_->Pop();
8313 new_value.ToRegister();
8314
8315 Result old_value; // Only allocated in the postfix case.
8316 if (is_postfix) {
8317 // Allocate a temporary to preserve the old value.
8318 old_value = allocator_->Allocate();
8319 ASSERT(old_value.is_valid());
8320 __ mov(old_value.reg(), new_value.reg());
Steve Block6ded16b2010-05-10 14:33:55 +01008321
8322 // The return value for postfix operations is ToNumber(input).
8323 // Keep more precise type info if the input is some kind of
8324 // number already. If the input is not a number we have to wait
8325 // for the deferred code to convert it.
8326 if (new_value.type_info().IsNumber()) {
8327 old_value.set_type_info(new_value.type_info());
8328 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008329 }
Steve Block6ded16b2010-05-10 14:33:55 +01008330
Steve Blocka7e24c12009-10-30 11:49:00 +00008331 // Ensure the new value is writable.
8332 frame_->Spill(new_value.reg());
8333
Steve Block6ded16b2010-05-10 14:33:55 +01008334 Result tmp;
8335 if (new_value.is_smi()) {
8336 if (FLAG_debug_code) __ AbortIfNotSmi(new_value.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00008337 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01008338 // We don't know statically if the input is a smi.
8339 // In order to combine the overflow and the smi tag check, we need
8340 // to be able to allocate a byte register. We attempt to do so
8341 // without spilling. If we fail, we will generate separate overflow
8342 // and smi tag checks.
8343 // We allocate and clear a temporary byte register before performing
8344 // the count operation since clearing the register using xor will clear
8345 // the overflow flag.
8346 tmp = allocator_->AllocateByteRegisterWithoutSpilling();
8347 if (tmp.is_valid()) {
8348 __ Set(tmp.reg(), Immediate(0));
8349 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008350 }
8351
8352 if (is_increment) {
8353 __ add(Operand(new_value.reg()), Immediate(Smi::FromInt(1)));
8354 } else {
8355 __ sub(Operand(new_value.reg()), Immediate(Smi::FromInt(1)));
8356 }
8357
Steve Block6ded16b2010-05-10 14:33:55 +01008358 DeferredCode* deferred = NULL;
8359 if (is_postfix) {
8360 deferred = new DeferredPostfixCountOperation(new_value.reg(),
8361 old_value.reg(),
8362 is_increment,
8363 new_value.type_info());
8364 } else {
8365 deferred = new DeferredPrefixCountOperation(new_value.reg(),
8366 is_increment,
8367 new_value.type_info());
8368 }
8369
8370 if (new_value.is_smi()) {
8371 // In case we have a smi as input just check for overflow.
8372 deferred->Branch(overflow);
8373 } else {
8374 // If the count operation didn't overflow and the result is a valid
8375 // smi, we're done. Otherwise, we jump to the deferred slow-case
8376 // code.
Steve Blocka7e24c12009-10-30 11:49:00 +00008377 // We combine the overflow and the smi tag check if we could
8378 // successfully allocate a temporary byte register.
Steve Block6ded16b2010-05-10 14:33:55 +01008379 if (tmp.is_valid()) {
8380 __ setcc(overflow, tmp.reg());
8381 __ or_(Operand(tmp.reg()), new_value.reg());
8382 __ test(tmp.reg(), Immediate(kSmiTagMask));
8383 tmp.Unuse();
8384 deferred->Branch(not_zero);
8385 } else {
8386 // Otherwise we test separately for overflow and smi tag.
8387 deferred->Branch(overflow);
8388 __ test(new_value.reg(), Immediate(kSmiTagMask));
8389 deferred->Branch(not_zero);
8390 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008391 }
8392 deferred->BindExit();
8393
Steve Block6ded16b2010-05-10 14:33:55 +01008394 // Postfix count operations return their input converted to
8395 // number. The case when the input is already a number is covered
8396 // above in the allocation code for old_value.
8397 if (is_postfix && !new_value.type_info().IsNumber()) {
8398 old_value.set_type_info(TypeInfo::Number());
8399 }
8400
8401 // The result of ++ or -- is an Integer32 if the
8402 // input is a smi. Otherwise it is a number.
8403 if (new_value.is_smi()) {
8404 new_value.set_type_info(TypeInfo::Integer32());
8405 } else {
8406 new_value.set_type_info(TypeInfo::Number());
8407 }
8408
Steve Blocka7e24c12009-10-30 11:49:00 +00008409 // Postfix: store the old value in the allocated slot under the
8410 // reference.
8411 if (is_postfix) frame_->SetElementAt(target.size(), &old_value);
8412
8413 frame_->Push(&new_value);
8414 // Non-constant: update the reference.
8415 if (!is_const) target.SetValue(NOT_CONST_INIT);
8416 }
8417
8418 // Postfix: drop the new value and use the old.
8419 if (is_postfix) frame_->Drop();
8420}
8421
8422
Steve Block6ded16b2010-05-10 14:33:55 +01008423void CodeGenerator::Int32BinaryOperation(BinaryOperation* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00008424 Token::Value op = node->op();
Steve Block6ded16b2010-05-10 14:33:55 +01008425 Comment cmnt(masm_, "[ Int32BinaryOperation");
8426 ASSERT(in_safe_int32_mode());
8427 ASSERT(safe_int32_mode_enabled());
8428 ASSERT(FLAG_safe_int32_compiler);
Steve Blocka7e24c12009-10-30 11:49:00 +00008429
Steve Block6ded16b2010-05-10 14:33:55 +01008430 if (op == Token::COMMA) {
8431 // Discard left value.
8432 frame_->Nip(1);
8433 return;
8434 }
8435
8436 Result right = frame_->Pop();
8437 Result left = frame_->Pop();
8438
8439 ASSERT(right.is_untagged_int32());
8440 ASSERT(left.is_untagged_int32());
8441 // Registers containing an int32 value are not multiply used.
8442 ASSERT(!left.is_register() || !frame_->is_used(left.reg()));
8443 ASSERT(!right.is_register() || !frame_->is_used(right.reg()));
8444
8445 switch (op) {
8446 case Token::COMMA:
8447 case Token::OR:
8448 case Token::AND:
8449 UNREACHABLE();
8450 break;
8451 case Token::BIT_OR:
8452 case Token::BIT_XOR:
8453 case Token::BIT_AND:
8454 if (left.is_constant() || right.is_constant()) {
8455 int32_t value; // Put constant in value, non-constant in left.
8456 // Constants are known to be int32 values, from static analysis,
8457 // or else will be converted to int32 by implicit ECMA [[ToInt32]].
8458 if (left.is_constant()) {
8459 ASSERT(left.handle()->IsSmi() || left.handle()->IsHeapNumber());
8460 value = NumberToInt32(*left.handle());
8461 left = right;
8462 } else {
8463 ASSERT(right.handle()->IsSmi() || right.handle()->IsHeapNumber());
8464 value = NumberToInt32(*right.handle());
8465 }
8466
8467 left.ToRegister();
8468 if (op == Token::BIT_OR) {
8469 __ or_(Operand(left.reg()), Immediate(value));
8470 } else if (op == Token::BIT_XOR) {
8471 __ xor_(Operand(left.reg()), Immediate(value));
8472 } else {
8473 ASSERT(op == Token::BIT_AND);
8474 __ and_(Operand(left.reg()), Immediate(value));
8475 }
8476 } else {
8477 ASSERT(left.is_register());
8478 ASSERT(right.is_register());
8479 if (op == Token::BIT_OR) {
8480 __ or_(left.reg(), Operand(right.reg()));
8481 } else if (op == Token::BIT_XOR) {
8482 __ xor_(left.reg(), Operand(right.reg()));
8483 } else {
8484 ASSERT(op == Token::BIT_AND);
8485 __ and_(left.reg(), Operand(right.reg()));
8486 }
8487 }
8488 frame_->Push(&left);
8489 right.Unuse();
8490 break;
8491 case Token::SAR:
8492 case Token::SHL:
8493 case Token::SHR: {
8494 bool test_shr_overflow = false;
8495 left.ToRegister();
8496 if (right.is_constant()) {
8497 ASSERT(right.handle()->IsSmi() || right.handle()->IsHeapNumber());
8498 int shift_amount = NumberToInt32(*right.handle()) & 0x1F;
8499 if (op == Token::SAR) {
8500 __ sar(left.reg(), shift_amount);
8501 } else if (op == Token::SHL) {
8502 __ shl(left.reg(), shift_amount);
8503 } else {
8504 ASSERT(op == Token::SHR);
8505 __ shr(left.reg(), shift_amount);
8506 if (shift_amount == 0) test_shr_overflow = true;
8507 }
8508 } else {
8509 // Move right to ecx
8510 if (left.is_register() && left.reg().is(ecx)) {
8511 right.ToRegister();
8512 __ xchg(left.reg(), right.reg());
8513 left = right; // Left is unused here, copy of right unused by Push.
8514 } else {
8515 right.ToRegister(ecx);
8516 left.ToRegister();
8517 }
8518 if (op == Token::SAR) {
8519 __ sar_cl(left.reg());
8520 } else if (op == Token::SHL) {
8521 __ shl_cl(left.reg());
8522 } else {
8523 ASSERT(op == Token::SHR);
8524 __ shr_cl(left.reg());
8525 test_shr_overflow = true;
8526 }
8527 }
8528 {
8529 Register left_reg = left.reg();
8530 frame_->Push(&left);
8531 right.Unuse();
8532 if (test_shr_overflow && !node->to_int32()) {
8533 // Uint32 results with top bit set are not Int32 values.
8534 // If they will be forced to Int32, skip the test.
8535 // Test is needed because shr with shift amount 0 does not set flags.
8536 __ test(left_reg, Operand(left_reg));
8537 unsafe_bailout_->Branch(sign);
8538 }
8539 }
8540 break;
8541 }
8542 case Token::ADD:
8543 case Token::SUB:
8544 case Token::MUL:
8545 if ((left.is_constant() && op != Token::SUB) || right.is_constant()) {
8546 int32_t value; // Put constant in value, non-constant in left.
8547 if (right.is_constant()) {
8548 ASSERT(right.handle()->IsSmi() || right.handle()->IsHeapNumber());
8549 value = NumberToInt32(*right.handle());
8550 } else {
8551 ASSERT(left.handle()->IsSmi() || left.handle()->IsHeapNumber());
8552 value = NumberToInt32(*left.handle());
8553 left = right;
8554 }
8555
8556 left.ToRegister();
8557 if (op == Token::ADD) {
8558 __ add(Operand(left.reg()), Immediate(value));
8559 } else if (op == Token::SUB) {
8560 __ sub(Operand(left.reg()), Immediate(value));
8561 } else {
8562 ASSERT(op == Token::MUL);
8563 __ imul(left.reg(), left.reg(), value);
8564 }
8565 } else {
8566 left.ToRegister();
8567 ASSERT(left.is_register());
8568 ASSERT(right.is_register());
8569 if (op == Token::ADD) {
8570 __ add(left.reg(), Operand(right.reg()));
8571 } else if (op == Token::SUB) {
8572 __ sub(left.reg(), Operand(right.reg()));
8573 } else {
8574 ASSERT(op == Token::MUL);
8575 // We have statically verified that a negative zero can be ignored.
8576 __ imul(left.reg(), Operand(right.reg()));
8577 }
8578 }
8579 right.Unuse();
8580 frame_->Push(&left);
Russell Brenner90bac252010-11-18 13:33:46 -08008581 if (!node->to_int32() || op == Token::MUL) {
8582 // If ToInt32 is called on the result of ADD, SUB, we don't
Steve Block6ded16b2010-05-10 14:33:55 +01008583 // care about overflows.
Russell Brenner90bac252010-11-18 13:33:46 -08008584 // Result of MUL can be non-representable precisely in double so
8585 // we have to check for overflow.
Steve Block6ded16b2010-05-10 14:33:55 +01008586 unsafe_bailout_->Branch(overflow);
8587 }
8588 break;
8589 case Token::DIV:
8590 case Token::MOD: {
8591 if (right.is_register() && (right.reg().is(eax) || right.reg().is(edx))) {
8592 if (left.is_register() && left.reg().is(edi)) {
8593 right.ToRegister(ebx);
8594 } else {
8595 right.ToRegister(edi);
8596 }
8597 }
8598 left.ToRegister(eax);
8599 Result edx_reg = allocator_->Allocate(edx);
8600 right.ToRegister();
8601 // The results are unused here because BreakTarget::Branch cannot handle
8602 // live results.
8603 Register right_reg = right.reg();
8604 left.Unuse();
8605 right.Unuse();
8606 edx_reg.Unuse();
8607 __ cmp(right_reg, 0);
8608 // Ensure divisor is positive: no chance of non-int32 or -0 result.
8609 unsafe_bailout_->Branch(less_equal);
8610 __ cdq(); // Sign-extend eax into edx:eax
8611 __ idiv(right_reg);
8612 if (op == Token::MOD) {
8613 // Negative zero can arise as a negative divident with a zero result.
8614 if (!node->no_negative_zero()) {
8615 Label not_negative_zero;
8616 __ test(edx, Operand(edx));
8617 __ j(not_zero, &not_negative_zero);
8618 __ test(eax, Operand(eax));
8619 unsafe_bailout_->Branch(negative);
8620 __ bind(&not_negative_zero);
8621 }
8622 Result edx_result(edx, TypeInfo::Integer32());
8623 edx_result.set_untagged_int32(true);
8624 frame_->Push(&edx_result);
8625 } else {
8626 ASSERT(op == Token::DIV);
8627 __ test(edx, Operand(edx));
8628 unsafe_bailout_->Branch(not_equal);
8629 Result eax_result(eax, TypeInfo::Integer32());
8630 eax_result.set_untagged_int32(true);
8631 frame_->Push(&eax_result);
8632 }
8633 break;
8634 }
8635 default:
8636 UNREACHABLE();
8637 break;
8638 }
8639}
8640
8641
8642void CodeGenerator::GenerateLogicalBooleanOperation(BinaryOperation* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00008643 // According to ECMA-262 section 11.11, page 58, the binary logical
8644 // operators must yield the result of one of the two expressions
8645 // before any ToBoolean() conversions. This means that the value
8646 // produced by a && or || operator is not necessarily a boolean.
8647
8648 // NOTE: If the left hand side produces a materialized value (not
8649 // control flow), we force the right hand side to do the same. This
8650 // is necessary because we assume that if we get control flow on the
8651 // last path out of an expression we got it on all paths.
Steve Block6ded16b2010-05-10 14:33:55 +01008652 if (node->op() == Token::AND) {
8653 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00008654 JumpTarget is_true;
8655 ControlDestination dest(&is_true, destination()->false_target(), true);
Steve Blockd0582a62009-12-15 09:54:21 +00008656 LoadCondition(node->left(), &dest, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00008657
8658 if (dest.false_was_fall_through()) {
8659 // The current false target was used as the fall-through. If
8660 // there are no dangling jumps to is_true then the left
8661 // subexpression was unconditionally false. Otherwise we have
8662 // paths where we do have to evaluate the right subexpression.
8663 if (is_true.is_linked()) {
8664 // We need to compile the right subexpression. If the jump to
8665 // the current false target was a forward jump then we have a
8666 // valid frame, we have just bound the false target, and we
8667 // have to jump around the code for the right subexpression.
8668 if (has_valid_frame()) {
8669 destination()->false_target()->Unuse();
8670 destination()->false_target()->Jump();
8671 }
8672 is_true.Bind();
8673 // The left subexpression compiled to control flow, so the
8674 // right one is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00008675 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00008676 } else {
8677 // We have actually just jumped to or bound the current false
8678 // target but the current control destination is not marked as
8679 // used.
8680 destination()->Use(false);
8681 }
8682
8683 } else if (dest.is_used()) {
8684 // The left subexpression compiled to control flow (and is_true
8685 // was just bound), so the right is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00008686 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00008687
8688 } else {
8689 // We have a materialized value on the frame, so we exit with
8690 // one on all paths. There are possibly also jumps to is_true
8691 // from nested subexpressions.
8692 JumpTarget pop_and_continue;
8693 JumpTarget exit;
8694
8695 // Avoid popping the result if it converts to 'false' using the
8696 // standard ToBoolean() conversion as described in ECMA-262,
8697 // section 9.2, page 30.
8698 //
8699 // Duplicate the TOS value. The duplicate will be popped by
8700 // ToBoolean.
8701 frame_->Dup();
8702 ControlDestination dest(&pop_and_continue, &exit, true);
8703 ToBoolean(&dest);
8704
8705 // Pop the result of evaluating the first part.
8706 frame_->Drop();
8707
8708 // Compile right side expression.
8709 is_true.Bind();
8710 Load(node->right());
8711
8712 // Exit (always with a materialized value).
8713 exit.Bind();
8714 }
8715
Steve Block6ded16b2010-05-10 14:33:55 +01008716 } else {
8717 ASSERT(node->op() == Token::OR);
8718 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00008719 JumpTarget is_false;
8720 ControlDestination dest(destination()->true_target(), &is_false, false);
Steve Blockd0582a62009-12-15 09:54:21 +00008721 LoadCondition(node->left(), &dest, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00008722
8723 if (dest.true_was_fall_through()) {
8724 // The current true target was used as the fall-through. If
8725 // there are no dangling jumps to is_false then the left
8726 // subexpression was unconditionally true. Otherwise we have
8727 // paths where we do have to evaluate the right subexpression.
8728 if (is_false.is_linked()) {
8729 // We need to compile the right subexpression. If the jump to
8730 // the current true target was a forward jump then we have a
8731 // valid frame, we have just bound the true target, and we
8732 // have to jump around the code for the right subexpression.
8733 if (has_valid_frame()) {
8734 destination()->true_target()->Unuse();
8735 destination()->true_target()->Jump();
8736 }
8737 is_false.Bind();
8738 // The left subexpression compiled to control flow, so the
8739 // right one is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00008740 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00008741 } else {
8742 // We have just jumped to or bound the current true target but
8743 // the current control destination is not marked as used.
8744 destination()->Use(true);
8745 }
8746
8747 } else if (dest.is_used()) {
8748 // The left subexpression compiled to control flow (and is_false
8749 // was just bound), so the right is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00008750 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00008751
8752 } else {
8753 // We have a materialized value on the frame, so we exit with
8754 // one on all paths. There are possibly also jumps to is_false
8755 // from nested subexpressions.
8756 JumpTarget pop_and_continue;
8757 JumpTarget exit;
8758
8759 // Avoid popping the result if it converts to 'true' using the
8760 // standard ToBoolean() conversion as described in ECMA-262,
8761 // section 9.2, page 30.
8762 //
8763 // Duplicate the TOS value. The duplicate will be popped by
8764 // ToBoolean.
8765 frame_->Dup();
8766 ControlDestination dest(&exit, &pop_and_continue, false);
8767 ToBoolean(&dest);
8768
8769 // Pop the result of evaluating the first part.
8770 frame_->Drop();
8771
8772 // Compile right side expression.
8773 is_false.Bind();
8774 Load(node->right());
8775
8776 // Exit (always with a materialized value).
8777 exit.Bind();
8778 }
Steve Block6ded16b2010-05-10 14:33:55 +01008779 }
8780}
Steve Blocka7e24c12009-10-30 11:49:00 +00008781
Steve Block6ded16b2010-05-10 14:33:55 +01008782
8783void CodeGenerator::VisitBinaryOperation(BinaryOperation* node) {
8784 Comment cmnt(masm_, "[ BinaryOperation");
8785
8786 if (node->op() == Token::AND || node->op() == Token::OR) {
8787 GenerateLogicalBooleanOperation(node);
8788 } else if (in_safe_int32_mode()) {
8789 Visit(node->left());
8790 Visit(node->right());
8791 Int32BinaryOperation(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00008792 } else {
8793 // NOTE: The code below assumes that the slow cases (calls to runtime)
8794 // never return a constant/immutable object.
8795 OverwriteMode overwrite_mode = NO_OVERWRITE;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01008796 if (node->left()->ResultOverwriteAllowed()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00008797 overwrite_mode = OVERWRITE_LEFT;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01008798 } else if (node->right()->ResultOverwriteAllowed()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00008799 overwrite_mode = OVERWRITE_RIGHT;
8800 }
8801
Steve Block6ded16b2010-05-10 14:33:55 +01008802 if (node->left()->IsTrivial()) {
8803 Load(node->right());
8804 Result right = frame_->Pop();
8805 frame_->Push(node->left());
8806 frame_->Push(&right);
8807 } else {
8808 Load(node->left());
8809 Load(node->right());
8810 }
8811 GenericBinaryOperation(node, overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00008812 }
8813}
8814
8815
8816void CodeGenerator::VisitThisFunction(ThisFunction* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01008817 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00008818 frame_->PushFunction();
8819}
8820
8821
8822void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01008823 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00008824 Comment cmnt(masm_, "[ CompareOperation");
8825
Leon Clarkee46be812010-01-19 14:06:41 +00008826 bool left_already_loaded = false;
8827
Steve Blocka7e24c12009-10-30 11:49:00 +00008828 // Get the expressions from the node.
8829 Expression* left = node->left();
8830 Expression* right = node->right();
8831 Token::Value op = node->op();
8832 // To make typeof testing for natives implemented in JavaScript really
8833 // efficient, we generate special code for expressions of the form:
8834 // 'typeof <expression> == <string>'.
8835 UnaryOperation* operation = left->AsUnaryOperation();
8836 if ((op == Token::EQ || op == Token::EQ_STRICT) &&
8837 (operation != NULL && operation->op() == Token::TYPEOF) &&
8838 (right->AsLiteral() != NULL &&
8839 right->AsLiteral()->handle()->IsString())) {
8840 Handle<String> check(String::cast(*right->AsLiteral()->handle()));
8841
8842 // Load the operand and move it to a register.
8843 LoadTypeofExpression(operation->expression());
8844 Result answer = frame_->Pop();
8845 answer.ToRegister();
8846
8847 if (check->Equals(Heap::number_symbol())) {
8848 __ test(answer.reg(), Immediate(kSmiTagMask));
8849 destination()->true_target()->Branch(zero);
8850 frame_->Spill(answer.reg());
8851 __ mov(answer.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset));
8852 __ cmp(answer.reg(), Factory::heap_number_map());
8853 answer.Unuse();
8854 destination()->Split(equal);
8855
8856 } else if (check->Equals(Heap::string_symbol())) {
8857 __ test(answer.reg(), Immediate(kSmiTagMask));
8858 destination()->false_target()->Branch(zero);
8859
8860 // It can be an undetectable string object.
8861 Result temp = allocator()->Allocate();
8862 ASSERT(temp.is_valid());
8863 __ mov(temp.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01008864 __ test_b(FieldOperand(temp.reg(), Map::kBitFieldOffset),
8865 1 << Map::kIsUndetectable);
Steve Blocka7e24c12009-10-30 11:49:00 +00008866 destination()->false_target()->Branch(not_zero);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01008867 __ CmpInstanceType(temp.reg(), FIRST_NONSTRING_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00008868 temp.Unuse();
8869 answer.Unuse();
Andrei Popescu402d9372010-02-26 13:31:12 +00008870 destination()->Split(below);
Steve Blocka7e24c12009-10-30 11:49:00 +00008871
8872 } else if (check->Equals(Heap::boolean_symbol())) {
8873 __ cmp(answer.reg(), Factory::true_value());
8874 destination()->true_target()->Branch(equal);
8875 __ cmp(answer.reg(), Factory::false_value());
8876 answer.Unuse();
8877 destination()->Split(equal);
8878
8879 } else if (check->Equals(Heap::undefined_symbol())) {
8880 __ cmp(answer.reg(), Factory::undefined_value());
8881 destination()->true_target()->Branch(equal);
8882
8883 __ test(answer.reg(), Immediate(kSmiTagMask));
8884 destination()->false_target()->Branch(zero);
8885
8886 // It can be an undetectable object.
8887 frame_->Spill(answer.reg());
8888 __ mov(answer.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01008889 __ test_b(FieldOperand(answer.reg(), Map::kBitFieldOffset),
8890 1 << Map::kIsUndetectable);
Steve Blocka7e24c12009-10-30 11:49:00 +00008891 answer.Unuse();
8892 destination()->Split(not_zero);
8893
8894 } else if (check->Equals(Heap::function_symbol())) {
8895 __ test(answer.reg(), Immediate(kSmiTagMask));
8896 destination()->false_target()->Branch(zero);
8897 frame_->Spill(answer.reg());
8898 __ CmpObjectType(answer.reg(), JS_FUNCTION_TYPE, answer.reg());
Steve Blockd0582a62009-12-15 09:54:21 +00008899 destination()->true_target()->Branch(equal);
8900 // Regular expressions are callable so typeof == 'function'.
8901 __ CmpInstanceType(answer.reg(), JS_REGEXP_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00008902 answer.Unuse();
8903 destination()->Split(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +00008904 } else if (check->Equals(Heap::object_symbol())) {
8905 __ test(answer.reg(), Immediate(kSmiTagMask));
8906 destination()->false_target()->Branch(zero);
8907 __ cmp(answer.reg(), Factory::null_value());
8908 destination()->true_target()->Branch(equal);
8909
Steve Blocka7e24c12009-10-30 11:49:00 +00008910 Result map = allocator()->Allocate();
8911 ASSERT(map.is_valid());
Steve Blockd0582a62009-12-15 09:54:21 +00008912 // Regular expressions are typeof == 'function', not 'object'.
8913 __ CmpObjectType(answer.reg(), JS_REGEXP_TYPE, map.reg());
8914 destination()->false_target()->Branch(equal);
8915
8916 // It can be an undetectable object.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01008917 __ test_b(FieldOperand(map.reg(), Map::kBitFieldOffset),
8918 1 << Map::kIsUndetectable);
Steve Blocka7e24c12009-10-30 11:49:00 +00008919 destination()->false_target()->Branch(not_zero);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01008920 // Do a range test for JSObject type. We can't use
8921 // MacroAssembler::IsInstanceJSObjectType, because we are using a
8922 // ControlDestination, so we copy its implementation here.
Steve Blocka7e24c12009-10-30 11:49:00 +00008923 __ movzx_b(map.reg(), FieldOperand(map.reg(), Map::kInstanceTypeOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01008924 __ sub(Operand(map.reg()), Immediate(FIRST_JS_OBJECT_TYPE));
8925 __ cmp(map.reg(), LAST_JS_OBJECT_TYPE - FIRST_JS_OBJECT_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00008926 answer.Unuse();
8927 map.Unuse();
Leon Clarkef7060e22010-06-03 12:02:55 +01008928 destination()->Split(below_equal);
Steve Blocka7e24c12009-10-30 11:49:00 +00008929 } else {
8930 // Uncommon case: typeof testing against a string literal that is
8931 // never returned from the typeof operator.
8932 answer.Unuse();
8933 destination()->Goto(false);
8934 }
8935 return;
Leon Clarkee46be812010-01-19 14:06:41 +00008936 } else if (op == Token::LT &&
8937 right->AsLiteral() != NULL &&
8938 right->AsLiteral()->handle()->IsHeapNumber()) {
8939 Handle<HeapNumber> check(HeapNumber::cast(*right->AsLiteral()->handle()));
8940 if (check->value() == 2147483648.0) { // 0x80000000.
8941 Load(left);
8942 left_already_loaded = true;
8943 Result lhs = frame_->Pop();
8944 lhs.ToRegister();
8945 __ test(lhs.reg(), Immediate(kSmiTagMask));
8946 destination()->true_target()->Branch(zero); // All Smis are less.
8947 Result scratch = allocator()->Allocate();
8948 ASSERT(scratch.is_valid());
8949 __ mov(scratch.reg(), FieldOperand(lhs.reg(), HeapObject::kMapOffset));
8950 __ cmp(scratch.reg(), Factory::heap_number_map());
8951 JumpTarget not_a_number;
8952 not_a_number.Branch(not_equal, &lhs);
8953 __ mov(scratch.reg(),
8954 FieldOperand(lhs.reg(), HeapNumber::kExponentOffset));
8955 __ cmp(Operand(scratch.reg()), Immediate(0xfff00000));
8956 not_a_number.Branch(above_equal, &lhs); // It's a negative NaN or -Inf.
8957 const uint32_t borderline_exponent =
8958 (HeapNumber::kExponentBias + 31) << HeapNumber::kExponentShift;
8959 __ cmp(Operand(scratch.reg()), Immediate(borderline_exponent));
8960 scratch.Unuse();
8961 lhs.Unuse();
8962 destination()->true_target()->Branch(less);
8963 destination()->false_target()->Jump();
8964
8965 not_a_number.Bind(&lhs);
8966 frame_->Push(&lhs);
8967 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008968 }
8969
8970 Condition cc = no_condition;
8971 bool strict = false;
8972 switch (op) {
8973 case Token::EQ_STRICT:
8974 strict = true;
8975 // Fall through
8976 case Token::EQ:
8977 cc = equal;
8978 break;
8979 case Token::LT:
8980 cc = less;
8981 break;
8982 case Token::GT:
8983 cc = greater;
8984 break;
8985 case Token::LTE:
8986 cc = less_equal;
8987 break;
8988 case Token::GTE:
8989 cc = greater_equal;
8990 break;
8991 case Token::IN: {
Leon Clarkee46be812010-01-19 14:06:41 +00008992 if (!left_already_loaded) Load(left);
Steve Blocka7e24c12009-10-30 11:49:00 +00008993 Load(right);
8994 Result answer = frame_->InvokeBuiltin(Builtins::IN, CALL_FUNCTION, 2);
8995 frame_->Push(&answer); // push the result
8996 return;
8997 }
8998 case Token::INSTANCEOF: {
Leon Clarkee46be812010-01-19 14:06:41 +00008999 if (!left_already_loaded) Load(left);
Steve Blocka7e24c12009-10-30 11:49:00 +00009000 Load(right);
9001 InstanceofStub stub;
9002 Result answer = frame_->CallStub(&stub, 2);
9003 answer.ToRegister();
9004 __ test(answer.reg(), Operand(answer.reg()));
9005 answer.Unuse();
9006 destination()->Split(zero);
9007 return;
9008 }
9009 default:
9010 UNREACHABLE();
9011 }
Steve Block6ded16b2010-05-10 14:33:55 +01009012
9013 if (left->IsTrivial()) {
9014 if (!left_already_loaded) {
9015 Load(right);
9016 Result right_result = frame_->Pop();
9017 frame_->Push(left);
9018 frame_->Push(&right_result);
9019 } else {
9020 Load(right);
9021 }
9022 } else {
9023 if (!left_already_loaded) Load(left);
9024 Load(right);
9025 }
Leon Clarkee46be812010-01-19 14:06:41 +00009026 Comparison(node, cc, strict, destination());
Steve Blocka7e24c12009-10-30 11:49:00 +00009027}
9028
9029
Kristian Monsen80d68ea2010-09-08 11:05:35 +01009030void CodeGenerator::VisitCompareToNull(CompareToNull* node) {
9031 ASSERT(!in_safe_int32_mode());
9032 Comment cmnt(masm_, "[ CompareToNull");
9033
9034 Load(node->expression());
9035 Result operand = frame_->Pop();
9036 operand.ToRegister();
9037 __ cmp(operand.reg(), Factory::null_value());
9038 if (node->is_strict()) {
9039 operand.Unuse();
9040 destination()->Split(equal);
9041 } else {
9042 // The 'null' value is only equal to 'undefined' if using non-strict
9043 // comparisons.
9044 destination()->true_target()->Branch(equal);
9045 __ cmp(operand.reg(), Factory::undefined_value());
9046 destination()->true_target()->Branch(equal);
9047 __ test(operand.reg(), Immediate(kSmiTagMask));
9048 destination()->false_target()->Branch(equal);
9049
9050 // It can be an undetectable object.
9051 // Use a scratch register in preference to spilling operand.reg().
9052 Result temp = allocator()->Allocate();
9053 ASSERT(temp.is_valid());
9054 __ mov(temp.reg(),
9055 FieldOperand(operand.reg(), HeapObject::kMapOffset));
9056 __ test_b(FieldOperand(temp.reg(), Map::kBitFieldOffset),
9057 1 << Map::kIsUndetectable);
9058 temp.Unuse();
9059 operand.Unuse();
9060 destination()->Split(not_zero);
9061 }
9062}
9063
9064
Steve Blocka7e24c12009-10-30 11:49:00 +00009065#ifdef DEBUG
9066bool CodeGenerator::HasValidEntryRegisters() {
9067 return (allocator()->count(eax) == (frame()->is_used(eax) ? 1 : 0))
9068 && (allocator()->count(ebx) == (frame()->is_used(ebx) ? 1 : 0))
9069 && (allocator()->count(ecx) == (frame()->is_used(ecx) ? 1 : 0))
9070 && (allocator()->count(edx) == (frame()->is_used(edx) ? 1 : 0))
9071 && (allocator()->count(edi) == (frame()->is_used(edi) ? 1 : 0));
9072}
9073#endif
9074
9075
9076// Emit a LoadIC call to get the value from receiver and leave it in
Andrei Popescu402d9372010-02-26 13:31:12 +00009077// dst.
Steve Blocka7e24c12009-10-30 11:49:00 +00009078class DeferredReferenceGetNamedValue: public DeferredCode {
9079 public:
9080 DeferredReferenceGetNamedValue(Register dst,
9081 Register receiver,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009082 Handle<String> name,
9083 bool is_contextual)
9084 : dst_(dst),
9085 receiver_(receiver),
9086 name_(name),
Ben Murdochf87a2032010-10-22 12:50:53 +01009087 is_contextual_(is_contextual),
9088 is_dont_delete_(false) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009089 set_comment(is_contextual
9090 ? "[ DeferredReferenceGetNamedValue (contextual)"
9091 : "[ DeferredReferenceGetNamedValue");
Steve Blocka7e24c12009-10-30 11:49:00 +00009092 }
9093
9094 virtual void Generate();
9095
9096 Label* patch_site() { return &patch_site_; }
9097
Ben Murdochf87a2032010-10-22 12:50:53 +01009098 void set_is_dont_delete(bool value) {
9099 ASSERT(is_contextual_);
9100 is_dont_delete_ = value;
9101 }
9102
Steve Blocka7e24c12009-10-30 11:49:00 +00009103 private:
9104 Label patch_site_;
9105 Register dst_;
9106 Register receiver_;
9107 Handle<String> name_;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009108 bool is_contextual_;
Ben Murdochf87a2032010-10-22 12:50:53 +01009109 bool is_dont_delete_;
Steve Blocka7e24c12009-10-30 11:49:00 +00009110};
9111
9112
9113void DeferredReferenceGetNamedValue::Generate() {
Andrei Popescu402d9372010-02-26 13:31:12 +00009114 if (!receiver_.is(eax)) {
9115 __ mov(eax, receiver_);
9116 }
Steve Blocka7e24c12009-10-30 11:49:00 +00009117 __ Set(ecx, Immediate(name_));
9118 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009119 RelocInfo::Mode mode = is_contextual_
9120 ? RelocInfo::CODE_TARGET_CONTEXT
9121 : RelocInfo::CODE_TARGET;
9122 __ call(ic, mode);
9123 // The call must be followed by:
9124 // - a test eax instruction to indicate that the inobject property
9125 // case was inlined.
Ben Murdochf87a2032010-10-22 12:50:53 +01009126 // - a mov ecx or mov edx instruction to indicate that the
9127 // contextual property load was inlined.
Steve Blocka7e24c12009-10-30 11:49:00 +00009128 //
9129 // Store the delta to the map check instruction here in the test
9130 // instruction. Use masm_-> instead of the __ macro since the
9131 // latter can't return a value.
9132 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
9133 // Here we use masm_-> instead of the __ macro because this is the
9134 // instruction that gets patched and coverage code gets in the way.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009135 if (is_contextual_) {
Ben Murdochf87a2032010-10-22 12:50:53 +01009136 masm_->mov(is_dont_delete_ ? edx : ecx, -delta_to_patch_site);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009137 __ IncrementCounter(&Counters::named_load_global_inline_miss, 1);
Ben Murdochf87a2032010-10-22 12:50:53 +01009138 if (is_dont_delete_) {
9139 __ IncrementCounter(&Counters::dont_delete_hint_miss, 1);
9140 }
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009141 } else {
9142 masm_->test(eax, Immediate(-delta_to_patch_site));
9143 __ IncrementCounter(&Counters::named_load_inline_miss, 1);
9144 }
Steve Blocka7e24c12009-10-30 11:49:00 +00009145
9146 if (!dst_.is(eax)) __ mov(dst_, eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00009147}
9148
9149
9150class DeferredReferenceGetKeyedValue: public DeferredCode {
9151 public:
9152 explicit DeferredReferenceGetKeyedValue(Register dst,
9153 Register receiver,
Andrei Popescu402d9372010-02-26 13:31:12 +00009154 Register key)
9155 : dst_(dst), receiver_(receiver), key_(key) {
Steve Blocka7e24c12009-10-30 11:49:00 +00009156 set_comment("[ DeferredReferenceGetKeyedValue");
9157 }
9158
9159 virtual void Generate();
9160
9161 Label* patch_site() { return &patch_site_; }
9162
9163 private:
9164 Label patch_site_;
9165 Register dst_;
9166 Register receiver_;
9167 Register key_;
Steve Blocka7e24c12009-10-30 11:49:00 +00009168};
9169
9170
9171void DeferredReferenceGetKeyedValue::Generate() {
Andrei Popescu402d9372010-02-26 13:31:12 +00009172 if (!receiver_.is(eax)) {
9173 // Register eax is available for key.
9174 if (!key_.is(eax)) {
9175 __ mov(eax, key_);
9176 }
9177 if (!receiver_.is(edx)) {
9178 __ mov(edx, receiver_);
9179 }
9180 } else if (!key_.is(edx)) {
9181 // Register edx is available for receiver.
9182 if (!receiver_.is(edx)) {
9183 __ mov(edx, receiver_);
9184 }
9185 if (!key_.is(eax)) {
9186 __ mov(eax, key_);
9187 }
9188 } else {
9189 __ xchg(edx, eax);
9190 }
Steve Blocka7e24c12009-10-30 11:49:00 +00009191 // Calculate the delta from the IC call instruction to the map check
9192 // cmp instruction in the inlined version. This delta is stored in
9193 // a test(eax, delta) instruction after the call so that we can find
9194 // it in the IC initialization code and patch the cmp instruction.
9195 // This means that we cannot allow test instructions after calls to
9196 // KeyedLoadIC stubs in other places.
9197 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
Andrei Popescu402d9372010-02-26 13:31:12 +00009198 __ call(ic, RelocInfo::CODE_TARGET);
Steve Blocka7e24c12009-10-30 11:49:00 +00009199 // The delta from the start of the map-compare instruction to the
9200 // test instruction. We use masm_-> directly here instead of the __
9201 // macro because the macro sometimes uses macro expansion to turn
9202 // into something that can't return a value. This is encountered
9203 // when doing generated code coverage tests.
9204 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
9205 // Here we use masm_-> instead of the __ macro because this is the
9206 // instruction that gets patched and coverage code gets in the way.
9207 masm_->test(eax, Immediate(-delta_to_patch_site));
9208 __ IncrementCounter(&Counters::keyed_load_inline_miss, 1);
9209
9210 if (!dst_.is(eax)) __ mov(dst_, eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00009211}
9212
9213
9214class DeferredReferenceSetKeyedValue: public DeferredCode {
9215 public:
9216 DeferredReferenceSetKeyedValue(Register value,
9217 Register key,
Steve Block6ded16b2010-05-10 14:33:55 +01009218 Register receiver,
9219 Register scratch)
9220 : value_(value),
9221 key_(key),
9222 receiver_(receiver),
9223 scratch_(scratch) {
Steve Blocka7e24c12009-10-30 11:49:00 +00009224 set_comment("[ DeferredReferenceSetKeyedValue");
9225 }
9226
9227 virtual void Generate();
9228
9229 Label* patch_site() { return &patch_site_; }
9230
9231 private:
9232 Register value_;
9233 Register key_;
9234 Register receiver_;
Steve Block6ded16b2010-05-10 14:33:55 +01009235 Register scratch_;
Steve Blocka7e24c12009-10-30 11:49:00 +00009236 Label patch_site_;
9237};
9238
9239
9240void DeferredReferenceSetKeyedValue::Generate() {
9241 __ IncrementCounter(&Counters::keyed_store_inline_miss, 1);
Steve Block6ded16b2010-05-10 14:33:55 +01009242 // Move value_ to eax, key_ to ecx, and receiver_ to edx.
9243 Register old_value = value_;
9244
9245 // First, move value to eax.
9246 if (!value_.is(eax)) {
9247 if (key_.is(eax)) {
9248 // Move key_ out of eax, preferably to ecx.
9249 if (!value_.is(ecx) && !receiver_.is(ecx)) {
9250 __ mov(ecx, key_);
9251 key_ = ecx;
9252 } else {
9253 __ mov(scratch_, key_);
9254 key_ = scratch_;
9255 }
9256 }
9257 if (receiver_.is(eax)) {
9258 // Move receiver_ out of eax, preferably to edx.
9259 if (!value_.is(edx) && !key_.is(edx)) {
9260 __ mov(edx, receiver_);
9261 receiver_ = edx;
9262 } else {
9263 // Both moves to scratch are from eax, also, no valid path hits both.
9264 __ mov(scratch_, receiver_);
9265 receiver_ = scratch_;
9266 }
9267 }
9268 __ mov(eax, value_);
9269 value_ = eax;
9270 }
9271
9272 // Now value_ is in eax. Move the other two to the right positions.
9273 // We do not update the variables key_ and receiver_ to ecx and edx.
9274 if (key_.is(ecx)) {
9275 if (!receiver_.is(edx)) {
9276 __ mov(edx, receiver_);
9277 }
9278 } else if (key_.is(edx)) {
9279 if (receiver_.is(ecx)) {
9280 __ xchg(edx, ecx);
9281 } else {
9282 __ mov(ecx, key_);
9283 if (!receiver_.is(edx)) {
9284 __ mov(edx, receiver_);
9285 }
9286 }
9287 } else { // Key is not in edx or ecx.
9288 if (!receiver_.is(edx)) {
9289 __ mov(edx, receiver_);
9290 }
9291 __ mov(ecx, key_);
9292 }
9293
Steve Blocka7e24c12009-10-30 11:49:00 +00009294 // Call the IC stub.
9295 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
9296 __ call(ic, RelocInfo::CODE_TARGET);
9297 // The delta from the start of the map-compare instruction to the
9298 // test instruction. We use masm_-> directly here instead of the
9299 // __ macro because the macro sometimes uses macro expansion to turn
9300 // into something that can't return a value. This is encountered
9301 // when doing generated code coverage tests.
9302 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
9303 // Here we use masm_-> instead of the __ macro because this is the
9304 // instruction that gets patched and coverage code gets in the way.
9305 masm_->test(eax, Immediate(-delta_to_patch_site));
Steve Block6ded16b2010-05-10 14:33:55 +01009306 // Restore value (returned from store IC) register.
9307 if (!old_value.is(eax)) __ mov(old_value, eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00009308}
9309
9310
Andrei Popescu402d9372010-02-26 13:31:12 +00009311Result CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) {
9312#ifdef DEBUG
9313 int original_height = frame()->height();
9314#endif
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009315
9316 bool contextual_load_in_builtin =
9317 is_contextual &&
9318 (Bootstrapper::IsActive() ||
9319 (!info_->closure().is_null() && info_->closure()->IsBuiltin()));
9320
Andrei Popescu402d9372010-02-26 13:31:12 +00009321 Result result;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009322 // Do not inline in the global code or when not in loop.
9323 if (scope()->is_global_scope() ||
9324 loop_nesting() == 0 ||
9325 contextual_load_in_builtin) {
Andrei Popescu402d9372010-02-26 13:31:12 +00009326 Comment cmnt(masm(), "[ Load from named Property");
9327 frame()->Push(name);
9328
9329 RelocInfo::Mode mode = is_contextual
9330 ? RelocInfo::CODE_TARGET_CONTEXT
9331 : RelocInfo::CODE_TARGET;
9332 result = frame()->CallLoadIC(mode);
9333 // A test eax instruction following the call signals that the inobject
9334 // property case was inlined. Ensure that there is not a test eax
9335 // instruction here.
9336 __ nop();
9337 } else {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009338 // Inline the property load.
9339 Comment cmnt(masm(), is_contextual
9340 ? "[ Inlined contextual property load"
9341 : "[ Inlined named property load");
Andrei Popescu402d9372010-02-26 13:31:12 +00009342 Result receiver = frame()->Pop();
9343 receiver.ToRegister();
9344
9345 result = allocator()->Allocate();
9346 ASSERT(result.is_valid());
9347 DeferredReferenceGetNamedValue* deferred =
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009348 new DeferredReferenceGetNamedValue(result.reg(),
9349 receiver.reg(),
9350 name,
9351 is_contextual);
Andrei Popescu402d9372010-02-26 13:31:12 +00009352
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009353 if (!is_contextual) {
9354 // Check that the receiver is a heap object.
9355 __ test(receiver.reg(), Immediate(kSmiTagMask));
9356 deferred->Branch(zero);
9357 }
Andrei Popescu402d9372010-02-26 13:31:12 +00009358
9359 __ bind(deferred->patch_site());
9360 // This is the map check instruction that will be patched (so we can't
9361 // use the double underscore macro that may insert instructions).
9362 // Initially use an invalid map to force a failure.
9363 masm()->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
9364 Immediate(Factory::null_value()));
9365 // This branch is always a forwards branch so it's always a fixed size
9366 // which allows the assert below to succeed and patching to work.
9367 deferred->Branch(not_equal);
9368
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009369 // The delta from the patch label to the actual load must be
9370 // statically known.
Andrei Popescu402d9372010-02-26 13:31:12 +00009371 ASSERT(masm()->SizeOfCodeGeneratedSince(deferred->patch_site()) ==
9372 LoadIC::kOffsetToLoadInstruction);
Andrei Popescu402d9372010-02-26 13:31:12 +00009373
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009374 if (is_contextual) {
9375 // Load the (initialy invalid) cell and get its value.
9376 masm()->mov(result.reg(), Factory::null_value());
9377 if (FLAG_debug_code) {
9378 __ cmp(FieldOperand(result.reg(), HeapObject::kMapOffset),
9379 Factory::global_property_cell_map());
9380 __ Assert(equal, "Uninitialized inlined contextual load");
9381 }
9382 __ mov(result.reg(),
9383 FieldOperand(result.reg(), JSGlobalPropertyCell::kValueOffset));
Ben Murdochf87a2032010-10-22 12:50:53 +01009384 bool is_dont_delete = false;
9385 if (!info_->closure().is_null()) {
9386 // When doing lazy compilation we can check if the global cell
9387 // already exists and use its "don't delete" status as a hint.
9388 AssertNoAllocation no_gc;
9389 v8::internal::GlobalObject* global_object =
9390 info_->closure()->context()->global();
9391 LookupResult lookup;
9392 global_object->LocalLookupRealNamedProperty(*name, &lookup);
9393 if (lookup.IsProperty() && lookup.type() == NORMAL) {
9394 ASSERT(lookup.holder() == global_object);
9395 ASSERT(global_object->property_dictionary()->ValueAt(
9396 lookup.GetDictionaryEntry())->IsJSGlobalPropertyCell());
9397 is_dont_delete = lookup.IsDontDelete();
9398 }
9399 }
9400 deferred->set_is_dont_delete(is_dont_delete);
9401 if (!is_dont_delete) {
9402 __ cmp(result.reg(), Factory::the_hole_value());
9403 deferred->Branch(equal);
9404 } else if (FLAG_debug_code) {
9405 __ cmp(result.reg(), Factory::the_hole_value());
9406 __ Check(not_equal, "DontDelete cells can't contain the hole");
9407 }
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009408 __ IncrementCounter(&Counters::named_load_global_inline, 1);
Ben Murdochf87a2032010-10-22 12:50:53 +01009409 if (is_dont_delete) {
9410 __ IncrementCounter(&Counters::dont_delete_hint_hit, 1);
9411 }
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009412 } else {
9413 // The initial (invalid) offset has to be large enough to force a 32-bit
9414 // instruction encoding to allow patching with an arbitrary offset. Use
9415 // kMaxInt (minus kHeapObjectTag).
9416 int offset = kMaxInt;
9417 masm()->mov(result.reg(), FieldOperand(receiver.reg(), offset));
9418 __ IncrementCounter(&Counters::named_load_inline, 1);
9419 }
9420
Andrei Popescu402d9372010-02-26 13:31:12 +00009421 deferred->BindExit();
9422 }
9423 ASSERT(frame()->height() == original_height - 1);
9424 return result;
9425}
9426
9427
9428Result CodeGenerator::EmitNamedStore(Handle<String> name, bool is_contextual) {
9429#ifdef DEBUG
9430 int expected_height = frame()->height() - (is_contextual ? 1 : 2);
9431#endif
Kristian Monsen50ef84f2010-07-29 15:18:00 +01009432
9433 Result result;
9434 if (is_contextual || scope()->is_global_scope() || loop_nesting() == 0) {
9435 result = frame()->CallStoreIC(name, is_contextual);
9436 // A test eax instruction following the call signals that the inobject
9437 // property case was inlined. Ensure that there is not a test eax
9438 // instruction here.
9439 __ nop();
9440 } else {
9441 // Inline the in-object property case.
9442 JumpTarget slow, done;
9443 Label patch_site;
9444
9445 // Get the value and receiver from the stack.
9446 Result value = frame()->Pop();
9447 value.ToRegister();
9448 Result receiver = frame()->Pop();
9449 receiver.ToRegister();
9450
9451 // Allocate result register.
9452 result = allocator()->Allocate();
9453 ASSERT(result.is_valid() && receiver.is_valid() && value.is_valid());
9454
9455 // Check that the receiver is a heap object.
9456 __ test(receiver.reg(), Immediate(kSmiTagMask));
9457 slow.Branch(zero, &value, &receiver);
9458
9459 // This is the map check instruction that will be patched (so we can't
9460 // use the double underscore macro that may insert instructions).
9461 // Initially use an invalid map to force a failure.
9462 __ bind(&patch_site);
9463 masm()->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
9464 Immediate(Factory::null_value()));
9465 // This branch is always a forwards branch so it's always a fixed size
9466 // which allows the assert below to succeed and patching to work.
9467 slow.Branch(not_equal, &value, &receiver);
9468
9469 // The delta from the patch label to the store offset must be
9470 // statically known.
9471 ASSERT(masm()->SizeOfCodeGeneratedSince(&patch_site) ==
9472 StoreIC::kOffsetToStoreInstruction);
9473
9474 // The initial (invalid) offset has to be large enough to force a 32-bit
9475 // instruction encoding to allow patching with an arbitrary offset. Use
9476 // kMaxInt (minus kHeapObjectTag).
9477 int offset = kMaxInt;
9478 __ mov(FieldOperand(receiver.reg(), offset), value.reg());
9479 __ mov(result.reg(), Operand(value.reg()));
9480
9481 // Allocate scratch register for write barrier.
9482 Result scratch = allocator()->Allocate();
9483 ASSERT(scratch.is_valid());
9484
9485 // The write barrier clobbers all input registers, so spill the
9486 // receiver and the value.
9487 frame_->Spill(receiver.reg());
9488 frame_->Spill(value.reg());
9489
9490 // If the receiver and the value share a register allocate a new
9491 // register for the receiver.
9492 if (receiver.reg().is(value.reg())) {
9493 receiver = allocator()->Allocate();
9494 ASSERT(receiver.is_valid());
9495 __ mov(receiver.reg(), Operand(value.reg()));
9496 }
9497
9498 // Update the write barrier. To save instructions in the inlined
9499 // version we do not filter smis.
9500 Label skip_write_barrier;
9501 __ InNewSpace(receiver.reg(), value.reg(), equal, &skip_write_barrier);
9502 int delta_to_record_write = masm_->SizeOfCodeGeneratedSince(&patch_site);
9503 __ lea(scratch.reg(), Operand(receiver.reg(), offset));
9504 __ RecordWriteHelper(receiver.reg(), scratch.reg(), value.reg());
9505 if (FLAG_debug_code) {
9506 __ mov(receiver.reg(), Immediate(BitCast<int32_t>(kZapValue)));
9507 __ mov(value.reg(), Immediate(BitCast<int32_t>(kZapValue)));
9508 __ mov(scratch.reg(), Immediate(BitCast<int32_t>(kZapValue)));
9509 }
9510 __ bind(&skip_write_barrier);
9511 value.Unuse();
9512 scratch.Unuse();
9513 receiver.Unuse();
9514 done.Jump(&result);
9515
9516 slow.Bind(&value, &receiver);
9517 frame()->Push(&receiver);
9518 frame()->Push(&value);
9519 result = frame()->CallStoreIC(name, is_contextual);
9520 // Encode the offset to the map check instruction and the offset
9521 // to the write barrier store address computation in a test eax
9522 // instruction.
9523 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site);
9524 __ test(eax,
9525 Immediate((delta_to_record_write << 16) | delta_to_patch_site));
9526 done.Bind(&result);
9527 }
Andrei Popescu402d9372010-02-26 13:31:12 +00009528
9529 ASSERT_EQ(expected_height, frame()->height());
9530 return result;
9531}
9532
9533
9534Result CodeGenerator::EmitKeyedLoad() {
9535#ifdef DEBUG
9536 int original_height = frame()->height();
9537#endif
9538 Result result;
9539 // Inline array load code if inside of a loop. We do not know the
9540 // receiver map yet, so we initially generate the code with a check
9541 // against an invalid map. In the inline cache code, we patch the map
9542 // check if appropriate.
Leon Clarked91b9f72010-01-27 17:25:45 +00009543 if (loop_nesting() > 0) {
9544 Comment cmnt(masm_, "[ Inlined load from keyed Property");
9545
Leon Clarked91b9f72010-01-27 17:25:45 +00009546 // Use a fresh temporary to load the elements without destroying
9547 // the receiver which is needed for the deferred slow case.
9548 Result elements = allocator()->Allocate();
9549 ASSERT(elements.is_valid());
9550
Leon Clarkef7060e22010-06-03 12:02:55 +01009551 Result key = frame_->Pop();
9552 Result receiver = frame_->Pop();
9553 key.ToRegister();
9554 receiver.ToRegister();
9555
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009556 // If key and receiver are shared registers on the frame, their values will
9557 // be automatically saved and restored when going to deferred code.
9558 // The result is in elements, which is guaranteed non-shared.
Leon Clarked91b9f72010-01-27 17:25:45 +00009559 DeferredReferenceGetKeyedValue* deferred =
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009560 new DeferredReferenceGetKeyedValue(elements.reg(),
Leon Clarked91b9f72010-01-27 17:25:45 +00009561 receiver.reg(),
Andrei Popescu402d9372010-02-26 13:31:12 +00009562 key.reg());
Leon Clarked91b9f72010-01-27 17:25:45 +00009563
Andrei Popescu402d9372010-02-26 13:31:12 +00009564 __ test(receiver.reg(), Immediate(kSmiTagMask));
9565 deferred->Branch(zero);
Leon Clarked91b9f72010-01-27 17:25:45 +00009566
Leon Clarkef7060e22010-06-03 12:02:55 +01009567 // Check that the receiver has the expected map.
Leon Clarked91b9f72010-01-27 17:25:45 +00009568 // Initially, use an invalid map. The map is patched in the IC
9569 // initialization code.
9570 __ bind(deferred->patch_site());
9571 // Use masm-> here instead of the double underscore macro since extra
9572 // coverage code can interfere with the patching.
9573 masm_->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
Steve Block8defd9f2010-07-08 12:39:36 +01009574 Immediate(Factory::null_value()));
Leon Clarked91b9f72010-01-27 17:25:45 +00009575 deferred->Branch(not_equal);
9576
9577 // Check that the key is a smi.
Steve Block6ded16b2010-05-10 14:33:55 +01009578 if (!key.is_smi()) {
9579 __ test(key.reg(), Immediate(kSmiTagMask));
9580 deferred->Branch(not_zero);
9581 } else {
9582 if (FLAG_debug_code) __ AbortIfNotSmi(key.reg());
9583 }
Leon Clarked91b9f72010-01-27 17:25:45 +00009584
Iain Merrick75681382010-08-19 15:07:18 +01009585 // Get the elements array from the receiver.
Leon Clarked91b9f72010-01-27 17:25:45 +00009586 __ mov(elements.reg(),
9587 FieldOperand(receiver.reg(), JSObject::kElementsOffset));
Iain Merrick75681382010-08-19 15:07:18 +01009588 __ AssertFastElements(elements.reg());
Leon Clarked91b9f72010-01-27 17:25:45 +00009589
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009590 // Check that the key is within bounds.
9591 __ cmp(key.reg(),
Leon Clarked91b9f72010-01-27 17:25:45 +00009592 FieldOperand(elements.reg(), FixedArray::kLengthOffset));
9593 deferred->Branch(above_equal);
9594
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009595 // Load and check that the result is not the hole.
9596 // Key holds a smi.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01009597 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009598 __ mov(elements.reg(),
9599 FieldOperand(elements.reg(),
9600 key.reg(),
9601 times_2,
9602 FixedArray::kHeaderSize));
9603 result = elements;
Andrei Popescu402d9372010-02-26 13:31:12 +00009604 __ cmp(Operand(result.reg()), Immediate(Factory::the_hole_value()));
Leon Clarked91b9f72010-01-27 17:25:45 +00009605 deferred->Branch(equal);
9606 __ IncrementCounter(&Counters::keyed_load_inline, 1);
9607
9608 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00009609 } else {
9610 Comment cmnt(masm_, "[ Load from keyed Property");
Andrei Popescu402d9372010-02-26 13:31:12 +00009611 result = frame_->CallKeyedLoadIC(RelocInfo::CODE_TARGET);
Leon Clarked91b9f72010-01-27 17:25:45 +00009612 // Make sure that we do not have a test instruction after the
9613 // call. A test instruction after the call is used to
9614 // indicate that we have generated an inline version of the
9615 // keyed load. The explicit nop instruction is here because
9616 // the push that follows might be peep-hole optimized away.
9617 __ nop();
Leon Clarked91b9f72010-01-27 17:25:45 +00009618 }
Andrei Popescu402d9372010-02-26 13:31:12 +00009619 ASSERT(frame()->height() == original_height - 2);
9620 return result;
9621}
9622
9623
9624Result CodeGenerator::EmitKeyedStore(StaticType* key_type) {
9625#ifdef DEBUG
9626 int original_height = frame()->height();
9627#endif
9628 Result result;
9629 // Generate inlined version of the keyed store if the code is in a loop
9630 // and the key is likely to be a smi.
9631 if (loop_nesting() > 0 && key_type->IsLikelySmi()) {
9632 Comment cmnt(masm(), "[ Inlined store to keyed Property");
9633
9634 // Get the receiver, key and value into registers.
9635 result = frame()->Pop();
9636 Result key = frame()->Pop();
9637 Result receiver = frame()->Pop();
9638
9639 Result tmp = allocator_->Allocate();
9640 ASSERT(tmp.is_valid());
Steve Block6ded16b2010-05-10 14:33:55 +01009641 Result tmp2 = allocator_->Allocate();
9642 ASSERT(tmp2.is_valid());
Andrei Popescu402d9372010-02-26 13:31:12 +00009643
9644 // Determine whether the value is a constant before putting it in a
9645 // register.
9646 bool value_is_constant = result.is_constant();
9647
9648 // Make sure that value, key and receiver are in registers.
9649 result.ToRegister();
9650 key.ToRegister();
9651 receiver.ToRegister();
9652
9653 DeferredReferenceSetKeyedValue* deferred =
9654 new DeferredReferenceSetKeyedValue(result.reg(),
9655 key.reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01009656 receiver.reg(),
9657 tmp.reg());
Andrei Popescu402d9372010-02-26 13:31:12 +00009658
9659 // Check that the receiver is not a smi.
9660 __ test(receiver.reg(), Immediate(kSmiTagMask));
9661 deferred->Branch(zero);
9662
Steve Block6ded16b2010-05-10 14:33:55 +01009663 // Check that the key is a smi.
9664 if (!key.is_smi()) {
9665 __ test(key.reg(), Immediate(kSmiTagMask));
9666 deferred->Branch(not_zero);
9667 } else {
9668 if (FLAG_debug_code) __ AbortIfNotSmi(key.reg());
9669 }
9670
Andrei Popescu402d9372010-02-26 13:31:12 +00009671 // Check that the receiver is a JSArray.
Steve Block6ded16b2010-05-10 14:33:55 +01009672 __ CmpObjectType(receiver.reg(), JS_ARRAY_TYPE, tmp.reg());
Andrei Popescu402d9372010-02-26 13:31:12 +00009673 deferred->Branch(not_equal);
9674
9675 // Check that the key is within bounds. Both the key and the length of
Steve Block6ded16b2010-05-10 14:33:55 +01009676 // the JSArray are smis. Use unsigned comparison to handle negative keys.
Andrei Popescu402d9372010-02-26 13:31:12 +00009677 __ cmp(key.reg(),
9678 FieldOperand(receiver.reg(), JSArray::kLengthOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01009679 deferred->Branch(above_equal);
Andrei Popescu402d9372010-02-26 13:31:12 +00009680
9681 // Get the elements array from the receiver and check that it is not a
9682 // dictionary.
9683 __ mov(tmp.reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01009684 FieldOperand(receiver.reg(), JSArray::kElementsOffset));
9685
9686 // Check whether it is possible to omit the write barrier. If the elements
9687 // array is in new space or the value written is a smi we can safely update
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009688 // the elements array without write barrier.
Steve Block6ded16b2010-05-10 14:33:55 +01009689 Label in_new_space;
9690 __ InNewSpace(tmp.reg(), tmp2.reg(), equal, &in_new_space);
9691 if (!value_is_constant) {
9692 __ test(result.reg(), Immediate(kSmiTagMask));
9693 deferred->Branch(not_zero);
9694 }
9695
9696 __ bind(&in_new_space);
Andrei Popescu402d9372010-02-26 13:31:12 +00009697 // Bind the deferred code patch site to be able to locate the fixed
9698 // array map comparison. When debugging, we patch this comparison to
9699 // always fail so that we will hit the IC call in the deferred code
9700 // which will allow the debugger to break for fast case stores.
9701 __ bind(deferred->patch_site());
9702 __ cmp(FieldOperand(tmp.reg(), HeapObject::kMapOffset),
9703 Immediate(Factory::fixed_array_map()));
9704 deferred->Branch(not_equal);
9705
9706 // Store the value.
Kristian Monsen25f61362010-05-21 11:50:48 +01009707 __ mov(FixedArrayElementOperand(tmp.reg(), key.reg()), result.reg());
Andrei Popescu402d9372010-02-26 13:31:12 +00009708 __ IncrementCounter(&Counters::keyed_store_inline, 1);
9709
9710 deferred->BindExit();
9711 } else {
9712 result = frame()->CallKeyedStoreIC();
9713 // Make sure that we do not have a test instruction after the
9714 // call. A test instruction after the call is used to
9715 // indicate that we have generated an inline version of the
9716 // keyed store.
9717 __ nop();
Andrei Popescu402d9372010-02-26 13:31:12 +00009718 }
9719 ASSERT(frame()->height() == original_height - 3);
9720 return result;
Leon Clarked91b9f72010-01-27 17:25:45 +00009721}
9722
9723
Steve Blocka7e24c12009-10-30 11:49:00 +00009724#undef __
9725#define __ ACCESS_MASM(masm)
9726
9727
9728Handle<String> Reference::GetName() {
9729 ASSERT(type_ == NAMED);
9730 Property* property = expression_->AsProperty();
9731 if (property == NULL) {
9732 // Global variable reference treated as a named property reference.
9733 VariableProxy* proxy = expression_->AsVariableProxy();
9734 ASSERT(proxy->AsVariable() != NULL);
9735 ASSERT(proxy->AsVariable()->is_global());
9736 return proxy->name();
9737 } else {
9738 Literal* raw_name = property->key()->AsLiteral();
9739 ASSERT(raw_name != NULL);
Andrei Popescu402d9372010-02-26 13:31:12 +00009740 return Handle<String>::cast(raw_name->handle());
Steve Blocka7e24c12009-10-30 11:49:00 +00009741 }
9742}
9743
9744
Steve Blockd0582a62009-12-15 09:54:21 +00009745void Reference::GetValue() {
Steve Blocka7e24c12009-10-30 11:49:00 +00009746 ASSERT(!cgen_->in_spilled_code());
9747 ASSERT(cgen_->HasValidEntryRegisters());
9748 ASSERT(!is_illegal());
9749 MacroAssembler* masm = cgen_->masm();
9750
9751 // Record the source position for the property load.
9752 Property* property = expression_->AsProperty();
9753 if (property != NULL) {
9754 cgen_->CodeForSourcePosition(property->position());
9755 }
9756
9757 switch (type_) {
9758 case SLOT: {
9759 Comment cmnt(masm, "[ Load from Slot");
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009760 Slot* slot = expression_->AsVariableProxy()->AsVariable()->AsSlot();
Steve Blocka7e24c12009-10-30 11:49:00 +00009761 ASSERT(slot != NULL);
Leon Clarkef7060e22010-06-03 12:02:55 +01009762 cgen_->LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF);
Andrei Popescu402d9372010-02-26 13:31:12 +00009763 if (!persist_after_get_) set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00009764 break;
9765 }
9766
9767 case NAMED: {
Steve Blocka7e24c12009-10-30 11:49:00 +00009768 Variable* var = expression_->AsVariableProxy()->AsVariable();
9769 bool is_global = var != NULL;
9770 ASSERT(!is_global || var->is_global());
Andrei Popescu402d9372010-02-26 13:31:12 +00009771 if (persist_after_get_) cgen_->frame()->Dup();
9772 Result result = cgen_->EmitNamedLoad(GetName(), is_global);
9773 if (!persist_after_get_) set_unloaded();
9774 cgen_->frame()->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00009775 break;
9776 }
9777
9778 case KEYED: {
Andrei Popescu402d9372010-02-26 13:31:12 +00009779 if (persist_after_get_) {
9780 cgen_->frame()->PushElementAt(1);
9781 cgen_->frame()->PushElementAt(1);
9782 }
9783 Result value = cgen_->EmitKeyedLoad();
Leon Clarked91b9f72010-01-27 17:25:45 +00009784 cgen_->frame()->Push(&value);
Andrei Popescu402d9372010-02-26 13:31:12 +00009785 if (!persist_after_get_) set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00009786 break;
9787 }
9788
9789 default:
9790 UNREACHABLE();
9791 }
9792}
9793
9794
Steve Blockd0582a62009-12-15 09:54:21 +00009795void Reference::TakeValue() {
Steve Blocka7e24c12009-10-30 11:49:00 +00009796 // For non-constant frame-allocated slots, we invalidate the value in the
9797 // slot. For all others, we fall back on GetValue.
9798 ASSERT(!cgen_->in_spilled_code());
9799 ASSERT(!is_illegal());
9800 if (type_ != SLOT) {
Steve Blockd0582a62009-12-15 09:54:21 +00009801 GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00009802 return;
9803 }
9804
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009805 Slot* slot = expression_->AsVariableProxy()->AsVariable()->AsSlot();
Steve Blocka7e24c12009-10-30 11:49:00 +00009806 ASSERT(slot != NULL);
9807 if (slot->type() == Slot::LOOKUP ||
9808 slot->type() == Slot::CONTEXT ||
9809 slot->var()->mode() == Variable::CONST ||
9810 slot->is_arguments()) {
Steve Blockd0582a62009-12-15 09:54:21 +00009811 GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00009812 return;
9813 }
9814
9815 // Only non-constant, frame-allocated parameters and locals can
9816 // reach here. Be careful not to use the optimizations for arguments
9817 // object access since it may not have been initialized yet.
9818 ASSERT(!slot->is_arguments());
9819 if (slot->type() == Slot::PARAMETER) {
9820 cgen_->frame()->TakeParameterAt(slot->index());
9821 } else {
9822 ASSERT(slot->type() == Slot::LOCAL);
9823 cgen_->frame()->TakeLocalAt(slot->index());
9824 }
Leon Clarked91b9f72010-01-27 17:25:45 +00009825
9826 ASSERT(persist_after_get_);
9827 // Do not unload the reference, because it is used in SetValue.
Steve Blocka7e24c12009-10-30 11:49:00 +00009828}
9829
9830
9831void Reference::SetValue(InitState init_state) {
9832 ASSERT(cgen_->HasValidEntryRegisters());
9833 ASSERT(!is_illegal());
9834 MacroAssembler* masm = cgen_->masm();
9835 switch (type_) {
9836 case SLOT: {
9837 Comment cmnt(masm, "[ Store to Slot");
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009838 Slot* slot = expression_->AsVariableProxy()->AsVariable()->AsSlot();
Steve Blocka7e24c12009-10-30 11:49:00 +00009839 ASSERT(slot != NULL);
9840 cgen_->StoreToSlot(slot, init_state);
Andrei Popescu402d9372010-02-26 13:31:12 +00009841 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00009842 break;
9843 }
9844
9845 case NAMED: {
9846 Comment cmnt(masm, "[ Store to named Property");
Andrei Popescu402d9372010-02-26 13:31:12 +00009847 Result answer = cgen_->EmitNamedStore(GetName(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00009848 cgen_->frame()->Push(&answer);
Leon Clarke4515c472010-02-03 11:58:03 +00009849 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00009850 break;
9851 }
9852
9853 case KEYED: {
9854 Comment cmnt(masm, "[ Store to keyed Property");
Steve Blocka7e24c12009-10-30 11:49:00 +00009855 Property* property = expression()->AsProperty();
9856 ASSERT(property != NULL);
Steve Block6ded16b2010-05-10 14:33:55 +01009857
Andrei Popescu402d9372010-02-26 13:31:12 +00009858 Result answer = cgen_->EmitKeyedStore(property->key()->type());
9859 cgen_->frame()->Push(&answer);
9860 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00009861 break;
9862 }
9863
Andrei Popescu402d9372010-02-26 13:31:12 +00009864 case UNLOADED:
9865 case ILLEGAL:
Steve Blocka7e24c12009-10-30 11:49:00 +00009866 UNREACHABLE();
9867 }
9868}
9869
9870
Steve Blocka7e24c12009-10-30 11:49:00 +00009871#undef __
9872
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009873#define __ masm.
9874
9875MemCopyFunction CreateMemCopyFunction() {
9876 size_t actual_size;
9877 byte* buffer = static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize,
9878 &actual_size,
9879 true));
9880 CHECK(buffer);
9881 HandleScope handles;
9882 MacroAssembler masm(buffer, static_cast<int>(actual_size));
9883
9884 // Generated code is put into a fixed, unmovable, buffer, and not into
9885 // the V8 heap. We can't, and don't, refer to any relocatable addresses
9886 // (e.g. the JavaScript nan-object).
9887
9888 // 32-bit C declaration function calls pass arguments on stack.
9889
9890 // Stack layout:
9891 // esp[12]: Third argument, size.
9892 // esp[8]: Second argument, source pointer.
9893 // esp[4]: First argument, destination pointer.
9894 // esp[0]: return address
9895
9896 const int kDestinationOffset = 1 * kPointerSize;
9897 const int kSourceOffset = 2 * kPointerSize;
9898 const int kSizeOffset = 3 * kPointerSize;
9899
9900 int stack_offset = 0; // Update if we change the stack height.
9901
9902 if (FLAG_debug_code) {
9903 __ cmp(Operand(esp, kSizeOffset + stack_offset),
9904 Immediate(kMinComplexMemCopy));
9905 Label ok;
9906 __ j(greater_equal, &ok);
9907 __ int3();
9908 __ bind(&ok);
9909 }
9910 if (CpuFeatures::IsSupported(SSE2)) {
9911 CpuFeatures::Scope enable(SSE2);
9912 __ push(edi);
9913 __ push(esi);
9914 stack_offset += 2 * kPointerSize;
9915 Register dst = edi;
9916 Register src = esi;
9917 Register count = ecx;
9918 __ mov(dst, Operand(esp, stack_offset + kDestinationOffset));
9919 __ mov(src, Operand(esp, stack_offset + kSourceOffset));
9920 __ mov(count, Operand(esp, stack_offset + kSizeOffset));
9921
9922
9923 __ movdqu(xmm0, Operand(src, 0));
9924 __ movdqu(Operand(dst, 0), xmm0);
9925 __ mov(edx, dst);
9926 __ and_(edx, 0xF);
9927 __ neg(edx);
9928 __ add(Operand(edx), Immediate(16));
9929 __ add(dst, Operand(edx));
9930 __ add(src, Operand(edx));
9931 __ sub(Operand(count), edx);
9932
9933 // edi is now aligned. Check if esi is also aligned.
9934 Label unaligned_source;
9935 __ test(Operand(src), Immediate(0x0F));
9936 __ j(not_zero, &unaligned_source);
9937 {
9938 __ IncrementCounter(&Counters::memcopy_aligned, 1);
9939 // Copy loop for aligned source and destination.
9940 __ mov(edx, count);
9941 Register loop_count = ecx;
9942 Register count = edx;
9943 __ shr(loop_count, 5);
9944 {
9945 // Main copy loop.
9946 Label loop;
9947 __ bind(&loop);
9948 __ prefetch(Operand(src, 0x20), 1);
9949 __ movdqa(xmm0, Operand(src, 0x00));
9950 __ movdqa(xmm1, Operand(src, 0x10));
9951 __ add(Operand(src), Immediate(0x20));
9952
9953 __ movdqa(Operand(dst, 0x00), xmm0);
9954 __ movdqa(Operand(dst, 0x10), xmm1);
9955 __ add(Operand(dst), Immediate(0x20));
9956
9957 __ dec(loop_count);
9958 __ j(not_zero, &loop);
9959 }
9960
9961 // At most 31 bytes to copy.
9962 Label move_less_16;
9963 __ test(Operand(count), Immediate(0x10));
9964 __ j(zero, &move_less_16);
9965 __ movdqa(xmm0, Operand(src, 0));
9966 __ add(Operand(src), Immediate(0x10));
9967 __ movdqa(Operand(dst, 0), xmm0);
9968 __ add(Operand(dst), Immediate(0x10));
9969 __ bind(&move_less_16);
9970
9971 // At most 15 bytes to copy. Copy 16 bytes at end of string.
9972 __ and_(count, 0xF);
9973 __ movdqu(xmm0, Operand(src, count, times_1, -0x10));
9974 __ movdqu(Operand(dst, count, times_1, -0x10), xmm0);
9975
9976 __ pop(esi);
9977 __ pop(edi);
9978 __ ret(0);
9979 }
9980 __ Align(16);
9981 {
9982 // Copy loop for unaligned source and aligned destination.
9983 // If source is not aligned, we can't read it as efficiently.
9984 __ bind(&unaligned_source);
9985 __ IncrementCounter(&Counters::memcopy_unaligned, 1);
9986 __ mov(edx, ecx);
9987 Register loop_count = ecx;
9988 Register count = edx;
9989 __ shr(loop_count, 5);
9990 {
9991 // Main copy loop
9992 Label loop;
9993 __ bind(&loop);
9994 __ prefetch(Operand(src, 0x20), 1);
9995 __ movdqu(xmm0, Operand(src, 0x00));
9996 __ movdqu(xmm1, Operand(src, 0x10));
9997 __ add(Operand(src), Immediate(0x20));
9998
9999 __ movdqa(Operand(dst, 0x00), xmm0);
10000 __ movdqa(Operand(dst, 0x10), xmm1);
10001 __ add(Operand(dst), Immediate(0x20));
10002
10003 __ dec(loop_count);
10004 __ j(not_zero, &loop);
10005 }
10006
10007 // At most 31 bytes to copy.
10008 Label move_less_16;
10009 __ test(Operand(count), Immediate(0x10));
10010 __ j(zero, &move_less_16);
10011 __ movdqu(xmm0, Operand(src, 0));
10012 __ add(Operand(src), Immediate(0x10));
10013 __ movdqa(Operand(dst, 0), xmm0);
10014 __ add(Operand(dst), Immediate(0x10));
10015 __ bind(&move_less_16);
10016
10017 // At most 15 bytes to copy. Copy 16 bytes at end of string.
10018 __ and_(count, 0x0F);
10019 __ movdqu(xmm0, Operand(src, count, times_1, -0x10));
10020 __ movdqu(Operand(dst, count, times_1, -0x10), xmm0);
10021
10022 __ pop(esi);
10023 __ pop(edi);
10024 __ ret(0);
10025 }
10026
10027 } else {
10028 __ IncrementCounter(&Counters::memcopy_noxmm, 1);
10029 // SSE2 not supported. Unlikely to happen in practice.
10030 __ push(edi);
10031 __ push(esi);
10032 stack_offset += 2 * kPointerSize;
10033 __ cld();
10034 Register dst = edi;
10035 Register src = esi;
10036 Register count = ecx;
10037 __ mov(dst, Operand(esp, stack_offset + kDestinationOffset));
10038 __ mov(src, Operand(esp, stack_offset + kSourceOffset));
10039 __ mov(count, Operand(esp, stack_offset + kSizeOffset));
10040
10041 // Copy the first word.
10042 __ mov(eax, Operand(src, 0));
10043 __ mov(Operand(dst, 0), eax);
10044
10045 // Increment src,dstso that dst is aligned.
10046 __ mov(edx, dst);
10047 __ and_(edx, 0x03);
10048 __ neg(edx);
10049 __ add(Operand(edx), Immediate(4)); // edx = 4 - (dst & 3)
10050 __ add(dst, Operand(edx));
10051 __ add(src, Operand(edx));
10052 __ sub(Operand(count), edx);
10053 // edi is now aligned, ecx holds number of remaning bytes to copy.
10054
10055 __ mov(edx, count);
10056 count = edx;
10057 __ shr(ecx, 2); // Make word count instead of byte count.
10058 __ rep_movs();
10059
10060 // At most 3 bytes left to copy. Copy 4 bytes at end of string.
10061 __ and_(count, 3);
10062 __ mov(eax, Operand(src, count, times_1, -4));
10063 __ mov(Operand(dst, count, times_1, -4), eax);
10064
10065 __ pop(esi);
10066 __ pop(edi);
10067 __ ret(0);
10068 }
10069
10070 CodeDesc desc;
10071 masm.GetCode(&desc);
10072 // Call the function from C++.
10073 return FUNCTION_CAST<MemCopyFunction>(buffer);
10074}
10075
10076#undef __
10077
Steve Blocka7e24c12009-10-30 11:49:00 +000010078} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +010010079
10080#endif // V8_TARGET_ARCH_IA32