blob: f2ac7f7022e7a85d1aa02eae72f22d20f2aa2fb9 [file] [log] [blame]
Leon Clarked91b9f72010-01-27 17:25:45 +00001// Copyright 2010 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_IA32)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "codegen-inl.h"
Kristian Monsen80d68ea2010-09-08 11:05:35 +010033#include "bootstrapper.h"
34#include "code-stubs.h"
Steve Blockd0582a62009-12-15 09:54:21 +000035#include "compiler.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000036#include "debug.h"
37#include "ic-inl.h"
38#include "parser.h"
Leon Clarkee46be812010-01-19 14:06:41 +000039#include "regexp-macro-assembler.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000040#include "register-allocator-inl.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000041#include "scopes.h"
Steve Block6ded16b2010-05-10 14:33:55 +010042#include "virtual-frame-inl.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000043
44namespace v8 {
45namespace internal {
46
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010047#define __ ACCESS_MASM(masm)
Steve Blocka7e24c12009-10-30 11:49:00 +000048
49// -------------------------------------------------------------------------
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010050// Platform-specific FrameRegisterState functions.
Steve Blocka7e24c12009-10-30 11:49:00 +000051
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010052void FrameRegisterState::Save(MacroAssembler* masm) const {
Steve Blocka7e24c12009-10-30 11:49:00 +000053 for (int i = 0; i < RegisterAllocator::kNumRegisters; i++) {
54 int action = registers_[i];
55 if (action == kPush) {
56 __ push(RegisterAllocator::ToRegister(i));
57 } else if (action != kIgnore && (action & kSyncedFlag) == 0) {
58 __ mov(Operand(ebp, action), RegisterAllocator::ToRegister(i));
59 }
60 }
61}
62
63
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010064void FrameRegisterState::Restore(MacroAssembler* masm) const {
Steve Blocka7e24c12009-10-30 11:49:00 +000065 // Restore registers in reverse order due to the stack.
66 for (int i = RegisterAllocator::kNumRegisters - 1; i >= 0; i--) {
67 int action = registers_[i];
68 if (action == kPush) {
69 __ pop(RegisterAllocator::ToRegister(i));
70 } else if (action != kIgnore) {
71 action &= ~kSyncedFlag;
72 __ mov(RegisterAllocator::ToRegister(i), Operand(ebp, action));
73 }
74 }
75}
76
77
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010078#undef __
79#define __ ACCESS_MASM(masm_)
80
81// -------------------------------------------------------------------------
82// Platform-specific DeferredCode functions.
83
84void DeferredCode::SaveRegisters() {
85 frame_state_.Save(masm_);
86}
87
88
89void DeferredCode::RestoreRegisters() {
90 frame_state_.Restore(masm_);
91}
92
93
94// -------------------------------------------------------------------------
95// Platform-specific RuntimeCallHelper functions.
96
97void VirtualFrameRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
98 frame_state_->Save(masm);
99}
100
101
102void VirtualFrameRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
103 frame_state_->Restore(masm);
104}
105
106
107void ICRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
108 masm->EnterInternalFrame();
109}
110
111
112void ICRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
113 masm->LeaveInternalFrame();
114}
115
116
Steve Blocka7e24c12009-10-30 11:49:00 +0000117// -------------------------------------------------------------------------
118// CodeGenState implementation.
119
120CodeGenState::CodeGenState(CodeGenerator* owner)
121 : owner_(owner),
Steve Blocka7e24c12009-10-30 11:49:00 +0000122 destination_(NULL),
123 previous_(NULL) {
124 owner_->set_state(this);
125}
126
127
128CodeGenState::CodeGenState(CodeGenerator* owner,
Steve Blocka7e24c12009-10-30 11:49:00 +0000129 ControlDestination* destination)
130 : owner_(owner),
Steve Blocka7e24c12009-10-30 11:49:00 +0000131 destination_(destination),
132 previous_(owner->state()) {
133 owner_->set_state(this);
134}
135
136
137CodeGenState::~CodeGenState() {
138 ASSERT(owner_->state() == this);
139 owner_->set_state(previous_);
140}
141
Steve Blocka7e24c12009-10-30 11:49:00 +0000142// -------------------------------------------------------------------------
Ben Murdoch3bec4d22010-07-22 14:51:16 +0100143// CodeGenerator implementation.
Steve Blocka7e24c12009-10-30 11:49:00 +0000144
Andrei Popescu31002712010-02-23 13:46:05 +0000145CodeGenerator::CodeGenerator(MacroAssembler* masm)
146 : deferred_(8),
Leon Clarke4515c472010-02-03 11:58:03 +0000147 masm_(masm),
Andrei Popescu31002712010-02-23 13:46:05 +0000148 info_(NULL),
Steve Blocka7e24c12009-10-30 11:49:00 +0000149 frame_(NULL),
150 allocator_(NULL),
151 state_(NULL),
152 loop_nesting_(0),
Steve Block6ded16b2010-05-10 14:33:55 +0100153 in_safe_int32_mode_(false),
154 safe_int32_mode_enabled_(true),
Steve Blocka7e24c12009-10-30 11:49:00 +0000155 function_return_is_shadowed_(false),
156 in_spilled_code_(false) {
157}
158
159
160// Calling conventions:
161// ebp: caller's frame pointer
162// esp: stack pointer
163// edi: called JS function
164// esi: callee's context
165
Andrei Popescu402d9372010-02-26 13:31:12 +0000166void CodeGenerator::Generate(CompilationInfo* info) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000167 // Record the position for debugging purposes.
Andrei Popescu31002712010-02-23 13:46:05 +0000168 CodeForFunctionPosition(info->function());
Steve Block6ded16b2010-05-10 14:33:55 +0100169 Comment cmnt(masm_, "[ function compiled by virtual frame code generator");
Steve Blocka7e24c12009-10-30 11:49:00 +0000170
171 // Initialize state.
Andrei Popescu31002712010-02-23 13:46:05 +0000172 info_ = info;
Steve Blocka7e24c12009-10-30 11:49:00 +0000173 ASSERT(allocator_ == NULL);
174 RegisterAllocator register_allocator(this);
175 allocator_ = &register_allocator;
176 ASSERT(frame_ == NULL);
177 frame_ = new VirtualFrame();
178 set_in_spilled_code(false);
179
180 // Adjust for function-level loop nesting.
Steve Block6ded16b2010-05-10 14:33:55 +0100181 ASSERT_EQ(0, loop_nesting_);
Ben Murdochf87a2032010-10-22 12:50:53 +0100182 loop_nesting_ = info->is_in_loop() ? 1 : 0;
Steve Blocka7e24c12009-10-30 11:49:00 +0000183
184 JumpTarget::set_compiling_deferred_code(false);
185
Ben Murdochf87a2032010-10-22 12:50:53 +0100186 {
Steve Blocka7e24c12009-10-30 11:49:00 +0000187 CodeGenState state(this);
188
189 // Entry:
190 // Stack: receiver, arguments, return address.
191 // ebp: caller's frame pointer
192 // esp: stack pointer
193 // edi: called JS function
194 // esi: callee's context
195 allocator_->Initialize();
Steve Blocka7e24c12009-10-30 11:49:00 +0000196
Ben Murdochf87a2032010-10-22 12:50:53 +0100197#ifdef DEBUG
198 if (strlen(FLAG_stop_at) > 0 &&
199 info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
200 frame_->SpillAll();
201 __ int3();
202 }
203#endif
204
Iain Merrick75681382010-08-19 15:07:18 +0100205 frame_->Enter();
Leon Clarke4515c472010-02-03 11:58:03 +0000206
Iain Merrick75681382010-08-19 15:07:18 +0100207 // Allocate space for locals and initialize them.
208 frame_->AllocateStackSlots();
Leon Clarke4515c472010-02-03 11:58:03 +0000209
Iain Merrick75681382010-08-19 15:07:18 +0100210 // Allocate the local context if needed.
211 int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
212 if (heap_slots > 0) {
213 Comment cmnt(masm_, "[ allocate local context");
214 // Allocate local context.
215 // Get outer context and create a new context based on it.
216 frame_->PushFunction();
217 Result context;
218 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
219 FastNewContextStub stub(heap_slots);
220 context = frame_->CallStub(&stub, 1);
221 } else {
222 context = frame_->CallRuntime(Runtime::kNewContext, 1);
Leon Clarke4515c472010-02-03 11:58:03 +0000223 }
224
Iain Merrick75681382010-08-19 15:07:18 +0100225 // Update context local.
226 frame_->SaveContextRegister();
Leon Clarke4515c472010-02-03 11:58:03 +0000227
Iain Merrick75681382010-08-19 15:07:18 +0100228 // Verify that the runtime call result and esi agree.
229 if (FLAG_debug_code) {
230 __ cmp(context.reg(), Operand(esi));
231 __ Assert(equal, "Runtime::NewContext should end up in esi");
Andrei Popescu402d9372010-02-26 13:31:12 +0000232 }
Leon Clarke4515c472010-02-03 11:58:03 +0000233 }
234
Iain Merrick75681382010-08-19 15:07:18 +0100235 // TODO(1241774): Improve this code:
236 // 1) only needed if we have a context
237 // 2) no need to recompute context ptr every single time
238 // 3) don't copy parameter operand code from SlotOperand!
239 {
240 Comment cmnt2(masm_, "[ copy context parameters into .context");
241 // Note that iteration order is relevant here! If we have the same
242 // parameter twice (e.g., function (x, y, x)), and that parameter
243 // needs to be copied into the context, it must be the last argument
244 // passed to the parameter that needs to be copied. This is a rare
245 // case so we don't check for it, instead we rely on the copying
246 // order: such a parameter is copied repeatedly into the same
247 // context location and thus the last value is what is seen inside
248 // the function.
249 for (int i = 0; i < scope()->num_parameters(); i++) {
250 Variable* par = scope()->parameter(i);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100251 Slot* slot = par->AsSlot();
Iain Merrick75681382010-08-19 15:07:18 +0100252 if (slot != NULL && slot->type() == Slot::CONTEXT) {
253 // The use of SlotOperand below is safe in unspilled code
254 // because the slot is guaranteed to be a context slot.
255 //
256 // There are no parameters in the global scope.
257 ASSERT(!scope()->is_global_scope());
258 frame_->PushParameterAt(i);
259 Result value = frame_->Pop();
260 value.ToRegister();
261
262 // SlotOperand loads context.reg() with the context object
263 // stored to, used below in RecordWrite.
264 Result context = allocator_->Allocate();
265 ASSERT(context.is_valid());
266 __ mov(SlotOperand(slot, context.reg()), value.reg());
267 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
268 Result scratch = allocator_->Allocate();
269 ASSERT(scratch.is_valid());
270 frame_->Spill(context.reg());
271 frame_->Spill(value.reg());
272 __ RecordWrite(context.reg(), offset, value.reg(), scratch.reg());
273 }
274 }
275 }
276
277 // Store the arguments object. This must happen after context
278 // initialization because the arguments object may be stored in
279 // the context.
280 if (ArgumentsMode() != NO_ARGUMENTS_ALLOCATION) {
281 StoreArgumentsObject(true);
282 }
283
284 // Initialize ThisFunction reference if present.
285 if (scope()->is_function_scope() && scope()->function() != NULL) {
286 frame_->Push(Factory::the_hole_value());
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100287 StoreToSlot(scope()->function()->AsSlot(), NOT_CONST_INIT);
Iain Merrick75681382010-08-19 15:07:18 +0100288 }
289
290
Steve Blocka7e24c12009-10-30 11:49:00 +0000291 // Initialize the function return target after the locals are set
292 // up, because it needs the expected frame height from the frame.
293 function_return_.set_direction(JumpTarget::BIDIRECTIONAL);
294 function_return_is_shadowed_ = false;
295
Steve Blocka7e24c12009-10-30 11:49:00 +0000296 // Generate code to 'execute' declarations and initialize functions
297 // (source elements). In case of an illegal redeclaration we need to
298 // handle that instead of processing the declarations.
Andrei Popescu31002712010-02-23 13:46:05 +0000299 if (scope()->HasIllegalRedeclaration()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000300 Comment cmnt(masm_, "[ illegal redeclarations");
Andrei Popescu31002712010-02-23 13:46:05 +0000301 scope()->VisitIllegalRedeclaration(this);
Steve Blocka7e24c12009-10-30 11:49:00 +0000302 } else {
303 Comment cmnt(masm_, "[ declarations");
Andrei Popescu31002712010-02-23 13:46:05 +0000304 ProcessDeclarations(scope()->declarations());
Steve Blocka7e24c12009-10-30 11:49:00 +0000305 // Bail out if a stack-overflow exception occurred when processing
306 // declarations.
307 if (HasStackOverflow()) return;
308 }
309
310 if (FLAG_trace) {
311 frame_->CallRuntime(Runtime::kTraceEnter, 0);
312 // Ignore the return value.
313 }
314 CheckStack();
315
316 // Compile the body of the function in a vanilla state. Don't
317 // bother compiling all the code if the scope has an illegal
318 // redeclaration.
Andrei Popescu31002712010-02-23 13:46:05 +0000319 if (!scope()->HasIllegalRedeclaration()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000320 Comment cmnt(masm_, "[ function body");
321#ifdef DEBUG
322 bool is_builtin = Bootstrapper::IsActive();
323 bool should_trace =
324 is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls;
325 if (should_trace) {
326 frame_->CallRuntime(Runtime::kDebugTrace, 0);
327 // Ignore the return value.
328 }
329#endif
Andrei Popescu31002712010-02-23 13:46:05 +0000330 VisitStatements(info->function()->body());
Steve Blocka7e24c12009-10-30 11:49:00 +0000331
332 // Handle the return from the function.
333 if (has_valid_frame()) {
334 // If there is a valid frame, control flow can fall off the end of
335 // the body. In that case there is an implicit return statement.
336 ASSERT(!function_return_is_shadowed_);
Andrei Popescu31002712010-02-23 13:46:05 +0000337 CodeForReturnPosition(info->function());
Steve Blocka7e24c12009-10-30 11:49:00 +0000338 frame_->PrepareForReturn();
339 Result undefined(Factory::undefined_value());
340 if (function_return_.is_bound()) {
341 function_return_.Jump(&undefined);
342 } else {
343 function_return_.Bind(&undefined);
344 GenerateReturnSequence(&undefined);
345 }
346 } else if (function_return_.is_linked()) {
347 // If the return target has dangling jumps to it, then we have not
348 // yet generated the return sequence. This can happen when (a)
349 // control does not flow off the end of the body so we did not
350 // compile an artificial return statement just above, and (b) there
351 // are return statements in the body but (c) they are all shadowed.
352 Result return_value;
353 function_return_.Bind(&return_value);
354 GenerateReturnSequence(&return_value);
355 }
356 }
357 }
358
359 // Adjust for function-level loop nesting.
Ben Murdochf87a2032010-10-22 12:50:53 +0100360 ASSERT_EQ(loop_nesting_, info->is_in_loop() ? 1 : 0);
Steve Block6ded16b2010-05-10 14:33:55 +0100361 loop_nesting_ = 0;
Steve Blocka7e24c12009-10-30 11:49:00 +0000362
363 // Code generation state must be reset.
364 ASSERT(state_ == NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000365 ASSERT(!function_return_is_shadowed_);
366 function_return_.Unuse();
367 DeleteFrame();
368
369 // Process any deferred code using the register allocator.
370 if (!HasStackOverflow()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000371 JumpTarget::set_compiling_deferred_code(true);
372 ProcessDeferred();
373 JumpTarget::set_compiling_deferred_code(false);
374 }
375
376 // There is no need to delete the register allocator, it is a
377 // stack-allocated local.
378 allocator_ = NULL;
Steve Blocka7e24c12009-10-30 11:49:00 +0000379}
380
381
382Operand CodeGenerator::SlotOperand(Slot* slot, Register tmp) {
383 // Currently, this assertion will fail if we try to assign to
384 // a constant variable that is constant because it is read-only
385 // (such as the variable referring to a named function expression).
386 // We need to implement assignments to read-only variables.
387 // Ideally, we should do this during AST generation (by converting
388 // such assignments into expression statements); however, in general
389 // we may not be able to make the decision until past AST generation,
390 // that is when the entire program is known.
391 ASSERT(slot != NULL);
392 int index = slot->index();
393 switch (slot->type()) {
394 case Slot::PARAMETER:
395 return frame_->ParameterAt(index);
396
397 case Slot::LOCAL:
398 return frame_->LocalAt(index);
399
400 case Slot::CONTEXT: {
401 // Follow the context chain if necessary.
402 ASSERT(!tmp.is(esi)); // do not overwrite context register
403 Register context = esi;
404 int chain_length = scope()->ContextChainLength(slot->var()->scope());
405 for (int i = 0; i < chain_length; i++) {
406 // Load the closure.
407 // (All contexts, even 'with' contexts, have a closure,
408 // and it is the same for all contexts inside a function.
409 // There is no need to go to the function context first.)
410 __ mov(tmp, ContextOperand(context, Context::CLOSURE_INDEX));
411 // Load the function context (which is the incoming, outer context).
412 __ mov(tmp, FieldOperand(tmp, JSFunction::kContextOffset));
413 context = tmp;
414 }
415 // We may have a 'with' context now. Get the function context.
416 // (In fact this mov may never be the needed, since the scope analysis
417 // may not permit a direct context access in this case and thus we are
418 // always at a function context. However it is safe to dereference be-
419 // cause the function context of a function context is itself. Before
420 // deleting this mov we should try to create a counter-example first,
421 // though...)
422 __ mov(tmp, ContextOperand(context, Context::FCONTEXT_INDEX));
423 return ContextOperand(tmp, index);
424 }
425
426 default:
427 UNREACHABLE();
428 return Operand(eax);
429 }
430}
431
432
433Operand CodeGenerator::ContextSlotOperandCheckExtensions(Slot* slot,
434 Result tmp,
435 JumpTarget* slow) {
436 ASSERT(slot->type() == Slot::CONTEXT);
437 ASSERT(tmp.is_register());
438 Register context = esi;
439
440 for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) {
441 if (s->num_heap_slots() > 0) {
442 if (s->calls_eval()) {
443 // Check that extension is NULL.
444 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
445 Immediate(0));
446 slow->Branch(not_equal, not_taken);
447 }
448 __ mov(tmp.reg(), ContextOperand(context, Context::CLOSURE_INDEX));
449 __ mov(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset));
450 context = tmp.reg();
451 }
452 }
453 // Check that last extension is NULL.
454 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
455 slow->Branch(not_equal, not_taken);
456 __ mov(tmp.reg(), ContextOperand(context, Context::FCONTEXT_INDEX));
457 return ContextOperand(tmp.reg(), slot->index());
458}
459
460
461// Emit code to load the value of an expression to the top of the
462// frame. If the expression is boolean-valued it may be compiled (or
463// partially compiled) into control flow to the control destination.
464// If force_control is true, control flow is forced.
Steve Block6ded16b2010-05-10 14:33:55 +0100465void CodeGenerator::LoadCondition(Expression* expr,
Steve Blocka7e24c12009-10-30 11:49:00 +0000466 ControlDestination* dest,
467 bool force_control) {
468 ASSERT(!in_spilled_code());
469 int original_height = frame_->height();
470
Steve Blockd0582a62009-12-15 09:54:21 +0000471 { CodeGenState new_state(this, dest);
Steve Block6ded16b2010-05-10 14:33:55 +0100472 Visit(expr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000473
474 // If we hit a stack overflow, we may not have actually visited
475 // the expression. In that case, we ensure that we have a
476 // valid-looking frame state because we will continue to generate
477 // code as we unwind the C++ stack.
478 //
479 // It's possible to have both a stack overflow and a valid frame
480 // state (eg, a subexpression overflowed, visiting it returned
481 // with a dummied frame state, and visiting this expression
482 // returned with a normal-looking state).
483 if (HasStackOverflow() &&
484 !dest->is_used() &&
485 frame_->height() == original_height) {
486 dest->Goto(true);
487 }
488 }
489
490 if (force_control && !dest->is_used()) {
491 // Convert the TOS value into flow to the control destination.
492 ToBoolean(dest);
493 }
494
495 ASSERT(!(force_control && !dest->is_used()));
496 ASSERT(dest->is_used() || frame_->height() == original_height + 1);
497}
498
499
Steve Blockd0582a62009-12-15 09:54:21 +0000500void CodeGenerator::LoadAndSpill(Expression* expression) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000501 ASSERT(in_spilled_code());
502 set_in_spilled_code(false);
Steve Blockd0582a62009-12-15 09:54:21 +0000503 Load(expression);
Steve Blocka7e24c12009-10-30 11:49:00 +0000504 frame_->SpillAll();
505 set_in_spilled_code(true);
506}
507
508
Steve Block6ded16b2010-05-10 14:33:55 +0100509void CodeGenerator::LoadInSafeInt32Mode(Expression* expr,
510 BreakTarget* unsafe_bailout) {
511 set_unsafe_bailout(unsafe_bailout);
512 set_in_safe_int32_mode(true);
513 Load(expr);
514 Result value = frame_->Pop();
515 ASSERT(frame_->HasNoUntaggedInt32Elements());
516 if (expr->GuaranteedSmiResult()) {
517 ConvertInt32ResultToSmi(&value);
518 } else {
519 ConvertInt32ResultToNumber(&value);
520 }
521 set_in_safe_int32_mode(false);
522 set_unsafe_bailout(NULL);
523 frame_->Push(&value);
524}
525
526
527void CodeGenerator::LoadWithSafeInt32ModeDisabled(Expression* expr) {
528 set_safe_int32_mode_enabled(false);
529 Load(expr);
530 set_safe_int32_mode_enabled(true);
531}
532
533
534void CodeGenerator::ConvertInt32ResultToSmi(Result* value) {
535 ASSERT(value->is_untagged_int32());
536 if (value->is_register()) {
537 __ add(value->reg(), Operand(value->reg()));
538 } else {
539 ASSERT(value->is_constant());
540 ASSERT(value->handle()->IsSmi());
541 }
542 value->set_untagged_int32(false);
543 value->set_type_info(TypeInfo::Smi());
544}
545
546
547void CodeGenerator::ConvertInt32ResultToNumber(Result* value) {
548 ASSERT(value->is_untagged_int32());
549 if (value->is_register()) {
550 Register val = value->reg();
551 JumpTarget done;
552 __ add(val, Operand(val));
553 done.Branch(no_overflow, value);
554 __ sar(val, 1);
555 // If there was an overflow, bits 30 and 31 of the original number disagree.
556 __ xor_(val, 0x80000000u);
557 if (CpuFeatures::IsSupported(SSE2)) {
558 CpuFeatures::Scope fscope(SSE2);
559 __ cvtsi2sd(xmm0, Operand(val));
560 } else {
561 // Move val to ST[0] in the FPU
562 // Push and pop are safe with respect to the virtual frame because
563 // all synced elements are below the actual stack pointer.
564 __ push(val);
565 __ fild_s(Operand(esp, 0));
566 __ pop(val);
567 }
568 Result scratch = allocator_->Allocate();
569 ASSERT(scratch.is_register());
570 Label allocation_failed;
571 __ AllocateHeapNumber(val, scratch.reg(),
572 no_reg, &allocation_failed);
573 VirtualFrame* clone = new VirtualFrame(frame_);
574 scratch.Unuse();
575 if (CpuFeatures::IsSupported(SSE2)) {
576 CpuFeatures::Scope fscope(SSE2);
577 __ movdbl(FieldOperand(val, HeapNumber::kValueOffset), xmm0);
578 } else {
579 __ fstp_d(FieldOperand(val, HeapNumber::kValueOffset));
580 }
581 done.Jump(value);
582
583 // Establish the virtual frame, cloned from where AllocateHeapNumber
584 // jumped to allocation_failed.
585 RegisterFile empty_regs;
586 SetFrame(clone, &empty_regs);
587 __ bind(&allocation_failed);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100588 if (!CpuFeatures::IsSupported(SSE2)) {
589 // Pop the value from the floating point stack.
590 __ fstp(0);
591 }
Steve Block6ded16b2010-05-10 14:33:55 +0100592 unsafe_bailout_->Jump();
593
594 done.Bind(value);
595 } else {
596 ASSERT(value->is_constant());
597 }
598 value->set_untagged_int32(false);
599 value->set_type_info(TypeInfo::Integer32());
600}
601
602
Steve Blockd0582a62009-12-15 09:54:21 +0000603void CodeGenerator::Load(Expression* expr) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000604#ifdef DEBUG
605 int original_height = frame_->height();
606#endif
607 ASSERT(!in_spilled_code());
Steve Blocka7e24c12009-10-30 11:49:00 +0000608
Steve Block6ded16b2010-05-10 14:33:55 +0100609 // If the expression should be a side-effect-free 32-bit int computation,
610 // compile that SafeInt32 path, and a bailout path.
611 if (!in_safe_int32_mode() &&
612 safe_int32_mode_enabled() &&
613 expr->side_effect_free() &&
614 expr->num_bit_ops() > 2 &&
615 CpuFeatures::IsSupported(SSE2)) {
616 BreakTarget unsafe_bailout;
617 JumpTarget done;
618 unsafe_bailout.set_expected_height(frame_->height());
619 LoadInSafeInt32Mode(expr, &unsafe_bailout);
620 done.Jump();
621
622 if (unsafe_bailout.is_linked()) {
623 unsafe_bailout.Bind();
624 LoadWithSafeInt32ModeDisabled(expr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000625 }
Steve Block6ded16b2010-05-10 14:33:55 +0100626 done.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +0000627 } else {
Steve Block6ded16b2010-05-10 14:33:55 +0100628 JumpTarget true_target;
629 JumpTarget false_target;
Steve Block6ded16b2010-05-10 14:33:55 +0100630 ControlDestination dest(&true_target, &false_target, true);
631 LoadCondition(expr, &dest, false);
632
633 if (dest.false_was_fall_through()) {
634 // The false target was just bound.
Steve Blocka7e24c12009-10-30 11:49:00 +0000635 JumpTarget loaded;
Steve Block6ded16b2010-05-10 14:33:55 +0100636 frame_->Push(Factory::false_value());
637 // There may be dangling jumps to the true target.
Steve Blocka7e24c12009-10-30 11:49:00 +0000638 if (true_target.is_linked()) {
Steve Block6ded16b2010-05-10 14:33:55 +0100639 loaded.Jump();
Steve Blocka7e24c12009-10-30 11:49:00 +0000640 true_target.Bind();
641 frame_->Push(Factory::true_value());
Steve Block6ded16b2010-05-10 14:33:55 +0100642 loaded.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +0000643 }
Steve Block6ded16b2010-05-10 14:33:55 +0100644
645 } else if (dest.is_used()) {
646 // There is true, and possibly false, control flow (with true as
647 // the fall through).
648 JumpTarget loaded;
649 frame_->Push(Factory::true_value());
Steve Blocka7e24c12009-10-30 11:49:00 +0000650 if (false_target.is_linked()) {
Steve Block6ded16b2010-05-10 14:33:55 +0100651 loaded.Jump();
Steve Blocka7e24c12009-10-30 11:49:00 +0000652 false_target.Bind();
653 frame_->Push(Factory::false_value());
Steve Block6ded16b2010-05-10 14:33:55 +0100654 loaded.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +0000655 }
Steve Block6ded16b2010-05-10 14:33:55 +0100656
657 } else {
658 // We have a valid value on top of the frame, but we still may
659 // have dangling jumps to the true and false targets from nested
660 // subexpressions (eg, the left subexpressions of the
661 // short-circuited boolean operators).
662 ASSERT(has_valid_frame());
663 if (true_target.is_linked() || false_target.is_linked()) {
664 JumpTarget loaded;
665 loaded.Jump(); // Don't lose the current TOS.
666 if (true_target.is_linked()) {
667 true_target.Bind();
668 frame_->Push(Factory::true_value());
669 if (false_target.is_linked()) {
670 loaded.Jump();
671 }
672 }
673 if (false_target.is_linked()) {
674 false_target.Bind();
675 frame_->Push(Factory::false_value());
676 }
677 loaded.Bind();
678 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000679 }
680 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000681 ASSERT(has_valid_frame());
682 ASSERT(frame_->height() == original_height + 1);
683}
684
685
686void CodeGenerator::LoadGlobal() {
687 if (in_spilled_code()) {
688 frame_->EmitPush(GlobalObject());
689 } else {
690 Result temp = allocator_->Allocate();
691 __ mov(temp.reg(), GlobalObject());
692 frame_->Push(&temp);
693 }
694}
695
696
697void CodeGenerator::LoadGlobalReceiver() {
698 Result temp = allocator_->Allocate();
699 Register reg = temp.reg();
700 __ mov(reg, GlobalObject());
701 __ mov(reg, FieldOperand(reg, GlobalObject::kGlobalReceiverOffset));
702 frame_->Push(&temp);
703}
704
705
Steve Blockd0582a62009-12-15 09:54:21 +0000706void CodeGenerator::LoadTypeofExpression(Expression* expr) {
707 // Special handling of identifiers as subexpressions of typeof.
708 Variable* variable = expr->AsVariableProxy()->AsVariable();
Steve Blocka7e24c12009-10-30 11:49:00 +0000709 if (variable != NULL && !variable->is_this() && variable->is_global()) {
Steve Blockd0582a62009-12-15 09:54:21 +0000710 // For a global variable we build the property reference
711 // <global>.<variable> and perform a (regular non-contextual) property
712 // load to make sure we do not get reference errors.
Steve Blocka7e24c12009-10-30 11:49:00 +0000713 Slot global(variable, Slot::CONTEXT, Context::GLOBAL_INDEX);
714 Literal key(variable->name());
Steve Blocka7e24c12009-10-30 11:49:00 +0000715 Property property(&global, &key, RelocInfo::kNoPosition);
Steve Blockd0582a62009-12-15 09:54:21 +0000716 Reference ref(this, &property);
717 ref.GetValue();
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100718 } else if (variable != NULL && variable->AsSlot() != NULL) {
Steve Blockd0582a62009-12-15 09:54:21 +0000719 // For a variable that rewrites to a slot, we signal it is the immediate
720 // subexpression of a typeof.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100721 LoadFromSlotCheckForArguments(variable->AsSlot(), INSIDE_TYPEOF);
Steve Blocka7e24c12009-10-30 11:49:00 +0000722 } else {
Steve Blockd0582a62009-12-15 09:54:21 +0000723 // Anything else can be handled normally.
724 Load(expr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000725 }
726}
727
728
Andrei Popescu31002712010-02-23 13:46:05 +0000729ArgumentsAllocationMode CodeGenerator::ArgumentsMode() {
730 if (scope()->arguments() == NULL) return NO_ARGUMENTS_ALLOCATION;
731 ASSERT(scope()->arguments_shadow() != NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000732 // We don't want to do lazy arguments allocation for functions that
733 // have heap-allocated contexts, because it interfers with the
734 // uninitialized const tracking in the context objects.
Andrei Popescu31002712010-02-23 13:46:05 +0000735 return (scope()->num_heap_slots() > 0)
Steve Blocka7e24c12009-10-30 11:49:00 +0000736 ? EAGER_ARGUMENTS_ALLOCATION
737 : LAZY_ARGUMENTS_ALLOCATION;
738}
739
740
741Result CodeGenerator::StoreArgumentsObject(bool initial) {
742 ArgumentsAllocationMode mode = ArgumentsMode();
743 ASSERT(mode != NO_ARGUMENTS_ALLOCATION);
744
745 Comment cmnt(masm_, "[ store arguments object");
746 if (mode == LAZY_ARGUMENTS_ALLOCATION && initial) {
747 // When using lazy arguments allocation, we store the hole value
748 // as a sentinel indicating that the arguments object hasn't been
749 // allocated yet.
750 frame_->Push(Factory::the_hole_value());
751 } else {
752 ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT);
753 frame_->PushFunction();
754 frame_->PushReceiverSlotAddress();
Andrei Popescu31002712010-02-23 13:46:05 +0000755 frame_->Push(Smi::FromInt(scope()->num_parameters()));
Steve Blocka7e24c12009-10-30 11:49:00 +0000756 Result result = frame_->CallStub(&stub, 3);
757 frame_->Push(&result);
758 }
759
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100760 Variable* arguments = scope()->arguments();
761 Variable* shadow = scope()->arguments_shadow();
762 ASSERT(arguments != NULL && arguments->AsSlot() != NULL);
763 ASSERT(shadow != NULL && shadow->AsSlot() != NULL);
Leon Clarkee46be812010-01-19 14:06:41 +0000764 JumpTarget done;
765 bool skip_arguments = false;
766 if (mode == LAZY_ARGUMENTS_ALLOCATION && !initial) {
Ben Murdoch3bec4d22010-07-22 14:51:16 +0100767 // We have to skip storing into the arguments slot if it has
768 // already been written to. This can happen if the a function
769 // has a local variable named 'arguments'.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100770 LoadFromSlot(arguments->AsSlot(), NOT_INSIDE_TYPEOF);
Leon Clarkef7060e22010-06-03 12:02:55 +0100771 Result probe = frame_->Pop();
Leon Clarkee46be812010-01-19 14:06:41 +0000772 if (probe.is_constant()) {
773 // We have to skip updating the arguments object if it has
774 // been assigned a proper value.
775 skip_arguments = !probe.handle()->IsTheHole();
776 } else {
777 __ cmp(Operand(probe.reg()), Immediate(Factory::the_hole_value()));
778 probe.Unuse();
779 done.Branch(not_equal);
Steve Blocka7e24c12009-10-30 11:49:00 +0000780 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000781 }
Leon Clarkee46be812010-01-19 14:06:41 +0000782 if (!skip_arguments) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100783 StoreToSlot(arguments->AsSlot(), NOT_CONST_INIT);
Leon Clarkee46be812010-01-19 14:06:41 +0000784 if (mode == LAZY_ARGUMENTS_ALLOCATION) done.Bind();
785 }
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100786 StoreToSlot(shadow->AsSlot(), NOT_CONST_INIT);
Steve Blocka7e24c12009-10-30 11:49:00 +0000787 return frame_->Pop();
788}
789
Leon Clarked91b9f72010-01-27 17:25:45 +0000790//------------------------------------------------------------------------------
791// CodeGenerator implementation of variables, lookups, and stores.
Steve Blocka7e24c12009-10-30 11:49:00 +0000792
Leon Clarked91b9f72010-01-27 17:25:45 +0000793Reference::Reference(CodeGenerator* cgen,
794 Expression* expression,
795 bool persist_after_get)
796 : cgen_(cgen),
797 expression_(expression),
798 type_(ILLEGAL),
799 persist_after_get_(persist_after_get) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000800 cgen->LoadReference(this);
801}
802
803
804Reference::~Reference() {
Leon Clarked91b9f72010-01-27 17:25:45 +0000805 ASSERT(is_unloaded() || is_illegal());
Steve Blocka7e24c12009-10-30 11:49:00 +0000806}
807
808
809void CodeGenerator::LoadReference(Reference* ref) {
810 // References are loaded from both spilled and unspilled code. Set the
811 // state to unspilled to allow that (and explicitly spill after
812 // construction at the construction sites).
813 bool was_in_spilled_code = in_spilled_code_;
814 in_spilled_code_ = false;
815
816 Comment cmnt(masm_, "[ LoadReference");
817 Expression* e = ref->expression();
818 Property* property = e->AsProperty();
819 Variable* var = e->AsVariableProxy()->AsVariable();
820
821 if (property != NULL) {
822 // The expression is either a property or a variable proxy that rewrites
823 // to a property.
824 Load(property->obj());
Leon Clarkee46be812010-01-19 14:06:41 +0000825 if (property->key()->IsPropertyName()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000826 ref->set_type(Reference::NAMED);
827 } else {
828 Load(property->key());
829 ref->set_type(Reference::KEYED);
830 }
831 } else if (var != NULL) {
832 // The expression is a variable proxy that does not rewrite to a
833 // property. Global variables are treated as named property references.
834 if (var->is_global()) {
Andrei Popescu402d9372010-02-26 13:31:12 +0000835 // If eax is free, the register allocator prefers it. Thus the code
836 // generator will load the global object into eax, which is where
837 // LoadIC wants it. Most uses of Reference call LoadIC directly
838 // after the reference is created.
839 frame_->Spill(eax);
Steve Blocka7e24c12009-10-30 11:49:00 +0000840 LoadGlobal();
841 ref->set_type(Reference::NAMED);
842 } else {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100843 ASSERT(var->AsSlot() != NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000844 ref->set_type(Reference::SLOT);
845 }
846 } else {
847 // Anything else is a runtime error.
848 Load(e);
849 frame_->CallRuntime(Runtime::kThrowReferenceError, 1);
850 }
851
852 in_spilled_code_ = was_in_spilled_code;
853}
854
855
Steve Blocka7e24c12009-10-30 11:49:00 +0000856// ECMA-262, section 9.2, page 30: ToBoolean(). Pop the top of stack and
857// convert it to a boolean in the condition code register or jump to
858// 'false_target'/'true_target' as appropriate.
859void CodeGenerator::ToBoolean(ControlDestination* dest) {
860 Comment cmnt(masm_, "[ ToBoolean");
861
862 // The value to convert should be popped from the frame.
863 Result value = frame_->Pop();
864 value.ToRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +0000865
Steve Block6ded16b2010-05-10 14:33:55 +0100866 if (value.is_integer32()) { // Also takes Smi case.
867 Comment cmnt(masm_, "ONLY_INTEGER_32");
Andrei Popescu402d9372010-02-26 13:31:12 +0000868 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100869 Label ok;
870 __ AbortIfNotNumber(value.reg());
871 __ test(value.reg(), Immediate(kSmiTagMask));
872 __ j(zero, &ok);
873 __ fldz();
874 __ fld_d(FieldOperand(value.reg(), HeapNumber::kValueOffset));
875 __ FCmp();
876 __ j(not_zero, &ok);
877 __ Abort("Smi was wrapped in HeapNumber in output from bitop");
878 __ bind(&ok);
879 }
880 // In the integer32 case there are no Smis hidden in heap numbers, so we
881 // need only test for Smi zero.
882 __ test(value.reg(), Operand(value.reg()));
883 dest->false_target()->Branch(zero);
884 value.Unuse();
885 dest->Split(not_zero);
886 } else if (value.is_number()) {
887 Comment cmnt(masm_, "ONLY_NUMBER");
888 // Fast case if TypeInfo indicates only numbers.
889 if (FLAG_debug_code) {
890 __ AbortIfNotNumber(value.reg());
Andrei Popescu402d9372010-02-26 13:31:12 +0000891 }
892 // Smi => false iff zero.
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100893 STATIC_ASSERT(kSmiTag == 0);
Andrei Popescu402d9372010-02-26 13:31:12 +0000894 __ test(value.reg(), Operand(value.reg()));
895 dest->false_target()->Branch(zero);
896 __ test(value.reg(), Immediate(kSmiTagMask));
897 dest->true_target()->Branch(zero);
898 __ fldz();
899 __ fld_d(FieldOperand(value.reg(), HeapNumber::kValueOffset));
900 __ FCmp();
901 value.Unuse();
902 dest->Split(not_zero);
903 } else {
904 // Fast case checks.
905 // 'false' => false.
906 __ cmp(value.reg(), Factory::false_value());
907 dest->false_target()->Branch(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +0000908
Andrei Popescu402d9372010-02-26 13:31:12 +0000909 // 'true' => true.
910 __ cmp(value.reg(), Factory::true_value());
911 dest->true_target()->Branch(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +0000912
Andrei Popescu402d9372010-02-26 13:31:12 +0000913 // 'undefined' => false.
914 __ cmp(value.reg(), Factory::undefined_value());
915 dest->false_target()->Branch(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +0000916
Andrei Popescu402d9372010-02-26 13:31:12 +0000917 // Smi => false iff zero.
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100918 STATIC_ASSERT(kSmiTag == 0);
Andrei Popescu402d9372010-02-26 13:31:12 +0000919 __ test(value.reg(), Operand(value.reg()));
920 dest->false_target()->Branch(zero);
921 __ test(value.reg(), Immediate(kSmiTagMask));
922 dest->true_target()->Branch(zero);
Steve Blocka7e24c12009-10-30 11:49:00 +0000923
Andrei Popescu402d9372010-02-26 13:31:12 +0000924 // Call the stub for all other cases.
925 frame_->Push(&value); // Undo the Pop() from above.
926 ToBooleanStub stub;
927 Result temp = frame_->CallStub(&stub, 1);
928 // Convert the result to a condition code.
929 __ test(temp.reg(), Operand(temp.reg()));
930 temp.Unuse();
931 dest->Split(not_equal);
932 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000933}
934
935
Iain Merrick75681382010-08-19 15:07:18 +0100936// Perform or call the specialized stub for a binary operation. Requires the
937// three registers left, right and dst to be distinct and spilled. This
938// deferred operation has up to three entry points: The main one calls the
939// runtime system. The second is for when the result is a non-Smi. The
940// third is for when at least one of the inputs is non-Smi and we have SSE2.
Steve Blocka7e24c12009-10-30 11:49:00 +0000941class DeferredInlineBinaryOperation: public DeferredCode {
942 public:
943 DeferredInlineBinaryOperation(Token::Value op,
944 Register dst,
945 Register left,
946 Register right,
Steve Block6ded16b2010-05-10 14:33:55 +0100947 TypeInfo left_info,
948 TypeInfo right_info,
Steve Blocka7e24c12009-10-30 11:49:00 +0000949 OverwriteMode mode)
Steve Block6ded16b2010-05-10 14:33:55 +0100950 : op_(op), dst_(dst), left_(left), right_(right),
951 left_info_(left_info), right_info_(right_info), mode_(mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000952 set_comment("[ DeferredInlineBinaryOperation");
Iain Merrick75681382010-08-19 15:07:18 +0100953 ASSERT(!left.is(right));
Steve Blocka7e24c12009-10-30 11:49:00 +0000954 }
955
956 virtual void Generate();
957
Iain Merrick75681382010-08-19 15:07:18 +0100958 // This stub makes explicit calls to SaveRegisters(), RestoreRegisters() and
959 // Exit().
960 virtual bool AutoSaveAndRestore() { return false; }
961
962 void JumpToAnswerOutOfRange(Condition cond);
963 void JumpToConstantRhs(Condition cond, Smi* smi_value);
964 Label* NonSmiInputLabel();
965
Steve Blocka7e24c12009-10-30 11:49:00 +0000966 private:
Iain Merrick75681382010-08-19 15:07:18 +0100967 void GenerateAnswerOutOfRange();
968 void GenerateNonSmiInput();
969
Steve Blocka7e24c12009-10-30 11:49:00 +0000970 Token::Value op_;
971 Register dst_;
972 Register left_;
973 Register right_;
Steve Block6ded16b2010-05-10 14:33:55 +0100974 TypeInfo left_info_;
975 TypeInfo right_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000976 OverwriteMode mode_;
Iain Merrick75681382010-08-19 15:07:18 +0100977 Label answer_out_of_range_;
978 Label non_smi_input_;
979 Label constant_rhs_;
980 Smi* smi_value_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000981};
982
983
Iain Merrick75681382010-08-19 15:07:18 +0100984Label* DeferredInlineBinaryOperation::NonSmiInputLabel() {
985 if (Token::IsBitOp(op_) && CpuFeatures::IsSupported(SSE2)) {
986 return &non_smi_input_;
987 } else {
988 return entry_label();
989 }
990}
991
992
993void DeferredInlineBinaryOperation::JumpToAnswerOutOfRange(Condition cond) {
994 __ j(cond, &answer_out_of_range_);
995}
996
997
998void DeferredInlineBinaryOperation::JumpToConstantRhs(Condition cond,
999 Smi* smi_value) {
1000 smi_value_ = smi_value;
1001 __ j(cond, &constant_rhs_);
1002}
1003
1004
Steve Blocka7e24c12009-10-30 11:49:00 +00001005void DeferredInlineBinaryOperation::Generate() {
Iain Merrick75681382010-08-19 15:07:18 +01001006 // Registers are not saved implicitly for this stub, so we should not
1007 // tread on the registers that were not passed to us.
1008 if (CpuFeatures::IsSupported(SSE2) &&
1009 ((op_ == Token::ADD) ||
1010 (op_ == Token::SUB) ||
1011 (op_ == Token::MUL) ||
1012 (op_ == Token::DIV))) {
Leon Clarkee46be812010-01-19 14:06:41 +00001013 CpuFeatures::Scope use_sse2(SSE2);
1014 Label call_runtime, after_alloc_failure;
1015 Label left_smi, right_smi, load_right, do_op;
Steve Block6ded16b2010-05-10 14:33:55 +01001016 if (!left_info_.IsSmi()) {
1017 __ test(left_, Immediate(kSmiTagMask));
1018 __ j(zero, &left_smi);
1019 if (!left_info_.IsNumber()) {
1020 __ cmp(FieldOperand(left_, HeapObject::kMapOffset),
1021 Factory::heap_number_map());
1022 __ j(not_equal, &call_runtime);
1023 }
1024 __ movdbl(xmm0, FieldOperand(left_, HeapNumber::kValueOffset));
1025 if (mode_ == OVERWRITE_LEFT) {
1026 __ mov(dst_, left_);
1027 }
1028 __ jmp(&load_right);
Leon Clarkee46be812010-01-19 14:06:41 +00001029
Steve Block6ded16b2010-05-10 14:33:55 +01001030 __ bind(&left_smi);
1031 } else {
1032 if (FLAG_debug_code) __ AbortIfNotSmi(left_);
1033 }
Leon Clarkee46be812010-01-19 14:06:41 +00001034 __ SmiUntag(left_);
1035 __ cvtsi2sd(xmm0, Operand(left_));
1036 __ SmiTag(left_);
1037 if (mode_ == OVERWRITE_LEFT) {
1038 Label alloc_failure;
1039 __ push(left_);
1040 __ AllocateHeapNumber(dst_, left_, no_reg, &after_alloc_failure);
1041 __ pop(left_);
1042 }
1043
1044 __ bind(&load_right);
Steve Block6ded16b2010-05-10 14:33:55 +01001045 if (!right_info_.IsSmi()) {
1046 __ test(right_, Immediate(kSmiTagMask));
1047 __ j(zero, &right_smi);
1048 if (!right_info_.IsNumber()) {
1049 __ cmp(FieldOperand(right_, HeapObject::kMapOffset),
1050 Factory::heap_number_map());
1051 __ j(not_equal, &call_runtime);
1052 }
1053 __ movdbl(xmm1, FieldOperand(right_, HeapNumber::kValueOffset));
1054 if (mode_ == OVERWRITE_RIGHT) {
1055 __ mov(dst_, right_);
1056 } else if (mode_ == NO_OVERWRITE) {
1057 Label alloc_failure;
1058 __ push(left_);
1059 __ AllocateHeapNumber(dst_, left_, no_reg, &after_alloc_failure);
1060 __ pop(left_);
1061 }
1062 __ jmp(&do_op);
Leon Clarkee46be812010-01-19 14:06:41 +00001063
Steve Block6ded16b2010-05-10 14:33:55 +01001064 __ bind(&right_smi);
1065 } else {
1066 if (FLAG_debug_code) __ AbortIfNotSmi(right_);
1067 }
Leon Clarkee46be812010-01-19 14:06:41 +00001068 __ SmiUntag(right_);
1069 __ cvtsi2sd(xmm1, Operand(right_));
1070 __ SmiTag(right_);
1071 if (mode_ == OVERWRITE_RIGHT || mode_ == NO_OVERWRITE) {
Leon Clarkee46be812010-01-19 14:06:41 +00001072 __ push(left_);
1073 __ AllocateHeapNumber(dst_, left_, no_reg, &after_alloc_failure);
1074 __ pop(left_);
1075 }
1076
1077 __ bind(&do_op);
1078 switch (op_) {
1079 case Token::ADD: __ addsd(xmm0, xmm1); break;
1080 case Token::SUB: __ subsd(xmm0, xmm1); break;
1081 case Token::MUL: __ mulsd(xmm0, xmm1); break;
1082 case Token::DIV: __ divsd(xmm0, xmm1); break;
1083 default: UNREACHABLE();
1084 }
1085 __ movdbl(FieldOperand(dst_, HeapNumber::kValueOffset), xmm0);
Iain Merrick75681382010-08-19 15:07:18 +01001086 Exit();
1087
Leon Clarkee46be812010-01-19 14:06:41 +00001088
1089 __ bind(&after_alloc_failure);
1090 __ pop(left_);
1091 __ bind(&call_runtime);
1092 }
Iain Merrick75681382010-08-19 15:07:18 +01001093 // Register spilling is not done implicitly for this stub.
1094 // We can't postpone it any more now though.
1095 SaveRegisters();
1096
Steve Block6ded16b2010-05-10 14:33:55 +01001097 GenericBinaryOpStub stub(op_,
1098 mode_,
1099 NO_SMI_CODE_IN_STUB,
1100 TypeInfo::Combine(left_info_, right_info_));
Steve Block3ce2e202009-11-05 08:53:23 +00001101 stub.GenerateCall(masm_, left_, right_);
Steve Blocka7e24c12009-10-30 11:49:00 +00001102 if (!dst_.is(eax)) __ mov(dst_, eax);
Iain Merrick75681382010-08-19 15:07:18 +01001103 RestoreRegisters();
1104 Exit();
1105
1106 if (non_smi_input_.is_linked() || constant_rhs_.is_linked()) {
1107 GenerateNonSmiInput();
1108 }
1109 if (answer_out_of_range_.is_linked()) {
1110 GenerateAnswerOutOfRange();
1111 }
1112}
1113
1114
1115void DeferredInlineBinaryOperation::GenerateNonSmiInput() {
1116 // We know at least one of the inputs was not a Smi.
1117 // This is a third entry point into the deferred code.
1118 // We may not overwrite left_ because we want to be able
1119 // to call the handling code for non-smi answer and it
1120 // might want to overwrite the heap number in left_.
1121 ASSERT(!right_.is(dst_));
1122 ASSERT(!left_.is(dst_));
1123 ASSERT(!left_.is(right_));
1124 // This entry point is used for bit ops where the right hand side
1125 // is a constant Smi and the left hand side is a heap object. It
1126 // is also used for bit ops where both sides are unknown, but where
1127 // at least one of them is a heap object.
1128 bool rhs_is_constant = constant_rhs_.is_linked();
1129 // We can't generate code for both cases.
1130 ASSERT(!non_smi_input_.is_linked() || !constant_rhs_.is_linked());
1131
1132 if (FLAG_debug_code) {
1133 __ int3(); // We don't fall through into this code.
1134 }
1135
1136 __ bind(&non_smi_input_);
1137
1138 if (rhs_is_constant) {
1139 __ bind(&constant_rhs_);
1140 // In this case the input is a heap object and it is in the dst_ register.
1141 // The left_ and right_ registers have not been initialized yet.
1142 __ mov(right_, Immediate(smi_value_));
1143 __ mov(left_, Operand(dst_));
1144 if (!CpuFeatures::IsSupported(SSE2)) {
1145 __ jmp(entry_label());
1146 return;
1147 } else {
1148 CpuFeatures::Scope use_sse2(SSE2);
1149 __ JumpIfNotNumber(dst_, left_info_, entry_label());
1150 __ ConvertToInt32(dst_, left_, dst_, left_info_, entry_label());
1151 __ SmiUntag(right_);
1152 }
1153 } else {
1154 // We know we have SSE2 here because otherwise the label is not linked (see
1155 // NonSmiInputLabel).
1156 CpuFeatures::Scope use_sse2(SSE2);
1157 // Handle the non-constant right hand side situation:
1158 if (left_info_.IsSmi()) {
1159 // Right is a heap object.
1160 __ JumpIfNotNumber(right_, right_info_, entry_label());
1161 __ ConvertToInt32(right_, right_, dst_, right_info_, entry_label());
1162 __ mov(dst_, Operand(left_));
1163 __ SmiUntag(dst_);
1164 } else if (right_info_.IsSmi()) {
1165 // Left is a heap object.
1166 __ JumpIfNotNumber(left_, left_info_, entry_label());
1167 __ ConvertToInt32(dst_, left_, dst_, left_info_, entry_label());
1168 __ SmiUntag(right_);
1169 } else {
1170 // Here we don't know if it's one or both that is a heap object.
1171 Label only_right_is_heap_object, got_both;
1172 __ mov(dst_, Operand(left_));
1173 __ SmiUntag(dst_, &only_right_is_heap_object);
1174 // Left was a heap object.
1175 __ JumpIfNotNumber(left_, left_info_, entry_label());
1176 __ ConvertToInt32(dst_, left_, dst_, left_info_, entry_label());
1177 __ SmiUntag(right_, &got_both);
1178 // Both were heap objects.
1179 __ rcl(right_, 1); // Put tag back.
1180 __ JumpIfNotNumber(right_, right_info_, entry_label());
1181 __ ConvertToInt32(right_, right_, no_reg, right_info_, entry_label());
1182 __ jmp(&got_both);
1183 __ bind(&only_right_is_heap_object);
1184 __ JumpIfNotNumber(right_, right_info_, entry_label());
1185 __ ConvertToInt32(right_, right_, no_reg, right_info_, entry_label());
1186 __ bind(&got_both);
1187 }
1188 }
1189 ASSERT(op_ == Token::BIT_AND ||
1190 op_ == Token::BIT_OR ||
1191 op_ == Token::BIT_XOR ||
1192 right_.is(ecx));
1193 switch (op_) {
1194 case Token::BIT_AND: __ and_(dst_, Operand(right_)); break;
1195 case Token::BIT_OR: __ or_(dst_, Operand(right_)); break;
1196 case Token::BIT_XOR: __ xor_(dst_, Operand(right_)); break;
1197 case Token::SHR: __ shr_cl(dst_); break;
1198 case Token::SAR: __ sar_cl(dst_); break;
1199 case Token::SHL: __ shl_cl(dst_); break;
1200 default: UNREACHABLE();
1201 }
1202 if (op_ == Token::SHR) {
1203 // Check that the *unsigned* result fits in a smi. Neither of
1204 // the two high-order bits can be set:
1205 // * 0x80000000: high bit would be lost when smi tagging.
1206 // * 0x40000000: this number would convert to negative when smi
1207 // tagging.
1208 __ test(dst_, Immediate(0xc0000000));
1209 __ j(not_zero, &answer_out_of_range_);
1210 } else {
1211 // Check that the *signed* result fits in a smi.
1212 __ cmp(dst_, 0xc0000000);
1213 __ j(negative, &answer_out_of_range_);
1214 }
1215 __ SmiTag(dst_);
1216 Exit();
1217}
1218
1219
1220void DeferredInlineBinaryOperation::GenerateAnswerOutOfRange() {
1221 Label after_alloc_failure2;
1222 Label allocation_ok;
1223 __ bind(&after_alloc_failure2);
1224 // We have to allocate a number, causing a GC, while keeping hold of
1225 // the answer in dst_. The answer is not a Smi. We can't just call the
1226 // runtime shift function here because we already threw away the inputs.
1227 __ xor_(left_, Operand(left_));
1228 __ shl(dst_, 1); // Put top bit in carry flag and Smi tag the low bits.
1229 __ rcr(left_, 1); // Rotate with carry.
1230 __ push(dst_); // Smi tagged low 31 bits.
1231 __ push(left_); // 0 or 0x80000000, which is Smi tagged in both cases.
1232 __ CallRuntime(Runtime::kNumberAlloc, 0);
1233 if (!left_.is(eax)) {
1234 __ mov(left_, eax);
1235 }
1236 __ pop(right_); // High bit.
1237 __ pop(dst_); // Low 31 bits.
1238 __ shr(dst_, 1); // Put 0 in top bit.
1239 __ or_(dst_, Operand(right_));
1240 __ jmp(&allocation_ok);
1241
1242 // This is the second entry point to the deferred code. It is used only by
1243 // the bit operations.
1244 // The dst_ register has the answer. It is not Smi tagged. If mode_ is
1245 // OVERWRITE_LEFT then left_ must contain either an overwritable heap number
1246 // or a Smi.
1247 // Put a heap number pointer in left_.
1248 __ bind(&answer_out_of_range_);
1249 SaveRegisters();
1250 if (mode_ == OVERWRITE_LEFT) {
1251 __ test(left_, Immediate(kSmiTagMask));
1252 __ j(not_zero, &allocation_ok);
1253 }
1254 // This trashes right_.
1255 __ AllocateHeapNumber(left_, right_, no_reg, &after_alloc_failure2);
1256 __ bind(&allocation_ok);
1257 if (CpuFeatures::IsSupported(SSE2) && op_ != Token::SHR) {
1258 CpuFeatures::Scope use_sse2(SSE2);
1259 ASSERT(Token::IsBitOp(op_));
1260 // Signed conversion.
1261 __ cvtsi2sd(xmm0, Operand(dst_));
1262 __ movdbl(FieldOperand(left_, HeapNumber::kValueOffset), xmm0);
1263 } else {
1264 if (op_ == Token::SHR) {
1265 __ push(Immediate(0)); // High word of unsigned value.
1266 __ push(dst_);
1267 __ fild_d(Operand(esp, 0));
1268 __ Drop(2);
1269 } else {
1270 ASSERT(Token::IsBitOp(op_));
1271 __ push(dst_);
1272 __ fild_s(Operand(esp, 0)); // Signed conversion.
1273 __ pop(dst_);
1274 }
1275 __ fstp_d(FieldOperand(left_, HeapNumber::kValueOffset));
1276 }
1277 __ mov(dst_, left_);
1278 RestoreRegisters();
1279 Exit();
Steve Blocka7e24c12009-10-30 11:49:00 +00001280}
1281
1282
Steve Block6ded16b2010-05-10 14:33:55 +01001283static TypeInfo CalculateTypeInfo(TypeInfo operands_type,
1284 Token::Value op,
1285 const Result& right,
1286 const Result& left) {
1287 // Set TypeInfo of result according to the operation performed.
1288 // Rely on the fact that smis have a 31 bit payload on ia32.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01001289 STATIC_ASSERT(kSmiValueSize == 31);
Steve Block6ded16b2010-05-10 14:33:55 +01001290 switch (op) {
1291 case Token::COMMA:
1292 return right.type_info();
1293 case Token::OR:
1294 case Token::AND:
1295 // Result type can be either of the two input types.
1296 return operands_type;
1297 case Token::BIT_AND: {
1298 // Anding with positive Smis will give you a Smi.
1299 if (right.is_constant() && right.handle()->IsSmi() &&
1300 Smi::cast(*right.handle())->value() >= 0) {
1301 return TypeInfo::Smi();
1302 } else if (left.is_constant() && left.handle()->IsSmi() &&
1303 Smi::cast(*left.handle())->value() >= 0) {
1304 return TypeInfo::Smi();
1305 }
1306 return (operands_type.IsSmi())
1307 ? TypeInfo::Smi()
1308 : TypeInfo::Integer32();
1309 }
1310 case Token::BIT_OR: {
1311 // Oring with negative Smis will give you a Smi.
1312 if (right.is_constant() && right.handle()->IsSmi() &&
1313 Smi::cast(*right.handle())->value() < 0) {
1314 return TypeInfo::Smi();
1315 } else if (left.is_constant() && left.handle()->IsSmi() &&
1316 Smi::cast(*left.handle())->value() < 0) {
1317 return TypeInfo::Smi();
1318 }
1319 return (operands_type.IsSmi())
1320 ? TypeInfo::Smi()
1321 : TypeInfo::Integer32();
1322 }
1323 case Token::BIT_XOR:
1324 // Result is always a 32 bit integer. Smi property of inputs is preserved.
1325 return (operands_type.IsSmi())
1326 ? TypeInfo::Smi()
1327 : TypeInfo::Integer32();
1328 case Token::SAR:
1329 if (left.is_smi()) return TypeInfo::Smi();
1330 // Result is a smi if we shift by a constant >= 1, otherwise an integer32.
1331 // Shift amount is masked with 0x1F (ECMA standard 11.7.2).
1332 return (right.is_constant() && right.handle()->IsSmi()
1333 && (Smi::cast(*right.handle())->value() & 0x1F) >= 1)
1334 ? TypeInfo::Smi()
1335 : TypeInfo::Integer32();
1336 case Token::SHR:
1337 // Result is a smi if we shift by a constant >= 2, an integer32 if
1338 // we shift by 1, and an unsigned 32-bit integer if we shift by 0.
1339 if (right.is_constant() && right.handle()->IsSmi()) {
1340 int shift_amount = Smi::cast(*right.handle())->value() & 0x1F;
1341 if (shift_amount > 1) {
1342 return TypeInfo::Smi();
1343 } else if (shift_amount > 0) {
1344 return TypeInfo::Integer32();
1345 }
1346 }
1347 return TypeInfo::Number();
1348 case Token::ADD:
1349 if (operands_type.IsSmi()) {
1350 // The Integer32 range is big enough to take the sum of any two Smis.
1351 return TypeInfo::Integer32();
1352 } else if (operands_type.IsNumber()) {
1353 return TypeInfo::Number();
1354 } else if (left.type_info().IsString() || right.type_info().IsString()) {
1355 return TypeInfo::String();
1356 } else {
1357 return TypeInfo::Unknown();
1358 }
1359 case Token::SHL:
1360 return TypeInfo::Integer32();
1361 case Token::SUB:
1362 // The Integer32 range is big enough to take the difference of any two
1363 // Smis.
1364 return (operands_type.IsSmi()) ?
1365 TypeInfo::Integer32() :
1366 TypeInfo::Number();
1367 case Token::MUL:
1368 case Token::DIV:
1369 case Token::MOD:
1370 // Result is always a number.
1371 return TypeInfo::Number();
1372 default:
1373 UNREACHABLE();
1374 }
1375 UNREACHABLE();
1376 return TypeInfo::Unknown();
1377}
1378
1379
1380void CodeGenerator::GenericBinaryOperation(BinaryOperation* expr,
Steve Blocka7e24c12009-10-30 11:49:00 +00001381 OverwriteMode overwrite_mode) {
1382 Comment cmnt(masm_, "[ BinaryOperation");
Steve Block6ded16b2010-05-10 14:33:55 +01001383 Token::Value op = expr->op();
Steve Blocka7e24c12009-10-30 11:49:00 +00001384 Comment cmnt_token(masm_, Token::String(op));
1385
1386 if (op == Token::COMMA) {
1387 // Simply discard left value.
1388 frame_->Nip(1);
1389 return;
1390 }
1391
Steve Blocka7e24c12009-10-30 11:49:00 +00001392 Result right = frame_->Pop();
1393 Result left = frame_->Pop();
1394
1395 if (op == Token::ADD) {
Steve Block6ded16b2010-05-10 14:33:55 +01001396 const bool left_is_string = left.type_info().IsString();
1397 const bool right_is_string = right.type_info().IsString();
1398 // Make sure constant strings have string type info.
1399 ASSERT(!(left.is_constant() && left.handle()->IsString()) ||
1400 left_is_string);
1401 ASSERT(!(right.is_constant() && right.handle()->IsString()) ||
1402 right_is_string);
Steve Blocka7e24c12009-10-30 11:49:00 +00001403 if (left_is_string || right_is_string) {
1404 frame_->Push(&left);
1405 frame_->Push(&right);
1406 Result answer;
1407 if (left_is_string) {
1408 if (right_is_string) {
Steve Block6ded16b2010-05-10 14:33:55 +01001409 StringAddStub stub(NO_STRING_CHECK_IN_STUB);
1410 answer = frame_->CallStub(&stub, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001411 } else {
Iain Merrick9ac36c92010-09-13 15:29:50 +01001412 StringAddStub stub(NO_STRING_CHECK_LEFT_IN_STUB);
1413 answer = frame_->CallStub(&stub, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001414 }
1415 } else if (right_is_string) {
Iain Merrick9ac36c92010-09-13 15:29:50 +01001416 StringAddStub stub(NO_STRING_CHECK_RIGHT_IN_STUB);
1417 answer = frame_->CallStub(&stub, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001418 }
Steve Block6ded16b2010-05-10 14:33:55 +01001419 answer.set_type_info(TypeInfo::String());
Steve Blocka7e24c12009-10-30 11:49:00 +00001420 frame_->Push(&answer);
1421 return;
1422 }
1423 // Neither operand is known to be a string.
1424 }
1425
Andrei Popescu402d9372010-02-26 13:31:12 +00001426 bool left_is_smi_constant = left.is_constant() && left.handle()->IsSmi();
1427 bool left_is_non_smi_constant = left.is_constant() && !left.handle()->IsSmi();
1428 bool right_is_smi_constant = right.is_constant() && right.handle()->IsSmi();
1429 bool right_is_non_smi_constant =
1430 right.is_constant() && !right.handle()->IsSmi();
Steve Blocka7e24c12009-10-30 11:49:00 +00001431
Andrei Popescu402d9372010-02-26 13:31:12 +00001432 if (left_is_smi_constant && right_is_smi_constant) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001433 // Compute the constant result at compile time, and leave it on the frame.
1434 int left_int = Smi::cast(*left.handle())->value();
1435 int right_int = Smi::cast(*right.handle())->value();
1436 if (FoldConstantSmis(op, left_int, right_int)) return;
1437 }
1438
Andrei Popescu402d9372010-02-26 13:31:12 +00001439 // Get number type of left and right sub-expressions.
Steve Block6ded16b2010-05-10 14:33:55 +01001440 TypeInfo operands_type =
1441 TypeInfo::Combine(left.type_info(), right.type_info());
1442
1443 TypeInfo result_type = CalculateTypeInfo(operands_type, op, right, left);
Andrei Popescu402d9372010-02-26 13:31:12 +00001444
Leon Clarked91b9f72010-01-27 17:25:45 +00001445 Result answer;
Andrei Popescu402d9372010-02-26 13:31:12 +00001446 if (left_is_non_smi_constant || right_is_non_smi_constant) {
Leon Clarked91b9f72010-01-27 17:25:45 +00001447 // Go straight to the slow case, with no smi code.
Andrei Popescu402d9372010-02-26 13:31:12 +00001448 GenericBinaryOpStub stub(op,
1449 overwrite_mode,
1450 NO_SMI_CODE_IN_STUB,
1451 operands_type);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001452 answer = GenerateGenericBinaryOpStubCall(&stub, &left, &right);
Andrei Popescu402d9372010-02-26 13:31:12 +00001453 } else if (right_is_smi_constant) {
Steve Block6ded16b2010-05-10 14:33:55 +01001454 answer = ConstantSmiBinaryOperation(expr, &left, right.handle(),
1455 false, overwrite_mode);
Andrei Popescu402d9372010-02-26 13:31:12 +00001456 } else if (left_is_smi_constant) {
Steve Block6ded16b2010-05-10 14:33:55 +01001457 answer = ConstantSmiBinaryOperation(expr, &right, left.handle(),
1458 true, overwrite_mode);
Leon Clarkeeab96aa2010-01-27 16:31:12 +00001459 } else {
Leon Clarked91b9f72010-01-27 17:25:45 +00001460 // Set the flags based on the operation, type and loop nesting level.
1461 // Bit operations always assume they likely operate on Smis. Still only
1462 // generate the inline Smi check code if this operation is part of a loop.
1463 // For all other operations only inline the Smi check code for likely smis
1464 // if the operation is part of a loop.
Steve Block6ded16b2010-05-10 14:33:55 +01001465 if (loop_nesting() > 0 &&
1466 (Token::IsBitOp(op) ||
1467 operands_type.IsInteger32() ||
1468 expr->type()->IsLikelySmi())) {
1469 answer = LikelySmiBinaryOperation(expr, &left, &right, overwrite_mode);
Leon Clarked91b9f72010-01-27 17:25:45 +00001470 } else {
Andrei Popescu402d9372010-02-26 13:31:12 +00001471 GenericBinaryOpStub stub(op,
1472 overwrite_mode,
1473 NO_GENERIC_BINARY_FLAGS,
1474 operands_type);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001475 answer = GenerateGenericBinaryOpStubCall(&stub, &left, &right);
Leon Clarkeeab96aa2010-01-27 16:31:12 +00001476 }
Leon Clarkeeab96aa2010-01-27 16:31:12 +00001477 }
Andrei Popescu402d9372010-02-26 13:31:12 +00001478
Steve Block6ded16b2010-05-10 14:33:55 +01001479 answer.set_type_info(result_type);
Leon Clarked91b9f72010-01-27 17:25:45 +00001480 frame_->Push(&answer);
Steve Blocka7e24c12009-10-30 11:49:00 +00001481}
1482
1483
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001484Result CodeGenerator::GenerateGenericBinaryOpStubCall(GenericBinaryOpStub* stub,
1485 Result* left,
1486 Result* right) {
1487 if (stub->ArgsInRegistersSupported()) {
1488 stub->SetArgsInRegisters();
1489 return frame_->CallStub(stub, left, right);
1490 } else {
1491 frame_->Push(left);
1492 frame_->Push(right);
1493 return frame_->CallStub(stub, 2);
1494 }
1495}
1496
1497
Steve Blocka7e24c12009-10-30 11:49:00 +00001498bool CodeGenerator::FoldConstantSmis(Token::Value op, int left, int right) {
1499 Object* answer_object = Heap::undefined_value();
1500 switch (op) {
1501 case Token::ADD:
1502 if (Smi::IsValid(left + right)) {
1503 answer_object = Smi::FromInt(left + right);
1504 }
1505 break;
1506 case Token::SUB:
1507 if (Smi::IsValid(left - right)) {
1508 answer_object = Smi::FromInt(left - right);
1509 }
1510 break;
1511 case Token::MUL: {
1512 double answer = static_cast<double>(left) * right;
1513 if (answer >= Smi::kMinValue && answer <= Smi::kMaxValue) {
1514 // If the product is zero and the non-zero factor is negative,
1515 // the spec requires us to return floating point negative zero.
1516 if (answer != 0 || (left >= 0 && right >= 0)) {
1517 answer_object = Smi::FromInt(static_cast<int>(answer));
1518 }
1519 }
1520 }
1521 break;
1522 case Token::DIV:
1523 case Token::MOD:
1524 break;
1525 case Token::BIT_OR:
1526 answer_object = Smi::FromInt(left | right);
1527 break;
1528 case Token::BIT_AND:
1529 answer_object = Smi::FromInt(left & right);
1530 break;
1531 case Token::BIT_XOR:
1532 answer_object = Smi::FromInt(left ^ right);
1533 break;
1534
1535 case Token::SHL: {
1536 int shift_amount = right & 0x1F;
1537 if (Smi::IsValid(left << shift_amount)) {
1538 answer_object = Smi::FromInt(left << shift_amount);
1539 }
1540 break;
1541 }
1542 case Token::SHR: {
1543 int shift_amount = right & 0x1F;
1544 unsigned int unsigned_left = left;
1545 unsigned_left >>= shift_amount;
1546 if (unsigned_left <= static_cast<unsigned int>(Smi::kMaxValue)) {
1547 answer_object = Smi::FromInt(unsigned_left);
1548 }
1549 break;
1550 }
1551 case Token::SAR: {
1552 int shift_amount = right & 0x1F;
1553 unsigned int unsigned_left = left;
1554 if (left < 0) {
1555 // Perform arithmetic shift of a negative number by
1556 // complementing number, logical shifting, complementing again.
1557 unsigned_left = ~unsigned_left;
1558 unsigned_left >>= shift_amount;
1559 unsigned_left = ~unsigned_left;
1560 } else {
1561 unsigned_left >>= shift_amount;
1562 }
Ben Murdoch3bec4d22010-07-22 14:51:16 +01001563 ASSERT(Smi::IsValid(static_cast<int32_t>(unsigned_left)));
1564 answer_object = Smi::FromInt(static_cast<int32_t>(unsigned_left));
Steve Blocka7e24c12009-10-30 11:49:00 +00001565 break;
1566 }
1567 default:
1568 UNREACHABLE();
1569 break;
1570 }
1571 if (answer_object == Heap::undefined_value()) {
1572 return false;
1573 }
1574 frame_->Push(Handle<Object>(answer_object));
1575 return true;
1576}
1577
1578
Kristian Monsen50ef84f2010-07-29 15:18:00 +01001579void CodeGenerator::JumpIfBothSmiUsingTypeInfo(Result* left,
1580 Result* right,
1581 JumpTarget* both_smi) {
1582 TypeInfo left_info = left->type_info();
1583 TypeInfo right_info = right->type_info();
1584 if (left_info.IsDouble() || left_info.IsString() ||
1585 right_info.IsDouble() || right_info.IsString()) {
1586 // We know that left and right are not both smi. Don't do any tests.
1587 return;
1588 }
1589
1590 if (left->reg().is(right->reg())) {
1591 if (!left_info.IsSmi()) {
1592 __ test(left->reg(), Immediate(kSmiTagMask));
1593 both_smi->Branch(zero);
1594 } else {
1595 if (FLAG_debug_code) __ AbortIfNotSmi(left->reg());
1596 left->Unuse();
1597 right->Unuse();
1598 both_smi->Jump();
1599 }
1600 } else if (!left_info.IsSmi()) {
1601 if (!right_info.IsSmi()) {
1602 Result temp = allocator_->Allocate();
1603 ASSERT(temp.is_valid());
1604 __ mov(temp.reg(), left->reg());
1605 __ or_(temp.reg(), Operand(right->reg()));
1606 __ test(temp.reg(), Immediate(kSmiTagMask));
1607 temp.Unuse();
1608 both_smi->Branch(zero);
1609 } else {
1610 __ test(left->reg(), Immediate(kSmiTagMask));
1611 both_smi->Branch(zero);
1612 }
1613 } else {
1614 if (FLAG_debug_code) __ AbortIfNotSmi(left->reg());
1615 if (!right_info.IsSmi()) {
1616 __ test(right->reg(), Immediate(kSmiTagMask));
1617 both_smi->Branch(zero);
1618 } else {
1619 if (FLAG_debug_code) __ AbortIfNotSmi(right->reg());
1620 left->Unuse();
1621 right->Unuse();
1622 both_smi->Jump();
1623 }
1624 }
1625}
1626
1627
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001628void CodeGenerator::JumpIfNotBothSmiUsingTypeInfo(Register left,
1629 Register right,
1630 Register scratch,
1631 TypeInfo left_info,
1632 TypeInfo right_info,
1633 DeferredCode* deferred) {
Iain Merrick75681382010-08-19 15:07:18 +01001634 JumpIfNotBothSmiUsingTypeInfo(left,
1635 right,
1636 scratch,
1637 left_info,
1638 right_info,
1639 deferred->entry_label());
1640}
1641
1642
1643void CodeGenerator::JumpIfNotBothSmiUsingTypeInfo(Register left,
1644 Register right,
1645 Register scratch,
1646 TypeInfo left_info,
1647 TypeInfo right_info,
1648 Label* on_not_smi) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001649 if (left.is(right)) {
1650 if (!left_info.IsSmi()) {
1651 __ test(left, Immediate(kSmiTagMask));
Iain Merrick75681382010-08-19 15:07:18 +01001652 __ j(not_zero, on_not_smi);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001653 } else {
1654 if (FLAG_debug_code) __ AbortIfNotSmi(left);
1655 }
1656 } else if (!left_info.IsSmi()) {
1657 if (!right_info.IsSmi()) {
1658 __ mov(scratch, left);
1659 __ or_(scratch, Operand(right));
1660 __ test(scratch, Immediate(kSmiTagMask));
Iain Merrick75681382010-08-19 15:07:18 +01001661 __ j(not_zero, on_not_smi);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001662 } else {
1663 __ test(left, Immediate(kSmiTagMask));
Iain Merrick75681382010-08-19 15:07:18 +01001664 __ j(not_zero, on_not_smi);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001665 if (FLAG_debug_code) __ AbortIfNotSmi(right);
1666 }
1667 } else {
1668 if (FLAG_debug_code) __ AbortIfNotSmi(left);
1669 if (!right_info.IsSmi()) {
1670 __ test(right, Immediate(kSmiTagMask));
Iain Merrick75681382010-08-19 15:07:18 +01001671 __ j(not_zero, on_not_smi);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001672 } else {
1673 if (FLAG_debug_code) __ AbortIfNotSmi(right);
1674 }
1675 }
1676}
Steve Block6ded16b2010-05-10 14:33:55 +01001677
1678
Steve Blocka7e24c12009-10-30 11:49:00 +00001679// Implements a binary operation using a deferred code object and some
1680// inline code to operate on smis quickly.
Steve Block6ded16b2010-05-10 14:33:55 +01001681Result CodeGenerator::LikelySmiBinaryOperation(BinaryOperation* expr,
Leon Clarked91b9f72010-01-27 17:25:45 +00001682 Result* left,
1683 Result* right,
1684 OverwriteMode overwrite_mode) {
Kristian Monsen25f61362010-05-21 11:50:48 +01001685 // Copy the type info because left and right may be overwritten.
1686 TypeInfo left_type_info = left->type_info();
1687 TypeInfo right_type_info = right->type_info();
Steve Block6ded16b2010-05-10 14:33:55 +01001688 Token::Value op = expr->op();
Leon Clarked91b9f72010-01-27 17:25:45 +00001689 Result answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00001690 // Special handling of div and mod because they use fixed registers.
1691 if (op == Token::DIV || op == Token::MOD) {
1692 // We need eax as the quotient register, edx as the remainder
1693 // register, neither left nor right in eax or edx, and left copied
1694 // to eax.
1695 Result quotient;
1696 Result remainder;
1697 bool left_is_in_eax = false;
1698 // Step 1: get eax for quotient.
1699 if ((left->is_register() && left->reg().is(eax)) ||
1700 (right->is_register() && right->reg().is(eax))) {
1701 // One or both is in eax. Use a fresh non-edx register for
1702 // them.
1703 Result fresh = allocator_->Allocate();
1704 ASSERT(fresh.is_valid());
1705 if (fresh.reg().is(edx)) {
1706 remainder = fresh;
1707 fresh = allocator_->Allocate();
1708 ASSERT(fresh.is_valid());
1709 }
1710 if (left->is_register() && left->reg().is(eax)) {
1711 quotient = *left;
1712 *left = fresh;
1713 left_is_in_eax = true;
1714 }
1715 if (right->is_register() && right->reg().is(eax)) {
1716 quotient = *right;
1717 *right = fresh;
1718 }
1719 __ mov(fresh.reg(), eax);
1720 } else {
1721 // Neither left nor right is in eax.
1722 quotient = allocator_->Allocate(eax);
1723 }
1724 ASSERT(quotient.is_register() && quotient.reg().is(eax));
1725 ASSERT(!(left->is_register() && left->reg().is(eax)));
1726 ASSERT(!(right->is_register() && right->reg().is(eax)));
1727
1728 // Step 2: get edx for remainder if necessary.
1729 if (!remainder.is_valid()) {
1730 if ((left->is_register() && left->reg().is(edx)) ||
1731 (right->is_register() && right->reg().is(edx))) {
1732 Result fresh = allocator_->Allocate();
1733 ASSERT(fresh.is_valid());
1734 if (left->is_register() && left->reg().is(edx)) {
1735 remainder = *left;
1736 *left = fresh;
1737 }
1738 if (right->is_register() && right->reg().is(edx)) {
1739 remainder = *right;
1740 *right = fresh;
1741 }
1742 __ mov(fresh.reg(), edx);
1743 } else {
1744 // Neither left nor right is in edx.
1745 remainder = allocator_->Allocate(edx);
1746 }
1747 }
1748 ASSERT(remainder.is_register() && remainder.reg().is(edx));
1749 ASSERT(!(left->is_register() && left->reg().is(edx)));
1750 ASSERT(!(right->is_register() && right->reg().is(edx)));
1751
1752 left->ToRegister();
1753 right->ToRegister();
1754 frame_->Spill(eax);
1755 frame_->Spill(edx);
Iain Merrick75681382010-08-19 15:07:18 +01001756 // DeferredInlineBinaryOperation requires all the registers that it is
1757 // told about to be spilled and distinct.
1758 Result distinct_right = frame_->MakeDistinctAndSpilled(left, right);
Steve Blocka7e24c12009-10-30 11:49:00 +00001759
1760 // Check that left and right are smi tagged.
1761 DeferredInlineBinaryOperation* deferred =
1762 new DeferredInlineBinaryOperation(op,
1763 (op == Token::DIV) ? eax : edx,
1764 left->reg(),
Iain Merrick75681382010-08-19 15:07:18 +01001765 distinct_right.reg(),
Kristian Monsen25f61362010-05-21 11:50:48 +01001766 left_type_info,
1767 right_type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00001768 overwrite_mode);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001769 JumpIfNotBothSmiUsingTypeInfo(left->reg(), right->reg(), edx,
1770 left_type_info, right_type_info, deferred);
1771 if (!left_is_in_eax) {
1772 __ mov(eax, left->reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001773 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001774 // Sign extend eax into edx:eax.
1775 __ cdq();
1776 // Check for 0 divisor.
1777 __ test(right->reg(), Operand(right->reg()));
1778 deferred->Branch(zero);
1779 // Divide edx:eax by the right operand.
1780 __ idiv(right->reg());
1781
1782 // Complete the operation.
1783 if (op == Token::DIV) {
1784 // Check for negative zero result. If result is zero, and divisor
1785 // is negative, return a floating point negative zero. The
1786 // virtual frame is unchanged in this block, so local control flow
Steve Block6ded16b2010-05-10 14:33:55 +01001787 // can use a Label rather than a JumpTarget. If the context of this
1788 // expression will treat -0 like 0, do not do this test.
1789 if (!expr->no_negative_zero()) {
1790 Label non_zero_result;
1791 __ test(left->reg(), Operand(left->reg()));
1792 __ j(not_zero, &non_zero_result);
1793 __ test(right->reg(), Operand(right->reg()));
1794 deferred->Branch(negative);
1795 __ bind(&non_zero_result);
1796 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001797 // Check for the corner case of dividing the most negative smi by
1798 // -1. We cannot use the overflow flag, since it is not set by
1799 // idiv instruction.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01001800 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001801 __ cmp(eax, 0x40000000);
1802 deferred->Branch(equal);
1803 // Check that the remainder is zero.
1804 __ test(edx, Operand(edx));
1805 deferred->Branch(not_zero);
1806 // Tag the result and store it in the quotient register.
Leon Clarkee46be812010-01-19 14:06:41 +00001807 __ SmiTag(eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00001808 deferred->BindExit();
1809 left->Unuse();
1810 right->Unuse();
Leon Clarked91b9f72010-01-27 17:25:45 +00001811 answer = quotient;
Steve Blocka7e24c12009-10-30 11:49:00 +00001812 } else {
1813 ASSERT(op == Token::MOD);
1814 // Check for a negative zero result. If the result is zero, and
1815 // the dividend is negative, return a floating point negative
1816 // zero. The frame is unchanged in this block, so local control
1817 // flow can use a Label rather than a JumpTarget.
Steve Block6ded16b2010-05-10 14:33:55 +01001818 if (!expr->no_negative_zero()) {
1819 Label non_zero_result;
1820 __ test(edx, Operand(edx));
1821 __ j(not_zero, &non_zero_result, taken);
1822 __ test(left->reg(), Operand(left->reg()));
1823 deferred->Branch(negative);
1824 __ bind(&non_zero_result);
1825 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001826 deferred->BindExit();
1827 left->Unuse();
1828 right->Unuse();
Leon Clarked91b9f72010-01-27 17:25:45 +00001829 answer = remainder;
Steve Blocka7e24c12009-10-30 11:49:00 +00001830 }
Leon Clarked91b9f72010-01-27 17:25:45 +00001831 ASSERT(answer.is_valid());
1832 return answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00001833 }
1834
1835 // Special handling of shift operations because they use fixed
1836 // registers.
1837 if (op == Token::SHL || op == Token::SHR || op == Token::SAR) {
1838 // Move left out of ecx if necessary.
1839 if (left->is_register() && left->reg().is(ecx)) {
1840 *left = allocator_->Allocate();
1841 ASSERT(left->is_valid());
1842 __ mov(left->reg(), ecx);
1843 }
1844 right->ToRegister(ecx);
1845 left->ToRegister();
1846 ASSERT(left->is_register() && !left->reg().is(ecx));
1847 ASSERT(right->is_register() && right->reg().is(ecx));
Iain Merrick75681382010-08-19 15:07:18 +01001848 if (left_type_info.IsSmi()) {
1849 if (FLAG_debug_code) __ AbortIfNotSmi(left->reg());
1850 }
1851 if (right_type_info.IsSmi()) {
1852 if (FLAG_debug_code) __ AbortIfNotSmi(right->reg());
1853 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001854
1855 // We will modify right, it must be spilled.
1856 frame_->Spill(ecx);
Iain Merrick75681382010-08-19 15:07:18 +01001857 // DeferredInlineBinaryOperation requires all the registers that it is told
1858 // about to be spilled and distinct. We know that right is ecx and left is
1859 // not ecx.
1860 frame_->Spill(left->reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001861
1862 // Use a fresh answer register to avoid spilling the left operand.
Leon Clarked91b9f72010-01-27 17:25:45 +00001863 answer = allocator_->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00001864 ASSERT(answer.is_valid());
Iain Merrick75681382010-08-19 15:07:18 +01001865
Steve Blocka7e24c12009-10-30 11:49:00 +00001866 DeferredInlineBinaryOperation* deferred =
1867 new DeferredInlineBinaryOperation(op,
1868 answer.reg(),
1869 left->reg(),
1870 ecx,
Kristian Monsen25f61362010-05-21 11:50:48 +01001871 left_type_info,
1872 right_type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00001873 overwrite_mode);
Iain Merrick75681382010-08-19 15:07:18 +01001874 JumpIfNotBothSmiUsingTypeInfo(left->reg(), right->reg(), answer.reg(),
1875 left_type_info, right_type_info,
1876 deferred->NonSmiInputLabel());
Steve Blocka7e24c12009-10-30 11:49:00 +00001877
Iain Merrick75681382010-08-19 15:07:18 +01001878 // Untag both operands.
1879 __ mov(answer.reg(), left->reg());
1880 __ SmiUntag(answer.reg());
1881 __ SmiUntag(right->reg()); // Right is ecx.
Steve Block6ded16b2010-05-10 14:33:55 +01001882
Steve Blocka7e24c12009-10-30 11:49:00 +00001883 // Perform the operation.
Iain Merrick75681382010-08-19 15:07:18 +01001884 ASSERT(right->reg().is(ecx));
Steve Blocka7e24c12009-10-30 11:49:00 +00001885 switch (op) {
Iain Merrick75681382010-08-19 15:07:18 +01001886 case Token::SAR: {
Steve Blockd0582a62009-12-15 09:54:21 +00001887 __ sar_cl(answer.reg());
Iain Merrick75681382010-08-19 15:07:18 +01001888 if (!left_type_info.IsSmi()) {
1889 // Check that the *signed* result fits in a smi.
1890 __ cmp(answer.reg(), 0xc0000000);
1891 deferred->JumpToAnswerOutOfRange(negative);
1892 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001893 break;
Iain Merrick75681382010-08-19 15:07:18 +01001894 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001895 case Token::SHR: {
Steve Blockd0582a62009-12-15 09:54:21 +00001896 __ shr_cl(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001897 // Check that the *unsigned* result fits in a smi. Neither of
1898 // the two high-order bits can be set:
1899 // * 0x80000000: high bit would be lost when smi tagging.
1900 // * 0x40000000: this number would convert to negative when smi
1901 // tagging.
1902 // These two cases can only happen with shifts by 0 or 1 when
1903 // handed a valid smi. If the answer cannot be represented by a
1904 // smi, restore the left and right arguments, and jump to slow
1905 // case. The low bit of the left argument may be lost, but only
1906 // in a case where it is dropped anyway.
1907 __ test(answer.reg(), Immediate(0xc0000000));
Iain Merrick75681382010-08-19 15:07:18 +01001908 deferred->JumpToAnswerOutOfRange(not_zero);
Steve Blocka7e24c12009-10-30 11:49:00 +00001909 break;
1910 }
1911 case Token::SHL: {
Steve Blockd0582a62009-12-15 09:54:21 +00001912 __ shl_cl(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001913 // Check that the *signed* result fits in a smi.
1914 __ cmp(answer.reg(), 0xc0000000);
Iain Merrick75681382010-08-19 15:07:18 +01001915 deferred->JumpToAnswerOutOfRange(negative);
Steve Blocka7e24c12009-10-30 11:49:00 +00001916 break;
1917 }
1918 default:
1919 UNREACHABLE();
1920 }
1921 // Smi-tag the result in answer.
Leon Clarkee46be812010-01-19 14:06:41 +00001922 __ SmiTag(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001923 deferred->BindExit();
1924 left->Unuse();
1925 right->Unuse();
Leon Clarked91b9f72010-01-27 17:25:45 +00001926 ASSERT(answer.is_valid());
1927 return answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00001928 }
1929
1930 // Handle the other binary operations.
1931 left->ToRegister();
1932 right->ToRegister();
Iain Merrick75681382010-08-19 15:07:18 +01001933 // DeferredInlineBinaryOperation requires all the registers that it is told
1934 // about to be spilled.
1935 Result distinct_right = frame_->MakeDistinctAndSpilled(left, right);
Steve Blocka7e24c12009-10-30 11:49:00 +00001936 // A newly allocated register answer is used to hold the answer. The
1937 // registers containing left and right are not modified so they don't
1938 // need to be spilled in the fast case.
Leon Clarked91b9f72010-01-27 17:25:45 +00001939 answer = allocator_->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00001940 ASSERT(answer.is_valid());
1941
1942 // Perform the smi tag check.
1943 DeferredInlineBinaryOperation* deferred =
1944 new DeferredInlineBinaryOperation(op,
1945 answer.reg(),
1946 left->reg(),
Iain Merrick75681382010-08-19 15:07:18 +01001947 distinct_right.reg(),
Kristian Monsen25f61362010-05-21 11:50:48 +01001948 left_type_info,
1949 right_type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00001950 overwrite_mode);
Iain Merrick75681382010-08-19 15:07:18 +01001951 Label non_smi_bit_op;
1952 if (op != Token::BIT_OR) {
1953 JumpIfNotBothSmiUsingTypeInfo(left->reg(), right->reg(), answer.reg(),
1954 left_type_info, right_type_info,
1955 deferred->NonSmiInputLabel());
1956 }
Steve Block6ded16b2010-05-10 14:33:55 +01001957
Steve Blocka7e24c12009-10-30 11:49:00 +00001958 __ mov(answer.reg(), left->reg());
1959 switch (op) {
1960 case Token::ADD:
Leon Clarked91b9f72010-01-27 17:25:45 +00001961 __ add(answer.reg(), Operand(right->reg()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001962 deferred->Branch(overflow);
1963 break;
1964
1965 case Token::SUB:
Leon Clarked91b9f72010-01-27 17:25:45 +00001966 __ sub(answer.reg(), Operand(right->reg()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001967 deferred->Branch(overflow);
1968 break;
1969
1970 case Token::MUL: {
1971 // If the smi tag is 0 we can just leave the tag on one operand.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01001972 STATIC_ASSERT(kSmiTag == 0); // Adjust code below if not the case.
Steve Blocka7e24c12009-10-30 11:49:00 +00001973 // Remove smi tag from the left operand (but keep sign).
1974 // Left-hand operand has been copied into answer.
Leon Clarkee46be812010-01-19 14:06:41 +00001975 __ SmiUntag(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001976 // Do multiplication of smis, leaving result in answer.
1977 __ imul(answer.reg(), Operand(right->reg()));
1978 // Go slow on overflows.
1979 deferred->Branch(overflow);
1980 // Check for negative zero result. If product is zero, and one
1981 // argument is negative, go to slow case. The frame is unchanged
1982 // in this block, so local control flow can use a Label rather
1983 // than a JumpTarget.
Steve Block6ded16b2010-05-10 14:33:55 +01001984 if (!expr->no_negative_zero()) {
1985 Label non_zero_result;
1986 __ test(answer.reg(), Operand(answer.reg()));
1987 __ j(not_zero, &non_zero_result, taken);
1988 __ mov(answer.reg(), left->reg());
1989 __ or_(answer.reg(), Operand(right->reg()));
1990 deferred->Branch(negative);
1991 __ xor_(answer.reg(), Operand(answer.reg())); // Positive 0 is correct.
1992 __ bind(&non_zero_result);
1993 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001994 break;
1995 }
1996
1997 case Token::BIT_OR:
1998 __ or_(answer.reg(), Operand(right->reg()));
Iain Merrick75681382010-08-19 15:07:18 +01001999 __ test(answer.reg(), Immediate(kSmiTagMask));
2000 __ j(not_zero, deferred->NonSmiInputLabel());
Steve Blocka7e24c12009-10-30 11:49:00 +00002001 break;
2002
2003 case Token::BIT_AND:
2004 __ and_(answer.reg(), Operand(right->reg()));
2005 break;
2006
2007 case Token::BIT_XOR:
2008 __ xor_(answer.reg(), Operand(right->reg()));
2009 break;
2010
2011 default:
2012 UNREACHABLE();
2013 break;
2014 }
Iain Merrick75681382010-08-19 15:07:18 +01002015
Steve Blocka7e24c12009-10-30 11:49:00 +00002016 deferred->BindExit();
2017 left->Unuse();
2018 right->Unuse();
Leon Clarked91b9f72010-01-27 17:25:45 +00002019 ASSERT(answer.is_valid());
2020 return answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00002021}
2022
2023
2024// Call the appropriate binary operation stub to compute src op value
2025// and leave the result in dst.
2026class DeferredInlineSmiOperation: public DeferredCode {
2027 public:
2028 DeferredInlineSmiOperation(Token::Value op,
2029 Register dst,
2030 Register src,
Steve Block6ded16b2010-05-10 14:33:55 +01002031 TypeInfo type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00002032 Smi* value,
2033 OverwriteMode overwrite_mode)
2034 : op_(op),
2035 dst_(dst),
2036 src_(src),
Steve Block6ded16b2010-05-10 14:33:55 +01002037 type_info_(type_info),
Steve Blocka7e24c12009-10-30 11:49:00 +00002038 value_(value),
2039 overwrite_mode_(overwrite_mode) {
Steve Block6ded16b2010-05-10 14:33:55 +01002040 if (type_info.IsSmi()) overwrite_mode_ = NO_OVERWRITE;
Steve Blocka7e24c12009-10-30 11:49:00 +00002041 set_comment("[ DeferredInlineSmiOperation");
2042 }
2043
2044 virtual void Generate();
2045
2046 private:
2047 Token::Value op_;
2048 Register dst_;
2049 Register src_;
Steve Block6ded16b2010-05-10 14:33:55 +01002050 TypeInfo type_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +00002051 Smi* value_;
2052 OverwriteMode overwrite_mode_;
2053};
2054
2055
2056void DeferredInlineSmiOperation::Generate() {
Steve Blocka7e24c12009-10-30 11:49:00 +00002057 // For mod we don't generate all the Smi code inline.
2058 GenericBinaryOpStub stub(
2059 op_,
2060 overwrite_mode_,
Steve Block6ded16b2010-05-10 14:33:55 +01002061 (op_ == Token::MOD) ? NO_GENERIC_BINARY_FLAGS : NO_SMI_CODE_IN_STUB,
2062 TypeInfo::Combine(TypeInfo::Smi(), type_info_));
Steve Block3ce2e202009-11-05 08:53:23 +00002063 stub.GenerateCall(masm_, src_, value_);
Steve Blocka7e24c12009-10-30 11:49:00 +00002064 if (!dst_.is(eax)) __ mov(dst_, eax);
2065}
2066
2067
2068// Call the appropriate binary operation stub to compute value op src
2069// and leave the result in dst.
2070class DeferredInlineSmiOperationReversed: public DeferredCode {
2071 public:
2072 DeferredInlineSmiOperationReversed(Token::Value op,
2073 Register dst,
2074 Smi* value,
2075 Register src,
Steve Block6ded16b2010-05-10 14:33:55 +01002076 TypeInfo type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00002077 OverwriteMode overwrite_mode)
2078 : op_(op),
2079 dst_(dst),
Steve Block6ded16b2010-05-10 14:33:55 +01002080 type_info_(type_info),
Steve Blocka7e24c12009-10-30 11:49:00 +00002081 value_(value),
2082 src_(src),
2083 overwrite_mode_(overwrite_mode) {
2084 set_comment("[ DeferredInlineSmiOperationReversed");
2085 }
2086
2087 virtual void Generate();
2088
2089 private:
2090 Token::Value op_;
2091 Register dst_;
Steve Block6ded16b2010-05-10 14:33:55 +01002092 TypeInfo type_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +00002093 Smi* value_;
2094 Register src_;
2095 OverwriteMode overwrite_mode_;
2096};
2097
2098
2099void DeferredInlineSmiOperationReversed::Generate() {
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002100 GenericBinaryOpStub stub(
Steve Block6ded16b2010-05-10 14:33:55 +01002101 op_,
2102 overwrite_mode_,
2103 NO_SMI_CODE_IN_STUB,
2104 TypeInfo::Combine(TypeInfo::Smi(), type_info_));
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002105 stub.GenerateCall(masm_, value_, src_);
Steve Blocka7e24c12009-10-30 11:49:00 +00002106 if (!dst_.is(eax)) __ mov(dst_, eax);
2107}
2108
2109
2110// The result of src + value is in dst. It either overflowed or was not
2111// smi tagged. Undo the speculative addition and call the appropriate
2112// specialized stub for add. The result is left in dst.
2113class DeferredInlineSmiAdd: public DeferredCode {
2114 public:
2115 DeferredInlineSmiAdd(Register dst,
Steve Block6ded16b2010-05-10 14:33:55 +01002116 TypeInfo type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00002117 Smi* value,
2118 OverwriteMode overwrite_mode)
Steve Block6ded16b2010-05-10 14:33:55 +01002119 : dst_(dst),
2120 type_info_(type_info),
2121 value_(value),
2122 overwrite_mode_(overwrite_mode) {
2123 if (type_info_.IsSmi()) overwrite_mode_ = NO_OVERWRITE;
Steve Blocka7e24c12009-10-30 11:49:00 +00002124 set_comment("[ DeferredInlineSmiAdd");
2125 }
2126
2127 virtual void Generate();
2128
2129 private:
2130 Register dst_;
Steve Block6ded16b2010-05-10 14:33:55 +01002131 TypeInfo type_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +00002132 Smi* value_;
2133 OverwriteMode overwrite_mode_;
2134};
2135
2136
2137void DeferredInlineSmiAdd::Generate() {
2138 // Undo the optimistic add operation and call the shared stub.
2139 __ sub(Operand(dst_), Immediate(value_));
Steve Block6ded16b2010-05-10 14:33:55 +01002140 GenericBinaryOpStub igostub(
2141 Token::ADD,
2142 overwrite_mode_,
2143 NO_SMI_CODE_IN_STUB,
2144 TypeInfo::Combine(TypeInfo::Smi(), type_info_));
Steve Block3ce2e202009-11-05 08:53:23 +00002145 igostub.GenerateCall(masm_, dst_, value_);
Steve Blocka7e24c12009-10-30 11:49:00 +00002146 if (!dst_.is(eax)) __ mov(dst_, eax);
2147}
2148
2149
2150// The result of value + src is in dst. It either overflowed or was not
2151// smi tagged. Undo the speculative addition and call the appropriate
2152// specialized stub for add. The result is left in dst.
2153class DeferredInlineSmiAddReversed: public DeferredCode {
2154 public:
2155 DeferredInlineSmiAddReversed(Register dst,
Steve Block6ded16b2010-05-10 14:33:55 +01002156 TypeInfo type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00002157 Smi* value,
2158 OverwriteMode overwrite_mode)
Steve Block6ded16b2010-05-10 14:33:55 +01002159 : dst_(dst),
2160 type_info_(type_info),
2161 value_(value),
2162 overwrite_mode_(overwrite_mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002163 set_comment("[ DeferredInlineSmiAddReversed");
2164 }
2165
2166 virtual void Generate();
2167
2168 private:
2169 Register dst_;
Steve Block6ded16b2010-05-10 14:33:55 +01002170 TypeInfo type_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +00002171 Smi* value_;
2172 OverwriteMode overwrite_mode_;
2173};
2174
2175
2176void DeferredInlineSmiAddReversed::Generate() {
2177 // Undo the optimistic add operation and call the shared stub.
2178 __ sub(Operand(dst_), Immediate(value_));
Steve Block6ded16b2010-05-10 14:33:55 +01002179 GenericBinaryOpStub igostub(
2180 Token::ADD,
2181 overwrite_mode_,
2182 NO_SMI_CODE_IN_STUB,
2183 TypeInfo::Combine(TypeInfo::Smi(), type_info_));
Steve Block3ce2e202009-11-05 08:53:23 +00002184 igostub.GenerateCall(masm_, value_, dst_);
Steve Blocka7e24c12009-10-30 11:49:00 +00002185 if (!dst_.is(eax)) __ mov(dst_, eax);
2186}
2187
2188
2189// The result of src - value is in dst. It either overflowed or was not
2190// smi tagged. Undo the speculative subtraction and call the
2191// appropriate specialized stub for subtract. The result is left in
2192// dst.
2193class DeferredInlineSmiSub: public DeferredCode {
2194 public:
2195 DeferredInlineSmiSub(Register dst,
Steve Block6ded16b2010-05-10 14:33:55 +01002196 TypeInfo type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00002197 Smi* value,
2198 OverwriteMode overwrite_mode)
Steve Block6ded16b2010-05-10 14:33:55 +01002199 : dst_(dst),
2200 type_info_(type_info),
2201 value_(value),
2202 overwrite_mode_(overwrite_mode) {
2203 if (type_info.IsSmi()) overwrite_mode_ = NO_OVERWRITE;
Steve Blocka7e24c12009-10-30 11:49:00 +00002204 set_comment("[ DeferredInlineSmiSub");
2205 }
2206
2207 virtual void Generate();
2208
2209 private:
2210 Register dst_;
Steve Block6ded16b2010-05-10 14:33:55 +01002211 TypeInfo type_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +00002212 Smi* value_;
2213 OverwriteMode overwrite_mode_;
2214};
2215
2216
2217void DeferredInlineSmiSub::Generate() {
2218 // Undo the optimistic sub operation and call the shared stub.
2219 __ add(Operand(dst_), Immediate(value_));
Steve Block6ded16b2010-05-10 14:33:55 +01002220 GenericBinaryOpStub igostub(
2221 Token::SUB,
2222 overwrite_mode_,
2223 NO_SMI_CODE_IN_STUB,
2224 TypeInfo::Combine(TypeInfo::Smi(), type_info_));
Steve Block3ce2e202009-11-05 08:53:23 +00002225 igostub.GenerateCall(masm_, dst_, value_);
Steve Blocka7e24c12009-10-30 11:49:00 +00002226 if (!dst_.is(eax)) __ mov(dst_, eax);
2227}
2228
2229
Kristian Monsen25f61362010-05-21 11:50:48 +01002230Result CodeGenerator::ConstantSmiBinaryOperation(BinaryOperation* expr,
2231 Result* operand,
2232 Handle<Object> value,
2233 bool reversed,
2234 OverwriteMode overwrite_mode) {
2235 // Generate inline code for a binary operation when one of the
2236 // operands is a constant smi. Consumes the argument "operand".
Steve Blocka7e24c12009-10-30 11:49:00 +00002237 if (IsUnsafeSmi(value)) {
2238 Result unsafe_operand(value);
2239 if (reversed) {
Steve Block6ded16b2010-05-10 14:33:55 +01002240 return LikelySmiBinaryOperation(expr, &unsafe_operand, operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00002241 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002242 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01002243 return LikelySmiBinaryOperation(expr, operand, &unsafe_operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00002244 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002245 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002246 }
2247
2248 // Get the literal value.
2249 Smi* smi_value = Smi::cast(*value);
2250 int int_value = smi_value->value();
2251
Steve Block6ded16b2010-05-10 14:33:55 +01002252 Token::Value op = expr->op();
Leon Clarked91b9f72010-01-27 17:25:45 +00002253 Result answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00002254 switch (op) {
2255 case Token::ADD: {
2256 operand->ToRegister();
2257 frame_->Spill(operand->reg());
2258
2259 // Optimistically add. Call the specialized add stub if the
2260 // result is not a smi or overflows.
2261 DeferredCode* deferred = NULL;
2262 if (reversed) {
2263 deferred = new DeferredInlineSmiAddReversed(operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002264 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002265 smi_value,
2266 overwrite_mode);
2267 } else {
2268 deferred = new DeferredInlineSmiAdd(operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002269 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002270 smi_value,
2271 overwrite_mode);
2272 }
2273 __ add(Operand(operand->reg()), Immediate(value));
2274 deferred->Branch(overflow);
Steve Block6ded16b2010-05-10 14:33:55 +01002275 if (!operand->type_info().IsSmi()) {
2276 __ test(operand->reg(), Immediate(kSmiTagMask));
2277 deferred->Branch(not_zero);
2278 } else if (FLAG_debug_code) {
2279 __ AbortIfNotSmi(operand->reg());
2280 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002281 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00002282 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00002283 break;
2284 }
2285
2286 case Token::SUB: {
2287 DeferredCode* deferred = NULL;
Steve Blocka7e24c12009-10-30 11:49:00 +00002288 if (reversed) {
2289 // The reversed case is only hit when the right operand is not a
2290 // constant.
2291 ASSERT(operand->is_register());
2292 answer = allocator()->Allocate();
2293 ASSERT(answer.is_valid());
2294 __ Set(answer.reg(), Immediate(value));
Steve Block6ded16b2010-05-10 14:33:55 +01002295 deferred =
2296 new DeferredInlineSmiOperationReversed(op,
2297 answer.reg(),
2298 smi_value,
2299 operand->reg(),
2300 operand->type_info(),
2301 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002302 __ sub(answer.reg(), Operand(operand->reg()));
2303 } else {
2304 operand->ToRegister();
2305 frame_->Spill(operand->reg());
2306 answer = *operand;
2307 deferred = new DeferredInlineSmiSub(operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002308 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002309 smi_value,
2310 overwrite_mode);
2311 __ sub(Operand(operand->reg()), Immediate(value));
2312 }
2313 deferred->Branch(overflow);
Steve Block6ded16b2010-05-10 14:33:55 +01002314 if (!operand->type_info().IsSmi()) {
2315 __ test(answer.reg(), Immediate(kSmiTagMask));
2316 deferred->Branch(not_zero);
2317 } else if (FLAG_debug_code) {
2318 __ AbortIfNotSmi(operand->reg());
2319 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002320 deferred->BindExit();
2321 operand->Unuse();
Steve Blocka7e24c12009-10-30 11:49:00 +00002322 break;
2323 }
2324
2325 case Token::SAR:
2326 if (reversed) {
2327 Result constant_operand(value);
Steve Block6ded16b2010-05-10 14:33:55 +01002328 answer = LikelySmiBinaryOperation(expr, &constant_operand, operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00002329 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002330 } else {
2331 // Only the least significant 5 bits of the shift value are used.
2332 // In the slow case, this masking is done inside the runtime call.
2333 int shift_value = int_value & 0x1f;
2334 operand->ToRegister();
2335 frame_->Spill(operand->reg());
Steve Block6ded16b2010-05-10 14:33:55 +01002336 if (!operand->type_info().IsSmi()) {
2337 DeferredInlineSmiOperation* deferred =
2338 new DeferredInlineSmiOperation(op,
2339 operand->reg(),
2340 operand->reg(),
2341 operand->type_info(),
2342 smi_value,
2343 overwrite_mode);
2344 __ test(operand->reg(), Immediate(kSmiTagMask));
2345 deferred->Branch(not_zero);
2346 if (shift_value > 0) {
2347 __ sar(operand->reg(), shift_value);
2348 __ and_(operand->reg(), ~kSmiTagMask);
2349 }
2350 deferred->BindExit();
2351 } else {
2352 if (FLAG_debug_code) {
2353 __ AbortIfNotSmi(operand->reg());
2354 }
2355 if (shift_value > 0) {
2356 __ sar(operand->reg(), shift_value);
2357 __ and_(operand->reg(), ~kSmiTagMask);
2358 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002359 }
Leon Clarked91b9f72010-01-27 17:25:45 +00002360 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00002361 }
2362 break;
2363
2364 case Token::SHR:
2365 if (reversed) {
2366 Result constant_operand(value);
Steve Block6ded16b2010-05-10 14:33:55 +01002367 answer = LikelySmiBinaryOperation(expr, &constant_operand, operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00002368 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002369 } else {
2370 // Only the least significant 5 bits of the shift value are used.
2371 // In the slow case, this masking is done inside the runtime call.
2372 int shift_value = int_value & 0x1f;
2373 operand->ToRegister();
Leon Clarked91b9f72010-01-27 17:25:45 +00002374 answer = allocator()->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00002375 ASSERT(answer.is_valid());
2376 DeferredInlineSmiOperation* deferred =
2377 new DeferredInlineSmiOperation(op,
2378 answer.reg(),
2379 operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002380 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002381 smi_value,
2382 overwrite_mode);
Steve Block6ded16b2010-05-10 14:33:55 +01002383 if (!operand->type_info().IsSmi()) {
2384 __ test(operand->reg(), Immediate(kSmiTagMask));
2385 deferred->Branch(not_zero);
2386 } else if (FLAG_debug_code) {
2387 __ AbortIfNotSmi(operand->reg());
2388 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002389 __ mov(answer.reg(), operand->reg());
Leon Clarkee46be812010-01-19 14:06:41 +00002390 __ SmiUntag(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00002391 __ shr(answer.reg(), shift_value);
2392 // A negative Smi shifted right two is in the positive Smi range.
2393 if (shift_value < 2) {
2394 __ test(answer.reg(), Immediate(0xc0000000));
2395 deferred->Branch(not_zero);
2396 }
2397 operand->Unuse();
Leon Clarkee46be812010-01-19 14:06:41 +00002398 __ SmiTag(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00002399 deferred->BindExit();
Steve Blocka7e24c12009-10-30 11:49:00 +00002400 }
2401 break;
2402
2403 case Token::SHL:
2404 if (reversed) {
Steve Block6ded16b2010-05-10 14:33:55 +01002405 // Move operand into ecx and also into a second register.
2406 // If operand is already in a register, take advantage of that.
2407 // This lets us modify ecx, but still bail out to deferred code.
Leon Clarkee46be812010-01-19 14:06:41 +00002408 Result right;
2409 Result right_copy_in_ecx;
Steve Block6ded16b2010-05-10 14:33:55 +01002410 TypeInfo right_type_info = operand->type_info();
Leon Clarkee46be812010-01-19 14:06:41 +00002411 operand->ToRegister();
2412 if (operand->reg().is(ecx)) {
2413 right = allocator()->Allocate();
2414 __ mov(right.reg(), ecx);
2415 frame_->Spill(ecx);
2416 right_copy_in_ecx = *operand;
2417 } else {
2418 right_copy_in_ecx = allocator()->Allocate(ecx);
2419 __ mov(ecx, operand->reg());
2420 right = *operand;
2421 }
2422 operand->Unuse();
2423
Leon Clarked91b9f72010-01-27 17:25:45 +00002424 answer = allocator()->Allocate();
Leon Clarkee46be812010-01-19 14:06:41 +00002425 DeferredInlineSmiOperationReversed* deferred =
2426 new DeferredInlineSmiOperationReversed(op,
2427 answer.reg(),
2428 smi_value,
2429 right.reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002430 right_type_info,
Leon Clarkee46be812010-01-19 14:06:41 +00002431 overwrite_mode);
2432 __ mov(answer.reg(), Immediate(int_value));
2433 __ sar(ecx, kSmiTagSize);
Steve Block6ded16b2010-05-10 14:33:55 +01002434 if (!right_type_info.IsSmi()) {
2435 deferred->Branch(carry);
2436 } else if (FLAG_debug_code) {
2437 __ AbortIfNotSmi(right.reg());
2438 }
Leon Clarkee46be812010-01-19 14:06:41 +00002439 __ shl_cl(answer.reg());
2440 __ cmp(answer.reg(), 0xc0000000);
2441 deferred->Branch(sign);
2442 __ SmiTag(answer.reg());
2443
2444 deferred->BindExit();
Steve Blocka7e24c12009-10-30 11:49:00 +00002445 } else {
2446 // Only the least significant 5 bits of the shift value are used.
2447 // In the slow case, this masking is done inside the runtime call.
2448 int shift_value = int_value & 0x1f;
2449 operand->ToRegister();
2450 if (shift_value == 0) {
2451 // Spill operand so it can be overwritten in the slow case.
2452 frame_->Spill(operand->reg());
2453 DeferredInlineSmiOperation* deferred =
2454 new DeferredInlineSmiOperation(op,
2455 operand->reg(),
2456 operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002457 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002458 smi_value,
2459 overwrite_mode);
2460 __ test(operand->reg(), Immediate(kSmiTagMask));
2461 deferred->Branch(not_zero);
2462 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00002463 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00002464 } else {
2465 // Use a fresh temporary for nonzero shift values.
Leon Clarked91b9f72010-01-27 17:25:45 +00002466 answer = allocator()->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00002467 ASSERT(answer.is_valid());
2468 DeferredInlineSmiOperation* deferred =
2469 new DeferredInlineSmiOperation(op,
2470 answer.reg(),
2471 operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002472 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002473 smi_value,
2474 overwrite_mode);
Steve Block6ded16b2010-05-10 14:33:55 +01002475 if (!operand->type_info().IsSmi()) {
2476 __ test(operand->reg(), Immediate(kSmiTagMask));
2477 deferred->Branch(not_zero);
2478 } else if (FLAG_debug_code) {
2479 __ AbortIfNotSmi(operand->reg());
2480 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002481 __ mov(answer.reg(), operand->reg());
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002482 STATIC_ASSERT(kSmiTag == 0); // adjust code if not the case
Steve Blocka7e24c12009-10-30 11:49:00 +00002483 // We do no shifts, only the Smi conversion, if shift_value is 1.
2484 if (shift_value > 1) {
2485 __ shl(answer.reg(), shift_value - 1);
2486 }
2487 // Convert int result to Smi, checking that it is in int range.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002488 STATIC_ASSERT(kSmiTagSize == 1); // adjust code if not the case
Steve Blocka7e24c12009-10-30 11:49:00 +00002489 __ add(answer.reg(), Operand(answer.reg()));
2490 deferred->Branch(overflow);
2491 deferred->BindExit();
2492 operand->Unuse();
Steve Blocka7e24c12009-10-30 11:49:00 +00002493 }
2494 }
2495 break;
2496
2497 case Token::BIT_OR:
2498 case Token::BIT_XOR:
2499 case Token::BIT_AND: {
2500 operand->ToRegister();
Iain Merrick75681382010-08-19 15:07:18 +01002501 // DeferredInlineBinaryOperation requires all the registers that it is
2502 // told about to be spilled.
Steve Blocka7e24c12009-10-30 11:49:00 +00002503 frame_->Spill(operand->reg());
Iain Merrick75681382010-08-19 15:07:18 +01002504 DeferredInlineBinaryOperation* deferred = NULL;
Steve Block6ded16b2010-05-10 14:33:55 +01002505 if (!operand->type_info().IsSmi()) {
Iain Merrick75681382010-08-19 15:07:18 +01002506 Result left = allocator()->Allocate();
2507 ASSERT(left.is_valid());
2508 Result right = allocator()->Allocate();
2509 ASSERT(right.is_valid());
2510 deferred = new DeferredInlineBinaryOperation(
2511 op,
2512 operand->reg(),
2513 left.reg(),
2514 right.reg(),
2515 operand->type_info(),
2516 TypeInfo::Smi(),
2517 overwrite_mode == NO_OVERWRITE ? NO_OVERWRITE : OVERWRITE_LEFT);
Steve Block6ded16b2010-05-10 14:33:55 +01002518 __ test(operand->reg(), Immediate(kSmiTagMask));
Iain Merrick75681382010-08-19 15:07:18 +01002519 deferred->JumpToConstantRhs(not_zero, smi_value);
Steve Block6ded16b2010-05-10 14:33:55 +01002520 } else if (FLAG_debug_code) {
2521 __ AbortIfNotSmi(operand->reg());
2522 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002523 if (op == Token::BIT_AND) {
2524 __ and_(Operand(operand->reg()), Immediate(value));
2525 } else if (op == Token::BIT_XOR) {
2526 if (int_value != 0) {
2527 __ xor_(Operand(operand->reg()), Immediate(value));
2528 }
2529 } else {
2530 ASSERT(op == Token::BIT_OR);
2531 if (int_value != 0) {
2532 __ or_(Operand(operand->reg()), Immediate(value));
2533 }
2534 }
Iain Merrick75681382010-08-19 15:07:18 +01002535 if (deferred != NULL) deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00002536 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00002537 break;
2538 }
2539
Andrei Popescu402d9372010-02-26 13:31:12 +00002540 case Token::DIV:
2541 if (!reversed && int_value == 2) {
2542 operand->ToRegister();
2543 frame_->Spill(operand->reg());
2544
2545 DeferredInlineSmiOperation* deferred =
2546 new DeferredInlineSmiOperation(op,
2547 operand->reg(),
2548 operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002549 operand->type_info(),
Andrei Popescu402d9372010-02-26 13:31:12 +00002550 smi_value,
2551 overwrite_mode);
2552 // Check that lowest log2(value) bits of operand are zero, and test
2553 // smi tag at the same time.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002554 STATIC_ASSERT(kSmiTag == 0);
2555 STATIC_ASSERT(kSmiTagSize == 1);
Andrei Popescu402d9372010-02-26 13:31:12 +00002556 __ test(operand->reg(), Immediate(3));
2557 deferred->Branch(not_zero); // Branch if non-smi or odd smi.
2558 __ sar(operand->reg(), 1);
2559 deferred->BindExit();
2560 answer = *operand;
2561 } else {
2562 // Cannot fall through MOD to default case, so we duplicate the
2563 // default case here.
2564 Result constant_operand(value);
2565 if (reversed) {
Steve Block6ded16b2010-05-10 14:33:55 +01002566 answer = LikelySmiBinaryOperation(expr, &constant_operand, operand,
Andrei Popescu402d9372010-02-26 13:31:12 +00002567 overwrite_mode);
2568 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01002569 answer = LikelySmiBinaryOperation(expr, operand, &constant_operand,
Andrei Popescu402d9372010-02-26 13:31:12 +00002570 overwrite_mode);
2571 }
2572 }
2573 break;
Steve Block6ded16b2010-05-10 14:33:55 +01002574
Steve Blocka7e24c12009-10-30 11:49:00 +00002575 // Generate inline code for mod of powers of 2 and negative powers of 2.
2576 case Token::MOD:
2577 if (!reversed &&
2578 int_value != 0 &&
2579 (IsPowerOf2(int_value) || IsPowerOf2(-int_value))) {
2580 operand->ToRegister();
2581 frame_->Spill(operand->reg());
Steve Block6ded16b2010-05-10 14:33:55 +01002582 DeferredCode* deferred =
2583 new DeferredInlineSmiOperation(op,
2584 operand->reg(),
2585 operand->reg(),
2586 operand->type_info(),
2587 smi_value,
2588 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002589 // Check for negative or non-Smi left hand side.
Steve Block6ded16b2010-05-10 14:33:55 +01002590 __ test(operand->reg(), Immediate(kSmiTagMask | kSmiSignMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00002591 deferred->Branch(not_zero);
2592 if (int_value < 0) int_value = -int_value;
2593 if (int_value == 1) {
2594 __ mov(operand->reg(), Immediate(Smi::FromInt(0)));
2595 } else {
2596 __ and_(operand->reg(), (int_value << kSmiTagSize) - 1);
2597 }
2598 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00002599 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00002600 break;
2601 }
2602 // Fall through if we did not find a power of 2 on the right hand side!
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002603 // The next case must be the default.
Steve Blocka7e24c12009-10-30 11:49:00 +00002604
2605 default: {
2606 Result constant_operand(value);
2607 if (reversed) {
Steve Block6ded16b2010-05-10 14:33:55 +01002608 answer = LikelySmiBinaryOperation(expr, &constant_operand, operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00002609 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002610 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01002611 answer = LikelySmiBinaryOperation(expr, operand, &constant_operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00002612 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002613 }
2614 break;
2615 }
2616 }
Leon Clarked91b9f72010-01-27 17:25:45 +00002617 ASSERT(answer.is_valid());
2618 return answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00002619}
2620
2621
Leon Clarkee46be812010-01-19 14:06:41 +00002622static bool CouldBeNaN(const Result& result) {
Steve Block6ded16b2010-05-10 14:33:55 +01002623 if (result.type_info().IsSmi()) return false;
2624 if (result.type_info().IsInteger32()) return false;
Leon Clarkee46be812010-01-19 14:06:41 +00002625 if (!result.is_constant()) return true;
2626 if (!result.handle()->IsHeapNumber()) return false;
2627 return isnan(HeapNumber::cast(*result.handle())->value());
2628}
2629
2630
Steve Block6ded16b2010-05-10 14:33:55 +01002631// Convert from signed to unsigned comparison to match the way EFLAGS are set
2632// by FPU and XMM compare instructions.
2633static Condition DoubleCondition(Condition cc) {
2634 switch (cc) {
2635 case less: return below;
2636 case equal: return equal;
2637 case less_equal: return below_equal;
2638 case greater: return above;
2639 case greater_equal: return above_equal;
2640 default: UNREACHABLE();
2641 }
2642 UNREACHABLE();
2643 return equal;
2644}
2645
2646
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002647static CompareFlags ComputeCompareFlags(NaNInformation nan_info,
2648 bool inline_number_compare) {
2649 CompareFlags flags = NO_SMI_COMPARE_IN_STUB;
2650 if (nan_info == kCantBothBeNaN) {
2651 flags = static_cast<CompareFlags>(flags | CANT_BOTH_BE_NAN);
2652 }
2653 if (inline_number_compare) {
2654 flags = static_cast<CompareFlags>(flags | NO_NUMBER_COMPARE_IN_STUB);
2655 }
2656 return flags;
2657}
2658
2659
Leon Clarkee46be812010-01-19 14:06:41 +00002660void CodeGenerator::Comparison(AstNode* node,
2661 Condition cc,
Steve Blocka7e24c12009-10-30 11:49:00 +00002662 bool strict,
2663 ControlDestination* dest) {
2664 // Strict only makes sense for equality comparisons.
2665 ASSERT(!strict || cc == equal);
2666
2667 Result left_side;
2668 Result right_side;
2669 // Implement '>' and '<=' by reversal to obtain ECMA-262 conversion order.
2670 if (cc == greater || cc == less_equal) {
2671 cc = ReverseCondition(cc);
2672 left_side = frame_->Pop();
2673 right_side = frame_->Pop();
2674 } else {
2675 right_side = frame_->Pop();
2676 left_side = frame_->Pop();
2677 }
2678 ASSERT(cc == less || cc == equal || cc == greater_equal);
2679
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002680 // If either side is a constant smi, optimize the comparison.
Leon Clarkee46be812010-01-19 14:06:41 +00002681 bool left_side_constant_smi = false;
2682 bool left_side_constant_null = false;
2683 bool left_side_constant_1_char_string = false;
2684 if (left_side.is_constant()) {
2685 left_side_constant_smi = left_side.handle()->IsSmi();
2686 left_side_constant_null = left_side.handle()->IsNull();
2687 left_side_constant_1_char_string =
2688 (left_side.handle()->IsString() &&
Steve Block6ded16b2010-05-10 14:33:55 +01002689 String::cast(*left_side.handle())->length() == 1 &&
2690 String::cast(*left_side.handle())->IsAsciiRepresentation());
Leon Clarkee46be812010-01-19 14:06:41 +00002691 }
2692 bool right_side_constant_smi = false;
2693 bool right_side_constant_null = false;
2694 bool right_side_constant_1_char_string = false;
2695 if (right_side.is_constant()) {
2696 right_side_constant_smi = right_side.handle()->IsSmi();
2697 right_side_constant_null = right_side.handle()->IsNull();
2698 right_side_constant_1_char_string =
2699 (right_side.handle()->IsString() &&
Steve Block6ded16b2010-05-10 14:33:55 +01002700 String::cast(*right_side.handle())->length() == 1 &&
2701 String::cast(*right_side.handle())->IsAsciiRepresentation());
Leon Clarkee46be812010-01-19 14:06:41 +00002702 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002703
2704 if (left_side_constant_smi || right_side_constant_smi) {
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002705 bool is_loop_condition = (node->AsExpression() != NULL) &&
2706 node->AsExpression()->is_loop_condition();
2707 ConstantSmiComparison(cc, strict, dest, &left_side, &right_side,
2708 left_side_constant_smi, right_side_constant_smi,
2709 is_loop_condition);
Leon Clarkee46be812010-01-19 14:06:41 +00002710 } else if (left_side_constant_1_char_string ||
2711 right_side_constant_1_char_string) {
2712 if (left_side_constant_1_char_string && right_side_constant_1_char_string) {
2713 // Trivial case, comparing two constants.
2714 int left_value = String::cast(*left_side.handle())->Get(0);
2715 int right_value = String::cast(*right_side.handle())->Get(0);
2716 switch (cc) {
2717 case less:
2718 dest->Goto(left_value < right_value);
2719 break;
2720 case equal:
2721 dest->Goto(left_value == right_value);
2722 break;
2723 case greater_equal:
2724 dest->Goto(left_value >= right_value);
2725 break;
2726 default:
2727 UNREACHABLE();
2728 }
2729 } else {
2730 // Only one side is a constant 1 character string.
2731 // If left side is a constant 1-character string, reverse the operands.
2732 // Since one side is a constant string, conversion order does not matter.
2733 if (left_side_constant_1_char_string) {
2734 Result temp = left_side;
2735 left_side = right_side;
2736 right_side = temp;
2737 cc = ReverseCondition(cc);
2738 // This may reintroduce greater or less_equal as the value of cc.
2739 // CompareStub and the inline code both support all values of cc.
2740 }
2741 // Implement comparison against a constant string, inlining the case
2742 // where both sides are strings.
2743 left_side.ToRegister();
2744
2745 // Here we split control flow to the stub call and inlined cases
2746 // before finally splitting it to the control destination. We use
2747 // a jump target and branching to duplicate the virtual frame at
2748 // the first split. We manually handle the off-frame references
2749 // by reconstituting them on the non-fall-through path.
2750 JumpTarget is_not_string, is_string;
2751 Register left_reg = left_side.reg();
2752 Handle<Object> right_val = right_side.handle();
Steve Block6ded16b2010-05-10 14:33:55 +01002753 ASSERT(StringShape(String::cast(*right_val)).IsSymbol());
Leon Clarkee46be812010-01-19 14:06:41 +00002754 __ test(left_side.reg(), Immediate(kSmiTagMask));
2755 is_not_string.Branch(zero, &left_side);
2756 Result temp = allocator_->Allocate();
2757 ASSERT(temp.is_valid());
2758 __ mov(temp.reg(),
2759 FieldOperand(left_side.reg(), HeapObject::kMapOffset));
2760 __ movzx_b(temp.reg(),
2761 FieldOperand(temp.reg(), Map::kInstanceTypeOffset));
2762 // If we are testing for equality then make use of the symbol shortcut.
2763 // Check if the right left hand side has the same type as the left hand
2764 // side (which is always a symbol).
2765 if (cc == equal) {
2766 Label not_a_symbol;
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002767 STATIC_ASSERT(kSymbolTag != 0);
Leon Clarkee46be812010-01-19 14:06:41 +00002768 // Ensure that no non-strings have the symbol bit set.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002769 STATIC_ASSERT(LAST_TYPE < kNotStringTag + kIsSymbolMask);
Leon Clarkee46be812010-01-19 14:06:41 +00002770 __ test(temp.reg(), Immediate(kIsSymbolMask)); // Test the symbol bit.
2771 __ j(zero, &not_a_symbol);
2772 // They are symbols, so do identity compare.
2773 __ cmp(left_side.reg(), right_side.handle());
2774 dest->true_target()->Branch(equal);
2775 dest->false_target()->Branch(not_equal);
2776 __ bind(&not_a_symbol);
2777 }
Steve Block6ded16b2010-05-10 14:33:55 +01002778 // Call the compare stub if the left side is not a flat ascii string.
Leon Clarkee46be812010-01-19 14:06:41 +00002779 __ and_(temp.reg(),
2780 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
2781 __ cmp(temp.reg(), kStringTag | kSeqStringTag | kAsciiStringTag);
2782 temp.Unuse();
2783 is_string.Branch(equal, &left_side);
2784
2785 // Setup and call the compare stub.
2786 is_not_string.Bind(&left_side);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002787 CompareFlags flags =
2788 static_cast<CompareFlags>(CANT_BOTH_BE_NAN | NO_SMI_COMPARE_IN_STUB);
2789 CompareStub stub(cc, strict, flags);
Leon Clarkee46be812010-01-19 14:06:41 +00002790 Result result = frame_->CallStub(&stub, &left_side, &right_side);
2791 result.ToRegister();
2792 __ cmp(result.reg(), 0);
2793 result.Unuse();
2794 dest->true_target()->Branch(cc);
2795 dest->false_target()->Jump();
2796
2797 is_string.Bind(&left_side);
Steve Block6ded16b2010-05-10 14:33:55 +01002798 // left_side is a sequential ASCII string.
Leon Clarkee46be812010-01-19 14:06:41 +00002799 left_side = Result(left_reg);
2800 right_side = Result(right_val);
Leon Clarkee46be812010-01-19 14:06:41 +00002801 // Test string equality and comparison.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002802 Label comparison_done;
Leon Clarkee46be812010-01-19 14:06:41 +00002803 if (cc == equal) {
Leon Clarkee46be812010-01-19 14:06:41 +00002804 __ cmp(FieldOperand(left_side.reg(), String::kLengthOffset),
Steve Block6ded16b2010-05-10 14:33:55 +01002805 Immediate(Smi::FromInt(1)));
Leon Clarkee46be812010-01-19 14:06:41 +00002806 __ j(not_equal, &comparison_done);
2807 uint8_t char_value =
Steve Block6ded16b2010-05-10 14:33:55 +01002808 static_cast<uint8_t>(String::cast(*right_val)->Get(0));
Leon Clarkee46be812010-01-19 14:06:41 +00002809 __ cmpb(FieldOperand(left_side.reg(), SeqAsciiString::kHeaderSize),
2810 char_value);
Leon Clarkee46be812010-01-19 14:06:41 +00002811 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002812 __ cmp(FieldOperand(left_side.reg(), String::kLengthOffset),
2813 Immediate(Smi::FromInt(1)));
2814 // If the length is 0 then the jump is taken and the flags
2815 // correctly represent being less than the one-character string.
2816 __ j(below, &comparison_done);
Steve Block6ded16b2010-05-10 14:33:55 +01002817 // Compare the first character of the string with the
2818 // constant 1-character string.
Leon Clarkee46be812010-01-19 14:06:41 +00002819 uint8_t char_value =
Steve Block6ded16b2010-05-10 14:33:55 +01002820 static_cast<uint8_t>(String::cast(*right_val)->Get(0));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002821 __ cmpb(FieldOperand(left_side.reg(), SeqAsciiString::kHeaderSize),
2822 char_value);
2823 __ j(not_equal, &comparison_done);
Leon Clarkee46be812010-01-19 14:06:41 +00002824 // If the first character is the same then the long string sorts after
2825 // the short one.
2826 __ cmp(FieldOperand(left_side.reg(), String::kLengthOffset),
Steve Block6ded16b2010-05-10 14:33:55 +01002827 Immediate(Smi::FromInt(1)));
Leon Clarkee46be812010-01-19 14:06:41 +00002828 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002829 __ bind(&comparison_done);
Leon Clarkee46be812010-01-19 14:06:41 +00002830 left_side.Unuse();
2831 right_side.Unuse();
2832 dest->Split(cc);
2833 }
2834 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01002835 // Neither side is a constant Smi, constant 1-char string or constant null.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002836 // If either side is a non-smi constant, or known to be a heap number,
2837 // skip the smi check.
Steve Blocka7e24c12009-10-30 11:49:00 +00002838 bool known_non_smi =
2839 (left_side.is_constant() && !left_side.handle()->IsSmi()) ||
Steve Block6ded16b2010-05-10 14:33:55 +01002840 (right_side.is_constant() && !right_side.handle()->IsSmi()) ||
2841 left_side.type_info().IsDouble() ||
2842 right_side.type_info().IsDouble();
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002843
Leon Clarkee46be812010-01-19 14:06:41 +00002844 NaNInformation nan_info =
2845 (CouldBeNaN(left_side) && CouldBeNaN(right_side)) ?
2846 kBothCouldBeNaN :
2847 kCantBothBeNaN;
Steve Block6ded16b2010-05-10 14:33:55 +01002848
2849 // Inline number comparison handling any combination of smi's and heap
2850 // numbers if:
2851 // code is in a loop
2852 // the compare operation is different from equal
2853 // compare is not a for-loop comparison
2854 // The reason for excluding equal is that it will most likely be done
2855 // with smi's (not heap numbers) and the code to comparing smi's is inlined
2856 // separately. The same reason applies for for-loop comparison which will
2857 // also most likely be smi comparisons.
2858 bool is_loop_condition = (node->AsExpression() != NULL)
2859 && node->AsExpression()->is_loop_condition();
2860 bool inline_number_compare =
2861 loop_nesting() > 0 && cc != equal && !is_loop_condition;
2862
2863 // Left and right needed in registers for the following code.
Steve Blocka7e24c12009-10-30 11:49:00 +00002864 left_side.ToRegister();
2865 right_side.ToRegister();
2866
2867 if (known_non_smi) {
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002868 // Inlined equality check:
2869 // If at least one of the objects is not NaN, then if the objects
2870 // are identical, they are equal.
Steve Block6ded16b2010-05-10 14:33:55 +01002871 if (nan_info == kCantBothBeNaN && cc == equal) {
2872 __ cmp(left_side.reg(), Operand(right_side.reg()));
2873 dest->true_target()->Branch(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +00002874 }
Steve Block6ded16b2010-05-10 14:33:55 +01002875
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002876 // Inlined number comparison:
Steve Block6ded16b2010-05-10 14:33:55 +01002877 if (inline_number_compare) {
2878 GenerateInlineNumberComparison(&left_side, &right_side, cc, dest);
2879 }
2880
2881 // End of in-line compare, call out to the compare stub. Don't include
2882 // number comparison in the stub if it was inlined.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002883 CompareFlags flags = ComputeCompareFlags(nan_info, inline_number_compare);
2884 CompareStub stub(cc, strict, flags);
Steve Block6ded16b2010-05-10 14:33:55 +01002885 Result answer = frame_->CallStub(&stub, &left_side, &right_side);
2886 __ test(answer.reg(), Operand(answer.reg()));
Steve Blocka7e24c12009-10-30 11:49:00 +00002887 answer.Unuse();
2888 dest->Split(cc);
2889 } else {
2890 // Here we split control flow to the stub call and inlined cases
2891 // before finally splitting it to the control destination. We use
2892 // a jump target and branching to duplicate the virtual frame at
2893 // the first split. We manually handle the off-frame references
2894 // by reconstituting them on the non-fall-through path.
2895 JumpTarget is_smi;
2896 Register left_reg = left_side.reg();
2897 Register right_reg = right_side.reg();
2898
Steve Block6ded16b2010-05-10 14:33:55 +01002899 // In-line check for comparing two smis.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002900 JumpIfBothSmiUsingTypeInfo(&left_side, &right_side, &is_smi);
Steve Block6ded16b2010-05-10 14:33:55 +01002901
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002902 if (has_valid_frame()) {
2903 // Inline the equality check if both operands can't be a NaN. If both
2904 // objects are the same they are equal.
2905 if (nan_info == kCantBothBeNaN && cc == equal) {
2906 __ cmp(left_side.reg(), Operand(right_side.reg()));
2907 dest->true_target()->Branch(equal);
2908 }
2909
2910 // Inlined number comparison:
2911 if (inline_number_compare) {
2912 GenerateInlineNumberComparison(&left_side, &right_side, cc, dest);
2913 }
2914
2915 // End of in-line compare, call out to the compare stub. Don't include
2916 // number comparison in the stub if it was inlined.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002917 CompareFlags flags =
2918 ComputeCompareFlags(nan_info, inline_number_compare);
2919 CompareStub stub(cc, strict, flags);
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002920 Result answer = frame_->CallStub(&stub, &left_side, &right_side);
2921 __ test(answer.reg(), Operand(answer.reg()));
2922 answer.Unuse();
2923 if (is_smi.is_linked()) {
2924 dest->true_target()->Branch(cc);
2925 dest->false_target()->Jump();
2926 } else {
2927 dest->Split(cc);
2928 }
2929 }
2930
2931 if (is_smi.is_linked()) {
2932 is_smi.Bind();
2933 left_side = Result(left_reg);
2934 right_side = Result(right_reg);
Steve Block6ded16b2010-05-10 14:33:55 +01002935 __ cmp(left_side.reg(), Operand(right_side.reg()));
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002936 right_side.Unuse();
2937 left_side.Unuse();
2938 dest->Split(cc);
Steve Block6ded16b2010-05-10 14:33:55 +01002939 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002940 }
2941 }
2942}
2943
2944
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002945void CodeGenerator::ConstantSmiComparison(Condition cc,
2946 bool strict,
2947 ControlDestination* dest,
2948 Result* left_side,
2949 Result* right_side,
2950 bool left_side_constant_smi,
2951 bool right_side_constant_smi,
2952 bool is_loop_condition) {
2953 if (left_side_constant_smi && right_side_constant_smi) {
2954 // Trivial case, comparing two constants.
2955 int left_value = Smi::cast(*left_side->handle())->value();
2956 int right_value = Smi::cast(*right_side->handle())->value();
2957 switch (cc) {
2958 case less:
2959 dest->Goto(left_value < right_value);
2960 break;
2961 case equal:
2962 dest->Goto(left_value == right_value);
2963 break;
2964 case greater_equal:
2965 dest->Goto(left_value >= right_value);
2966 break;
2967 default:
2968 UNREACHABLE();
2969 }
2970 } else {
2971 // Only one side is a constant Smi.
2972 // If left side is a constant Smi, reverse the operands.
2973 // Since one side is a constant Smi, conversion order does not matter.
2974 if (left_side_constant_smi) {
2975 Result* temp = left_side;
2976 left_side = right_side;
2977 right_side = temp;
2978 cc = ReverseCondition(cc);
2979 // This may re-introduce greater or less_equal as the value of cc.
2980 // CompareStub and the inline code both support all values of cc.
2981 }
2982 // Implement comparison against a constant Smi, inlining the case
2983 // where both sides are Smis.
2984 left_side->ToRegister();
2985 Register left_reg = left_side->reg();
2986 Handle<Object> right_val = right_side->handle();
2987
2988 if (left_side->is_smi()) {
2989 if (FLAG_debug_code) {
2990 __ AbortIfNotSmi(left_reg);
2991 }
2992 // Test smi equality and comparison by signed int comparison.
2993 if (IsUnsafeSmi(right_side->handle())) {
2994 right_side->ToRegister();
2995 __ cmp(left_reg, Operand(right_side->reg()));
2996 } else {
2997 __ cmp(Operand(left_reg), Immediate(right_side->handle()));
2998 }
2999 left_side->Unuse();
3000 right_side->Unuse();
3001 dest->Split(cc);
3002 } else {
3003 // Only the case where the left side could possibly be a non-smi is left.
3004 JumpTarget is_smi;
3005 if (cc == equal) {
3006 // We can do the equality comparison before the smi check.
3007 __ cmp(Operand(left_reg), Immediate(right_side->handle()));
3008 dest->true_target()->Branch(equal);
3009 __ test(left_reg, Immediate(kSmiTagMask));
3010 dest->false_target()->Branch(zero);
3011 } else {
3012 // Do the smi check, then the comparison.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003013 __ test(left_reg, Immediate(kSmiTagMask));
3014 is_smi.Branch(zero, left_side, right_side);
3015 }
3016
3017 // Jump or fall through to here if we are comparing a non-smi to a
3018 // constant smi. If the non-smi is a heap number and this is not
3019 // a loop condition, inline the floating point code.
3020 if (!is_loop_condition && CpuFeatures::IsSupported(SSE2)) {
3021 // Right side is a constant smi and left side has been checked
3022 // not to be a smi.
3023 CpuFeatures::Scope use_sse2(SSE2);
3024 JumpTarget not_number;
3025 __ cmp(FieldOperand(left_reg, HeapObject::kMapOffset),
3026 Immediate(Factory::heap_number_map()));
3027 not_number.Branch(not_equal, left_side);
3028 __ movdbl(xmm1,
3029 FieldOperand(left_reg, HeapNumber::kValueOffset));
3030 int value = Smi::cast(*right_val)->value();
3031 if (value == 0) {
3032 __ xorpd(xmm0, xmm0);
3033 } else {
3034 Result temp = allocator()->Allocate();
3035 __ mov(temp.reg(), Immediate(value));
3036 __ cvtsi2sd(xmm0, Operand(temp.reg()));
3037 temp.Unuse();
3038 }
3039 __ ucomisd(xmm1, xmm0);
3040 // Jump to builtin for NaN.
3041 not_number.Branch(parity_even, left_side);
3042 left_side->Unuse();
3043 dest->true_target()->Branch(DoubleCondition(cc));
3044 dest->false_target()->Jump();
3045 not_number.Bind(left_side);
3046 }
3047
3048 // Setup and call the compare stub.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003049 CompareFlags flags =
3050 static_cast<CompareFlags>(CANT_BOTH_BE_NAN | NO_SMI_CODE_IN_STUB);
3051 CompareStub stub(cc, strict, flags);
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003052 Result result = frame_->CallStub(&stub, left_side, right_side);
3053 result.ToRegister();
3054 __ test(result.reg(), Operand(result.reg()));
3055 result.Unuse();
3056 if (cc == equal) {
3057 dest->Split(cc);
3058 } else {
3059 dest->true_target()->Branch(cc);
3060 dest->false_target()->Jump();
3061
3062 // It is important for performance for this case to be at the end.
3063 is_smi.Bind(left_side, right_side);
3064 if (IsUnsafeSmi(right_side->handle())) {
3065 right_side->ToRegister();
3066 __ cmp(left_reg, Operand(right_side->reg()));
3067 } else {
3068 __ cmp(Operand(left_reg), Immediate(right_side->handle()));
3069 }
3070 left_side->Unuse();
3071 right_side->Unuse();
3072 dest->Split(cc);
3073 }
3074 }
3075 }
3076}
3077
3078
Steve Block6ded16b2010-05-10 14:33:55 +01003079// Check that the comparison operand is a number. Jump to not_numbers jump
3080// target passing the left and right result if the operand is not a number.
3081static void CheckComparisonOperand(MacroAssembler* masm_,
3082 Result* operand,
3083 Result* left_side,
3084 Result* right_side,
3085 JumpTarget* not_numbers) {
3086 // Perform check if operand is not known to be a number.
3087 if (!operand->type_info().IsNumber()) {
3088 Label done;
3089 __ test(operand->reg(), Immediate(kSmiTagMask));
3090 __ j(zero, &done);
3091 __ cmp(FieldOperand(operand->reg(), HeapObject::kMapOffset),
3092 Immediate(Factory::heap_number_map()));
3093 not_numbers->Branch(not_equal, left_side, right_side, not_taken);
3094 __ bind(&done);
3095 }
3096}
3097
3098
3099// Load a comparison operand to the FPU stack. This assumes that the operand has
3100// already been checked and is a number.
3101static void LoadComparisonOperand(MacroAssembler* masm_,
3102 Result* operand) {
3103 Label done;
3104 if (operand->type_info().IsDouble()) {
3105 // Operand is known to be a heap number, just load it.
3106 __ fld_d(FieldOperand(operand->reg(), HeapNumber::kValueOffset));
3107 } else if (operand->type_info().IsSmi()) {
3108 // Operand is known to be a smi. Convert it to double and keep the original
3109 // smi.
3110 __ SmiUntag(operand->reg());
3111 __ push(operand->reg());
3112 __ fild_s(Operand(esp, 0));
3113 __ pop(operand->reg());
3114 __ SmiTag(operand->reg());
3115 } else {
3116 // Operand type not known, check for smi otherwise assume heap number.
3117 Label smi;
3118 __ test(operand->reg(), Immediate(kSmiTagMask));
3119 __ j(zero, &smi);
3120 __ fld_d(FieldOperand(operand->reg(), HeapNumber::kValueOffset));
3121 __ jmp(&done);
3122 __ bind(&smi);
3123 __ SmiUntag(operand->reg());
3124 __ push(operand->reg());
3125 __ fild_s(Operand(esp, 0));
3126 __ pop(operand->reg());
3127 __ SmiTag(operand->reg());
3128 __ jmp(&done);
3129 }
3130 __ bind(&done);
3131}
3132
3133
3134// Load a comparison operand into into a XMM register. Jump to not_numbers jump
3135// target passing the left and right result if the operand is not a number.
3136static void LoadComparisonOperandSSE2(MacroAssembler* masm_,
3137 Result* operand,
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003138 XMMRegister xmm_reg,
Steve Block6ded16b2010-05-10 14:33:55 +01003139 Result* left_side,
3140 Result* right_side,
3141 JumpTarget* not_numbers) {
3142 Label done;
3143 if (operand->type_info().IsDouble()) {
3144 // Operand is known to be a heap number, just load it.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003145 __ movdbl(xmm_reg, FieldOperand(operand->reg(), HeapNumber::kValueOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01003146 } else if (operand->type_info().IsSmi()) {
3147 // Operand is known to be a smi. Convert it to double and keep the original
3148 // smi.
3149 __ SmiUntag(operand->reg());
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003150 __ cvtsi2sd(xmm_reg, Operand(operand->reg()));
Steve Block6ded16b2010-05-10 14:33:55 +01003151 __ SmiTag(operand->reg());
3152 } else {
3153 // Operand type not known, check for smi or heap number.
3154 Label smi;
3155 __ test(operand->reg(), Immediate(kSmiTagMask));
3156 __ j(zero, &smi);
3157 if (!operand->type_info().IsNumber()) {
3158 __ cmp(FieldOperand(operand->reg(), HeapObject::kMapOffset),
3159 Immediate(Factory::heap_number_map()));
3160 not_numbers->Branch(not_equal, left_side, right_side, taken);
3161 }
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003162 __ movdbl(xmm_reg, FieldOperand(operand->reg(), HeapNumber::kValueOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01003163 __ jmp(&done);
3164
3165 __ bind(&smi);
3166 // Comvert smi to float and keep the original smi.
3167 __ SmiUntag(operand->reg());
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003168 __ cvtsi2sd(xmm_reg, Operand(operand->reg()));
Steve Block6ded16b2010-05-10 14:33:55 +01003169 __ SmiTag(operand->reg());
3170 __ jmp(&done);
3171 }
3172 __ bind(&done);
3173}
3174
3175
3176void CodeGenerator::GenerateInlineNumberComparison(Result* left_side,
3177 Result* right_side,
3178 Condition cc,
3179 ControlDestination* dest) {
3180 ASSERT(left_side->is_register());
3181 ASSERT(right_side->is_register());
3182
3183 JumpTarget not_numbers;
3184 if (CpuFeatures::IsSupported(SSE2)) {
3185 CpuFeatures::Scope use_sse2(SSE2);
3186
3187 // Load left and right operand into registers xmm0 and xmm1 and compare.
3188 LoadComparisonOperandSSE2(masm_, left_side, xmm0, left_side, right_side,
3189 &not_numbers);
3190 LoadComparisonOperandSSE2(masm_, right_side, xmm1, left_side, right_side,
3191 &not_numbers);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003192 __ ucomisd(xmm0, xmm1);
Steve Block6ded16b2010-05-10 14:33:55 +01003193 } else {
3194 Label check_right, compare;
3195
3196 // Make sure that both comparison operands are numbers.
3197 CheckComparisonOperand(masm_, left_side, left_side, right_side,
3198 &not_numbers);
3199 CheckComparisonOperand(masm_, right_side, left_side, right_side,
3200 &not_numbers);
3201
3202 // Load right and left operand to FPU stack and compare.
3203 LoadComparisonOperand(masm_, right_side);
3204 LoadComparisonOperand(masm_, left_side);
3205 __ FCmp();
3206 }
3207
3208 // Bail out if a NaN is involved.
3209 not_numbers.Branch(parity_even, left_side, right_side, not_taken);
3210
3211 // Split to destination targets based on comparison.
3212 left_side->Unuse();
3213 right_side->Unuse();
3214 dest->true_target()->Branch(DoubleCondition(cc));
3215 dest->false_target()->Jump();
3216
3217 not_numbers.Bind(left_side, right_side);
3218}
3219
3220
Steve Blocka7e24c12009-10-30 11:49:00 +00003221// Call the function just below TOS on the stack with the given
3222// arguments. The receiver is the TOS.
3223void CodeGenerator::CallWithArguments(ZoneList<Expression*>* args,
Leon Clarkee46be812010-01-19 14:06:41 +00003224 CallFunctionFlags flags,
Steve Blocka7e24c12009-10-30 11:49:00 +00003225 int position) {
3226 // Push the arguments ("left-to-right") on the stack.
3227 int arg_count = args->length();
3228 for (int i = 0; i < arg_count; i++) {
3229 Load(args->at(i));
Leon Clarkef7060e22010-06-03 12:02:55 +01003230 frame_->SpillTop();
Steve Blocka7e24c12009-10-30 11:49:00 +00003231 }
3232
3233 // Record the position for debugging purposes.
3234 CodeForSourcePosition(position);
3235
3236 // Use the shared code stub to call the function.
3237 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Leon Clarkee46be812010-01-19 14:06:41 +00003238 CallFunctionStub call_function(arg_count, in_loop, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00003239 Result answer = frame_->CallStub(&call_function, arg_count + 1);
3240 // Restore context and replace function on the stack with the
3241 // result of the stub invocation.
3242 frame_->RestoreContextRegister();
3243 frame_->SetElementAt(0, &answer);
3244}
3245
3246
Leon Clarked91b9f72010-01-27 17:25:45 +00003247void CodeGenerator::CallApplyLazy(Expression* applicand,
Steve Blocka7e24c12009-10-30 11:49:00 +00003248 Expression* receiver,
3249 VariableProxy* arguments,
3250 int position) {
Leon Clarked91b9f72010-01-27 17:25:45 +00003251 // An optimized implementation of expressions of the form
3252 // x.apply(y, arguments).
3253 // If the arguments object of the scope has not been allocated,
3254 // and x.apply is Function.prototype.apply, this optimization
3255 // just copies y and the arguments of the current function on the
3256 // stack, as receiver and arguments, and calls x.
3257 // In the implementation comments, we call x the applicand
3258 // and y the receiver.
Steve Blocka7e24c12009-10-30 11:49:00 +00003259 ASSERT(ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION);
3260 ASSERT(arguments->IsArguments());
3261
Leon Clarked91b9f72010-01-27 17:25:45 +00003262 // Load applicand.apply onto the stack. This will usually
Steve Blocka7e24c12009-10-30 11:49:00 +00003263 // give us a megamorphic load site. Not super, but it works.
Leon Clarked91b9f72010-01-27 17:25:45 +00003264 Load(applicand);
Andrei Popescu402d9372010-02-26 13:31:12 +00003265 frame()->Dup();
Leon Clarked91b9f72010-01-27 17:25:45 +00003266 Handle<String> name = Factory::LookupAsciiSymbol("apply");
3267 frame()->Push(name);
3268 Result answer = frame()->CallLoadIC(RelocInfo::CODE_TARGET);
3269 __ nop();
3270 frame()->Push(&answer);
Steve Blocka7e24c12009-10-30 11:49:00 +00003271
3272 // Load the receiver and the existing arguments object onto the
3273 // expression stack. Avoid allocating the arguments object here.
3274 Load(receiver);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003275 LoadFromSlot(scope()->arguments()->AsSlot(), NOT_INSIDE_TYPEOF);
Steve Blocka7e24c12009-10-30 11:49:00 +00003276
3277 // Emit the source position information after having loaded the
3278 // receiver and the arguments.
3279 CodeForSourcePosition(position);
Leon Clarked91b9f72010-01-27 17:25:45 +00003280 // Contents of frame at this point:
3281 // Frame[0]: arguments object of the current function or the hole.
3282 // Frame[1]: receiver
3283 // Frame[2]: applicand.apply
3284 // Frame[3]: applicand.
Steve Blocka7e24c12009-10-30 11:49:00 +00003285
3286 // Check if the arguments object has been lazily allocated
3287 // already. If so, just use that instead of copying the arguments
3288 // from the stack. This also deals with cases where a local variable
3289 // named 'arguments' has been introduced.
3290 frame_->Dup();
3291 Result probe = frame_->Pop();
Leon Clarked91b9f72010-01-27 17:25:45 +00003292 { VirtualFrame::SpilledScope spilled_scope;
3293 Label slow, done;
3294 bool try_lazy = true;
3295 if (probe.is_constant()) {
3296 try_lazy = probe.handle()->IsTheHole();
3297 } else {
3298 __ cmp(Operand(probe.reg()), Immediate(Factory::the_hole_value()));
3299 probe.Unuse();
3300 __ j(not_equal, &slow);
3301 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003302
Leon Clarked91b9f72010-01-27 17:25:45 +00003303 if (try_lazy) {
3304 Label build_args;
3305 // Get rid of the arguments object probe.
3306 frame_->Drop(); // Can be called on a spilled frame.
3307 // Stack now has 3 elements on it.
3308 // Contents of stack at this point:
3309 // esp[0]: receiver
3310 // esp[1]: applicand.apply
3311 // esp[2]: applicand.
Steve Blocka7e24c12009-10-30 11:49:00 +00003312
Leon Clarked91b9f72010-01-27 17:25:45 +00003313 // Check that the receiver really is a JavaScript object.
3314 __ mov(eax, Operand(esp, 0));
3315 __ test(eax, Immediate(kSmiTagMask));
3316 __ j(zero, &build_args);
Steve Blocka7e24c12009-10-30 11:49:00 +00003317 // We allow all JSObjects including JSFunctions. As long as
3318 // JS_FUNCTION_TYPE is the last instance type and it is right
3319 // after LAST_JS_OBJECT_TYPE, we do not have to check the upper
3320 // bound.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01003321 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
3322 STATIC_ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
Leon Clarked91b9f72010-01-27 17:25:45 +00003323 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
3324 __ j(below, &build_args);
Steve Blocka7e24c12009-10-30 11:49:00 +00003325
Leon Clarked91b9f72010-01-27 17:25:45 +00003326 // Check that applicand.apply is Function.prototype.apply.
3327 __ mov(eax, Operand(esp, kPointerSize));
3328 __ test(eax, Immediate(kSmiTagMask));
3329 __ j(zero, &build_args);
3330 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ecx);
3331 __ j(not_equal, &build_args);
Steve Block791712a2010-08-27 10:21:07 +01003332 __ mov(ecx, FieldOperand(eax, JSFunction::kCodeEntryOffset));
3333 __ sub(Operand(ecx), Immediate(Code::kHeaderSize - kHeapObjectTag));
Leon Clarkeeab96aa2010-01-27 16:31:12 +00003334 Handle<Code> apply_code(Builtins::builtin(Builtins::FunctionApply));
Steve Block791712a2010-08-27 10:21:07 +01003335 __ cmp(Operand(ecx), Immediate(apply_code));
Leon Clarked91b9f72010-01-27 17:25:45 +00003336 __ j(not_equal, &build_args);
3337
3338 // Check that applicand is a function.
3339 __ mov(edi, Operand(esp, 2 * kPointerSize));
3340 __ test(edi, Immediate(kSmiTagMask));
3341 __ j(zero, &build_args);
3342 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
3343 __ j(not_equal, &build_args);
3344
3345 // Copy the arguments to this function possibly from the
3346 // adaptor frame below it.
3347 Label invoke, adapted;
3348 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
3349 __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
3350 __ cmp(Operand(ecx),
3351 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3352 __ j(equal, &adapted);
3353
3354 // No arguments adaptor frame. Copy fixed number of arguments.
Andrei Popescu31002712010-02-23 13:46:05 +00003355 __ mov(eax, Immediate(scope()->num_parameters()));
3356 for (int i = 0; i < scope()->num_parameters(); i++) {
Leon Clarked91b9f72010-01-27 17:25:45 +00003357 __ push(frame_->ParameterAt(i));
3358 }
3359 __ jmp(&invoke);
3360
3361 // Arguments adaptor frame present. Copy arguments from there, but
3362 // avoid copying too many arguments to avoid stack overflows.
3363 __ bind(&adapted);
3364 static const uint32_t kArgumentsLimit = 1 * KB;
3365 __ mov(eax, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
3366 __ SmiUntag(eax);
3367 __ mov(ecx, Operand(eax));
3368 __ cmp(eax, kArgumentsLimit);
3369 __ j(above, &build_args);
3370
3371 // Loop through the arguments pushing them onto the execution
3372 // stack. We don't inform the virtual frame of the push, so we don't
3373 // have to worry about getting rid of the elements from the virtual
3374 // frame.
3375 Label loop;
3376 // ecx is a small non-negative integer, due to the test above.
3377 __ test(ecx, Operand(ecx));
3378 __ j(zero, &invoke);
3379 __ bind(&loop);
3380 __ push(Operand(edx, ecx, times_pointer_size, 1 * kPointerSize));
3381 __ dec(ecx);
3382 __ j(not_zero, &loop);
3383
3384 // Invoke the function.
3385 __ bind(&invoke);
3386 ParameterCount actual(eax);
3387 __ InvokeFunction(edi, actual, CALL_FUNCTION);
3388 // Drop applicand.apply and applicand from the stack, and push
3389 // the result of the function call, but leave the spilled frame
3390 // unchanged, with 3 elements, so it is correct when we compile the
3391 // slow-case code.
3392 __ add(Operand(esp), Immediate(2 * kPointerSize));
3393 __ push(eax);
3394 // Stack now has 1 element:
3395 // esp[0]: result
3396 __ jmp(&done);
3397
3398 // Slow-case: Allocate the arguments object since we know it isn't
3399 // there, and fall-through to the slow-case where we call
3400 // applicand.apply.
3401 __ bind(&build_args);
3402 // Stack now has 3 elements, because we have jumped from where:
3403 // esp[0]: receiver
3404 // esp[1]: applicand.apply
3405 // esp[2]: applicand.
3406
3407 // StoreArgumentsObject requires a correct frame, and may modify it.
3408 Result arguments_object = StoreArgumentsObject(false);
3409 frame_->SpillAll();
3410 arguments_object.ToRegister();
3411 frame_->EmitPush(arguments_object.reg());
3412 arguments_object.Unuse();
3413 // Stack and frame now have 4 elements.
3414 __ bind(&slow);
Leon Clarkeeab96aa2010-01-27 16:31:12 +00003415 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003416
Leon Clarked91b9f72010-01-27 17:25:45 +00003417 // Generic computation of x.apply(y, args) with no special optimization.
3418 // Flip applicand.apply and applicand on the stack, so
3419 // applicand looks like the receiver of the applicand.apply call.
3420 // Then process it as a normal function call.
3421 __ mov(eax, Operand(esp, 3 * kPointerSize));
3422 __ mov(ebx, Operand(esp, 2 * kPointerSize));
3423 __ mov(Operand(esp, 2 * kPointerSize), eax);
3424 __ mov(Operand(esp, 3 * kPointerSize), ebx);
Leon Clarkeeab96aa2010-01-27 16:31:12 +00003425
Leon Clarked91b9f72010-01-27 17:25:45 +00003426 CallFunctionStub call_function(2, NOT_IN_LOOP, NO_CALL_FUNCTION_FLAGS);
3427 Result res = frame_->CallStub(&call_function, 3);
3428 // The function and its two arguments have been dropped.
3429 frame_->Drop(1); // Drop the receiver as well.
3430 res.ToRegister();
3431 frame_->EmitPush(res.reg());
3432 // Stack now has 1 element:
3433 // esp[0]: result
3434 if (try_lazy) __ bind(&done);
3435 } // End of spilled scope.
3436 // Restore the context register after a call.
Steve Blocka7e24c12009-10-30 11:49:00 +00003437 frame_->RestoreContextRegister();
3438}
3439
3440
3441class DeferredStackCheck: public DeferredCode {
3442 public:
3443 DeferredStackCheck() {
3444 set_comment("[ DeferredStackCheck");
3445 }
3446
3447 virtual void Generate();
3448};
3449
3450
3451void DeferredStackCheck::Generate() {
3452 StackCheckStub stub;
3453 __ CallStub(&stub);
3454}
3455
3456
3457void CodeGenerator::CheckStack() {
Steve Blockd0582a62009-12-15 09:54:21 +00003458 DeferredStackCheck* deferred = new DeferredStackCheck;
3459 ExternalReference stack_limit =
3460 ExternalReference::address_of_stack_limit();
3461 __ cmp(esp, Operand::StaticVariable(stack_limit));
3462 deferred->Branch(below);
3463 deferred->BindExit();
Steve Blocka7e24c12009-10-30 11:49:00 +00003464}
3465
3466
3467void CodeGenerator::VisitAndSpill(Statement* statement) {
3468 ASSERT(in_spilled_code());
3469 set_in_spilled_code(false);
3470 Visit(statement);
3471 if (frame_ != NULL) {
3472 frame_->SpillAll();
3473 }
3474 set_in_spilled_code(true);
3475}
3476
3477
3478void CodeGenerator::VisitStatementsAndSpill(ZoneList<Statement*>* statements) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003479#ifdef DEBUG
3480 int original_height = frame_->height();
3481#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003482 ASSERT(in_spilled_code());
3483 set_in_spilled_code(false);
3484 VisitStatements(statements);
3485 if (frame_ != NULL) {
3486 frame_->SpillAll();
3487 }
3488 set_in_spilled_code(true);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003489
3490 ASSERT(!has_valid_frame() || frame_->height() == original_height);
Steve Blocka7e24c12009-10-30 11:49:00 +00003491}
3492
3493
3494void CodeGenerator::VisitStatements(ZoneList<Statement*>* statements) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003495#ifdef DEBUG
3496 int original_height = frame_->height();
3497#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003498 ASSERT(!in_spilled_code());
3499 for (int i = 0; has_valid_frame() && i < statements->length(); i++) {
3500 Visit(statements->at(i));
3501 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003502 ASSERT(!has_valid_frame() || frame_->height() == original_height);
Steve Blocka7e24c12009-10-30 11:49:00 +00003503}
3504
3505
3506void CodeGenerator::VisitBlock(Block* node) {
3507 ASSERT(!in_spilled_code());
3508 Comment cmnt(masm_, "[ Block");
3509 CodeForStatementPosition(node);
3510 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
3511 VisitStatements(node->statements());
3512 if (node->break_target()->is_linked()) {
3513 node->break_target()->Bind();
3514 }
3515 node->break_target()->Unuse();
3516}
3517
3518
3519void CodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
3520 // Call the runtime to declare the globals. The inevitable call
3521 // will sync frame elements to memory anyway, so we do it eagerly to
3522 // allow us to push the arguments directly into place.
3523 frame_->SyncRange(0, frame_->element_count() - 1);
3524
Steve Block3ce2e202009-11-05 08:53:23 +00003525 frame_->EmitPush(esi); // The context is the first argument.
Steve Blocka7e24c12009-10-30 11:49:00 +00003526 frame_->EmitPush(Immediate(pairs));
Steve Blocka7e24c12009-10-30 11:49:00 +00003527 frame_->EmitPush(Immediate(Smi::FromInt(is_eval() ? 1 : 0)));
3528 Result ignored = frame_->CallRuntime(Runtime::kDeclareGlobals, 3);
3529 // Return value is ignored.
3530}
3531
3532
3533void CodeGenerator::VisitDeclaration(Declaration* node) {
3534 Comment cmnt(masm_, "[ Declaration");
3535 Variable* var = node->proxy()->var();
3536 ASSERT(var != NULL); // must have been resolved
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003537 Slot* slot = var->AsSlot();
Steve Blocka7e24c12009-10-30 11:49:00 +00003538
3539 // If it was not possible to allocate the variable at compile time,
3540 // we need to "declare" it at runtime to make sure it actually
3541 // exists in the local context.
3542 if (slot != NULL && slot->type() == Slot::LOOKUP) {
3543 // Variables with a "LOOKUP" slot were introduced as non-locals
3544 // during variable resolution and must have mode DYNAMIC.
3545 ASSERT(var->is_dynamic());
3546 // For now, just do a runtime call. Sync the virtual frame eagerly
3547 // so we can simply push the arguments into place.
3548 frame_->SyncRange(0, frame_->element_count() - 1);
3549 frame_->EmitPush(esi);
3550 frame_->EmitPush(Immediate(var->name()));
3551 // Declaration nodes are always introduced in one of two modes.
3552 ASSERT(node->mode() == Variable::VAR || node->mode() == Variable::CONST);
3553 PropertyAttributes attr = node->mode() == Variable::VAR ? NONE : READ_ONLY;
3554 frame_->EmitPush(Immediate(Smi::FromInt(attr)));
3555 // Push initial value, if any.
3556 // Note: For variables we must not push an initial value (such as
3557 // 'undefined') because we may have a (legal) redeclaration and we
3558 // must not destroy the current value.
3559 if (node->mode() == Variable::CONST) {
3560 frame_->EmitPush(Immediate(Factory::the_hole_value()));
3561 } else if (node->fun() != NULL) {
3562 Load(node->fun());
3563 } else {
3564 frame_->EmitPush(Immediate(Smi::FromInt(0))); // no initial value!
3565 }
3566 Result ignored = frame_->CallRuntime(Runtime::kDeclareContextSlot, 4);
3567 // Ignore the return value (declarations are statements).
3568 return;
3569 }
3570
3571 ASSERT(!var->is_global());
3572
3573 // If we have a function or a constant, we need to initialize the variable.
3574 Expression* val = NULL;
3575 if (node->mode() == Variable::CONST) {
3576 val = new Literal(Factory::the_hole_value());
3577 } else {
3578 val = node->fun(); // NULL if we don't have a function
3579 }
3580
3581 if (val != NULL) {
3582 {
3583 // Set the initial value.
3584 Reference target(this, node->proxy());
3585 Load(val);
3586 target.SetValue(NOT_CONST_INIT);
3587 // The reference is removed from the stack (preserving TOS) when
3588 // it goes out of scope.
3589 }
3590 // Get rid of the assigned value (declarations are statements).
3591 frame_->Drop();
3592 }
3593}
3594
3595
3596void CodeGenerator::VisitExpressionStatement(ExpressionStatement* node) {
3597 ASSERT(!in_spilled_code());
3598 Comment cmnt(masm_, "[ ExpressionStatement");
3599 CodeForStatementPosition(node);
3600 Expression* expression = node->expression();
3601 expression->MarkAsStatement();
3602 Load(expression);
3603 // Remove the lingering expression result from the top of stack.
3604 frame_->Drop();
3605}
3606
3607
3608void CodeGenerator::VisitEmptyStatement(EmptyStatement* node) {
3609 ASSERT(!in_spilled_code());
3610 Comment cmnt(masm_, "// EmptyStatement");
3611 CodeForStatementPosition(node);
3612 // nothing to do
3613}
3614
3615
3616void CodeGenerator::VisitIfStatement(IfStatement* node) {
3617 ASSERT(!in_spilled_code());
3618 Comment cmnt(masm_, "[ IfStatement");
3619 // Generate different code depending on which parts of the if statement
3620 // are present or not.
3621 bool has_then_stm = node->HasThenStatement();
3622 bool has_else_stm = node->HasElseStatement();
3623
3624 CodeForStatementPosition(node);
3625 JumpTarget exit;
3626 if (has_then_stm && has_else_stm) {
3627 JumpTarget then;
3628 JumpTarget else_;
3629 ControlDestination dest(&then, &else_, true);
Steve Blockd0582a62009-12-15 09:54:21 +00003630 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00003631
3632 if (dest.false_was_fall_through()) {
3633 // The else target was bound, so we compile the else part first.
3634 Visit(node->else_statement());
3635
3636 // We may have dangling jumps to the then part.
3637 if (then.is_linked()) {
3638 if (has_valid_frame()) exit.Jump();
3639 then.Bind();
3640 Visit(node->then_statement());
3641 }
3642 } else {
3643 // The then target was bound, so we compile the then part first.
3644 Visit(node->then_statement());
3645
3646 if (else_.is_linked()) {
3647 if (has_valid_frame()) exit.Jump();
3648 else_.Bind();
3649 Visit(node->else_statement());
3650 }
3651 }
3652
3653 } else if (has_then_stm) {
3654 ASSERT(!has_else_stm);
3655 JumpTarget then;
3656 ControlDestination dest(&then, &exit, true);
Steve Blockd0582a62009-12-15 09:54:21 +00003657 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00003658
3659 if (dest.false_was_fall_through()) {
3660 // The exit label was bound. We may have dangling jumps to the
3661 // then part.
3662 if (then.is_linked()) {
3663 exit.Unuse();
3664 exit.Jump();
3665 then.Bind();
3666 Visit(node->then_statement());
3667 }
3668 } else {
3669 // The then label was bound.
3670 Visit(node->then_statement());
3671 }
3672
3673 } else if (has_else_stm) {
3674 ASSERT(!has_then_stm);
3675 JumpTarget else_;
3676 ControlDestination dest(&exit, &else_, false);
Steve Blockd0582a62009-12-15 09:54:21 +00003677 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00003678
3679 if (dest.true_was_fall_through()) {
3680 // The exit label was bound. We may have dangling jumps to the
3681 // else part.
3682 if (else_.is_linked()) {
3683 exit.Unuse();
3684 exit.Jump();
3685 else_.Bind();
3686 Visit(node->else_statement());
3687 }
3688 } else {
3689 // The else label was bound.
3690 Visit(node->else_statement());
3691 }
3692
3693 } else {
3694 ASSERT(!has_then_stm && !has_else_stm);
3695 // We only care about the condition's side effects (not its value
3696 // or control flow effect). LoadCondition is called without
3697 // forcing control flow.
3698 ControlDestination dest(&exit, &exit, true);
Steve Blockd0582a62009-12-15 09:54:21 +00003699 LoadCondition(node->condition(), &dest, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00003700 if (!dest.is_used()) {
3701 // We got a value on the frame rather than (or in addition to)
3702 // control flow.
3703 frame_->Drop();
3704 }
3705 }
3706
3707 if (exit.is_linked()) {
3708 exit.Bind();
3709 }
3710}
3711
3712
3713void CodeGenerator::VisitContinueStatement(ContinueStatement* node) {
3714 ASSERT(!in_spilled_code());
3715 Comment cmnt(masm_, "[ ContinueStatement");
3716 CodeForStatementPosition(node);
3717 node->target()->continue_target()->Jump();
3718}
3719
3720
3721void CodeGenerator::VisitBreakStatement(BreakStatement* node) {
3722 ASSERT(!in_spilled_code());
3723 Comment cmnt(masm_, "[ BreakStatement");
3724 CodeForStatementPosition(node);
3725 node->target()->break_target()->Jump();
3726}
3727
3728
3729void CodeGenerator::VisitReturnStatement(ReturnStatement* node) {
3730 ASSERT(!in_spilled_code());
3731 Comment cmnt(masm_, "[ ReturnStatement");
3732
3733 CodeForStatementPosition(node);
3734 Load(node->expression());
3735 Result return_value = frame_->Pop();
Steve Blockd0582a62009-12-15 09:54:21 +00003736 masm()->WriteRecordedPositions();
Steve Blocka7e24c12009-10-30 11:49:00 +00003737 if (function_return_is_shadowed_) {
3738 function_return_.Jump(&return_value);
3739 } else {
3740 frame_->PrepareForReturn();
3741 if (function_return_.is_bound()) {
3742 // If the function return label is already bound we reuse the
3743 // code by jumping to the return site.
3744 function_return_.Jump(&return_value);
3745 } else {
3746 function_return_.Bind(&return_value);
3747 GenerateReturnSequence(&return_value);
3748 }
3749 }
3750}
3751
3752
3753void CodeGenerator::GenerateReturnSequence(Result* return_value) {
3754 // The return value is a live (but not currently reference counted)
3755 // reference to eax. This is safe because the current frame does not
3756 // contain a reference to eax (it is prepared for the return by spilling
3757 // all registers).
3758 if (FLAG_trace) {
3759 frame_->Push(return_value);
3760 *return_value = frame_->CallRuntime(Runtime::kTraceExit, 1);
3761 }
3762 return_value->ToRegister(eax);
3763
3764 // Add a label for checking the size of the code used for returning.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003765#ifdef DEBUG
Steve Blocka7e24c12009-10-30 11:49:00 +00003766 Label check_exit_codesize;
3767 masm_->bind(&check_exit_codesize);
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003768#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003769
3770 // Leave the frame and return popping the arguments and the
3771 // receiver.
3772 frame_->Exit();
Andrei Popescu31002712010-02-23 13:46:05 +00003773 masm_->ret((scope()->num_parameters() + 1) * kPointerSize);
Steve Blocka7e24c12009-10-30 11:49:00 +00003774 DeleteFrame();
3775
3776#ifdef ENABLE_DEBUGGER_SUPPORT
3777 // Check that the size of the code used for returning matches what is
3778 // expected by the debugger.
Steve Blockd0582a62009-12-15 09:54:21 +00003779 ASSERT_EQ(Assembler::kJSReturnSequenceLength,
Steve Blocka7e24c12009-10-30 11:49:00 +00003780 masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
3781#endif
3782}
3783
3784
3785void CodeGenerator::VisitWithEnterStatement(WithEnterStatement* node) {
3786 ASSERT(!in_spilled_code());
3787 Comment cmnt(masm_, "[ WithEnterStatement");
3788 CodeForStatementPosition(node);
3789 Load(node->expression());
3790 Result context;
3791 if (node->is_catch_block()) {
3792 context = frame_->CallRuntime(Runtime::kPushCatchContext, 1);
3793 } else {
3794 context = frame_->CallRuntime(Runtime::kPushContext, 1);
3795 }
3796
3797 // Update context local.
3798 frame_->SaveContextRegister();
3799
3800 // Verify that the runtime call result and esi agree.
3801 if (FLAG_debug_code) {
3802 __ cmp(context.reg(), Operand(esi));
3803 __ Assert(equal, "Runtime::NewContext should end up in esi");
3804 }
3805}
3806
3807
3808void CodeGenerator::VisitWithExitStatement(WithExitStatement* node) {
3809 ASSERT(!in_spilled_code());
3810 Comment cmnt(masm_, "[ WithExitStatement");
3811 CodeForStatementPosition(node);
3812 // Pop context.
3813 __ mov(esi, ContextOperand(esi, Context::PREVIOUS_INDEX));
3814 // Update context local.
3815 frame_->SaveContextRegister();
3816}
3817
3818
3819void CodeGenerator::VisitSwitchStatement(SwitchStatement* node) {
3820 ASSERT(!in_spilled_code());
3821 Comment cmnt(masm_, "[ SwitchStatement");
3822 CodeForStatementPosition(node);
3823 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
3824
3825 // Compile the switch value.
3826 Load(node->tag());
3827
3828 ZoneList<CaseClause*>* cases = node->cases();
3829 int length = cases->length();
3830 CaseClause* default_clause = NULL;
3831
3832 JumpTarget next_test;
3833 // Compile the case label expressions and comparisons. Exit early
3834 // if a comparison is unconditionally true. The target next_test is
3835 // bound before the loop in order to indicate control flow to the
3836 // first comparison.
3837 next_test.Bind();
3838 for (int i = 0; i < length && !next_test.is_unused(); i++) {
3839 CaseClause* clause = cases->at(i);
3840 // The default is not a test, but remember it for later.
3841 if (clause->is_default()) {
3842 default_clause = clause;
3843 continue;
3844 }
3845
3846 Comment cmnt(masm_, "[ Case comparison");
3847 // We recycle the same target next_test for each test. Bind it if
3848 // the previous test has not done so and then unuse it for the
3849 // loop.
3850 if (next_test.is_linked()) {
3851 next_test.Bind();
3852 }
3853 next_test.Unuse();
3854
3855 // Duplicate the switch value.
3856 frame_->Dup();
3857
3858 // Compile the label expression.
3859 Load(clause->label());
3860
3861 // Compare and branch to the body if true or the next test if
3862 // false. Prefer the next test as a fall through.
3863 ControlDestination dest(clause->body_target(), &next_test, false);
Leon Clarkee46be812010-01-19 14:06:41 +00003864 Comparison(node, equal, true, &dest);
Steve Blocka7e24c12009-10-30 11:49:00 +00003865
3866 // If the comparison fell through to the true target, jump to the
3867 // actual body.
3868 if (dest.true_was_fall_through()) {
3869 clause->body_target()->Unuse();
3870 clause->body_target()->Jump();
3871 }
3872 }
3873
3874 // If there was control flow to a next test from the last one
3875 // compiled, compile a jump to the default or break target.
3876 if (!next_test.is_unused()) {
3877 if (next_test.is_linked()) {
3878 next_test.Bind();
3879 }
3880 // Drop the switch value.
3881 frame_->Drop();
3882 if (default_clause != NULL) {
3883 default_clause->body_target()->Jump();
3884 } else {
3885 node->break_target()->Jump();
3886 }
3887 }
3888
Steve Blocka7e24c12009-10-30 11:49:00 +00003889 // The last instruction emitted was a jump, either to the default
3890 // clause or the break target, or else to a case body from the loop
3891 // that compiles the tests.
3892 ASSERT(!has_valid_frame());
3893 // Compile case bodies as needed.
3894 for (int i = 0; i < length; i++) {
3895 CaseClause* clause = cases->at(i);
3896
3897 // There are two ways to reach the body: from the corresponding
3898 // test or as the fall through of the previous body.
3899 if (clause->body_target()->is_linked() || has_valid_frame()) {
3900 if (clause->body_target()->is_linked()) {
3901 if (has_valid_frame()) {
3902 // If we have both a jump to the test and a fall through, put
3903 // a jump on the fall through path to avoid the dropping of
3904 // the switch value on the test path. The exception is the
3905 // default which has already had the switch value dropped.
3906 if (clause->is_default()) {
3907 clause->body_target()->Bind();
3908 } else {
3909 JumpTarget body;
3910 body.Jump();
3911 clause->body_target()->Bind();
3912 frame_->Drop();
3913 body.Bind();
3914 }
3915 } else {
3916 // No fall through to worry about.
3917 clause->body_target()->Bind();
3918 if (!clause->is_default()) {
3919 frame_->Drop();
3920 }
3921 }
3922 } else {
3923 // Otherwise, we have only fall through.
3924 ASSERT(has_valid_frame());
3925 }
3926
3927 // We are now prepared to compile the body.
3928 Comment cmnt(masm_, "[ Case body");
3929 VisitStatements(clause->statements());
3930 }
3931 clause->body_target()->Unuse();
3932 }
3933
3934 // We may not have a valid frame here so bind the break target only
3935 // if needed.
3936 if (node->break_target()->is_linked()) {
3937 node->break_target()->Bind();
3938 }
3939 node->break_target()->Unuse();
3940}
3941
3942
Steve Block3ce2e202009-11-05 08:53:23 +00003943void CodeGenerator::VisitDoWhileStatement(DoWhileStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003944 ASSERT(!in_spilled_code());
Steve Block3ce2e202009-11-05 08:53:23 +00003945 Comment cmnt(masm_, "[ DoWhileStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00003946 CodeForStatementPosition(node);
3947 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
Steve Block3ce2e202009-11-05 08:53:23 +00003948 JumpTarget body(JumpTarget::BIDIRECTIONAL);
3949 IncrementLoopNesting();
Steve Blocka7e24c12009-10-30 11:49:00 +00003950
Steve Block3ce2e202009-11-05 08:53:23 +00003951 ConditionAnalysis info = AnalyzeCondition(node->cond());
3952 // Label the top of the loop for the backward jump if necessary.
3953 switch (info) {
3954 case ALWAYS_TRUE:
3955 // Use the continue target.
3956 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
3957 node->continue_target()->Bind();
3958 break;
3959 case ALWAYS_FALSE:
3960 // No need to label it.
3961 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
3962 break;
3963 case DONT_KNOW:
3964 // Continue is the test, so use the backward body target.
3965 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
3966 body.Bind();
3967 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00003968 }
3969
Steve Block3ce2e202009-11-05 08:53:23 +00003970 CheckStack(); // TODO(1222600): ignore if body contains calls.
3971 Visit(node->body());
Steve Blocka7e24c12009-10-30 11:49:00 +00003972
Steve Block3ce2e202009-11-05 08:53:23 +00003973 // Compile the test.
3974 switch (info) {
3975 case ALWAYS_TRUE:
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003976 // If control flow can fall off the end of the body, jump back
3977 // to the top and bind the break target at the exit.
Steve Block3ce2e202009-11-05 08:53:23 +00003978 if (has_valid_frame()) {
3979 node->continue_target()->Jump();
Steve Blocka7e24c12009-10-30 11:49:00 +00003980 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003981 if (node->break_target()->is_linked()) {
3982 node->break_target()->Bind();
3983 }
3984 break;
Steve Block3ce2e202009-11-05 08:53:23 +00003985 case ALWAYS_FALSE:
3986 // We may have had continues or breaks in the body.
3987 if (node->continue_target()->is_linked()) {
3988 node->continue_target()->Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00003989 }
Steve Block3ce2e202009-11-05 08:53:23 +00003990 if (node->break_target()->is_linked()) {
3991 node->break_target()->Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00003992 }
Steve Block3ce2e202009-11-05 08:53:23 +00003993 break;
3994 case DONT_KNOW:
3995 // We have to compile the test expression if it can be reached by
3996 // control flow falling out of the body or via continue.
3997 if (node->continue_target()->is_linked()) {
3998 node->continue_target()->Bind();
3999 }
4000 if (has_valid_frame()) {
Steve Blockd0582a62009-12-15 09:54:21 +00004001 Comment cmnt(masm_, "[ DoWhileCondition");
4002 CodeForDoWhileConditionPosition(node);
Steve Block3ce2e202009-11-05 08:53:23 +00004003 ControlDestination dest(&body, node->break_target(), false);
Steve Blockd0582a62009-12-15 09:54:21 +00004004 LoadCondition(node->cond(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00004005 }
Steve Block3ce2e202009-11-05 08:53:23 +00004006 if (node->break_target()->is_linked()) {
4007 node->break_target()->Bind();
4008 }
4009 break;
4010 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004011
Steve Block3ce2e202009-11-05 08:53:23 +00004012 DecrementLoopNesting();
Ben Murdoch3bec4d22010-07-22 14:51:16 +01004013 node->continue_target()->Unuse();
4014 node->break_target()->Unuse();
Steve Block3ce2e202009-11-05 08:53:23 +00004015}
Steve Blocka7e24c12009-10-30 11:49:00 +00004016
Steve Block3ce2e202009-11-05 08:53:23 +00004017
4018void CodeGenerator::VisitWhileStatement(WhileStatement* node) {
4019 ASSERT(!in_spilled_code());
4020 Comment cmnt(masm_, "[ WhileStatement");
4021 CodeForStatementPosition(node);
4022
4023 // If the condition is always false and has no side effects, we do not
4024 // need to compile anything.
4025 ConditionAnalysis info = AnalyzeCondition(node->cond());
4026 if (info == ALWAYS_FALSE) return;
4027
4028 // Do not duplicate conditions that may have function literal
4029 // subexpressions. This can cause us to compile the function literal
4030 // twice.
4031 bool test_at_bottom = !node->may_have_function_literal();
4032 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
4033 IncrementLoopNesting();
4034 JumpTarget body;
4035 if (test_at_bottom) {
4036 body.set_direction(JumpTarget::BIDIRECTIONAL);
4037 }
4038
4039 // Based on the condition analysis, compile the test as necessary.
4040 switch (info) {
4041 case ALWAYS_TRUE:
4042 // We will not compile the test expression. Label the top of the
4043 // loop with the continue target.
4044 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
4045 node->continue_target()->Bind();
4046 break;
4047 case DONT_KNOW: {
4048 if (test_at_bottom) {
4049 // Continue is the test at the bottom, no need to label the test
4050 // at the top. The body is a backward target.
4051 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
4052 } else {
4053 // Label the test at the top as the continue target. The body
4054 // is a forward-only target.
4055 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
4056 node->continue_target()->Bind();
4057 }
4058 // Compile the test with the body as the true target and preferred
4059 // fall-through and with the break target as the false target.
4060 ControlDestination dest(&body, node->break_target(), true);
Steve Blockd0582a62009-12-15 09:54:21 +00004061 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00004062
4063 if (dest.false_was_fall_through()) {
4064 // If we got the break target as fall-through, the test may have
4065 // been unconditionally false (if there are no jumps to the
4066 // body).
4067 if (!body.is_linked()) {
4068 DecrementLoopNesting();
4069 return;
4070 }
4071
4072 // Otherwise, jump around the body on the fall through and then
4073 // bind the body target.
4074 node->break_target()->Unuse();
4075 node->break_target()->Jump();
4076 body.Bind();
4077 }
4078 break;
4079 }
4080 case ALWAYS_FALSE:
4081 UNREACHABLE();
4082 break;
4083 }
4084
4085 CheckStack(); // TODO(1222600): ignore if body contains calls.
4086 Visit(node->body());
4087
4088 // Based on the condition analysis, compile the backward jump as
4089 // necessary.
4090 switch (info) {
4091 case ALWAYS_TRUE:
4092 // The loop body has been labeled with the continue target.
4093 if (has_valid_frame()) {
4094 node->continue_target()->Jump();
4095 }
4096 break;
4097 case DONT_KNOW:
4098 if (test_at_bottom) {
Ben Murdoch3bec4d22010-07-22 14:51:16 +01004099 // If we have chosen to recompile the test at the bottom,
4100 // then it is the continue target.
Steve Blocka7e24c12009-10-30 11:49:00 +00004101 if (node->continue_target()->is_linked()) {
4102 node->continue_target()->Bind();
4103 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004104 if (has_valid_frame()) {
Steve Block3ce2e202009-11-05 08:53:23 +00004105 // The break target is the fall-through (body is a backward
4106 // jump from here and thus an invalid fall-through).
4107 ControlDestination dest(&body, node->break_target(), false);
Steve Blockd0582a62009-12-15 09:54:21 +00004108 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00004109 }
4110 } else {
4111 // If we have chosen not to recompile the test at the bottom,
4112 // jump back to the one at the top.
4113 if (has_valid_frame()) {
4114 node->continue_target()->Jump();
Steve Blocka7e24c12009-10-30 11:49:00 +00004115 }
4116 }
Steve Block3ce2e202009-11-05 08:53:23 +00004117 break;
4118 case ALWAYS_FALSE:
4119 UNREACHABLE();
4120 break;
4121 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004122
Steve Block3ce2e202009-11-05 08:53:23 +00004123 // The break target may be already bound (by the condition), or there
4124 // may not be a valid frame. Bind it only if needed.
4125 if (node->break_target()->is_linked()) {
4126 node->break_target()->Bind();
4127 }
4128 DecrementLoopNesting();
4129}
4130
4131
Steve Block6ded16b2010-05-10 14:33:55 +01004132void CodeGenerator::SetTypeForStackSlot(Slot* slot, TypeInfo info) {
4133 ASSERT(slot->type() == Slot::LOCAL || slot->type() == Slot::PARAMETER);
4134 if (slot->type() == Slot::LOCAL) {
4135 frame_->SetTypeForLocalAt(slot->index(), info);
4136 } else {
4137 frame_->SetTypeForParamAt(slot->index(), info);
4138 }
4139 if (FLAG_debug_code && info.IsSmi()) {
4140 if (slot->type() == Slot::LOCAL) {
4141 frame_->PushLocalAt(slot->index());
4142 } else {
4143 frame_->PushParameterAt(slot->index());
4144 }
4145 Result var = frame_->Pop();
4146 var.ToRegister();
4147 __ AbortIfNotSmi(var.reg());
4148 }
4149}
4150
4151
Steve Block3ce2e202009-11-05 08:53:23 +00004152void CodeGenerator::VisitForStatement(ForStatement* node) {
4153 ASSERT(!in_spilled_code());
4154 Comment cmnt(masm_, "[ ForStatement");
4155 CodeForStatementPosition(node);
4156
4157 // Compile the init expression if present.
4158 if (node->init() != NULL) {
4159 Visit(node->init());
4160 }
4161
4162 // If the condition is always false and has no side effects, we do not
4163 // need to compile anything else.
4164 ConditionAnalysis info = AnalyzeCondition(node->cond());
4165 if (info == ALWAYS_FALSE) return;
4166
4167 // Do not duplicate conditions that may have function literal
4168 // subexpressions. This can cause us to compile the function literal
4169 // twice.
4170 bool test_at_bottom = !node->may_have_function_literal();
4171 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
4172 IncrementLoopNesting();
4173
4174 // Target for backward edge if no test at the bottom, otherwise
4175 // unused.
4176 JumpTarget loop(JumpTarget::BIDIRECTIONAL);
4177
4178 // Target for backward edge if there is a test at the bottom,
4179 // otherwise used as target for test at the top.
4180 JumpTarget body;
4181 if (test_at_bottom) {
4182 body.set_direction(JumpTarget::BIDIRECTIONAL);
4183 }
4184
4185 // Based on the condition analysis, compile the test as necessary.
4186 switch (info) {
4187 case ALWAYS_TRUE:
4188 // We will not compile the test expression. Label the top of the
4189 // loop.
4190 if (node->next() == NULL) {
4191 // Use the continue target if there is no update expression.
4192 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
4193 node->continue_target()->Bind();
4194 } else {
4195 // Otherwise use the backward loop target.
4196 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
4197 loop.Bind();
4198 }
4199 break;
4200 case DONT_KNOW: {
4201 if (test_at_bottom) {
4202 // Continue is either the update expression or the test at the
4203 // bottom, no need to label the test at the top.
4204 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
4205 } else if (node->next() == NULL) {
4206 // We are not recompiling the test at the bottom and there is no
4207 // update expression.
4208 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
4209 node->continue_target()->Bind();
4210 } else {
4211 // We are not recompiling the test at the bottom and there is an
4212 // update expression.
4213 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
4214 loop.Bind();
4215 }
Ben Murdoch3bec4d22010-07-22 14:51:16 +01004216
Steve Block3ce2e202009-11-05 08:53:23 +00004217 // Compile the test with the body as the true target and preferred
4218 // fall-through and with the break target as the false target.
4219 ControlDestination dest(&body, node->break_target(), true);
Steve Blockd0582a62009-12-15 09:54:21 +00004220 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00004221
4222 if (dest.false_was_fall_through()) {
4223 // If we got the break target as fall-through, the test may have
4224 // been unconditionally false (if there are no jumps to the
4225 // body).
4226 if (!body.is_linked()) {
4227 DecrementLoopNesting();
4228 return;
4229 }
4230
4231 // Otherwise, jump around the body on the fall through and then
4232 // bind the body target.
4233 node->break_target()->Unuse();
4234 node->break_target()->Jump();
4235 body.Bind();
4236 }
4237 break;
4238 }
4239 case ALWAYS_FALSE:
4240 UNREACHABLE();
4241 break;
4242 }
4243
4244 CheckStack(); // TODO(1222600): ignore if body contains calls.
Steve Block6ded16b2010-05-10 14:33:55 +01004245
4246 // We know that the loop index is a smi if it is not modified in the
4247 // loop body and it is checked against a constant limit in the loop
4248 // condition. In this case, we reset the static type information of the
4249 // loop index to smi before compiling the body, the update expression, and
4250 // the bottom check of the loop condition.
4251 if (node->is_fast_smi_loop()) {
4252 // Set number type of the loop variable to smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004253 SetTypeForStackSlot(node->loop_variable()->AsSlot(), TypeInfo::Smi());
Steve Block6ded16b2010-05-10 14:33:55 +01004254 }
4255
Steve Block3ce2e202009-11-05 08:53:23 +00004256 Visit(node->body());
4257
4258 // If there is an update expression, compile it if necessary.
4259 if (node->next() != NULL) {
4260 if (node->continue_target()->is_linked()) {
4261 node->continue_target()->Bind();
4262 }
4263
4264 // Control can reach the update by falling out of the body or by a
4265 // continue.
4266 if (has_valid_frame()) {
4267 // Record the source position of the statement as this code which
4268 // is after the code for the body actually belongs to the loop
4269 // statement and not the body.
4270 CodeForStatementPosition(node);
4271 Visit(node->next());
4272 }
4273 }
4274
Steve Block6ded16b2010-05-10 14:33:55 +01004275 // Set the type of the loop variable to smi before compiling the test
4276 // expression if we are in a fast smi loop condition.
4277 if (node->is_fast_smi_loop() && has_valid_frame()) {
4278 // Set number type of the loop variable to smi.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004279 SetTypeForStackSlot(node->loop_variable()->AsSlot(), TypeInfo::Smi());
Steve Block6ded16b2010-05-10 14:33:55 +01004280 }
4281
Steve Block3ce2e202009-11-05 08:53:23 +00004282 // Based on the condition analysis, compile the backward jump as
4283 // necessary.
4284 switch (info) {
4285 case ALWAYS_TRUE:
4286 if (has_valid_frame()) {
4287 if (node->next() == NULL) {
4288 node->continue_target()->Jump();
4289 } else {
4290 loop.Jump();
4291 }
4292 }
4293 break;
4294 case DONT_KNOW:
4295 if (test_at_bottom) {
4296 if (node->continue_target()->is_linked()) {
4297 // We can have dangling jumps to the continue target if there
4298 // was no update expression.
4299 node->continue_target()->Bind();
4300 }
4301 // Control can reach the test at the bottom by falling out of
4302 // the body, by a continue in the body, or from the update
4303 // expression.
4304 if (has_valid_frame()) {
4305 // The break target is the fall-through (body is a backward
4306 // jump from here).
4307 ControlDestination dest(&body, node->break_target(), false);
Steve Blockd0582a62009-12-15 09:54:21 +00004308 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00004309 }
4310 } else {
4311 // Otherwise, jump back to the test at the top.
Steve Blocka7e24c12009-10-30 11:49:00 +00004312 if (has_valid_frame()) {
4313 if (node->next() == NULL) {
4314 node->continue_target()->Jump();
4315 } else {
4316 loop.Jump();
4317 }
4318 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004319 }
4320 break;
Steve Block3ce2e202009-11-05 08:53:23 +00004321 case ALWAYS_FALSE:
4322 UNREACHABLE();
4323 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00004324 }
4325
Ben Murdoch3bec4d22010-07-22 14:51:16 +01004326 // The break target may be already bound (by the condition), or there
4327 // may not be a valid frame. Bind it only if needed.
Steve Block3ce2e202009-11-05 08:53:23 +00004328 if (node->break_target()->is_linked()) {
4329 node->break_target()->Bind();
4330 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004331 DecrementLoopNesting();
Steve Blocka7e24c12009-10-30 11:49:00 +00004332}
4333
4334
4335void CodeGenerator::VisitForInStatement(ForInStatement* node) {
4336 ASSERT(!in_spilled_code());
4337 VirtualFrame::SpilledScope spilled_scope;
4338 Comment cmnt(masm_, "[ ForInStatement");
4339 CodeForStatementPosition(node);
4340
4341 JumpTarget primitive;
4342 JumpTarget jsobject;
4343 JumpTarget fixed_array;
4344 JumpTarget entry(JumpTarget::BIDIRECTIONAL);
4345 JumpTarget end_del_check;
4346 JumpTarget exit;
4347
4348 // Get the object to enumerate over (converted to JSObject).
4349 LoadAndSpill(node->enumerable());
4350
4351 // Both SpiderMonkey and kjs ignore null and undefined in contrast
4352 // to the specification. 12.6.4 mandates a call to ToObject.
4353 frame_->EmitPop(eax);
4354
4355 // eax: value to be iterated over
4356 __ cmp(eax, Factory::undefined_value());
4357 exit.Branch(equal);
4358 __ cmp(eax, Factory::null_value());
4359 exit.Branch(equal);
4360
4361 // Stack layout in body:
4362 // [iteration counter (smi)] <- slot 0
4363 // [length of array] <- slot 1
4364 // [FixedArray] <- slot 2
4365 // [Map or 0] <- slot 3
4366 // [Object] <- slot 4
4367
4368 // Check if enumerable is already a JSObject
4369 // eax: value to be iterated over
4370 __ test(eax, Immediate(kSmiTagMask));
4371 primitive.Branch(zero);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004372 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
Steve Blocka7e24c12009-10-30 11:49:00 +00004373 jsobject.Branch(above_equal);
4374
4375 primitive.Bind();
4376 frame_->EmitPush(eax);
4377 frame_->InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION, 1);
4378 // function call returns the value in eax, which is where we want it below
4379
4380 jsobject.Bind();
4381 // Get the set of properties (as a FixedArray or Map).
4382 // eax: value to be iterated over
Steve Blockd0582a62009-12-15 09:54:21 +00004383 frame_->EmitPush(eax); // Push the object being iterated over.
Steve Blocka7e24c12009-10-30 11:49:00 +00004384
Steve Blockd0582a62009-12-15 09:54:21 +00004385 // Check cache validity in generated code. This is a fast case for
4386 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
4387 // guarantee cache validity, call the runtime system to check cache
4388 // validity or get the property names in a fixed array.
4389 JumpTarget call_runtime;
4390 JumpTarget loop(JumpTarget::BIDIRECTIONAL);
4391 JumpTarget check_prototype;
4392 JumpTarget use_cache;
4393 __ mov(ecx, eax);
4394 loop.Bind();
4395 // Check that there are no elements.
4396 __ mov(edx, FieldOperand(ecx, JSObject::kElementsOffset));
4397 __ cmp(Operand(edx), Immediate(Factory::empty_fixed_array()));
4398 call_runtime.Branch(not_equal);
4399 // Check that instance descriptors are not empty so that we can
4400 // check for an enum cache. Leave the map in ebx for the subsequent
4401 // prototype load.
4402 __ mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
4403 __ mov(edx, FieldOperand(ebx, Map::kInstanceDescriptorsOffset));
4404 __ cmp(Operand(edx), Immediate(Factory::empty_descriptor_array()));
4405 call_runtime.Branch(equal);
4406 // Check that there in an enum cache in the non-empty instance
4407 // descriptors. This is the case if the next enumeration index
4408 // field does not contain a smi.
4409 __ mov(edx, FieldOperand(edx, DescriptorArray::kEnumerationIndexOffset));
4410 __ test(edx, Immediate(kSmiTagMask));
4411 call_runtime.Branch(zero);
4412 // For all objects but the receiver, check that the cache is empty.
4413 __ cmp(ecx, Operand(eax));
4414 check_prototype.Branch(equal);
4415 __ mov(edx, FieldOperand(edx, DescriptorArray::kEnumCacheBridgeCacheOffset));
4416 __ cmp(Operand(edx), Immediate(Factory::empty_fixed_array()));
4417 call_runtime.Branch(not_equal);
4418 check_prototype.Bind();
4419 // Load the prototype from the map and loop if non-null.
4420 __ mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
4421 __ cmp(Operand(ecx), Immediate(Factory::null_value()));
4422 loop.Branch(not_equal);
4423 // The enum cache is valid. Load the map of the object being
4424 // iterated over and use the cache for the iteration.
4425 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
4426 use_cache.Jump();
4427
4428 call_runtime.Bind();
4429 // Call the runtime to get the property names for the object.
Steve Blocka7e24c12009-10-30 11:49:00 +00004430 frame_->EmitPush(eax); // push the Object (slot 4) for the runtime call
4431 frame_->CallRuntime(Runtime::kGetPropertyNamesFast, 1);
4432
Steve Blockd0582a62009-12-15 09:54:21 +00004433 // If we got a map from the runtime call, we can do a fast
4434 // modification check. Otherwise, we got a fixed array, and we have
4435 // to do a slow check.
Steve Blocka7e24c12009-10-30 11:49:00 +00004436 // eax: map or fixed array (result from call to
4437 // Runtime::kGetPropertyNamesFast)
4438 __ mov(edx, Operand(eax));
4439 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
4440 __ cmp(ecx, Factory::meta_map());
4441 fixed_array.Branch(not_equal);
4442
Steve Blockd0582a62009-12-15 09:54:21 +00004443 use_cache.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00004444 // Get enum cache
Steve Blockd0582a62009-12-15 09:54:21 +00004445 // eax: map (either the result from a call to
4446 // Runtime::kGetPropertyNamesFast or has been fetched directly from
4447 // the object)
Steve Blocka7e24c12009-10-30 11:49:00 +00004448 __ mov(ecx, Operand(eax));
Steve Blockd0582a62009-12-15 09:54:21 +00004449
Steve Blocka7e24c12009-10-30 11:49:00 +00004450 __ mov(ecx, FieldOperand(ecx, Map::kInstanceDescriptorsOffset));
4451 // Get the bridge array held in the enumeration index field.
4452 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumerationIndexOffset));
4453 // Get the cache from the bridge array.
4454 __ mov(edx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset));
4455
4456 frame_->EmitPush(eax); // <- slot 3
4457 frame_->EmitPush(edx); // <- slot 2
4458 __ mov(eax, FieldOperand(edx, FixedArray::kLengthOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004459 frame_->EmitPush(eax); // <- slot 1
4460 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 0
4461 entry.Jump();
4462
4463 fixed_array.Bind();
4464 // eax: fixed array (result from call to Runtime::kGetPropertyNamesFast)
4465 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 3
4466 frame_->EmitPush(eax); // <- slot 2
4467
4468 // Push the length of the array and the initial index onto the stack.
4469 __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004470 frame_->EmitPush(eax); // <- slot 1
4471 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 0
4472
4473 // Condition.
4474 entry.Bind();
4475 // Grab the current frame's height for the break and continue
4476 // targets only after all the state is pushed on the frame.
4477 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
4478 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
4479
4480 __ mov(eax, frame_->ElementAt(0)); // load the current count
4481 __ cmp(eax, frame_->ElementAt(1)); // compare to the array length
4482 node->break_target()->Branch(above_equal);
4483
4484 // Get the i'th entry of the array.
4485 __ mov(edx, frame_->ElementAt(2));
Kristian Monsen25f61362010-05-21 11:50:48 +01004486 __ mov(ebx, FixedArrayElementOperand(edx, eax));
Steve Blocka7e24c12009-10-30 11:49:00 +00004487
4488 // Get the expected map from the stack or a zero map in the
4489 // permanent slow case eax: current iteration count ebx: i'th entry
4490 // of the enum cache
4491 __ mov(edx, frame_->ElementAt(3));
4492 // Check if the expected map still matches that of the enumerable.
4493 // If not, we have to filter the key.
4494 // eax: current iteration count
4495 // ebx: i'th entry of the enum cache
4496 // edx: expected map value
4497 __ mov(ecx, frame_->ElementAt(4));
4498 __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
4499 __ cmp(ecx, Operand(edx));
4500 end_del_check.Branch(equal);
4501
4502 // Convert the entry to a string (or null if it isn't a property anymore).
4503 frame_->EmitPush(frame_->ElementAt(4)); // push enumerable
4504 frame_->EmitPush(ebx); // push entry
4505 frame_->InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION, 2);
4506 __ mov(ebx, Operand(eax));
4507
4508 // If the property has been removed while iterating, we just skip it.
Iain Merrick75681382010-08-19 15:07:18 +01004509 __ test(ebx, Operand(ebx));
Steve Blocka7e24c12009-10-30 11:49:00 +00004510 node->continue_target()->Branch(equal);
4511
4512 end_del_check.Bind();
4513 // Store the entry in the 'each' expression and take another spin in the
4514 // loop. edx: i'th entry of the enum cache (or string there of)
4515 frame_->EmitPush(ebx);
4516 { Reference each(this, node->each());
Steve Blocka7e24c12009-10-30 11:49:00 +00004517 if (!each.is_illegal()) {
4518 if (each.size() > 0) {
Iain Merrick75681382010-08-19 15:07:18 +01004519 // Loading a reference may leave the frame in an unspilled state.
4520 frame_->SpillAll();
4521 // Get the value (under the reference on the stack) from memory.
Steve Blocka7e24c12009-10-30 11:49:00 +00004522 frame_->EmitPush(frame_->ElementAt(each.size()));
Leon Clarked91b9f72010-01-27 17:25:45 +00004523 each.SetValue(NOT_CONST_INIT);
4524 frame_->Drop(2);
4525 } else {
4526 // If the reference was to a slot we rely on the convenient property
4527 // that it doesn't matter whether a value (eg, ebx pushed above) is
4528 // right on top of or right underneath a zero-sized reference.
4529 each.SetValue(NOT_CONST_INIT);
Steve Blocka7e24c12009-10-30 11:49:00 +00004530 frame_->Drop();
4531 }
4532 }
4533 }
4534 // Unloading a reference may leave the frame in an unspilled state.
4535 frame_->SpillAll();
4536
Steve Blocka7e24c12009-10-30 11:49:00 +00004537 // Body.
4538 CheckStack(); // TODO(1222600): ignore if body contains calls.
4539 VisitAndSpill(node->body());
4540
4541 // Next. Reestablish a spilled frame in case we are coming here via
4542 // a continue in the body.
4543 node->continue_target()->Bind();
4544 frame_->SpillAll();
4545 frame_->EmitPop(eax);
4546 __ add(Operand(eax), Immediate(Smi::FromInt(1)));
4547 frame_->EmitPush(eax);
4548 entry.Jump();
4549
4550 // Cleanup. No need to spill because VirtualFrame::Drop is safe for
4551 // any frame.
4552 node->break_target()->Bind();
4553 frame_->Drop(5);
4554
4555 // Exit.
4556 exit.Bind();
4557
4558 node->continue_target()->Unuse();
4559 node->break_target()->Unuse();
4560}
4561
4562
Steve Block3ce2e202009-11-05 08:53:23 +00004563void CodeGenerator::VisitTryCatchStatement(TryCatchStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004564 ASSERT(!in_spilled_code());
4565 VirtualFrame::SpilledScope spilled_scope;
Steve Block3ce2e202009-11-05 08:53:23 +00004566 Comment cmnt(masm_, "[ TryCatchStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00004567 CodeForStatementPosition(node);
4568
4569 JumpTarget try_block;
4570 JumpTarget exit;
4571
4572 try_block.Call();
4573 // --- Catch block ---
4574 frame_->EmitPush(eax);
4575
4576 // Store the caught exception in the catch variable.
Leon Clarkee46be812010-01-19 14:06:41 +00004577 Variable* catch_var = node->catch_var()->var();
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004578 ASSERT(catch_var != NULL && catch_var->AsSlot() != NULL);
4579 StoreToSlot(catch_var->AsSlot(), NOT_CONST_INIT);
Steve Blocka7e24c12009-10-30 11:49:00 +00004580
4581 // Remove the exception from the stack.
4582 frame_->Drop();
4583
4584 VisitStatementsAndSpill(node->catch_block()->statements());
4585 if (has_valid_frame()) {
4586 exit.Jump();
4587 }
4588
4589
4590 // --- Try block ---
4591 try_block.Bind();
4592
4593 frame_->PushTryHandler(TRY_CATCH_HANDLER);
4594 int handler_height = frame_->height();
4595
4596 // Shadow the jump targets for all escapes from the try block, including
4597 // returns. During shadowing, the original target is hidden as the
4598 // ShadowTarget and operations on the original actually affect the
4599 // shadowing target.
4600 //
4601 // We should probably try to unify the escaping targets and the return
4602 // target.
4603 int nof_escapes = node->escaping_targets()->length();
4604 List<ShadowTarget*> shadows(1 + nof_escapes);
4605
4606 // Add the shadow target for the function return.
4607 static const int kReturnShadowIndex = 0;
4608 shadows.Add(new ShadowTarget(&function_return_));
4609 bool function_return_was_shadowed = function_return_is_shadowed_;
4610 function_return_is_shadowed_ = true;
4611 ASSERT(shadows[kReturnShadowIndex]->other_target() == &function_return_);
4612
4613 // Add the remaining shadow targets.
4614 for (int i = 0; i < nof_escapes; i++) {
4615 shadows.Add(new ShadowTarget(node->escaping_targets()->at(i)));
4616 }
4617
4618 // Generate code for the statements in the try block.
4619 VisitStatementsAndSpill(node->try_block()->statements());
4620
4621 // Stop the introduced shadowing and count the number of required unlinks.
4622 // After shadowing stops, the original targets are unshadowed and the
4623 // ShadowTargets represent the formerly shadowing targets.
4624 bool has_unlinks = false;
4625 for (int i = 0; i < shadows.length(); i++) {
4626 shadows[i]->StopShadowing();
4627 has_unlinks = has_unlinks || shadows[i]->is_linked();
4628 }
4629 function_return_is_shadowed_ = function_return_was_shadowed;
4630
4631 // Get an external reference to the handler address.
4632 ExternalReference handler_address(Top::k_handler_address);
4633
4634 // Make sure that there's nothing left on the stack above the
4635 // handler structure.
4636 if (FLAG_debug_code) {
4637 __ mov(eax, Operand::StaticVariable(handler_address));
4638 __ cmp(esp, Operand(eax));
4639 __ Assert(equal, "stack pointer should point to top handler");
4640 }
4641
4642 // If we can fall off the end of the try block, unlink from try chain.
4643 if (has_valid_frame()) {
4644 // The next handler address is on top of the frame. Unlink from
4645 // the handler list and drop the rest of this handler from the
4646 // frame.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01004647 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00004648 frame_->EmitPop(Operand::StaticVariable(handler_address));
4649 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
4650 if (has_unlinks) {
4651 exit.Jump();
4652 }
4653 }
4654
4655 // Generate unlink code for the (formerly) shadowing targets that
4656 // have been jumped to. Deallocate each shadow target.
4657 Result return_value;
4658 for (int i = 0; i < shadows.length(); i++) {
4659 if (shadows[i]->is_linked()) {
4660 // Unlink from try chain; be careful not to destroy the TOS if
4661 // there is one.
4662 if (i == kReturnShadowIndex) {
4663 shadows[i]->Bind(&return_value);
4664 return_value.ToRegister(eax);
4665 } else {
4666 shadows[i]->Bind();
4667 }
4668 // Because we can be jumping here (to spilled code) from
4669 // unspilled code, we need to reestablish a spilled frame at
4670 // this block.
4671 frame_->SpillAll();
4672
4673 // Reload sp from the top handler, because some statements that we
4674 // break from (eg, for...in) may have left stuff on the stack.
4675 __ mov(esp, Operand::StaticVariable(handler_address));
4676 frame_->Forget(frame_->height() - handler_height);
4677
Kristian Monsen50ef84f2010-07-29 15:18:00 +01004678 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00004679 frame_->EmitPop(Operand::StaticVariable(handler_address));
4680 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
4681
4682 if (i == kReturnShadowIndex) {
4683 if (!function_return_is_shadowed_) frame_->PrepareForReturn();
4684 shadows[i]->other_target()->Jump(&return_value);
4685 } else {
4686 shadows[i]->other_target()->Jump();
4687 }
4688 }
4689 }
4690
4691 exit.Bind();
4692}
4693
4694
Steve Block3ce2e202009-11-05 08:53:23 +00004695void CodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004696 ASSERT(!in_spilled_code());
4697 VirtualFrame::SpilledScope spilled_scope;
Steve Block3ce2e202009-11-05 08:53:23 +00004698 Comment cmnt(masm_, "[ TryFinallyStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00004699 CodeForStatementPosition(node);
4700
4701 // State: Used to keep track of reason for entering the finally
4702 // block. Should probably be extended to hold information for
4703 // break/continue from within the try block.
4704 enum { FALLING, THROWING, JUMPING };
4705
4706 JumpTarget try_block;
4707 JumpTarget finally_block;
4708
4709 try_block.Call();
4710
4711 frame_->EmitPush(eax);
4712 // In case of thrown exceptions, this is where we continue.
4713 __ Set(ecx, Immediate(Smi::FromInt(THROWING)));
4714 finally_block.Jump();
4715
4716 // --- Try block ---
4717 try_block.Bind();
4718
4719 frame_->PushTryHandler(TRY_FINALLY_HANDLER);
4720 int handler_height = frame_->height();
4721
4722 // Shadow the jump targets for all escapes from the try block, including
4723 // returns. During shadowing, the original target is hidden as the
4724 // ShadowTarget and operations on the original actually affect the
4725 // shadowing target.
4726 //
4727 // We should probably try to unify the escaping targets and the return
4728 // target.
4729 int nof_escapes = node->escaping_targets()->length();
4730 List<ShadowTarget*> shadows(1 + nof_escapes);
4731
4732 // Add the shadow target for the function return.
4733 static const int kReturnShadowIndex = 0;
4734 shadows.Add(new ShadowTarget(&function_return_));
4735 bool function_return_was_shadowed = function_return_is_shadowed_;
4736 function_return_is_shadowed_ = true;
4737 ASSERT(shadows[kReturnShadowIndex]->other_target() == &function_return_);
4738
4739 // Add the remaining shadow targets.
4740 for (int i = 0; i < nof_escapes; i++) {
4741 shadows.Add(new ShadowTarget(node->escaping_targets()->at(i)));
4742 }
4743
4744 // Generate code for the statements in the try block.
4745 VisitStatementsAndSpill(node->try_block()->statements());
4746
4747 // Stop the introduced shadowing and count the number of required unlinks.
4748 // After shadowing stops, the original targets are unshadowed and the
4749 // ShadowTargets represent the formerly shadowing targets.
4750 int nof_unlinks = 0;
4751 for (int i = 0; i < shadows.length(); i++) {
4752 shadows[i]->StopShadowing();
4753 if (shadows[i]->is_linked()) nof_unlinks++;
4754 }
4755 function_return_is_shadowed_ = function_return_was_shadowed;
4756
4757 // Get an external reference to the handler address.
4758 ExternalReference handler_address(Top::k_handler_address);
4759
4760 // If we can fall off the end of the try block, unlink from the try
4761 // chain and set the state on the frame to FALLING.
4762 if (has_valid_frame()) {
4763 // The next handler address is on top of the frame.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01004764 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00004765 frame_->EmitPop(Operand::StaticVariable(handler_address));
4766 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
4767
4768 // Fake a top of stack value (unneeded when FALLING) and set the
4769 // state in ecx, then jump around the unlink blocks if any.
4770 frame_->EmitPush(Immediate(Factory::undefined_value()));
4771 __ Set(ecx, Immediate(Smi::FromInt(FALLING)));
4772 if (nof_unlinks > 0) {
4773 finally_block.Jump();
4774 }
4775 }
4776
4777 // Generate code to unlink and set the state for the (formerly)
4778 // shadowing targets that have been jumped to.
4779 for (int i = 0; i < shadows.length(); i++) {
4780 if (shadows[i]->is_linked()) {
4781 // If we have come from the shadowed return, the return value is
4782 // on the virtual frame. We must preserve it until it is
4783 // pushed.
4784 if (i == kReturnShadowIndex) {
4785 Result return_value;
4786 shadows[i]->Bind(&return_value);
4787 return_value.ToRegister(eax);
4788 } else {
4789 shadows[i]->Bind();
4790 }
4791 // Because we can be jumping here (to spilled code) from
4792 // unspilled code, we need to reestablish a spilled frame at
4793 // this block.
4794 frame_->SpillAll();
4795
4796 // Reload sp from the top handler, because some statements that
4797 // we break from (eg, for...in) may have left stuff on the
4798 // stack.
4799 __ mov(esp, Operand::StaticVariable(handler_address));
4800 frame_->Forget(frame_->height() - handler_height);
4801
4802 // Unlink this handler and drop it from the frame.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01004803 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00004804 frame_->EmitPop(Operand::StaticVariable(handler_address));
4805 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
4806
4807 if (i == kReturnShadowIndex) {
4808 // If this target shadowed the function return, materialize
4809 // the return value on the stack.
4810 frame_->EmitPush(eax);
4811 } else {
4812 // Fake TOS for targets that shadowed breaks and continues.
4813 frame_->EmitPush(Immediate(Factory::undefined_value()));
4814 }
4815 __ Set(ecx, Immediate(Smi::FromInt(JUMPING + i)));
4816 if (--nof_unlinks > 0) {
4817 // If this is not the last unlink block, jump around the next.
4818 finally_block.Jump();
4819 }
4820 }
4821 }
4822
4823 // --- Finally block ---
4824 finally_block.Bind();
4825
4826 // Push the state on the stack.
4827 frame_->EmitPush(ecx);
4828
4829 // We keep two elements on the stack - the (possibly faked) result
4830 // and the state - while evaluating the finally block.
4831 //
4832 // Generate code for the statements in the finally block.
4833 VisitStatementsAndSpill(node->finally_block()->statements());
4834
4835 if (has_valid_frame()) {
4836 // Restore state and return value or faked TOS.
4837 frame_->EmitPop(ecx);
4838 frame_->EmitPop(eax);
4839 }
4840
4841 // Generate code to jump to the right destination for all used
4842 // formerly shadowing targets. Deallocate each shadow target.
4843 for (int i = 0; i < shadows.length(); i++) {
4844 if (has_valid_frame() && shadows[i]->is_bound()) {
4845 BreakTarget* original = shadows[i]->other_target();
4846 __ cmp(Operand(ecx), Immediate(Smi::FromInt(JUMPING + i)));
4847 if (i == kReturnShadowIndex) {
4848 // The return value is (already) in eax.
4849 Result return_value = allocator_->Allocate(eax);
4850 ASSERT(return_value.is_valid());
4851 if (function_return_is_shadowed_) {
4852 original->Branch(equal, &return_value);
4853 } else {
4854 // Branch around the preparation for return which may emit
4855 // code.
4856 JumpTarget skip;
4857 skip.Branch(not_equal);
4858 frame_->PrepareForReturn();
4859 original->Jump(&return_value);
4860 skip.Bind();
4861 }
4862 } else {
4863 original->Branch(equal);
4864 }
4865 }
4866 }
4867
4868 if (has_valid_frame()) {
4869 // Check if we need to rethrow the exception.
4870 JumpTarget exit;
4871 __ cmp(Operand(ecx), Immediate(Smi::FromInt(THROWING)));
4872 exit.Branch(not_equal);
4873
4874 // Rethrow exception.
4875 frame_->EmitPush(eax); // undo pop from above
4876 frame_->CallRuntime(Runtime::kReThrow, 1);
4877
4878 // Done.
4879 exit.Bind();
4880 }
4881}
4882
4883
4884void CodeGenerator::VisitDebuggerStatement(DebuggerStatement* node) {
4885 ASSERT(!in_spilled_code());
4886 Comment cmnt(masm_, "[ DebuggerStatement");
4887 CodeForStatementPosition(node);
4888#ifdef ENABLE_DEBUGGER_SUPPORT
4889 // Spill everything, even constants, to the frame.
4890 frame_->SpillAll();
Leon Clarke4515c472010-02-03 11:58:03 +00004891
Andrei Popescu402d9372010-02-26 13:31:12 +00004892 frame_->DebugBreak();
Steve Blocka7e24c12009-10-30 11:49:00 +00004893 // Ignore the return value.
4894#endif
4895}
4896
4897
Steve Block6ded16b2010-05-10 14:33:55 +01004898Result CodeGenerator::InstantiateFunction(
4899 Handle<SharedFunctionInfo> function_info) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004900 // The inevitable call will sync frame elements to memory anyway, so
4901 // we do it eagerly to allow us to push the arguments directly into
4902 // place.
Andrei Popescu402d9372010-02-26 13:31:12 +00004903 frame()->SyncRange(0, frame()->element_count() - 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00004904
Leon Clarkee46be812010-01-19 14:06:41 +00004905 // Use the fast case closure allocation code that allocates in new
4906 // space for nested functions that don't need literals cloning.
Steve Block6ded16b2010-05-10 14:33:55 +01004907 if (scope()->is_function_scope() && function_info->num_literals() == 0) {
Leon Clarkee46be812010-01-19 14:06:41 +00004908 FastNewClosureStub stub;
Steve Block6ded16b2010-05-10 14:33:55 +01004909 frame()->EmitPush(Immediate(function_info));
Andrei Popescu402d9372010-02-26 13:31:12 +00004910 return frame()->CallStub(&stub, 1);
Leon Clarkee46be812010-01-19 14:06:41 +00004911 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01004912 // Call the runtime to instantiate the function based on the
4913 // shared function info.
Andrei Popescu402d9372010-02-26 13:31:12 +00004914 frame()->EmitPush(esi);
Steve Block6ded16b2010-05-10 14:33:55 +01004915 frame()->EmitPush(Immediate(function_info));
Andrei Popescu402d9372010-02-26 13:31:12 +00004916 return frame()->CallRuntime(Runtime::kNewClosure, 2);
Leon Clarkee46be812010-01-19 14:06:41 +00004917 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004918}
4919
4920
4921void CodeGenerator::VisitFunctionLiteral(FunctionLiteral* node) {
4922 Comment cmnt(masm_, "[ FunctionLiteral");
Steve Block6ded16b2010-05-10 14:33:55 +01004923 ASSERT(!in_safe_int32_mode());
4924 // Build the function info and instantiate it.
4925 Handle<SharedFunctionInfo> function_info =
Ben Murdochf87a2032010-10-22 12:50:53 +01004926 Compiler::BuildFunctionInfo(node, script());
Steve Blocka7e24c12009-10-30 11:49:00 +00004927 // Check for stack-overflow exception.
Ben Murdochf87a2032010-10-22 12:50:53 +01004928 if (function_info.is_null()) {
4929 SetStackOverflow();
4930 return;
4931 }
Steve Block6ded16b2010-05-10 14:33:55 +01004932 Result result = InstantiateFunction(function_info);
Andrei Popescu402d9372010-02-26 13:31:12 +00004933 frame()->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00004934}
4935
4936
Steve Block6ded16b2010-05-10 14:33:55 +01004937void CodeGenerator::VisitSharedFunctionInfoLiteral(
4938 SharedFunctionInfoLiteral* node) {
4939 ASSERT(!in_safe_int32_mode());
4940 Comment cmnt(masm_, "[ SharedFunctionInfoLiteral");
4941 Result result = InstantiateFunction(node->shared_function_info());
Andrei Popescu402d9372010-02-26 13:31:12 +00004942 frame()->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00004943}
4944
4945
4946void CodeGenerator::VisitConditional(Conditional* node) {
4947 Comment cmnt(masm_, "[ Conditional");
Steve Block6ded16b2010-05-10 14:33:55 +01004948 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00004949 JumpTarget then;
4950 JumpTarget else_;
4951 JumpTarget exit;
4952 ControlDestination dest(&then, &else_, true);
Steve Blockd0582a62009-12-15 09:54:21 +00004953 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00004954
4955 if (dest.false_was_fall_through()) {
4956 // The else target was bound, so we compile the else part first.
Steve Blockd0582a62009-12-15 09:54:21 +00004957 Load(node->else_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00004958
4959 if (then.is_linked()) {
4960 exit.Jump();
4961 then.Bind();
Steve Blockd0582a62009-12-15 09:54:21 +00004962 Load(node->then_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00004963 }
4964 } else {
4965 // The then target was bound, so we compile the then part first.
Steve Blockd0582a62009-12-15 09:54:21 +00004966 Load(node->then_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00004967
4968 if (else_.is_linked()) {
4969 exit.Jump();
4970 else_.Bind();
Steve Blockd0582a62009-12-15 09:54:21 +00004971 Load(node->else_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00004972 }
4973 }
4974
4975 exit.Bind();
4976}
4977
4978
Leon Clarkef7060e22010-06-03 12:02:55 +01004979void CodeGenerator::LoadFromSlot(Slot* slot, TypeofState typeof_state) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004980 if (slot->type() == Slot::LOOKUP) {
4981 ASSERT(slot->var()->is_dynamic());
Steve Blocka7e24c12009-10-30 11:49:00 +00004982 JumpTarget slow;
4983 JumpTarget done;
Leon Clarkef7060e22010-06-03 12:02:55 +01004984 Result value;
Steve Blocka7e24c12009-10-30 11:49:00 +00004985
Kristian Monsen25f61362010-05-21 11:50:48 +01004986 // Generate fast case for loading from slots that correspond to
4987 // local/global variables or arguments unless they are shadowed by
4988 // eval-introduced bindings.
4989 EmitDynamicLoadFromSlotFastCase(slot,
4990 typeof_state,
Leon Clarkef7060e22010-06-03 12:02:55 +01004991 &value,
Kristian Monsen25f61362010-05-21 11:50:48 +01004992 &slow,
4993 &done);
Steve Blocka7e24c12009-10-30 11:49:00 +00004994
4995 slow.Bind();
4996 // A runtime call is inevitable. We eagerly sync frame elements
4997 // to memory so that we can push the arguments directly into place
4998 // on top of the frame.
Andrei Popescu402d9372010-02-26 13:31:12 +00004999 frame()->SyncRange(0, frame()->element_count() - 1);
5000 frame()->EmitPush(esi);
5001 frame()->EmitPush(Immediate(slot->var()->name()));
Steve Blocka7e24c12009-10-30 11:49:00 +00005002 if (typeof_state == INSIDE_TYPEOF) {
Leon Clarkef7060e22010-06-03 12:02:55 +01005003 value =
Andrei Popescu402d9372010-02-26 13:31:12 +00005004 frame()->CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00005005 } else {
Leon Clarkef7060e22010-06-03 12:02:55 +01005006 value = frame()->CallRuntime(Runtime::kLoadContextSlot, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00005007 }
5008
Leon Clarkef7060e22010-06-03 12:02:55 +01005009 done.Bind(&value);
5010 frame_->Push(&value);
Steve Blocka7e24c12009-10-30 11:49:00 +00005011
5012 } else if (slot->var()->mode() == Variable::CONST) {
5013 // Const slots may contain 'the hole' value (the constant hasn't been
5014 // initialized yet) which needs to be converted into the 'undefined'
5015 // value.
5016 //
5017 // We currently spill the virtual frame because constants use the
5018 // potentially unsafe direct-frame access of SlotOperand.
5019 VirtualFrame::SpilledScope spilled_scope;
5020 Comment cmnt(masm_, "[ Load const");
Andrei Popescu402d9372010-02-26 13:31:12 +00005021 Label exit;
Steve Blocka7e24c12009-10-30 11:49:00 +00005022 __ mov(ecx, SlotOperand(slot, ecx));
5023 __ cmp(ecx, Factory::the_hole_value());
Andrei Popescu402d9372010-02-26 13:31:12 +00005024 __ j(not_equal, &exit);
Steve Blocka7e24c12009-10-30 11:49:00 +00005025 __ mov(ecx, Factory::undefined_value());
Andrei Popescu402d9372010-02-26 13:31:12 +00005026 __ bind(&exit);
Leon Clarkef7060e22010-06-03 12:02:55 +01005027 frame()->EmitPush(ecx);
Steve Blocka7e24c12009-10-30 11:49:00 +00005028
5029 } else if (slot->type() == Slot::PARAMETER) {
Andrei Popescu402d9372010-02-26 13:31:12 +00005030 frame()->PushParameterAt(slot->index());
Steve Blocka7e24c12009-10-30 11:49:00 +00005031
5032 } else if (slot->type() == Slot::LOCAL) {
Andrei Popescu402d9372010-02-26 13:31:12 +00005033 frame()->PushLocalAt(slot->index());
Steve Blocka7e24c12009-10-30 11:49:00 +00005034
5035 } else {
5036 // The other remaining slot types (LOOKUP and GLOBAL) cannot reach
5037 // here.
5038 //
5039 // The use of SlotOperand below is safe for an unspilled frame
5040 // because it will always be a context slot.
5041 ASSERT(slot->type() == Slot::CONTEXT);
Leon Clarkef7060e22010-06-03 12:02:55 +01005042 Result temp = allocator()->Allocate();
5043 ASSERT(temp.is_valid());
5044 __ mov(temp.reg(), SlotOperand(slot, temp.reg()));
5045 frame()->Push(&temp);
Steve Blocka7e24c12009-10-30 11:49:00 +00005046 }
5047}
5048
5049
Leon Clarkef7060e22010-06-03 12:02:55 +01005050void CodeGenerator::LoadFromSlotCheckForArguments(Slot* slot,
Andrei Popescu402d9372010-02-26 13:31:12 +00005051 TypeofState state) {
Leon Clarkef7060e22010-06-03 12:02:55 +01005052 LoadFromSlot(slot, state);
Steve Blocka7e24c12009-10-30 11:49:00 +00005053
5054 // Bail out quickly if we're not using lazy arguments allocation.
Leon Clarkef7060e22010-06-03 12:02:55 +01005055 if (ArgumentsMode() != LAZY_ARGUMENTS_ALLOCATION) return;
Steve Blocka7e24c12009-10-30 11:49:00 +00005056
5057 // ... or if the slot isn't a non-parameter arguments slot.
Leon Clarkef7060e22010-06-03 12:02:55 +01005058 if (slot->type() == Slot::PARAMETER || !slot->is_arguments()) return;
Steve Blocka7e24c12009-10-30 11:49:00 +00005059
5060 // If the loaded value is a constant, we know if the arguments
5061 // object has been lazily loaded yet.
Leon Clarkef7060e22010-06-03 12:02:55 +01005062 Result result = frame()->Pop();
Andrei Popescu402d9372010-02-26 13:31:12 +00005063 if (result.is_constant()) {
5064 if (result.handle()->IsTheHole()) {
Leon Clarkef7060e22010-06-03 12:02:55 +01005065 result = StoreArgumentsObject(false);
Steve Blocka7e24c12009-10-30 11:49:00 +00005066 }
Leon Clarkef7060e22010-06-03 12:02:55 +01005067 frame()->Push(&result);
5068 return;
Steve Blocka7e24c12009-10-30 11:49:00 +00005069 }
Leon Clarkef7060e22010-06-03 12:02:55 +01005070 ASSERT(result.is_register());
Steve Blocka7e24c12009-10-30 11:49:00 +00005071 // The loaded value is in a register. If it is the sentinel that
5072 // indicates that we haven't loaded the arguments object yet, we
5073 // need to do it now.
5074 JumpTarget exit;
Andrei Popescu402d9372010-02-26 13:31:12 +00005075 __ cmp(Operand(result.reg()), Immediate(Factory::the_hole_value()));
Leon Clarkef7060e22010-06-03 12:02:55 +01005076 frame()->Push(&result);
5077 exit.Branch(not_equal);
Andrei Popescu402d9372010-02-26 13:31:12 +00005078
Andrei Popescu402d9372010-02-26 13:31:12 +00005079 result = StoreArgumentsObject(false);
Leon Clarkef7060e22010-06-03 12:02:55 +01005080 frame()->SetElementAt(0, &result);
5081 result.Unuse();
5082 exit.Bind();
5083 return;
Steve Blocka7e24c12009-10-30 11:49:00 +00005084}
5085
5086
5087Result CodeGenerator::LoadFromGlobalSlotCheckExtensions(
5088 Slot* slot,
5089 TypeofState typeof_state,
5090 JumpTarget* slow) {
Steve Block6ded16b2010-05-10 14:33:55 +01005091 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005092 // Check that no extension objects have been created by calls to
5093 // eval from the current scope to the global scope.
5094 Register context = esi;
5095 Result tmp = allocator_->Allocate();
5096 ASSERT(tmp.is_valid()); // All non-reserved registers were available.
5097
5098 Scope* s = scope();
5099 while (s != NULL) {
5100 if (s->num_heap_slots() > 0) {
5101 if (s->calls_eval()) {
5102 // Check that extension is NULL.
5103 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
5104 Immediate(0));
5105 slow->Branch(not_equal, not_taken);
5106 }
5107 // Load next context in chain.
5108 __ mov(tmp.reg(), ContextOperand(context, Context::CLOSURE_INDEX));
5109 __ mov(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset));
5110 context = tmp.reg();
5111 }
5112 // If no outer scope calls eval, we do not need to check more
5113 // context extensions. If we have reached an eval scope, we check
5114 // all extensions from this point.
5115 if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break;
5116 s = s->outer_scope();
5117 }
5118
5119 if (s != NULL && s->is_eval_scope()) {
5120 // Loop up the context chain. There is no frame effect so it is
5121 // safe to use raw labels here.
5122 Label next, fast;
5123 if (!context.is(tmp.reg())) {
5124 __ mov(tmp.reg(), context);
5125 }
5126 __ bind(&next);
5127 // Terminate at global context.
5128 __ cmp(FieldOperand(tmp.reg(), HeapObject::kMapOffset),
5129 Immediate(Factory::global_context_map()));
5130 __ j(equal, &fast);
5131 // Check that extension is NULL.
5132 __ cmp(ContextOperand(tmp.reg(), Context::EXTENSION_INDEX), Immediate(0));
5133 slow->Branch(not_equal, not_taken);
5134 // Load next context in chain.
5135 __ mov(tmp.reg(), ContextOperand(tmp.reg(), Context::CLOSURE_INDEX));
5136 __ mov(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset));
5137 __ jmp(&next);
5138 __ bind(&fast);
5139 }
5140 tmp.Unuse();
5141
5142 // All extension objects were empty and it is safe to use a global
5143 // load IC call.
Andrei Popescu402d9372010-02-26 13:31:12 +00005144 // The register allocator prefers eax if it is free, so the code generator
5145 // will load the global object directly into eax, which is where the LoadIC
5146 // expects it.
5147 frame_->Spill(eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00005148 LoadGlobal();
5149 frame_->Push(slot->var()->name());
5150 RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
5151 ? RelocInfo::CODE_TARGET
5152 : RelocInfo::CODE_TARGET_CONTEXT;
5153 Result answer = frame_->CallLoadIC(mode);
5154 // A test eax instruction following the call signals that the inobject
5155 // property case was inlined. Ensure that there is not a test eax
5156 // instruction here.
5157 __ nop();
Steve Blocka7e24c12009-10-30 11:49:00 +00005158 return answer;
5159}
5160
5161
Kristian Monsen25f61362010-05-21 11:50:48 +01005162void CodeGenerator::EmitDynamicLoadFromSlotFastCase(Slot* slot,
5163 TypeofState typeof_state,
5164 Result* result,
5165 JumpTarget* slow,
5166 JumpTarget* done) {
5167 // Generate fast-case code for variables that might be shadowed by
5168 // eval-introduced variables. Eval is used a lot without
5169 // introducing variables. In those cases, we do not want to
5170 // perform a runtime call for all variables in the scope
5171 // containing the eval.
5172 if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) {
5173 *result = LoadFromGlobalSlotCheckExtensions(slot, typeof_state, slow);
5174 done->Jump(result);
5175
5176 } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01005177 Slot* potential_slot = slot->var()->local_if_not_shadowed()->AsSlot();
Kristian Monsen25f61362010-05-21 11:50:48 +01005178 Expression* rewrite = slot->var()->local_if_not_shadowed()->rewrite();
5179 if (potential_slot != NULL) {
5180 // Generate fast case for locals that rewrite to slots.
5181 // Allocate a fresh register to use as a temp in
5182 // ContextSlotOperandCheckExtensions and to hold the result
5183 // value.
5184 *result = allocator()->Allocate();
5185 ASSERT(result->is_valid());
5186 __ mov(result->reg(),
5187 ContextSlotOperandCheckExtensions(potential_slot, *result, slow));
5188 if (potential_slot->var()->mode() == Variable::CONST) {
5189 __ cmp(result->reg(), Factory::the_hole_value());
5190 done->Branch(not_equal, result);
5191 __ mov(result->reg(), Factory::undefined_value());
5192 }
5193 done->Jump(result);
5194 } else if (rewrite != NULL) {
5195 // Generate fast case for calls of an argument function.
5196 Property* property = rewrite->AsProperty();
5197 if (property != NULL) {
5198 VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
5199 Literal* key_literal = property->key()->AsLiteral();
5200 if (obj_proxy != NULL &&
5201 key_literal != NULL &&
5202 obj_proxy->IsArguments() &&
5203 key_literal->handle()->IsSmi()) {
5204 // Load arguments object if there are no eval-introduced
5205 // variables. Then load the argument from the arguments
5206 // object using keyed load.
5207 Result arguments = allocator()->Allocate();
5208 ASSERT(arguments.is_valid());
5209 __ mov(arguments.reg(),
Kristian Monsen0d5e1162010-09-30 15:31:59 +01005210 ContextSlotOperandCheckExtensions(obj_proxy->var()->AsSlot(),
Kristian Monsen25f61362010-05-21 11:50:48 +01005211 arguments,
5212 slow));
5213 frame_->Push(&arguments);
5214 frame_->Push(key_literal->handle());
5215 *result = EmitKeyedLoad();
5216 done->Jump(result);
5217 }
5218 }
5219 }
5220 }
5221}
5222
5223
Steve Blocka7e24c12009-10-30 11:49:00 +00005224void CodeGenerator::StoreToSlot(Slot* slot, InitState init_state) {
5225 if (slot->type() == Slot::LOOKUP) {
5226 ASSERT(slot->var()->is_dynamic());
5227
5228 // For now, just do a runtime call. Since the call is inevitable,
5229 // we eagerly sync the virtual frame so we can directly push the
5230 // arguments into place.
5231 frame_->SyncRange(0, frame_->element_count() - 1);
5232
5233 frame_->EmitPush(esi);
5234 frame_->EmitPush(Immediate(slot->var()->name()));
5235
5236 Result value;
5237 if (init_state == CONST_INIT) {
5238 // Same as the case for a normal store, but ignores attribute
5239 // (e.g. READ_ONLY) of context slot so that we can initialize const
5240 // properties (introduced via eval("const foo = (some expr);")). Also,
5241 // uses the current function context instead of the top context.
5242 //
5243 // Note that we must declare the foo upon entry of eval(), via a
5244 // context slot declaration, but we cannot initialize it at the same
5245 // time, because the const declaration may be at the end of the eval
5246 // code (sigh...) and the const variable may have been used before
5247 // (where its value is 'undefined'). Thus, we can only do the
5248 // initialization when we actually encounter the expression and when
5249 // the expression operands are defined and valid, and thus we need the
5250 // split into 2 operations: declaration of the context slot followed
5251 // by initialization.
5252 value = frame_->CallRuntime(Runtime::kInitializeConstContextSlot, 3);
5253 } else {
5254 value = frame_->CallRuntime(Runtime::kStoreContextSlot, 3);
5255 }
5256 // Storing a variable must keep the (new) value on the expression
5257 // stack. This is necessary for compiling chained assignment
5258 // expressions.
5259 frame_->Push(&value);
5260
5261 } else {
5262 ASSERT(!slot->var()->is_dynamic());
5263
5264 JumpTarget exit;
5265 if (init_state == CONST_INIT) {
5266 ASSERT(slot->var()->mode() == Variable::CONST);
5267 // Only the first const initialization must be executed (the slot
5268 // still contains 'the hole' value). When the assignment is executed,
5269 // the code is identical to a normal store (see below).
5270 //
5271 // We spill the frame in the code below because the direct-frame
5272 // access of SlotOperand is potentially unsafe with an unspilled
5273 // frame.
5274 VirtualFrame::SpilledScope spilled_scope;
5275 Comment cmnt(masm_, "[ Init const");
5276 __ mov(ecx, SlotOperand(slot, ecx));
5277 __ cmp(ecx, Factory::the_hole_value());
5278 exit.Branch(not_equal);
5279 }
5280
5281 // We must execute the store. Storing a variable must keep the (new)
5282 // value on the stack. This is necessary for compiling assignment
5283 // expressions.
5284 //
5285 // Note: We will reach here even with slot->var()->mode() ==
5286 // Variable::CONST because of const declarations which will initialize
5287 // consts to 'the hole' value and by doing so, end up calling this code.
5288 if (slot->type() == Slot::PARAMETER) {
5289 frame_->StoreToParameterAt(slot->index());
5290 } else if (slot->type() == Slot::LOCAL) {
5291 frame_->StoreToLocalAt(slot->index());
5292 } else {
5293 // The other slot types (LOOKUP and GLOBAL) cannot reach here.
5294 //
5295 // The use of SlotOperand below is safe for an unspilled frame
5296 // because the slot is a context slot.
5297 ASSERT(slot->type() == Slot::CONTEXT);
5298 frame_->Dup();
5299 Result value = frame_->Pop();
5300 value.ToRegister();
5301 Result start = allocator_->Allocate();
5302 ASSERT(start.is_valid());
5303 __ mov(SlotOperand(slot, start.reg()), value.reg());
5304 // RecordWrite may destroy the value registers.
5305 //
5306 // TODO(204): Avoid actually spilling when the value is not
5307 // needed (probably the common case).
5308 frame_->Spill(value.reg());
5309 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
5310 Result temp = allocator_->Allocate();
5311 ASSERT(temp.is_valid());
5312 __ RecordWrite(start.reg(), offset, value.reg(), temp.reg());
5313 // The results start, value, and temp are unused by going out of
5314 // scope.
5315 }
5316
5317 exit.Bind();
5318 }
5319}
5320
5321
Steve Block6ded16b2010-05-10 14:33:55 +01005322void CodeGenerator::VisitSlot(Slot* slot) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005323 Comment cmnt(masm_, "[ Slot");
Steve Block6ded16b2010-05-10 14:33:55 +01005324 if (in_safe_int32_mode()) {
5325 if ((slot->type() == Slot::LOCAL && !slot->is_arguments())) {
5326 frame()->UntaggedPushLocalAt(slot->index());
5327 } else if (slot->type() == Slot::PARAMETER) {
5328 frame()->UntaggedPushParameterAt(slot->index());
5329 } else {
5330 UNREACHABLE();
5331 }
5332 } else {
Leon Clarkef7060e22010-06-03 12:02:55 +01005333 LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF);
Steve Block6ded16b2010-05-10 14:33:55 +01005334 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005335}
5336
5337
5338void CodeGenerator::VisitVariableProxy(VariableProxy* node) {
5339 Comment cmnt(masm_, "[ VariableProxy");
5340 Variable* var = node->var();
5341 Expression* expr = var->rewrite();
5342 if (expr != NULL) {
5343 Visit(expr);
5344 } else {
5345 ASSERT(var->is_global());
Steve Block6ded16b2010-05-10 14:33:55 +01005346 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005347 Reference ref(this, node);
Steve Blockd0582a62009-12-15 09:54:21 +00005348 ref.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00005349 }
5350}
5351
5352
5353void CodeGenerator::VisitLiteral(Literal* node) {
5354 Comment cmnt(masm_, "[ Literal");
Steve Block6ded16b2010-05-10 14:33:55 +01005355 if (in_safe_int32_mode()) {
5356 frame_->PushUntaggedElement(node->handle());
5357 } else {
5358 frame_->Push(node->handle());
5359 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005360}
5361
5362
Steve Blockd0582a62009-12-15 09:54:21 +00005363void CodeGenerator::PushUnsafeSmi(Handle<Object> value) {
5364 ASSERT(value->IsSmi());
5365 int bits = reinterpret_cast<int>(*value);
5366 __ push(Immediate(bits & 0x0000FFFF));
5367 __ or_(Operand(esp, 0), Immediate(bits & 0xFFFF0000));
5368}
5369
5370
5371void CodeGenerator::StoreUnsafeSmiToLocal(int offset, Handle<Object> value) {
5372 ASSERT(value->IsSmi());
5373 int bits = reinterpret_cast<int>(*value);
5374 __ mov(Operand(ebp, offset), Immediate(bits & 0x0000FFFF));
5375 __ or_(Operand(ebp, offset), Immediate(bits & 0xFFFF0000));
5376}
5377
5378
5379void CodeGenerator::MoveUnsafeSmi(Register target, Handle<Object> value) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005380 ASSERT(target.is_valid());
5381 ASSERT(value->IsSmi());
5382 int bits = reinterpret_cast<int>(*value);
5383 __ Set(target, Immediate(bits & 0x0000FFFF));
Steve Blockd0582a62009-12-15 09:54:21 +00005384 __ or_(target, bits & 0xFFFF0000);
Steve Blocka7e24c12009-10-30 11:49:00 +00005385}
5386
5387
5388bool CodeGenerator::IsUnsafeSmi(Handle<Object> value) {
5389 if (!value->IsSmi()) return false;
5390 int int_value = Smi::cast(*value)->value();
5391 return !is_intn(int_value, kMaxSmiInlinedBits);
5392}
5393
5394
5395// Materialize the regexp literal 'node' in the literals array
5396// 'literals' of the function. Leave the regexp boilerplate in
5397// 'boilerplate'.
5398class DeferredRegExpLiteral: public DeferredCode {
5399 public:
5400 DeferredRegExpLiteral(Register boilerplate,
5401 Register literals,
5402 RegExpLiteral* node)
5403 : boilerplate_(boilerplate), literals_(literals), node_(node) {
5404 set_comment("[ DeferredRegExpLiteral");
5405 }
5406
5407 void Generate();
5408
5409 private:
5410 Register boilerplate_;
5411 Register literals_;
5412 RegExpLiteral* node_;
5413};
5414
5415
5416void DeferredRegExpLiteral::Generate() {
5417 // Since the entry is undefined we call the runtime system to
5418 // compute the literal.
5419 // Literal array (0).
5420 __ push(literals_);
5421 // Literal index (1).
5422 __ push(Immediate(Smi::FromInt(node_->literal_index())));
5423 // RegExp pattern (2).
5424 __ push(Immediate(node_->pattern()));
5425 // RegExp flags (3).
5426 __ push(Immediate(node_->flags()));
5427 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
5428 if (!boilerplate_.is(eax)) __ mov(boilerplate_, eax);
5429}
5430
5431
Ben Murdochbb769b22010-08-11 14:56:33 +01005432class DeferredAllocateInNewSpace: public DeferredCode {
5433 public:
Steve Block791712a2010-08-27 10:21:07 +01005434 DeferredAllocateInNewSpace(int size,
5435 Register target,
5436 int registers_to_save = 0)
5437 : size_(size), target_(target), registers_to_save_(registers_to_save) {
Ben Murdochbb769b22010-08-11 14:56:33 +01005438 ASSERT(size >= kPointerSize && size <= Heap::MaxObjectSizeInNewSpace());
Steve Block791712a2010-08-27 10:21:07 +01005439 ASSERT_EQ(0, registers_to_save & target.bit());
Ben Murdochbb769b22010-08-11 14:56:33 +01005440 set_comment("[ DeferredAllocateInNewSpace");
5441 }
5442 void Generate();
5443
5444 private:
5445 int size_;
5446 Register target_;
Steve Block791712a2010-08-27 10:21:07 +01005447 int registers_to_save_;
Ben Murdochbb769b22010-08-11 14:56:33 +01005448};
5449
5450
5451void DeferredAllocateInNewSpace::Generate() {
Steve Block791712a2010-08-27 10:21:07 +01005452 for (int i = 0; i < kNumRegs; i++) {
5453 if (registers_to_save_ & (1 << i)) {
5454 Register save_register = { i };
5455 __ push(save_register);
5456 }
5457 }
Ben Murdochbb769b22010-08-11 14:56:33 +01005458 __ push(Immediate(Smi::FromInt(size_)));
5459 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
5460 if (!target_.is(eax)) {
5461 __ mov(target_, eax);
5462 }
Steve Block791712a2010-08-27 10:21:07 +01005463 for (int i = kNumRegs - 1; i >= 0; i--) {
5464 if (registers_to_save_ & (1 << i)) {
5465 Register save_register = { i };
5466 __ pop(save_register);
5467 }
5468 }
Ben Murdochbb769b22010-08-11 14:56:33 +01005469}
5470
5471
Steve Blocka7e24c12009-10-30 11:49:00 +00005472void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01005473 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005474 Comment cmnt(masm_, "[ RegExp Literal");
5475
5476 // Retrieve the literals array and check the allocated entry. Begin
5477 // with a writable copy of the function of this activation in a
5478 // register.
5479 frame_->PushFunction();
5480 Result literals = frame_->Pop();
5481 literals.ToRegister();
5482 frame_->Spill(literals.reg());
5483
5484 // Load the literals array of the function.
5485 __ mov(literals.reg(),
5486 FieldOperand(literals.reg(), JSFunction::kLiteralsOffset));
5487
5488 // Load the literal at the ast saved index.
5489 Result boilerplate = allocator_->Allocate();
5490 ASSERT(boilerplate.is_valid());
5491 int literal_offset =
5492 FixedArray::kHeaderSize + node->literal_index() * kPointerSize;
5493 __ mov(boilerplate.reg(), FieldOperand(literals.reg(), literal_offset));
5494
5495 // Check whether we need to materialize the RegExp object. If so,
5496 // jump to the deferred code passing the literals array.
5497 DeferredRegExpLiteral* deferred =
5498 new DeferredRegExpLiteral(boilerplate.reg(), literals.reg(), node);
5499 __ cmp(boilerplate.reg(), Factory::undefined_value());
5500 deferred->Branch(equal);
5501 deferred->BindExit();
Steve Blocka7e24c12009-10-30 11:49:00 +00005502
Ben Murdochbb769b22010-08-11 14:56:33 +01005503 // Register of boilerplate contains RegExp object.
5504
5505 Result tmp = allocator()->Allocate();
5506 ASSERT(tmp.is_valid());
5507
5508 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
5509
5510 DeferredAllocateInNewSpace* allocate_fallback =
5511 new DeferredAllocateInNewSpace(size, literals.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00005512 frame_->Push(&boilerplate);
Ben Murdochbb769b22010-08-11 14:56:33 +01005513 frame_->SpillTop();
5514 __ AllocateInNewSpace(size,
5515 literals.reg(),
5516 tmp.reg(),
5517 no_reg,
5518 allocate_fallback->entry_label(),
5519 TAG_OBJECT);
5520 allocate_fallback->BindExit();
5521 boilerplate = frame_->Pop();
5522 // Copy from boilerplate to clone and return clone.
5523
5524 for (int i = 0; i < size; i += kPointerSize) {
5525 __ mov(tmp.reg(), FieldOperand(boilerplate.reg(), i));
5526 __ mov(FieldOperand(literals.reg(), i), tmp.reg());
5527 }
5528 frame_->Push(&literals);
Steve Blocka7e24c12009-10-30 11:49:00 +00005529}
5530
5531
Steve Blocka7e24c12009-10-30 11:49:00 +00005532void CodeGenerator::VisitObjectLiteral(ObjectLiteral* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01005533 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005534 Comment cmnt(masm_, "[ ObjectLiteral");
5535
Leon Clarkee46be812010-01-19 14:06:41 +00005536 // Load a writable copy of the function of this activation in a
Steve Blocka7e24c12009-10-30 11:49:00 +00005537 // register.
5538 frame_->PushFunction();
5539 Result literals = frame_->Pop();
5540 literals.ToRegister();
5541 frame_->Spill(literals.reg());
5542
5543 // Load the literals array of the function.
5544 __ mov(literals.reg(),
5545 FieldOperand(literals.reg(), JSFunction::kLiteralsOffset));
Leon Clarkee46be812010-01-19 14:06:41 +00005546 // Literal array.
5547 frame_->Push(&literals);
5548 // Literal index.
5549 frame_->Push(Smi::FromInt(node->literal_index()));
5550 // Constant properties.
5551 frame_->Push(node->constant_properties());
Steve Block6ded16b2010-05-10 14:33:55 +01005552 // Should the object literal have fast elements?
5553 frame_->Push(Smi::FromInt(node->fast_elements() ? 1 : 0));
Leon Clarkee46be812010-01-19 14:06:41 +00005554 Result clone;
5555 if (node->depth() > 1) {
Steve Block6ded16b2010-05-10 14:33:55 +01005556 clone = frame_->CallRuntime(Runtime::kCreateObjectLiteral, 4);
Leon Clarkee46be812010-01-19 14:06:41 +00005557 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01005558 clone = frame_->CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
Steve Blocka7e24c12009-10-30 11:49:00 +00005559 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005560 frame_->Push(&clone);
5561
5562 for (int i = 0; i < node->properties()->length(); i++) {
5563 ObjectLiteral::Property* property = node->properties()->at(i);
5564 switch (property->kind()) {
5565 case ObjectLiteral::Property::CONSTANT:
5566 break;
5567 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
5568 if (CompileTimeValue::IsCompileTimeValue(property->value())) break;
5569 // else fall through.
5570 case ObjectLiteral::Property::COMPUTED: {
5571 Handle<Object> key(property->key()->handle());
5572 if (key->IsSymbol()) {
5573 // Duplicate the object as the IC receiver.
5574 frame_->Dup();
5575 Load(property->value());
Kristian Monsen50ef84f2010-07-29 15:18:00 +01005576 Result ignored =
5577 frame_->CallStoreIC(Handle<String>::cast(key), false);
5578 // A test eax instruction following the store IC call would
5579 // indicate the presence of an inlined version of the
5580 // store. Add a nop to indicate that there is no such
5581 // inlined version.
5582 __ nop();
Steve Blocka7e24c12009-10-30 11:49:00 +00005583 break;
5584 }
5585 // Fall through
5586 }
5587 case ObjectLiteral::Property::PROTOTYPE: {
5588 // Duplicate the object as an argument to the runtime call.
5589 frame_->Dup();
5590 Load(property->key());
5591 Load(property->value());
5592 Result ignored = frame_->CallRuntime(Runtime::kSetProperty, 3);
5593 // Ignore the result.
5594 break;
5595 }
5596 case ObjectLiteral::Property::SETTER: {
5597 // Duplicate the object as an argument to the runtime call.
5598 frame_->Dup();
5599 Load(property->key());
5600 frame_->Push(Smi::FromInt(1));
5601 Load(property->value());
5602 Result ignored = frame_->CallRuntime(Runtime::kDefineAccessor, 4);
5603 // Ignore the result.
5604 break;
5605 }
5606 case ObjectLiteral::Property::GETTER: {
5607 // Duplicate the object as an argument to the runtime call.
5608 frame_->Dup();
5609 Load(property->key());
5610 frame_->Push(Smi::FromInt(0));
5611 Load(property->value());
5612 Result ignored = frame_->CallRuntime(Runtime::kDefineAccessor, 4);
5613 // Ignore the result.
5614 break;
5615 }
5616 default: UNREACHABLE();
5617 }
5618 }
5619}
5620
5621
Steve Blocka7e24c12009-10-30 11:49:00 +00005622void CodeGenerator::VisitArrayLiteral(ArrayLiteral* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01005623 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005624 Comment cmnt(masm_, "[ ArrayLiteral");
5625
Leon Clarkee46be812010-01-19 14:06:41 +00005626 // Load a writable copy of the function of this activation in a
Steve Blocka7e24c12009-10-30 11:49:00 +00005627 // register.
5628 frame_->PushFunction();
5629 Result literals = frame_->Pop();
5630 literals.ToRegister();
5631 frame_->Spill(literals.reg());
5632
5633 // Load the literals array of the function.
5634 __ mov(literals.reg(),
5635 FieldOperand(literals.reg(), JSFunction::kLiteralsOffset));
5636
Leon Clarkee46be812010-01-19 14:06:41 +00005637 frame_->Push(&literals);
5638 frame_->Push(Smi::FromInt(node->literal_index()));
5639 frame_->Push(node->constant_elements());
5640 int length = node->values()->length();
5641 Result clone;
Iain Merrick75681382010-08-19 15:07:18 +01005642 if (node->constant_elements()->map() == Heap::fixed_cow_array_map()) {
5643 FastCloneShallowArrayStub stub(
5644 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
5645 clone = frame_->CallStub(&stub, 3);
5646 __ IncrementCounter(&Counters::cow_arrays_created_stub, 1);
5647 } else if (node->depth() > 1) {
Leon Clarkee46be812010-01-19 14:06:41 +00005648 clone = frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3);
Iain Merrick75681382010-08-19 15:07:18 +01005649 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
Leon Clarkee46be812010-01-19 14:06:41 +00005650 clone = frame_->CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
5651 } else {
Iain Merrick75681382010-08-19 15:07:18 +01005652 FastCloneShallowArrayStub stub(
5653 FastCloneShallowArrayStub::CLONE_ELEMENTS, length);
Leon Clarkee46be812010-01-19 14:06:41 +00005654 clone = frame_->CallStub(&stub, 3);
Steve Blocka7e24c12009-10-30 11:49:00 +00005655 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005656 frame_->Push(&clone);
5657
5658 // Generate code to set the elements in the array that are not
5659 // literals.
Leon Clarkee46be812010-01-19 14:06:41 +00005660 for (int i = 0; i < length; i++) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005661 Expression* value = node->values()->at(i);
5662
Iain Merrick75681382010-08-19 15:07:18 +01005663 if (!CompileTimeValue::ArrayLiteralElementNeedsInitialization(value)) {
5664 continue;
5665 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005666
5667 // The property must be set by generated code.
5668 Load(value);
5669
5670 // Get the property value off the stack.
5671 Result prop_value = frame_->Pop();
5672 prop_value.ToRegister();
5673
5674 // Fetch the array literal while leaving a copy on the stack and
5675 // use it to get the elements array.
5676 frame_->Dup();
5677 Result elements = frame_->Pop();
5678 elements.ToRegister();
5679 frame_->Spill(elements.reg());
5680 // Get the elements array.
5681 __ mov(elements.reg(),
5682 FieldOperand(elements.reg(), JSObject::kElementsOffset));
5683
5684 // Write to the indexed properties array.
5685 int offset = i * kPointerSize + FixedArray::kHeaderSize;
5686 __ mov(FieldOperand(elements.reg(), offset), prop_value.reg());
5687
5688 // Update the write barrier for the array address.
5689 frame_->Spill(prop_value.reg()); // Overwritten by the write barrier.
5690 Result scratch = allocator_->Allocate();
5691 ASSERT(scratch.is_valid());
5692 __ RecordWrite(elements.reg(), offset, prop_value.reg(), scratch.reg());
5693 }
5694}
5695
5696
5697void CodeGenerator::VisitCatchExtensionObject(CatchExtensionObject* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01005698 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005699 ASSERT(!in_spilled_code());
5700 // Call runtime routine to allocate the catch extension object and
5701 // assign the exception value to the catch variable.
5702 Comment cmnt(masm_, "[ CatchExtensionObject");
5703 Load(node->key());
5704 Load(node->value());
5705 Result result =
5706 frame_->CallRuntime(Runtime::kCreateCatchExtensionObject, 2);
5707 frame_->Push(&result);
5708}
5709
5710
Andrei Popescu402d9372010-02-26 13:31:12 +00005711void CodeGenerator::EmitSlotAssignment(Assignment* node) {
5712#ifdef DEBUG
5713 int original_height = frame()->height();
5714#endif
5715 Comment cmnt(masm(), "[ Variable Assignment");
5716 Variable* var = node->target()->AsVariableProxy()->AsVariable();
5717 ASSERT(var != NULL);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01005718 Slot* slot = var->AsSlot();
Andrei Popescu402d9372010-02-26 13:31:12 +00005719 ASSERT(slot != NULL);
5720
5721 // Evaluate the right-hand side.
5722 if (node->is_compound()) {
Steve Block6ded16b2010-05-10 14:33:55 +01005723 // For a compound assignment the right-hand side is a binary operation
5724 // between the current property value and the actual right-hand side.
Leon Clarkef7060e22010-06-03 12:02:55 +01005725 LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF);
Andrei Popescu402d9372010-02-26 13:31:12 +00005726 Load(node->value());
5727
Steve Block6ded16b2010-05-10 14:33:55 +01005728 // Perform the binary operation.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005729 bool overwrite_value = node->value()->ResultOverwriteAllowed();
Steve Block6ded16b2010-05-10 14:33:55 +01005730 // Construct the implicit binary operation.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005731 BinaryOperation expr(node);
Steve Block6ded16b2010-05-10 14:33:55 +01005732 GenericBinaryOperation(&expr,
Andrei Popescu402d9372010-02-26 13:31:12 +00005733 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
5734 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01005735 // For non-compound assignment just load the right-hand side.
Andrei Popescu402d9372010-02-26 13:31:12 +00005736 Load(node->value());
5737 }
5738
5739 // Perform the assignment.
5740 if (var->mode() != Variable::CONST || node->op() == Token::INIT_CONST) {
5741 CodeForSourcePosition(node->position());
5742 StoreToSlot(slot,
5743 node->op() == Token::INIT_CONST ? CONST_INIT : NOT_CONST_INIT);
5744 }
5745 ASSERT(frame()->height() == original_height + 1);
5746}
5747
5748
5749void CodeGenerator::EmitNamedPropertyAssignment(Assignment* node) {
5750#ifdef DEBUG
5751 int original_height = frame()->height();
5752#endif
5753 Comment cmnt(masm(), "[ Named Property Assignment");
5754 Variable* var = node->target()->AsVariableProxy()->AsVariable();
5755 Property* prop = node->target()->AsProperty();
5756 ASSERT(var == NULL || (prop == NULL && var->is_global()));
5757
Steve Block6ded16b2010-05-10 14:33:55 +01005758 // Initialize name and evaluate the receiver sub-expression if necessary. If
5759 // the receiver is trivial it is not placed on the stack at this point, but
5760 // loaded whenever actually needed.
Andrei Popescu402d9372010-02-26 13:31:12 +00005761 Handle<String> name;
5762 bool is_trivial_receiver = false;
5763 if (var != NULL) {
5764 name = var->name();
5765 } else {
5766 Literal* lit = prop->key()->AsLiteral();
5767 ASSERT_NOT_NULL(lit);
5768 name = Handle<String>::cast(lit->handle());
5769 // Do not materialize the receiver on the frame if it is trivial.
5770 is_trivial_receiver = prop->obj()->IsTrivial();
5771 if (!is_trivial_receiver) Load(prop->obj());
5772 }
5773
Steve Block6ded16b2010-05-10 14:33:55 +01005774 // Change to slow case in the beginning of an initialization block to
5775 // avoid the quadratic behavior of repeatedly adding fast properties.
Andrei Popescu402d9372010-02-26 13:31:12 +00005776 if (node->starts_initialization_block()) {
Steve Block6ded16b2010-05-10 14:33:55 +01005777 // Initialization block consists of assignments of the form expr.x = ..., so
5778 // this will never be an assignment to a variable, so there must be a
5779 // receiver object.
Andrei Popescu402d9372010-02-26 13:31:12 +00005780 ASSERT_EQ(NULL, var);
Andrei Popescu402d9372010-02-26 13:31:12 +00005781 if (is_trivial_receiver) {
5782 frame()->Push(prop->obj());
5783 } else {
5784 frame()->Dup();
5785 }
5786 Result ignored = frame()->CallRuntime(Runtime::kToSlowProperties, 1);
5787 }
5788
Steve Block6ded16b2010-05-10 14:33:55 +01005789 // Change to fast case at the end of an initialization block. To prepare for
5790 // that add an extra copy of the receiver to the frame, so that it can be
5791 // converted back to fast case after the assignment.
Andrei Popescu402d9372010-02-26 13:31:12 +00005792 if (node->ends_initialization_block() && !is_trivial_receiver) {
Andrei Popescu402d9372010-02-26 13:31:12 +00005793 frame()->Dup();
5794 }
5795
Steve Block6ded16b2010-05-10 14:33:55 +01005796 // Stack layout:
5797 // [tos] : receiver (only materialized if non-trivial)
5798 // [tos+1] : receiver if at the end of an initialization block
5799
Andrei Popescu402d9372010-02-26 13:31:12 +00005800 // Evaluate the right-hand side.
5801 if (node->is_compound()) {
Steve Block6ded16b2010-05-10 14:33:55 +01005802 // For a compound assignment the right-hand side is a binary operation
5803 // between the current property value and the actual right-hand side.
Andrei Popescu402d9372010-02-26 13:31:12 +00005804 if (is_trivial_receiver) {
5805 frame()->Push(prop->obj());
5806 } else if (var != NULL) {
5807 // The LoadIC stub expects the object in eax.
5808 // Freeing eax causes the code generator to load the global into it.
5809 frame_->Spill(eax);
5810 LoadGlobal();
5811 } else {
5812 frame()->Dup();
5813 }
5814 Result value = EmitNamedLoad(name, var != NULL);
5815 frame()->Push(&value);
5816 Load(node->value());
5817
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005818 bool overwrite_value = node->value()->ResultOverwriteAllowed();
Steve Block6ded16b2010-05-10 14:33:55 +01005819 // Construct the implicit binary operation.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005820 BinaryOperation expr(node);
Steve Block6ded16b2010-05-10 14:33:55 +01005821 GenericBinaryOperation(&expr,
Andrei Popescu402d9372010-02-26 13:31:12 +00005822 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
5823 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01005824 // For non-compound assignment just load the right-hand side.
Andrei Popescu402d9372010-02-26 13:31:12 +00005825 Load(node->value());
5826 }
5827
Steve Block6ded16b2010-05-10 14:33:55 +01005828 // Stack layout:
5829 // [tos] : value
5830 // [tos+1] : receiver (only materialized if non-trivial)
5831 // [tos+2] : receiver if at the end of an initialization block
5832
Andrei Popescu402d9372010-02-26 13:31:12 +00005833 // Perform the assignment. It is safe to ignore constants here.
5834 ASSERT(var == NULL || var->mode() != Variable::CONST);
5835 ASSERT_NE(Token::INIT_CONST, node->op());
5836 if (is_trivial_receiver) {
5837 Result value = frame()->Pop();
5838 frame()->Push(prop->obj());
5839 frame()->Push(&value);
5840 }
5841 CodeForSourcePosition(node->position());
5842 bool is_contextual = (var != NULL);
5843 Result answer = EmitNamedStore(name, is_contextual);
5844 frame()->Push(&answer);
5845
Steve Block6ded16b2010-05-10 14:33:55 +01005846 // Stack layout:
5847 // [tos] : result
5848 // [tos+1] : receiver if at the end of an initialization block
5849
Andrei Popescu402d9372010-02-26 13:31:12 +00005850 if (node->ends_initialization_block()) {
5851 ASSERT_EQ(NULL, var);
5852 // The argument to the runtime call is the receiver.
5853 if (is_trivial_receiver) {
5854 frame()->Push(prop->obj());
5855 } else {
5856 // A copy of the receiver is below the value of the assignment. Swap
5857 // the receiver and the value of the assignment expression.
5858 Result result = frame()->Pop();
5859 Result receiver = frame()->Pop();
5860 frame()->Push(&result);
5861 frame()->Push(&receiver);
5862 }
5863 Result ignored = frame_->CallRuntime(Runtime::kToFastProperties, 1);
5864 }
5865
Steve Block6ded16b2010-05-10 14:33:55 +01005866 // Stack layout:
5867 // [tos] : result
5868
Andrei Popescu402d9372010-02-26 13:31:12 +00005869 ASSERT_EQ(frame()->height(), original_height + 1);
5870}
5871
5872
5873void CodeGenerator::EmitKeyedPropertyAssignment(Assignment* node) {
5874#ifdef DEBUG
5875 int original_height = frame()->height();
5876#endif
Steve Block6ded16b2010-05-10 14:33:55 +01005877 Comment cmnt(masm_, "[ Keyed Property Assignment");
Andrei Popescu402d9372010-02-26 13:31:12 +00005878 Property* prop = node->target()->AsProperty();
5879 ASSERT_NOT_NULL(prop);
5880
5881 // Evaluate the receiver subexpression.
5882 Load(prop->obj());
5883
Steve Block6ded16b2010-05-10 14:33:55 +01005884 // Change to slow case in the beginning of an initialization block to
5885 // avoid the quadratic behavior of repeatedly adding fast properties.
Andrei Popescu402d9372010-02-26 13:31:12 +00005886 if (node->starts_initialization_block()) {
Andrei Popescu402d9372010-02-26 13:31:12 +00005887 frame_->Dup();
5888 Result ignored = frame_->CallRuntime(Runtime::kToSlowProperties, 1);
5889 }
5890
Steve Block6ded16b2010-05-10 14:33:55 +01005891 // Change to fast case at the end of an initialization block. To prepare for
5892 // that add an extra copy of the receiver to the frame, so that it can be
5893 // converted back to fast case after the assignment.
Andrei Popescu402d9372010-02-26 13:31:12 +00005894 if (node->ends_initialization_block()) {
Andrei Popescu402d9372010-02-26 13:31:12 +00005895 frame_->Dup();
5896 }
5897
5898 // Evaluate the key subexpression.
5899 Load(prop->key());
5900
Steve Block6ded16b2010-05-10 14:33:55 +01005901 // Stack layout:
5902 // [tos] : key
5903 // [tos+1] : receiver
5904 // [tos+2] : receiver if at the end of an initialization block
5905
Andrei Popescu402d9372010-02-26 13:31:12 +00005906 // Evaluate the right-hand side.
5907 if (node->is_compound()) {
Steve Block6ded16b2010-05-10 14:33:55 +01005908 // For a compound assignment the right-hand side is a binary operation
5909 // between the current property value and the actual right-hand side.
5910 // Duplicate receiver and key for loading the current property value.
Andrei Popescu402d9372010-02-26 13:31:12 +00005911 frame()->PushElementAt(1);
5912 frame()->PushElementAt(1);
5913 Result value = EmitKeyedLoad();
5914 frame()->Push(&value);
5915 Load(node->value());
5916
Steve Block6ded16b2010-05-10 14:33:55 +01005917 // Perform the binary operation.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005918 bool overwrite_value = node->value()->ResultOverwriteAllowed();
5919 BinaryOperation expr(node);
Steve Block6ded16b2010-05-10 14:33:55 +01005920 GenericBinaryOperation(&expr,
Andrei Popescu402d9372010-02-26 13:31:12 +00005921 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
5922 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01005923 // For non-compound assignment just load the right-hand side.
Andrei Popescu402d9372010-02-26 13:31:12 +00005924 Load(node->value());
5925 }
5926
Steve Block6ded16b2010-05-10 14:33:55 +01005927 // Stack layout:
5928 // [tos] : value
5929 // [tos+1] : key
5930 // [tos+2] : receiver
5931 // [tos+3] : receiver if at the end of an initialization block
5932
Andrei Popescu402d9372010-02-26 13:31:12 +00005933 // Perform the assignment. It is safe to ignore constants here.
5934 ASSERT(node->op() != Token::INIT_CONST);
5935 CodeForSourcePosition(node->position());
5936 Result answer = EmitKeyedStore(prop->key()->type());
5937 frame()->Push(&answer);
5938
Steve Block6ded16b2010-05-10 14:33:55 +01005939 // Stack layout:
5940 // [tos] : result
5941 // [tos+1] : receiver if at the end of an initialization block
5942
5943 // Change to fast case at the end of an initialization block.
Andrei Popescu402d9372010-02-26 13:31:12 +00005944 if (node->ends_initialization_block()) {
5945 // The argument to the runtime call is the extra copy of the receiver,
5946 // which is below the value of the assignment. Swap the receiver and
5947 // the value of the assignment expression.
5948 Result result = frame()->Pop();
5949 Result receiver = frame()->Pop();
5950 frame()->Push(&result);
5951 frame()->Push(&receiver);
5952 Result ignored = frame_->CallRuntime(Runtime::kToFastProperties, 1);
5953 }
5954
Steve Block6ded16b2010-05-10 14:33:55 +01005955 // Stack layout:
5956 // [tos] : result
5957
Andrei Popescu402d9372010-02-26 13:31:12 +00005958 ASSERT(frame()->height() == original_height + 1);
5959}
5960
5961
Steve Blocka7e24c12009-10-30 11:49:00 +00005962void CodeGenerator::VisitAssignment(Assignment* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01005963 ASSERT(!in_safe_int32_mode());
Leon Clarked91b9f72010-01-27 17:25:45 +00005964#ifdef DEBUG
Andrei Popescu402d9372010-02-26 13:31:12 +00005965 int original_height = frame()->height();
Leon Clarked91b9f72010-01-27 17:25:45 +00005966#endif
Andrei Popescu402d9372010-02-26 13:31:12 +00005967 Variable* var = node->target()->AsVariableProxy()->AsVariable();
5968 Property* prop = node->target()->AsProperty();
Steve Blocka7e24c12009-10-30 11:49:00 +00005969
Andrei Popescu402d9372010-02-26 13:31:12 +00005970 if (var != NULL && !var->is_global()) {
5971 EmitSlotAssignment(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00005972
Andrei Popescu402d9372010-02-26 13:31:12 +00005973 } else if ((prop != NULL && prop->key()->IsPropertyName()) ||
5974 (var != NULL && var->is_global())) {
5975 // Properties whose keys are property names and global variables are
5976 // treated as named property references. We do not need to consider
5977 // global 'this' because it is not a valid left-hand side.
5978 EmitNamedPropertyAssignment(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00005979
Andrei Popescu402d9372010-02-26 13:31:12 +00005980 } else if (prop != NULL) {
5981 // Other properties (including rewritten parameters for a function that
5982 // uses arguments) are keyed property assignments.
5983 EmitKeyedPropertyAssignment(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00005984
Andrei Popescu402d9372010-02-26 13:31:12 +00005985 } else {
5986 // Invalid left-hand side.
5987 Load(node->target());
5988 Result result = frame()->CallRuntime(Runtime::kThrowReferenceError, 1);
5989 // The runtime call doesn't actually return but the code generator will
5990 // still generate code and expects a certain frame height.
5991 frame()->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00005992 }
Andrei Popescu402d9372010-02-26 13:31:12 +00005993
5994 ASSERT(frame()->height() == original_height + 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00005995}
5996
5997
5998void CodeGenerator::VisitThrow(Throw* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01005999 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00006000 Comment cmnt(masm_, "[ Throw");
6001 Load(node->exception());
6002 Result result = frame_->CallRuntime(Runtime::kThrow, 1);
6003 frame_->Push(&result);
6004}
6005
6006
6007void CodeGenerator::VisitProperty(Property* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01006008 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00006009 Comment cmnt(masm_, "[ Property");
6010 Reference property(this, node);
Steve Blockd0582a62009-12-15 09:54:21 +00006011 property.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00006012}
6013
6014
6015void CodeGenerator::VisitCall(Call* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01006016 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00006017 Comment cmnt(masm_, "[ Call");
6018
6019 Expression* function = node->expression();
6020 ZoneList<Expression*>* args = node->arguments();
6021
6022 // Check if the function is a variable or a property.
6023 Variable* var = function->AsVariableProxy()->AsVariable();
6024 Property* property = function->AsProperty();
6025
6026 // ------------------------------------------------------------------------
6027 // Fast-case: Use inline caching.
6028 // ---
6029 // According to ECMA-262, section 11.2.3, page 44, the function to call
6030 // must be resolved after the arguments have been evaluated. The IC code
6031 // automatically handles this by loading the arguments before the function
6032 // is resolved in cache misses (this also holds for megamorphic calls).
6033 // ------------------------------------------------------------------------
6034
6035 if (var != NULL && var->is_possibly_eval()) {
6036 // ----------------------------------
6037 // JavaScript example: 'eval(arg)' // eval is not known to be shadowed
6038 // ----------------------------------
6039
6040 // In a call to eval, we first call %ResolvePossiblyDirectEval to
6041 // resolve the function we need to call and the receiver of the
6042 // call. Then we call the resolved function using the given
6043 // arguments.
6044
6045 // Prepare the stack for the call to the resolved function.
6046 Load(function);
6047
6048 // Allocate a frame slot for the receiver.
6049 frame_->Push(Factory::undefined_value());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006050
6051 // Load the arguments.
Steve Blocka7e24c12009-10-30 11:49:00 +00006052 int arg_count = args->length();
6053 for (int i = 0; i < arg_count; i++) {
6054 Load(args->at(i));
Leon Clarkef7060e22010-06-03 12:02:55 +01006055 frame_->SpillTop();
Steve Blocka7e24c12009-10-30 11:49:00 +00006056 }
6057
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006058 // Result to hold the result of the function resolution and the
6059 // final result of the eval call.
6060 Result result;
6061
6062 // If we know that eval can only be shadowed by eval-introduced
6063 // variables we attempt to load the global eval function directly
6064 // in generated code. If we succeed, there is no need to perform a
6065 // context lookup in the runtime system.
6066 JumpTarget done;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01006067 if (var->AsSlot() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) {
6068 ASSERT(var->AsSlot()->type() == Slot::LOOKUP);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006069 JumpTarget slow;
6070 // Prepare the stack for the call to
6071 // ResolvePossiblyDirectEvalNoLookup by pushing the loaded
6072 // function, the first argument to the eval call and the
6073 // receiver.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01006074 Result fun = LoadFromGlobalSlotCheckExtensions(var->AsSlot(),
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006075 NOT_INSIDE_TYPEOF,
6076 &slow);
6077 frame_->Push(&fun);
6078 if (arg_count > 0) {
6079 frame_->PushElementAt(arg_count);
6080 } else {
6081 frame_->Push(Factory::undefined_value());
6082 }
6083 frame_->PushParameterAt(-1);
6084
6085 // Resolve the call.
6086 result =
6087 frame_->CallRuntime(Runtime::kResolvePossiblyDirectEvalNoLookup, 3);
6088
6089 done.Jump(&result);
6090 slow.Bind();
6091 }
6092
6093 // Prepare the stack for the call to ResolvePossiblyDirectEval by
6094 // pushing the loaded function, the first argument to the eval
6095 // call and the receiver.
Steve Blocka7e24c12009-10-30 11:49:00 +00006096 frame_->PushElementAt(arg_count + 1);
6097 if (arg_count > 0) {
6098 frame_->PushElementAt(arg_count);
6099 } else {
6100 frame_->Push(Factory::undefined_value());
6101 }
Leon Clarkee46be812010-01-19 14:06:41 +00006102 frame_->PushParameterAt(-1);
6103
Steve Blocka7e24c12009-10-30 11:49:00 +00006104 // Resolve the call.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006105 result = frame_->CallRuntime(Runtime::kResolvePossiblyDirectEval, 3);
6106
6107 // If we generated fast-case code bind the jump-target where fast
6108 // and slow case merge.
6109 if (done.is_linked()) done.Bind(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00006110
Leon Clarkee46be812010-01-19 14:06:41 +00006111 // The runtime call returns a pair of values in eax (function) and
6112 // edx (receiver). Touch up the stack with the right values.
6113 Result receiver = allocator_->Allocate(edx);
6114 frame_->SetElementAt(arg_count + 1, &result);
6115 frame_->SetElementAt(arg_count, &receiver);
6116 receiver.Unuse();
Steve Blocka7e24c12009-10-30 11:49:00 +00006117
6118 // Call the function.
6119 CodeForSourcePosition(node->position());
6120 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Leon Clarkee46be812010-01-19 14:06:41 +00006121 CallFunctionStub call_function(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
Steve Blocka7e24c12009-10-30 11:49:00 +00006122 result = frame_->CallStub(&call_function, arg_count + 1);
6123
6124 // Restore the context and overwrite the function on the stack with
6125 // the result.
6126 frame_->RestoreContextRegister();
6127 frame_->SetElementAt(0, &result);
6128
6129 } else if (var != NULL && !var->is_this() && var->is_global()) {
6130 // ----------------------------------
6131 // JavaScript example: 'foo(1, 2, 3)' // foo is global
6132 // ----------------------------------
6133
Steve Blocka7e24c12009-10-30 11:49:00 +00006134 // Pass the global object as the receiver and let the IC stub
6135 // patch the stack to use the global proxy as 'this' in the
6136 // invoked function.
6137 LoadGlobal();
6138
6139 // Load the arguments.
6140 int arg_count = args->length();
6141 for (int i = 0; i < arg_count; i++) {
6142 Load(args->at(i));
Leon Clarkef7060e22010-06-03 12:02:55 +01006143 frame_->SpillTop();
Steve Blocka7e24c12009-10-30 11:49:00 +00006144 }
6145
Leon Clarkee46be812010-01-19 14:06:41 +00006146 // Push the name of the function onto the frame.
6147 frame_->Push(var->name());
6148
Steve Blocka7e24c12009-10-30 11:49:00 +00006149 // Call the IC initialization code.
6150 CodeForSourcePosition(node->position());
6151 Result result = frame_->CallCallIC(RelocInfo::CODE_TARGET_CONTEXT,
6152 arg_count,
6153 loop_nesting());
6154 frame_->RestoreContextRegister();
Leon Clarkee46be812010-01-19 14:06:41 +00006155 frame_->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00006156
Kristian Monsen0d5e1162010-09-30 15:31:59 +01006157 } else if (var != NULL && var->AsSlot() != NULL &&
6158 var->AsSlot()->type() == Slot::LOOKUP) {
Steve Blocka7e24c12009-10-30 11:49:00 +00006159 // ----------------------------------
Kristian Monsen25f61362010-05-21 11:50:48 +01006160 // JavaScript examples:
6161 //
6162 // with (obj) foo(1, 2, 3) // foo may be in obj.
6163 //
6164 // function f() {};
6165 // function g() {
6166 // eval(...);
6167 // f(); // f could be in extension object.
6168 // }
Steve Blocka7e24c12009-10-30 11:49:00 +00006169 // ----------------------------------
6170
Kristian Monsen25f61362010-05-21 11:50:48 +01006171 JumpTarget slow, done;
6172 Result function;
6173
6174 // Generate fast case for loading functions from slots that
6175 // correspond to local/global variables or arguments unless they
6176 // are shadowed by eval-introduced bindings.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01006177 EmitDynamicLoadFromSlotFastCase(var->AsSlot(),
Kristian Monsen25f61362010-05-21 11:50:48 +01006178 NOT_INSIDE_TYPEOF,
6179 &function,
6180 &slow,
6181 &done);
6182
6183 slow.Bind();
6184 // Enter the runtime system to load the function from the context.
6185 // Sync the frame so we can push the arguments directly into
6186 // place.
Steve Blocka7e24c12009-10-30 11:49:00 +00006187 frame_->SyncRange(0, frame_->element_count() - 1);
6188 frame_->EmitPush(esi);
6189 frame_->EmitPush(Immediate(var->name()));
6190 frame_->CallRuntime(Runtime::kLoadContextSlot, 2);
6191 // The runtime call returns a pair of values in eax and edx. The
6192 // looked-up function is in eax and the receiver is in edx. These
6193 // register references are not ref counted here. We spill them
6194 // eagerly since they are arguments to an inevitable call (and are
6195 // not sharable by the arguments).
6196 ASSERT(!allocator()->is_used(eax));
6197 frame_->EmitPush(eax);
6198
6199 // Load the receiver.
6200 ASSERT(!allocator()->is_used(edx));
6201 frame_->EmitPush(edx);
6202
Kristian Monsen25f61362010-05-21 11:50:48 +01006203 // If fast case code has been generated, emit code to push the
6204 // function and receiver and have the slow path jump around this
6205 // code.
6206 if (done.is_linked()) {
6207 JumpTarget call;
6208 call.Jump();
6209 done.Bind(&function);
6210 frame_->Push(&function);
6211 LoadGlobalReceiver();
6212 call.Bind();
6213 }
6214
Steve Blocka7e24c12009-10-30 11:49:00 +00006215 // Call the function.
Leon Clarkee46be812010-01-19 14:06:41 +00006216 CallWithArguments(args, NO_CALL_FUNCTION_FLAGS, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00006217
6218 } else if (property != NULL) {
6219 // Check if the key is a literal string.
6220 Literal* literal = property->key()->AsLiteral();
6221
6222 if (literal != NULL && literal->handle()->IsSymbol()) {
6223 // ------------------------------------------------------------------
6224 // JavaScript example: 'object.foo(1, 2, 3)' or 'map["key"](1, 2, 3)'
6225 // ------------------------------------------------------------------
6226
6227 Handle<String> name = Handle<String>::cast(literal->handle());
6228
6229 if (ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION &&
6230 name->IsEqualTo(CStrVector("apply")) &&
6231 args->length() == 2 &&
6232 args->at(1)->AsVariableProxy() != NULL &&
6233 args->at(1)->AsVariableProxy()->IsArguments()) {
6234 // Use the optimized Function.prototype.apply that avoids
6235 // allocating lazily allocated arguments objects.
Leon Clarked91b9f72010-01-27 17:25:45 +00006236 CallApplyLazy(property->obj(),
Steve Blocka7e24c12009-10-30 11:49:00 +00006237 args->at(0),
6238 args->at(1)->AsVariableProxy(),
6239 node->position());
6240
6241 } else {
Leon Clarkee46be812010-01-19 14:06:41 +00006242 // Push the receiver onto the frame.
Steve Blocka7e24c12009-10-30 11:49:00 +00006243 Load(property->obj());
6244
6245 // Load the arguments.
6246 int arg_count = args->length();
6247 for (int i = 0; i < arg_count; i++) {
6248 Load(args->at(i));
Leon Clarkef7060e22010-06-03 12:02:55 +01006249 frame_->SpillTop();
Steve Blocka7e24c12009-10-30 11:49:00 +00006250 }
6251
Leon Clarkee46be812010-01-19 14:06:41 +00006252 // Push the name of the function onto the frame.
6253 frame_->Push(name);
6254
Steve Blocka7e24c12009-10-30 11:49:00 +00006255 // Call the IC initialization code.
6256 CodeForSourcePosition(node->position());
6257 Result result =
6258 frame_->CallCallIC(RelocInfo::CODE_TARGET, arg_count,
6259 loop_nesting());
6260 frame_->RestoreContextRegister();
Leon Clarkee46be812010-01-19 14:06:41 +00006261 frame_->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00006262 }
6263
6264 } else {
6265 // -------------------------------------------
6266 // JavaScript example: 'array[index](1, 2, 3)'
6267 // -------------------------------------------
6268
6269 // Load the function to call from the property through a reference.
Steve Blocka7e24c12009-10-30 11:49:00 +00006270
6271 // Pass receiver to called function.
6272 if (property->is_synthetic()) {
Leon Clarked91b9f72010-01-27 17:25:45 +00006273 Reference ref(this, property);
6274 ref.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00006275 // Use global object as receiver.
6276 LoadGlobalReceiver();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006277 // Call the function.
6278 CallWithArguments(args, RECEIVER_MIGHT_BE_VALUE, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00006279 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006280 // Push the receiver onto the frame.
Leon Clarked91b9f72010-01-27 17:25:45 +00006281 Load(property->obj());
Steve Blocka7e24c12009-10-30 11:49:00 +00006282
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006283 // Load the arguments.
6284 int arg_count = args->length();
6285 for (int i = 0; i < arg_count; i++) {
6286 Load(args->at(i));
6287 frame_->SpillTop();
6288 }
6289
6290 // Load the name of the function.
6291 Load(property->key());
6292
6293 // Call the IC initialization code.
6294 CodeForSourcePosition(node->position());
6295 Result result =
6296 frame_->CallKeyedCallIC(RelocInfo::CODE_TARGET,
6297 arg_count,
6298 loop_nesting());
6299 frame_->RestoreContextRegister();
6300 frame_->Push(&result);
6301 }
Steve Blocka7e24c12009-10-30 11:49:00 +00006302 }
6303
6304 } else {
6305 // ----------------------------------
6306 // JavaScript example: 'foo(1, 2, 3)' // foo is not global
6307 // ----------------------------------
6308
6309 // Load the function.
6310 Load(function);
6311
6312 // Pass the global proxy as the receiver.
6313 LoadGlobalReceiver();
6314
6315 // Call the function.
Leon Clarkee46be812010-01-19 14:06:41 +00006316 CallWithArguments(args, NO_CALL_FUNCTION_FLAGS, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00006317 }
6318}
6319
6320
6321void CodeGenerator::VisitCallNew(CallNew* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01006322 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00006323 Comment cmnt(masm_, "[ CallNew");
6324
6325 // According to ECMA-262, section 11.2.2, page 44, the function
6326 // expression in new calls must be evaluated before the
6327 // arguments. This is different from ordinary calls, where the
6328 // actual function to call is resolved after the arguments have been
6329 // evaluated.
6330
Kristian Monsen80d68ea2010-09-08 11:05:35 +01006331 // Push constructor on the stack. If it's not a function it's used as
6332 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
6333 // ignored.
Steve Blocka7e24c12009-10-30 11:49:00 +00006334 Load(node->expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00006335
6336 // Push the arguments ("left-to-right") on the stack.
6337 ZoneList<Expression*>* args = node->arguments();
6338 int arg_count = args->length();
6339 for (int i = 0; i < arg_count; i++) {
6340 Load(args->at(i));
6341 }
6342
6343 // Call the construct call builtin that handles allocation and
6344 // constructor invocation.
6345 CodeForSourcePosition(node->position());
6346 Result result = frame_->CallConstructor(arg_count);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01006347 frame_->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00006348}
6349
6350
6351void CodeGenerator::GenerateIsSmi(ZoneList<Expression*>* args) {
6352 ASSERT(args->length() == 1);
6353 Load(args->at(0));
6354 Result value = frame_->Pop();
6355 value.ToRegister();
6356 ASSERT(value.is_valid());
6357 __ test(value.reg(), Immediate(kSmiTagMask));
6358 value.Unuse();
6359 destination()->Split(zero);
6360}
6361
6362
6363void CodeGenerator::GenerateLog(ZoneList<Expression*>* args) {
6364 // Conditionally generate a log call.
6365 // Args:
6366 // 0 (literal string): The type of logging (corresponds to the flags).
6367 // This is used to determine whether or not to generate the log call.
6368 // 1 (string): Format string. Access the string at argument index 2
6369 // with '%2s' (see Logger::LogRuntime for all the formats).
6370 // 2 (array): Arguments to the format string.
6371 ASSERT_EQ(args->length(), 3);
6372#ifdef ENABLE_LOGGING_AND_PROFILING
6373 if (ShouldGenerateLog(args->at(0))) {
6374 Load(args->at(1));
6375 Load(args->at(2));
6376 frame_->CallRuntime(Runtime::kLog, 2);
6377 }
6378#endif
6379 // Finally, we're expected to leave a value on the top of the stack.
6380 frame_->Push(Factory::undefined_value());
6381}
6382
6383
6384void CodeGenerator::GenerateIsNonNegativeSmi(ZoneList<Expression*>* args) {
6385 ASSERT(args->length() == 1);
6386 Load(args->at(0));
6387 Result value = frame_->Pop();
6388 value.ToRegister();
6389 ASSERT(value.is_valid());
Steve Block6ded16b2010-05-10 14:33:55 +01006390 __ test(value.reg(), Immediate(kSmiTagMask | kSmiSignMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00006391 value.Unuse();
6392 destination()->Split(zero);
6393}
6394
6395
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006396class DeferredStringCharCodeAt : public DeferredCode {
6397 public:
6398 DeferredStringCharCodeAt(Register object,
6399 Register index,
6400 Register scratch,
6401 Register result)
6402 : result_(result),
6403 char_code_at_generator_(object,
6404 index,
6405 scratch,
6406 result,
6407 &need_conversion_,
6408 &need_conversion_,
6409 &index_out_of_range_,
6410 STRING_INDEX_IS_NUMBER) {}
6411
6412 StringCharCodeAtGenerator* fast_case_generator() {
6413 return &char_code_at_generator_;
6414 }
6415
6416 virtual void Generate() {
6417 VirtualFrameRuntimeCallHelper call_helper(frame_state());
6418 char_code_at_generator_.GenerateSlow(masm(), call_helper);
6419
6420 __ bind(&need_conversion_);
6421 // Move the undefined value into the result register, which will
6422 // trigger conversion.
6423 __ Set(result_, Immediate(Factory::undefined_value()));
6424 __ jmp(exit_label());
6425
6426 __ bind(&index_out_of_range_);
6427 // When the index is out of range, the spec requires us to return
6428 // NaN.
6429 __ Set(result_, Immediate(Factory::nan_value()));
6430 __ jmp(exit_label());
6431 }
6432
6433 private:
6434 Register result_;
6435
6436 Label need_conversion_;
6437 Label index_out_of_range_;
6438
6439 StringCharCodeAtGenerator char_code_at_generator_;
6440};
6441
6442
6443// This generates code that performs a String.prototype.charCodeAt() call
6444// or returns a smi in order to trigger conversion.
6445void CodeGenerator::GenerateStringCharCodeAt(ZoneList<Expression*>* args) {
6446 Comment(masm_, "[ GenerateStringCharCodeAt");
Steve Blocka7e24c12009-10-30 11:49:00 +00006447 ASSERT(args->length() == 2);
6448
Steve Blocka7e24c12009-10-30 11:49:00 +00006449 Load(args->at(0));
6450 Load(args->at(1));
6451 Result index = frame_->Pop();
6452 Result object = frame_->Pop();
Steve Blocka7e24c12009-10-30 11:49:00 +00006453 object.ToRegister();
6454 index.ToRegister();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006455 // We might mutate the object register.
Steve Blocka7e24c12009-10-30 11:49:00 +00006456 frame_->Spill(object.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00006457
Steve Block6ded16b2010-05-10 14:33:55 +01006458 // We need two extra registers.
6459 Result result = allocator()->Allocate();
6460 ASSERT(result.is_valid());
6461 Result scratch = allocator()->Allocate();
6462 ASSERT(scratch.is_valid());
Steve Blocka7e24c12009-10-30 11:49:00 +00006463
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006464 DeferredStringCharCodeAt* deferred =
6465 new DeferredStringCharCodeAt(object.reg(),
6466 index.reg(),
6467 scratch.reg(),
6468 result.reg());
6469 deferred->fast_case_generator()->GenerateFast(masm_);
6470 deferred->BindExit();
Steve Block6ded16b2010-05-10 14:33:55 +01006471 frame_->Push(&result);
6472}
6473
6474
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006475class DeferredStringCharFromCode : public DeferredCode {
6476 public:
6477 DeferredStringCharFromCode(Register code,
6478 Register result)
6479 : char_from_code_generator_(code, result) {}
6480
6481 StringCharFromCodeGenerator* fast_case_generator() {
6482 return &char_from_code_generator_;
6483 }
6484
6485 virtual void Generate() {
6486 VirtualFrameRuntimeCallHelper call_helper(frame_state());
6487 char_from_code_generator_.GenerateSlow(masm(), call_helper);
6488 }
6489
6490 private:
6491 StringCharFromCodeGenerator char_from_code_generator_;
6492};
6493
6494
6495// Generates code for creating a one-char string from a char code.
6496void CodeGenerator::GenerateStringCharFromCode(ZoneList<Expression*>* args) {
6497 Comment(masm_, "[ GenerateStringCharFromCode");
Steve Block6ded16b2010-05-10 14:33:55 +01006498 ASSERT(args->length() == 1);
6499
6500 Load(args->at(0));
6501
6502 Result code = frame_->Pop();
6503 code.ToRegister();
6504 ASSERT(code.is_valid());
6505
Steve Block6ded16b2010-05-10 14:33:55 +01006506 Result result = allocator()->Allocate();
6507 ASSERT(result.is_valid());
6508
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006509 DeferredStringCharFromCode* deferred = new DeferredStringCharFromCode(
6510 code.reg(), result.reg());
6511 deferred->fast_case_generator()->GenerateFast(masm_);
6512 deferred->BindExit();
6513 frame_->Push(&result);
6514}
6515
6516
6517class DeferredStringCharAt : public DeferredCode {
6518 public:
6519 DeferredStringCharAt(Register object,
6520 Register index,
6521 Register scratch1,
6522 Register scratch2,
6523 Register result)
6524 : result_(result),
6525 char_at_generator_(object,
6526 index,
6527 scratch1,
6528 scratch2,
6529 result,
6530 &need_conversion_,
6531 &need_conversion_,
6532 &index_out_of_range_,
6533 STRING_INDEX_IS_NUMBER) {}
6534
6535 StringCharAtGenerator* fast_case_generator() {
6536 return &char_at_generator_;
6537 }
6538
6539 virtual void Generate() {
6540 VirtualFrameRuntimeCallHelper call_helper(frame_state());
6541 char_at_generator_.GenerateSlow(masm(), call_helper);
6542
6543 __ bind(&need_conversion_);
6544 // Move smi zero into the result register, which will trigger
6545 // conversion.
6546 __ Set(result_, Immediate(Smi::FromInt(0)));
6547 __ jmp(exit_label());
6548
6549 __ bind(&index_out_of_range_);
6550 // When the index is out of range, the spec requires us to return
6551 // the empty string.
6552 __ Set(result_, Immediate(Factory::empty_string()));
6553 __ jmp(exit_label());
6554 }
6555
6556 private:
6557 Register result_;
6558
6559 Label need_conversion_;
6560 Label index_out_of_range_;
6561
6562 StringCharAtGenerator char_at_generator_;
6563};
6564
6565
6566// This generates code that performs a String.prototype.charAt() call
6567// or returns a smi in order to trigger conversion.
6568void CodeGenerator::GenerateStringCharAt(ZoneList<Expression*>* args) {
6569 Comment(masm_, "[ GenerateStringCharAt");
6570 ASSERT(args->length() == 2);
6571
6572 Load(args->at(0));
6573 Load(args->at(1));
6574 Result index = frame_->Pop();
6575 Result object = frame_->Pop();
6576 object.ToRegister();
6577 index.ToRegister();
6578 // We might mutate the object register.
6579 frame_->Spill(object.reg());
6580
6581 // We need three extra registers.
6582 Result result = allocator()->Allocate();
6583 ASSERT(result.is_valid());
6584 Result scratch1 = allocator()->Allocate();
6585 ASSERT(scratch1.is_valid());
6586 Result scratch2 = allocator()->Allocate();
6587 ASSERT(scratch2.is_valid());
6588
6589 DeferredStringCharAt* deferred =
6590 new DeferredStringCharAt(object.reg(),
6591 index.reg(),
6592 scratch1.reg(),
6593 scratch2.reg(),
6594 result.reg());
6595 deferred->fast_case_generator()->GenerateFast(masm_);
6596 deferred->BindExit();
Steve Block6ded16b2010-05-10 14:33:55 +01006597 frame_->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00006598}
6599
6600
6601void CodeGenerator::GenerateIsArray(ZoneList<Expression*>* args) {
6602 ASSERT(args->length() == 1);
6603 Load(args->at(0));
6604 Result value = frame_->Pop();
6605 value.ToRegister();
6606 ASSERT(value.is_valid());
6607 __ test(value.reg(), Immediate(kSmiTagMask));
6608 destination()->false_target()->Branch(equal);
6609 // It is a heap object - get map.
6610 Result temp = allocator()->Allocate();
6611 ASSERT(temp.is_valid());
6612 // Check if the object is a JS array or not.
6613 __ CmpObjectType(value.reg(), JS_ARRAY_TYPE, temp.reg());
6614 value.Unuse();
6615 temp.Unuse();
6616 destination()->Split(equal);
6617}
6618
6619
Andrei Popescu402d9372010-02-26 13:31:12 +00006620void CodeGenerator::GenerateIsRegExp(ZoneList<Expression*>* args) {
6621 ASSERT(args->length() == 1);
6622 Load(args->at(0));
6623 Result value = frame_->Pop();
6624 value.ToRegister();
6625 ASSERT(value.is_valid());
6626 __ test(value.reg(), Immediate(kSmiTagMask));
6627 destination()->false_target()->Branch(equal);
6628 // It is a heap object - get map.
6629 Result temp = allocator()->Allocate();
6630 ASSERT(temp.is_valid());
6631 // Check if the object is a regexp.
6632 __ CmpObjectType(value.reg(), JS_REGEXP_TYPE, temp.reg());
6633 value.Unuse();
6634 temp.Unuse();
6635 destination()->Split(equal);
6636}
6637
6638
Steve Blockd0582a62009-12-15 09:54:21 +00006639void CodeGenerator::GenerateIsObject(ZoneList<Expression*>* args) {
6640 // This generates a fast version of:
6641 // (typeof(arg) === 'object' || %_ClassOf(arg) == 'RegExp')
6642 ASSERT(args->length() == 1);
6643 Load(args->at(0));
6644 Result obj = frame_->Pop();
6645 obj.ToRegister();
6646
6647 __ test(obj.reg(), Immediate(kSmiTagMask));
6648 destination()->false_target()->Branch(zero);
6649 __ cmp(obj.reg(), Factory::null_value());
6650 destination()->true_target()->Branch(equal);
6651
6652 Result map = allocator()->Allocate();
6653 ASSERT(map.is_valid());
6654 __ mov(map.reg(), FieldOperand(obj.reg(), HeapObject::kMapOffset));
6655 // Undetectable objects behave like undefined when tested with typeof.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006656 __ test_b(FieldOperand(map.reg(), Map::kBitFieldOffset),
6657 1 << Map::kIsUndetectable);
Steve Blockd0582a62009-12-15 09:54:21 +00006658 destination()->false_target()->Branch(not_zero);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006659 // Do a range test for JSObject type. We can't use
6660 // MacroAssembler::IsInstanceJSObjectType, because we are using a
6661 // ControlDestination, so we copy its implementation here.
Steve Blockd0582a62009-12-15 09:54:21 +00006662 __ movzx_b(map.reg(), FieldOperand(map.reg(), Map::kInstanceTypeOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006663 __ sub(Operand(map.reg()), Immediate(FIRST_JS_OBJECT_TYPE));
6664 __ cmp(map.reg(), LAST_JS_OBJECT_TYPE - FIRST_JS_OBJECT_TYPE);
Steve Blockd0582a62009-12-15 09:54:21 +00006665 obj.Unuse();
6666 map.Unuse();
Leon Clarkef7060e22010-06-03 12:02:55 +01006667 destination()->Split(below_equal);
Steve Blockd0582a62009-12-15 09:54:21 +00006668}
6669
6670
Iain Merrick75681382010-08-19 15:07:18 +01006671void CodeGenerator::GenerateIsSpecObject(ZoneList<Expression*>* args) {
Ben Murdoch3bec4d22010-07-22 14:51:16 +01006672 // This generates a fast version of:
6673 // (typeof(arg) === 'object' || %_ClassOf(arg) == 'RegExp' ||
6674 // typeof(arg) == function).
6675 // It includes undetectable objects (as opposed to IsObject).
6676 ASSERT(args->length() == 1);
6677 Load(args->at(0));
6678 Result value = frame_->Pop();
6679 value.ToRegister();
6680 ASSERT(value.is_valid());
6681 __ test(value.reg(), Immediate(kSmiTagMask));
6682 destination()->false_target()->Branch(equal);
6683
6684 // Check that this is an object.
6685 frame_->Spill(value.reg());
6686 __ CmpObjectType(value.reg(), FIRST_JS_OBJECT_TYPE, value.reg());
6687 value.Unuse();
6688 destination()->Split(above_equal);
6689}
6690
6691
Iain Merrick75681382010-08-19 15:07:18 +01006692// Deferred code to check whether the String JavaScript object is safe for using
6693// default value of. This code is called after the bit caching this information
6694// in the map has been checked with the map for the object in the map_result_
6695// register. On return the register map_result_ contains 1 for true and 0 for
6696// false.
6697class DeferredIsStringWrapperSafeForDefaultValueOf : public DeferredCode {
6698 public:
6699 DeferredIsStringWrapperSafeForDefaultValueOf(Register object,
6700 Register map_result,
6701 Register scratch1,
6702 Register scratch2)
6703 : object_(object),
6704 map_result_(map_result),
6705 scratch1_(scratch1),
6706 scratch2_(scratch2) { }
6707
6708 virtual void Generate() {
6709 Label false_result;
6710
6711 // Check that map is loaded as expected.
6712 if (FLAG_debug_code) {
6713 __ cmp(map_result_, FieldOperand(object_, HeapObject::kMapOffset));
6714 __ Assert(equal, "Map not in expected register");
6715 }
6716
6717 // Check for fast case object. Generate false result for slow case object.
6718 __ mov(scratch1_, FieldOperand(object_, JSObject::kPropertiesOffset));
6719 __ mov(scratch1_, FieldOperand(scratch1_, HeapObject::kMapOffset));
6720 __ cmp(scratch1_, Factory::hash_table_map());
6721 __ j(equal, &false_result);
6722
6723 // Look for valueOf symbol in the descriptor array, and indicate false if
6724 // found. The type is not checked, so if it is a transition it is a false
6725 // negative.
6726 __ mov(map_result_,
6727 FieldOperand(map_result_, Map::kInstanceDescriptorsOffset));
6728 __ mov(scratch1_, FieldOperand(map_result_, FixedArray::kLengthOffset));
6729 // map_result_: descriptor array
6730 // scratch1_: length of descriptor array
6731 // Calculate the end of the descriptor array.
6732 STATIC_ASSERT(kSmiTag == 0);
6733 STATIC_ASSERT(kSmiTagSize == 1);
6734 STATIC_ASSERT(kPointerSize == 4);
6735 __ lea(scratch1_,
6736 Operand(map_result_, scratch1_, times_2, FixedArray::kHeaderSize));
6737 // Calculate location of the first key name.
6738 __ add(Operand(map_result_),
6739 Immediate(FixedArray::kHeaderSize +
6740 DescriptorArray::kFirstIndex * kPointerSize));
6741 // Loop through all the keys in the descriptor array. If one of these is the
6742 // symbol valueOf the result is false.
6743 Label entry, loop;
6744 __ jmp(&entry);
6745 __ bind(&loop);
6746 __ mov(scratch2_, FieldOperand(map_result_, 0));
6747 __ cmp(scratch2_, Factory::value_of_symbol());
6748 __ j(equal, &false_result);
6749 __ add(Operand(map_result_), Immediate(kPointerSize));
6750 __ bind(&entry);
6751 __ cmp(map_result_, Operand(scratch1_));
6752 __ j(not_equal, &loop);
6753
6754 // Reload map as register map_result_ was used as temporary above.
6755 __ mov(map_result_, FieldOperand(object_, HeapObject::kMapOffset));
6756
6757 // If a valueOf property is not found on the object check that it's
6758 // prototype is the un-modified String prototype. If not result is false.
6759 __ mov(scratch1_, FieldOperand(map_result_, Map::kPrototypeOffset));
6760 __ test(scratch1_, Immediate(kSmiTagMask));
6761 __ j(zero, &false_result);
6762 __ mov(scratch1_, FieldOperand(scratch1_, HeapObject::kMapOffset));
6763 __ mov(scratch2_, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
6764 __ mov(scratch2_,
6765 FieldOperand(scratch2_, GlobalObject::kGlobalContextOffset));
6766 __ cmp(scratch1_,
6767 CodeGenerator::ContextOperand(
6768 scratch2_, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
6769 __ j(not_equal, &false_result);
6770 // Set the bit in the map to indicate that it has been checked safe for
6771 // default valueOf and set true result.
6772 __ or_(FieldOperand(map_result_, Map::kBitField2Offset),
6773 Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
6774 __ Set(map_result_, Immediate(1));
6775 __ jmp(exit_label());
6776 __ bind(&false_result);
6777 // Set false result.
6778 __ Set(map_result_, Immediate(0));
6779 }
6780
6781 private:
6782 Register object_;
6783 Register map_result_;
6784 Register scratch1_;
6785 Register scratch2_;
6786};
6787
6788
6789void CodeGenerator::GenerateIsStringWrapperSafeForDefaultValueOf(
6790 ZoneList<Expression*>* args) {
6791 ASSERT(args->length() == 1);
6792 Load(args->at(0));
6793 Result obj = frame_->Pop(); // Pop the string wrapper.
6794 obj.ToRegister();
6795 ASSERT(obj.is_valid());
6796 if (FLAG_debug_code) {
6797 __ AbortIfSmi(obj.reg());
6798 }
6799
6800 // Check whether this map has already been checked to be safe for default
6801 // valueOf.
6802 Result map_result = allocator()->Allocate();
6803 ASSERT(map_result.is_valid());
6804 __ mov(map_result.reg(), FieldOperand(obj.reg(), HeapObject::kMapOffset));
6805 __ test_b(FieldOperand(map_result.reg(), Map::kBitField2Offset),
6806 1 << Map::kStringWrapperSafeForDefaultValueOf);
6807 destination()->true_target()->Branch(not_zero);
6808
6809 // We need an additional two scratch registers for the deferred code.
6810 Result temp1 = allocator()->Allocate();
6811 ASSERT(temp1.is_valid());
6812 Result temp2 = allocator()->Allocate();
6813 ASSERT(temp2.is_valid());
6814
6815 DeferredIsStringWrapperSafeForDefaultValueOf* deferred =
6816 new DeferredIsStringWrapperSafeForDefaultValueOf(
6817 obj.reg(), map_result.reg(), temp1.reg(), temp2.reg());
6818 deferred->Branch(zero);
6819 deferred->BindExit();
6820 __ test(map_result.reg(), Operand(map_result.reg()));
6821 obj.Unuse();
6822 map_result.Unuse();
6823 temp1.Unuse();
6824 temp2.Unuse();
6825 destination()->Split(not_equal);
6826}
6827
6828
Steve Blockd0582a62009-12-15 09:54:21 +00006829void CodeGenerator::GenerateIsFunction(ZoneList<Expression*>* args) {
6830 // This generates a fast version of:
6831 // (%_ClassOf(arg) === 'Function')
6832 ASSERT(args->length() == 1);
6833 Load(args->at(0));
6834 Result obj = frame_->Pop();
6835 obj.ToRegister();
6836 __ test(obj.reg(), Immediate(kSmiTagMask));
6837 destination()->false_target()->Branch(zero);
6838 Result temp = allocator()->Allocate();
6839 ASSERT(temp.is_valid());
6840 __ CmpObjectType(obj.reg(), JS_FUNCTION_TYPE, temp.reg());
6841 obj.Unuse();
6842 temp.Unuse();
6843 destination()->Split(equal);
6844}
6845
6846
Leon Clarked91b9f72010-01-27 17:25:45 +00006847void CodeGenerator::GenerateIsUndetectableObject(ZoneList<Expression*>* args) {
6848 ASSERT(args->length() == 1);
6849 Load(args->at(0));
6850 Result obj = frame_->Pop();
6851 obj.ToRegister();
6852 __ test(obj.reg(), Immediate(kSmiTagMask));
6853 destination()->false_target()->Branch(zero);
6854 Result temp = allocator()->Allocate();
6855 ASSERT(temp.is_valid());
6856 __ mov(temp.reg(),
6857 FieldOperand(obj.reg(), HeapObject::kMapOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006858 __ test_b(FieldOperand(temp.reg(), Map::kBitFieldOffset),
6859 1 << Map::kIsUndetectable);
Leon Clarked91b9f72010-01-27 17:25:45 +00006860 obj.Unuse();
6861 temp.Unuse();
6862 destination()->Split(not_zero);
6863}
6864
6865
Steve Blocka7e24c12009-10-30 11:49:00 +00006866void CodeGenerator::GenerateIsConstructCall(ZoneList<Expression*>* args) {
6867 ASSERT(args->length() == 0);
6868
6869 // Get the frame pointer for the calling frame.
6870 Result fp = allocator()->Allocate();
6871 __ mov(fp.reg(), Operand(ebp, StandardFrameConstants::kCallerFPOffset));
6872
6873 // Skip the arguments adaptor frame if it exists.
6874 Label check_frame_marker;
6875 __ cmp(Operand(fp.reg(), StandardFrameConstants::kContextOffset),
6876 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
6877 __ j(not_equal, &check_frame_marker);
6878 __ mov(fp.reg(), Operand(fp.reg(), StandardFrameConstants::kCallerFPOffset));
6879
6880 // Check the marker in the calling frame.
6881 __ bind(&check_frame_marker);
6882 __ cmp(Operand(fp.reg(), StandardFrameConstants::kMarkerOffset),
6883 Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
6884 fp.Unuse();
6885 destination()->Split(equal);
6886}
6887
6888
6889void CodeGenerator::GenerateArgumentsLength(ZoneList<Expression*>* args) {
6890 ASSERT(args->length() == 0);
Steve Block6ded16b2010-05-10 14:33:55 +01006891
6892 Result fp = allocator_->Allocate();
6893 Result result = allocator_->Allocate();
6894 ASSERT(fp.is_valid() && result.is_valid());
6895
6896 Label exit;
6897
6898 // Get the number of formal parameters.
6899 __ Set(result.reg(), Immediate(Smi::FromInt(scope()->num_parameters())));
6900
6901 // Check if the calling frame is an arguments adaptor frame.
6902 __ mov(fp.reg(), Operand(ebp, StandardFrameConstants::kCallerFPOffset));
6903 __ cmp(Operand(fp.reg(), StandardFrameConstants::kContextOffset),
6904 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
6905 __ j(not_equal, &exit);
6906
6907 // Arguments adaptor case: Read the arguments length from the
6908 // adaptor frame.
6909 __ mov(result.reg(),
6910 Operand(fp.reg(), ArgumentsAdaptorFrameConstants::kLengthOffset));
6911
6912 __ bind(&exit);
6913 result.set_type_info(TypeInfo::Smi());
6914 if (FLAG_debug_code) __ AbortIfNotSmi(result.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00006915 frame_->Push(&result);
6916}
6917
6918
6919void CodeGenerator::GenerateClassOf(ZoneList<Expression*>* args) {
6920 ASSERT(args->length() == 1);
6921 JumpTarget leave, null, function, non_function_constructor;
6922 Load(args->at(0)); // Load the object.
6923 Result obj = frame_->Pop();
6924 obj.ToRegister();
6925 frame_->Spill(obj.reg());
6926
6927 // If the object is a smi, we return null.
6928 __ test(obj.reg(), Immediate(kSmiTagMask));
6929 null.Branch(zero);
6930
6931 // Check that the object is a JS object but take special care of JS
6932 // functions to make sure they have 'Function' as their class.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006933 __ CmpObjectType(obj.reg(), FIRST_JS_OBJECT_TYPE, obj.reg());
6934 null.Branch(below);
Steve Blocka7e24c12009-10-30 11:49:00 +00006935
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006936 // As long as JS_FUNCTION_TYPE is the last instance type and it is
6937 // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
6938 // LAST_JS_OBJECT_TYPE.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01006939 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
6940 STATIC_ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006941 __ CmpInstanceType(obj.reg(), JS_FUNCTION_TYPE);
6942 function.Branch(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +00006943
6944 // Check if the constructor in the map is a function.
6945 { Result tmp = allocator()->Allocate();
6946 __ mov(obj.reg(), FieldOperand(obj.reg(), Map::kConstructorOffset));
6947 __ CmpObjectType(obj.reg(), JS_FUNCTION_TYPE, tmp.reg());
6948 non_function_constructor.Branch(not_equal);
6949 }
6950
6951 // The map register now contains the constructor function. Grab the
6952 // instance class name from there.
6953 __ mov(obj.reg(),
6954 FieldOperand(obj.reg(), JSFunction::kSharedFunctionInfoOffset));
6955 __ mov(obj.reg(),
6956 FieldOperand(obj.reg(), SharedFunctionInfo::kInstanceClassNameOffset));
6957 frame_->Push(&obj);
6958 leave.Jump();
6959
6960 // Functions have class 'Function'.
6961 function.Bind();
6962 frame_->Push(Factory::function_class_symbol());
6963 leave.Jump();
6964
6965 // Objects with a non-function constructor have class 'Object'.
6966 non_function_constructor.Bind();
6967 frame_->Push(Factory::Object_symbol());
6968 leave.Jump();
6969
6970 // Non-JS objects have class null.
6971 null.Bind();
6972 frame_->Push(Factory::null_value());
6973
6974 // All done.
6975 leave.Bind();
6976}
6977
6978
6979void CodeGenerator::GenerateValueOf(ZoneList<Expression*>* args) {
6980 ASSERT(args->length() == 1);
6981 JumpTarget leave;
6982 Load(args->at(0)); // Load the object.
6983 frame_->Dup();
6984 Result object = frame_->Pop();
6985 object.ToRegister();
6986 ASSERT(object.is_valid());
6987 // if (object->IsSmi()) return object.
6988 __ test(object.reg(), Immediate(kSmiTagMask));
6989 leave.Branch(zero, taken);
6990 // It is a heap object - get map.
6991 Result temp = allocator()->Allocate();
6992 ASSERT(temp.is_valid());
6993 // if (!object->IsJSValue()) return object.
6994 __ CmpObjectType(object.reg(), JS_VALUE_TYPE, temp.reg());
6995 leave.Branch(not_equal, not_taken);
6996 __ mov(temp.reg(), FieldOperand(object.reg(), JSValue::kValueOffset));
6997 object.Unuse();
6998 frame_->SetElementAt(0, &temp);
6999 leave.Bind();
7000}
7001
7002
7003void CodeGenerator::GenerateSetValueOf(ZoneList<Expression*>* args) {
7004 ASSERT(args->length() == 2);
7005 JumpTarget leave;
7006 Load(args->at(0)); // Load the object.
7007 Load(args->at(1)); // Load the value.
7008 Result value = frame_->Pop();
7009 Result object = frame_->Pop();
7010 value.ToRegister();
7011 object.ToRegister();
7012
7013 // if (object->IsSmi()) return value.
7014 __ test(object.reg(), Immediate(kSmiTagMask));
7015 leave.Branch(zero, &value, taken);
7016
7017 // It is a heap object - get its map.
7018 Result scratch = allocator_->Allocate();
7019 ASSERT(scratch.is_valid());
7020 // if (!object->IsJSValue()) return value.
7021 __ CmpObjectType(object.reg(), JS_VALUE_TYPE, scratch.reg());
7022 leave.Branch(not_equal, &value, not_taken);
7023
7024 // Store the value.
7025 __ mov(FieldOperand(object.reg(), JSValue::kValueOffset), value.reg());
7026 // Update the write barrier. Save the value as it will be
7027 // overwritten by the write barrier code and is needed afterward.
7028 Result duplicate_value = allocator_->Allocate();
7029 ASSERT(duplicate_value.is_valid());
7030 __ mov(duplicate_value.reg(), value.reg());
7031 // The object register is also overwritten by the write barrier and
7032 // possibly aliased in the frame.
7033 frame_->Spill(object.reg());
7034 __ RecordWrite(object.reg(), JSValue::kValueOffset, duplicate_value.reg(),
7035 scratch.reg());
7036 object.Unuse();
7037 scratch.Unuse();
7038 duplicate_value.Unuse();
7039
7040 // Leave.
7041 leave.Bind(&value);
7042 frame_->Push(&value);
7043}
7044
7045
Steve Block6ded16b2010-05-10 14:33:55 +01007046void CodeGenerator::GenerateArguments(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00007047 ASSERT(args->length() == 1);
7048
7049 // ArgumentsAccessStub expects the key in edx and the formal
7050 // parameter count in eax.
7051 Load(args->at(0));
7052 Result key = frame_->Pop();
7053 // Explicitly create a constant result.
Andrei Popescu31002712010-02-23 13:46:05 +00007054 Result count(Handle<Smi>(Smi::FromInt(scope()->num_parameters())));
Steve Blocka7e24c12009-10-30 11:49:00 +00007055 // Call the shared stub to get to arguments[key].
7056 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
7057 Result result = frame_->CallStub(&stub, &key, &count);
7058 frame_->Push(&result);
7059}
7060
7061
7062void CodeGenerator::GenerateObjectEquals(ZoneList<Expression*>* args) {
7063 ASSERT(args->length() == 2);
7064
7065 // Load the two objects into registers and perform the comparison.
7066 Load(args->at(0));
7067 Load(args->at(1));
7068 Result right = frame_->Pop();
7069 Result left = frame_->Pop();
7070 right.ToRegister();
7071 left.ToRegister();
7072 __ cmp(right.reg(), Operand(left.reg()));
7073 right.Unuse();
7074 left.Unuse();
7075 destination()->Split(equal);
7076}
7077
7078
7079void CodeGenerator::GenerateGetFramePointer(ZoneList<Expression*>* args) {
7080 ASSERT(args->length() == 0);
Kristian Monsen50ef84f2010-07-29 15:18:00 +01007081 STATIC_ASSERT(kSmiTag == 0); // EBP value is aligned, so it looks like a Smi.
Steve Blocka7e24c12009-10-30 11:49:00 +00007082 Result ebp_as_smi = allocator_->Allocate();
7083 ASSERT(ebp_as_smi.is_valid());
7084 __ mov(ebp_as_smi.reg(), Operand(ebp));
7085 frame_->Push(&ebp_as_smi);
7086}
7087
7088
Steve Block6ded16b2010-05-10 14:33:55 +01007089void CodeGenerator::GenerateRandomHeapNumber(
7090 ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00007091 ASSERT(args->length() == 0);
7092 frame_->SpillAll();
7093
Steve Block6ded16b2010-05-10 14:33:55 +01007094 Label slow_allocate_heapnumber;
7095 Label heapnumber_allocated;
Steve Blocka7e24c12009-10-30 11:49:00 +00007096
Steve Block6ded16b2010-05-10 14:33:55 +01007097 __ AllocateHeapNumber(edi, ebx, ecx, &slow_allocate_heapnumber);
7098 __ jmp(&heapnumber_allocated);
Steve Blocka7e24c12009-10-30 11:49:00 +00007099
Steve Block6ded16b2010-05-10 14:33:55 +01007100 __ bind(&slow_allocate_heapnumber);
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007101 // Allocate a heap number.
7102 __ CallRuntime(Runtime::kNumberAlloc, 0);
Steve Block6ded16b2010-05-10 14:33:55 +01007103 __ mov(edi, eax);
7104
7105 __ bind(&heapnumber_allocated);
7106
7107 __ PrepareCallCFunction(0, ebx);
7108 __ CallCFunction(ExternalReference::random_uint32_function(), 0);
7109
7110 // Convert 32 random bits in eax to 0.(32 random bits) in a double
7111 // by computing:
7112 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
7113 // This is implemented on both SSE2 and FPU.
7114 if (CpuFeatures::IsSupported(SSE2)) {
7115 CpuFeatures::Scope fscope(SSE2);
7116 __ mov(ebx, Immediate(0x49800000)); // 1.0 x 2^20 as single.
7117 __ movd(xmm1, Operand(ebx));
7118 __ movd(xmm0, Operand(eax));
7119 __ cvtss2sd(xmm1, xmm1);
7120 __ pxor(xmm0, xmm1);
7121 __ subsd(xmm0, xmm1);
7122 __ movdbl(FieldOperand(edi, HeapNumber::kValueOffset), xmm0);
7123 } else {
7124 // 0x4130000000000000 is 1.0 x 2^20 as a double.
7125 __ mov(FieldOperand(edi, HeapNumber::kExponentOffset),
7126 Immediate(0x41300000));
7127 __ mov(FieldOperand(edi, HeapNumber::kMantissaOffset), eax);
7128 __ fld_d(FieldOperand(edi, HeapNumber::kValueOffset));
7129 __ mov(FieldOperand(edi, HeapNumber::kMantissaOffset), Immediate(0));
7130 __ fld_d(FieldOperand(edi, HeapNumber::kValueOffset));
7131 __ fsubp(1);
7132 __ fstp_d(FieldOperand(edi, HeapNumber::kValueOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00007133 }
Steve Block6ded16b2010-05-10 14:33:55 +01007134 __ mov(eax, edi);
Steve Blocka7e24c12009-10-30 11:49:00 +00007135
7136 Result result = allocator_->Allocate(eax);
7137 frame_->Push(&result);
7138}
7139
7140
Steve Blockd0582a62009-12-15 09:54:21 +00007141void CodeGenerator::GenerateStringAdd(ZoneList<Expression*>* args) {
7142 ASSERT_EQ(2, args->length());
7143
7144 Load(args->at(0));
7145 Load(args->at(1));
7146
7147 StringAddStub stub(NO_STRING_ADD_FLAGS);
7148 Result answer = frame_->CallStub(&stub, 2);
7149 frame_->Push(&answer);
7150}
7151
7152
Leon Clarkee46be812010-01-19 14:06:41 +00007153void CodeGenerator::GenerateSubString(ZoneList<Expression*>* args) {
7154 ASSERT_EQ(3, args->length());
7155
7156 Load(args->at(0));
7157 Load(args->at(1));
7158 Load(args->at(2));
7159
7160 SubStringStub stub;
7161 Result answer = frame_->CallStub(&stub, 3);
7162 frame_->Push(&answer);
7163}
7164
7165
7166void CodeGenerator::GenerateStringCompare(ZoneList<Expression*>* args) {
7167 ASSERT_EQ(2, args->length());
7168
7169 Load(args->at(0));
7170 Load(args->at(1));
7171
7172 StringCompareStub stub;
7173 Result answer = frame_->CallStub(&stub, 2);
7174 frame_->Push(&answer);
7175}
7176
7177
7178void CodeGenerator::GenerateRegExpExec(ZoneList<Expression*>* args) {
Steve Block6ded16b2010-05-10 14:33:55 +01007179 ASSERT_EQ(4, args->length());
Leon Clarkee46be812010-01-19 14:06:41 +00007180
7181 // Load the arguments on the stack and call the stub.
7182 Load(args->at(0));
7183 Load(args->at(1));
7184 Load(args->at(2));
7185 Load(args->at(3));
7186 RegExpExecStub stub;
7187 Result result = frame_->CallStub(&stub, 4);
7188 frame_->Push(&result);
7189}
7190
7191
Steve Block6ded16b2010-05-10 14:33:55 +01007192void CodeGenerator::GenerateRegExpConstructResult(ZoneList<Expression*>* args) {
7193 // No stub. This code only occurs a few times in regexp.js.
7194 const int kMaxInlineLength = 100;
7195 ASSERT_EQ(3, args->length());
7196 Load(args->at(0)); // Size of array, smi.
7197 Load(args->at(1)); // "index" property value.
7198 Load(args->at(2)); // "input" property value.
7199 {
7200 VirtualFrame::SpilledScope spilled_scope;
7201
7202 Label slowcase;
7203 Label done;
7204 __ mov(ebx, Operand(esp, kPointerSize * 2));
7205 __ test(ebx, Immediate(kSmiTagMask));
7206 __ j(not_zero, &slowcase);
7207 __ cmp(Operand(ebx), Immediate(Smi::FromInt(kMaxInlineLength)));
7208 __ j(above, &slowcase);
7209 // Smi-tagging is equivalent to multiplying by 2.
7210 STATIC_ASSERT(kSmiTag == 0);
7211 STATIC_ASSERT(kSmiTagSize == 1);
7212 // Allocate RegExpResult followed by FixedArray with size in ebx.
7213 // JSArray: [Map][empty properties][Elements][Length-smi][index][input]
7214 // Elements: [Map][Length][..elements..]
7215 __ AllocateInNewSpace(JSRegExpResult::kSize + FixedArray::kHeaderSize,
7216 times_half_pointer_size,
7217 ebx, // In: Number of elements (times 2, being a smi)
7218 eax, // Out: Start of allocation (tagged).
7219 ecx, // Out: End of allocation.
7220 edx, // Scratch register
7221 &slowcase,
7222 TAG_OBJECT);
7223 // eax: Start of allocated area, object-tagged.
7224
7225 // Set JSArray map to global.regexp_result_map().
7226 // Set empty properties FixedArray.
7227 // Set elements to point to FixedArray allocated right after the JSArray.
7228 // Interleave operations for better latency.
7229 __ mov(edx, ContextOperand(esi, Context::GLOBAL_INDEX));
7230 __ mov(ecx, Immediate(Factory::empty_fixed_array()));
7231 __ lea(ebx, Operand(eax, JSRegExpResult::kSize));
7232 __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalContextOffset));
7233 __ mov(FieldOperand(eax, JSObject::kElementsOffset), ebx);
7234 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset), ecx);
7235 __ mov(edx, ContextOperand(edx, Context::REGEXP_RESULT_MAP_INDEX));
7236 __ mov(FieldOperand(eax, HeapObject::kMapOffset), edx);
7237
7238 // Set input, index and length fields from arguments.
7239 __ pop(FieldOperand(eax, JSRegExpResult::kInputOffset));
7240 __ pop(FieldOperand(eax, JSRegExpResult::kIndexOffset));
7241 __ pop(ecx);
7242 __ mov(FieldOperand(eax, JSArray::kLengthOffset), ecx);
7243
7244 // Fill out the elements FixedArray.
7245 // eax: JSArray.
7246 // ebx: FixedArray.
7247 // ecx: Number of elements in array, as smi.
7248
7249 // Set map.
7250 __ mov(FieldOperand(ebx, HeapObject::kMapOffset),
7251 Immediate(Factory::fixed_array_map()));
7252 // Set length.
Steve Block6ded16b2010-05-10 14:33:55 +01007253 __ mov(FieldOperand(ebx, FixedArray::kLengthOffset), ecx);
7254 // Fill contents of fixed-array with the-hole.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007255 __ SmiUntag(ecx);
Steve Block6ded16b2010-05-10 14:33:55 +01007256 __ mov(edx, Immediate(Factory::the_hole_value()));
7257 __ lea(ebx, FieldOperand(ebx, FixedArray::kHeaderSize));
7258 // Fill fixed array elements with hole.
7259 // eax: JSArray.
7260 // ecx: Number of elements to fill.
7261 // ebx: Start of elements in FixedArray.
7262 // edx: the hole.
7263 Label loop;
7264 __ test(ecx, Operand(ecx));
7265 __ bind(&loop);
7266 __ j(less_equal, &done); // Jump if ecx is negative or zero.
7267 __ sub(Operand(ecx), Immediate(1));
7268 __ mov(Operand(ebx, ecx, times_pointer_size, 0), edx);
7269 __ jmp(&loop);
7270
7271 __ bind(&slowcase);
7272 __ CallRuntime(Runtime::kRegExpConstructResult, 3);
7273
7274 __ bind(&done);
7275 }
7276 frame_->Forget(3);
7277 frame_->Push(eax);
7278}
7279
7280
Steve Block791712a2010-08-27 10:21:07 +01007281void CodeGenerator::GenerateRegExpCloneResult(ZoneList<Expression*>* args) {
7282 ASSERT_EQ(1, args->length());
7283
7284 Load(args->at(0));
7285 Result object_result = frame_->Pop();
7286 object_result.ToRegister(eax);
7287 object_result.Unuse();
7288 {
7289 VirtualFrame::SpilledScope spilled_scope;
7290
7291 Label done;
7292
7293 __ test(eax, Immediate(kSmiTagMask));
7294 __ j(zero, &done);
7295
7296 // Load JSRegExpResult map into edx.
7297 // Arguments to this function should be results of calling RegExp exec,
7298 // which is either an unmodified JSRegExpResult or null. Anything not having
7299 // the unmodified JSRegExpResult map is returned unmodified.
7300 // This also ensures that elements are fast.
7301 __ mov(edx, ContextOperand(esi, Context::GLOBAL_INDEX));
7302 __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalContextOffset));
7303 __ mov(edx, ContextOperand(edx, Context::REGEXP_RESULT_MAP_INDEX));
7304 __ cmp(edx, FieldOperand(eax, HeapObject::kMapOffset));
7305 __ j(not_equal, &done);
7306
7307 if (FLAG_debug_code) {
7308 // Check that object really has empty properties array, as the map
7309 // should guarantee.
7310 __ cmp(FieldOperand(eax, JSObject::kPropertiesOffset),
7311 Immediate(Factory::empty_fixed_array()));
7312 __ Check(equal, "JSRegExpResult: default map but non-empty properties.");
7313 }
7314
7315 DeferredAllocateInNewSpace* allocate_fallback =
7316 new DeferredAllocateInNewSpace(JSRegExpResult::kSize,
7317 ebx,
7318 edx.bit() | eax.bit());
7319
7320 // All set, copy the contents to a new object.
7321 __ AllocateInNewSpace(JSRegExpResult::kSize,
7322 ebx,
7323 ecx,
7324 no_reg,
7325 allocate_fallback->entry_label(),
7326 TAG_OBJECT);
7327 __ bind(allocate_fallback->exit_label());
7328
7329 // Copy all fields from eax to ebx.
7330 STATIC_ASSERT(JSRegExpResult::kSize % (2 * kPointerSize) == 0);
7331 // There is an even number of fields, so unroll the loop once
7332 // for efficiency.
7333 for (int i = 0; i < JSRegExpResult::kSize; i += 2 * kPointerSize) {
7334 STATIC_ASSERT(JSObject::kMapOffset % (2 * kPointerSize) == 0);
7335 if (i != JSObject::kMapOffset) {
7336 // The map was already loaded into edx.
7337 __ mov(edx, FieldOperand(eax, i));
7338 }
7339 __ mov(ecx, FieldOperand(eax, i + kPointerSize));
7340
7341 STATIC_ASSERT(JSObject::kElementsOffset % (2 * kPointerSize) == 0);
7342 if (i == JSObject::kElementsOffset) {
7343 // If the elements array isn't empty, make it copy-on-write
7344 // before copying it.
7345 Label empty;
7346 __ cmp(Operand(edx), Immediate(Factory::empty_fixed_array()));
7347 __ j(equal, &empty);
Steve Block791712a2010-08-27 10:21:07 +01007348 __ mov(FieldOperand(edx, HeapObject::kMapOffset),
7349 Immediate(Factory::fixed_cow_array_map()));
7350 __ bind(&empty);
7351 }
7352 __ mov(FieldOperand(ebx, i), edx);
7353 __ mov(FieldOperand(ebx, i + kPointerSize), ecx);
7354 }
7355 __ mov(eax, ebx);
7356
7357 __ bind(&done);
7358 }
7359 frame_->Push(eax);
7360}
7361
7362
Steve Block6ded16b2010-05-10 14:33:55 +01007363class DeferredSearchCache: public DeferredCode {
7364 public:
7365 DeferredSearchCache(Register dst, Register cache, Register key)
7366 : dst_(dst), cache_(cache), key_(key) {
7367 set_comment("[ DeferredSearchCache");
7368 }
7369
7370 virtual void Generate();
7371
7372 private:
Kristian Monsen25f61362010-05-21 11:50:48 +01007373 Register dst_; // on invocation Smi index of finger, on exit
7374 // holds value being looked up.
7375 Register cache_; // instance of JSFunctionResultCache.
7376 Register key_; // key being looked up.
Steve Block6ded16b2010-05-10 14:33:55 +01007377};
7378
7379
7380void DeferredSearchCache::Generate() {
Kristian Monsen25f61362010-05-21 11:50:48 +01007381 Label first_loop, search_further, second_loop, cache_miss;
7382
7383 // Smi-tagging is equivalent to multiplying by 2.
7384 STATIC_ASSERT(kSmiTag == 0);
7385 STATIC_ASSERT(kSmiTagSize == 1);
7386
7387 Smi* kEntrySizeSmi = Smi::FromInt(JSFunctionResultCache::kEntrySize);
7388 Smi* kEntriesIndexSmi = Smi::FromInt(JSFunctionResultCache::kEntriesIndex);
7389
7390 // Check the cache from finger to start of the cache.
7391 __ bind(&first_loop);
7392 __ sub(Operand(dst_), Immediate(kEntrySizeSmi));
7393 __ cmp(Operand(dst_), Immediate(kEntriesIndexSmi));
7394 __ j(less, &search_further);
7395
7396 __ cmp(key_, CodeGenerator::FixedArrayElementOperand(cache_, dst_));
7397 __ j(not_equal, &first_loop);
7398
7399 __ mov(FieldOperand(cache_, JSFunctionResultCache::kFingerOffset), dst_);
7400 __ mov(dst_, CodeGenerator::FixedArrayElementOperand(cache_, dst_, 1));
7401 __ jmp(exit_label());
7402
7403 __ bind(&search_further);
7404
7405 // Check the cache from end of cache up to finger.
7406 __ mov(dst_, FieldOperand(cache_, JSFunctionResultCache::kCacheSizeOffset));
7407
7408 __ bind(&second_loop);
7409 __ sub(Operand(dst_), Immediate(kEntrySizeSmi));
7410 // Consider prefetching into some reg.
7411 __ cmp(dst_, FieldOperand(cache_, JSFunctionResultCache::kFingerOffset));
7412 __ j(less_equal, &cache_miss);
7413
7414 __ cmp(key_, CodeGenerator::FixedArrayElementOperand(cache_, dst_));
7415 __ j(not_equal, &second_loop);
7416
7417 __ mov(FieldOperand(cache_, JSFunctionResultCache::kFingerOffset), dst_);
7418 __ mov(dst_, CodeGenerator::FixedArrayElementOperand(cache_, dst_, 1));
7419 __ jmp(exit_label());
7420
7421 __ bind(&cache_miss);
7422 __ push(cache_); // store a reference to cache
7423 __ push(key_); // store a key
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01007424 __ push(Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
Steve Block6ded16b2010-05-10 14:33:55 +01007425 __ push(key_);
Kristian Monsen25f61362010-05-21 11:50:48 +01007426 // On ia32 function must be in edi.
7427 __ mov(edi, FieldOperand(cache_, JSFunctionResultCache::kFactoryOffset));
7428 ParameterCount expected(1);
7429 __ InvokeFunction(edi, expected, CALL_FUNCTION);
7430
7431 // Find a place to put new cached value into.
7432 Label add_new_entry, update_cache;
7433 __ mov(ecx, Operand(esp, kPointerSize)); // restore the cache
7434 // Possible optimization: cache size is constant for the given cache
7435 // so technically we could use a constant here. However, if we have
7436 // cache miss this optimization would hardly matter much.
7437
7438 // Check if we could add new entry to cache.
7439 __ mov(ebx, FieldOperand(ecx, FixedArray::kLengthOffset));
Kristian Monsen25f61362010-05-21 11:50:48 +01007440 __ cmp(ebx, FieldOperand(ecx, JSFunctionResultCache::kCacheSizeOffset));
7441 __ j(greater, &add_new_entry);
7442
7443 // Check if we could evict entry after finger.
7444 __ mov(edx, FieldOperand(ecx, JSFunctionResultCache::kFingerOffset));
7445 __ add(Operand(edx), Immediate(kEntrySizeSmi));
7446 __ cmp(ebx, Operand(edx));
7447 __ j(greater, &update_cache);
7448
7449 // Need to wrap over the cache.
7450 __ mov(edx, Immediate(kEntriesIndexSmi));
7451 __ jmp(&update_cache);
7452
7453 __ bind(&add_new_entry);
7454 __ mov(edx, FieldOperand(ecx, JSFunctionResultCache::kCacheSizeOffset));
7455 __ lea(ebx, Operand(edx, JSFunctionResultCache::kEntrySize << 1));
7456 __ mov(FieldOperand(ecx, JSFunctionResultCache::kCacheSizeOffset), ebx);
7457
7458 // Update the cache itself.
7459 // edx holds the index.
7460 __ bind(&update_cache);
7461 __ pop(ebx); // restore the key
7462 __ mov(FieldOperand(ecx, JSFunctionResultCache::kFingerOffset), edx);
7463 // Store key.
7464 __ mov(CodeGenerator::FixedArrayElementOperand(ecx, edx), ebx);
7465 __ RecordWrite(ecx, 0, ebx, edx);
7466
7467 // Store value.
7468 __ pop(ecx); // restore the cache.
7469 __ mov(edx, FieldOperand(ecx, JSFunctionResultCache::kFingerOffset));
7470 __ add(Operand(edx), Immediate(Smi::FromInt(1)));
7471 __ mov(ebx, eax);
7472 __ mov(CodeGenerator::FixedArrayElementOperand(ecx, edx), ebx);
7473 __ RecordWrite(ecx, 0, ebx, edx);
7474
Steve Block6ded16b2010-05-10 14:33:55 +01007475 if (!dst_.is(eax)) {
7476 __ mov(dst_, eax);
7477 }
7478}
7479
7480
7481void CodeGenerator::GenerateGetFromCache(ZoneList<Expression*>* args) {
7482 ASSERT_EQ(2, args->length());
7483
7484 ASSERT_NE(NULL, args->at(0)->AsLiteral());
7485 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
7486
7487 Handle<FixedArray> jsfunction_result_caches(
7488 Top::global_context()->jsfunction_result_caches());
7489 if (jsfunction_result_caches->length() <= cache_id) {
7490 __ Abort("Attempt to use undefined cache.");
7491 frame_->Push(Factory::undefined_value());
7492 return;
7493 }
7494
7495 Load(args->at(1));
7496 Result key = frame_->Pop();
7497 key.ToRegister();
7498
7499 Result cache = allocator()->Allocate();
7500 ASSERT(cache.is_valid());
7501 __ mov(cache.reg(), ContextOperand(esi, Context::GLOBAL_INDEX));
7502 __ mov(cache.reg(),
7503 FieldOperand(cache.reg(), GlobalObject::kGlobalContextOffset));
7504 __ mov(cache.reg(),
7505 ContextOperand(cache.reg(), Context::JSFUNCTION_RESULT_CACHES_INDEX));
7506 __ mov(cache.reg(),
7507 FieldOperand(cache.reg(), FixedArray::OffsetOfElementAt(cache_id)));
7508
7509 Result tmp = allocator()->Allocate();
7510 ASSERT(tmp.is_valid());
7511
7512 DeferredSearchCache* deferred = new DeferredSearchCache(tmp.reg(),
7513 cache.reg(),
7514 key.reg());
7515
Steve Block6ded16b2010-05-10 14:33:55 +01007516 // tmp.reg() now holds finger offset as a smi.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01007517 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
Kristian Monsen25f61362010-05-21 11:50:48 +01007518 __ mov(tmp.reg(), FieldOperand(cache.reg(),
7519 JSFunctionResultCache::kFingerOffset));
7520 __ cmp(key.reg(), FixedArrayElementOperand(cache.reg(), tmp.reg()));
Steve Block6ded16b2010-05-10 14:33:55 +01007521 deferred->Branch(not_equal);
7522
Kristian Monsen25f61362010-05-21 11:50:48 +01007523 __ mov(tmp.reg(), FixedArrayElementOperand(cache.reg(), tmp.reg(), 1));
Steve Block6ded16b2010-05-10 14:33:55 +01007524
7525 deferred->BindExit();
7526 frame_->Push(&tmp);
7527}
7528
7529
Andrei Popescu402d9372010-02-26 13:31:12 +00007530void CodeGenerator::GenerateNumberToString(ZoneList<Expression*>* args) {
7531 ASSERT_EQ(args->length(), 1);
7532
7533 // Load the argument on the stack and call the stub.
7534 Load(args->at(0));
7535 NumberToStringStub stub;
7536 Result result = frame_->CallStub(&stub, 1);
7537 frame_->Push(&result);
7538}
7539
7540
Steve Block6ded16b2010-05-10 14:33:55 +01007541class DeferredSwapElements: public DeferredCode {
7542 public:
7543 DeferredSwapElements(Register object, Register index1, Register index2)
7544 : object_(object), index1_(index1), index2_(index2) {
7545 set_comment("[ DeferredSwapElements");
7546 }
7547
7548 virtual void Generate();
7549
7550 private:
7551 Register object_, index1_, index2_;
7552};
7553
7554
7555void DeferredSwapElements::Generate() {
7556 __ push(object_);
7557 __ push(index1_);
7558 __ push(index2_);
7559 __ CallRuntime(Runtime::kSwapElements, 3);
7560}
7561
7562
7563void CodeGenerator::GenerateSwapElements(ZoneList<Expression*>* args) {
7564 // Note: this code assumes that indices are passed are within
7565 // elements' bounds and refer to valid (not holes) values.
7566 Comment cmnt(masm_, "[ GenerateSwapElements");
7567
7568 ASSERT_EQ(3, args->length());
7569
7570 Load(args->at(0));
7571 Load(args->at(1));
7572 Load(args->at(2));
7573
7574 Result index2 = frame_->Pop();
7575 index2.ToRegister();
7576
7577 Result index1 = frame_->Pop();
7578 index1.ToRegister();
7579
7580 Result object = frame_->Pop();
7581 object.ToRegister();
7582
7583 Result tmp1 = allocator()->Allocate();
7584 tmp1.ToRegister();
7585 Result tmp2 = allocator()->Allocate();
7586 tmp2.ToRegister();
7587
7588 frame_->Spill(object.reg());
7589 frame_->Spill(index1.reg());
7590 frame_->Spill(index2.reg());
7591
7592 DeferredSwapElements* deferred = new DeferredSwapElements(object.reg(),
7593 index1.reg(),
7594 index2.reg());
7595
7596 // Fetch the map and check if array is in fast case.
7597 // Check that object doesn't require security checks and
7598 // has no indexed interceptor.
7599 __ CmpObjectType(object.reg(), FIRST_JS_OBJECT_TYPE, tmp1.reg());
Leon Clarkef7060e22010-06-03 12:02:55 +01007600 deferred->Branch(below);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007601 __ test_b(FieldOperand(tmp1.reg(), Map::kBitFieldOffset),
7602 KeyedLoadIC::kSlowCaseBitFieldMask);
Steve Block6ded16b2010-05-10 14:33:55 +01007603 deferred->Branch(not_zero);
7604
Iain Merrick75681382010-08-19 15:07:18 +01007605 // Check the object's elements are in fast case and writable.
Steve Block6ded16b2010-05-10 14:33:55 +01007606 __ mov(tmp1.reg(), FieldOperand(object.reg(), JSObject::kElementsOffset));
7607 __ cmp(FieldOperand(tmp1.reg(), HeapObject::kMapOffset),
7608 Immediate(Factory::fixed_array_map()));
7609 deferred->Branch(not_equal);
7610
7611 // Smi-tagging is equivalent to multiplying by 2.
7612 STATIC_ASSERT(kSmiTag == 0);
7613 STATIC_ASSERT(kSmiTagSize == 1);
7614
7615 // Check that both indices are smis.
7616 __ mov(tmp2.reg(), index1.reg());
7617 __ or_(tmp2.reg(), Operand(index2.reg()));
7618 __ test(tmp2.reg(), Immediate(kSmiTagMask));
7619 deferred->Branch(not_zero);
7620
7621 // Bring addresses into index1 and index2.
Kristian Monsen25f61362010-05-21 11:50:48 +01007622 __ lea(index1.reg(), FixedArrayElementOperand(tmp1.reg(), index1.reg()));
7623 __ lea(index2.reg(), FixedArrayElementOperand(tmp1.reg(), index2.reg()));
Steve Block6ded16b2010-05-10 14:33:55 +01007624
7625 // Swap elements.
7626 __ mov(object.reg(), Operand(index1.reg(), 0));
7627 __ mov(tmp2.reg(), Operand(index2.reg(), 0));
7628 __ mov(Operand(index2.reg(), 0), object.reg());
7629 __ mov(Operand(index1.reg(), 0), tmp2.reg());
7630
7631 Label done;
7632 __ InNewSpace(tmp1.reg(), tmp2.reg(), equal, &done);
7633 // Possible optimization: do a check that both values are Smis
7634 // (or them and test against Smi mask.)
7635
7636 __ mov(tmp2.reg(), tmp1.reg());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007637 __ RecordWriteHelper(tmp2.reg(), index1.reg(), object.reg());
7638 __ RecordWriteHelper(tmp1.reg(), index2.reg(), object.reg());
Steve Block6ded16b2010-05-10 14:33:55 +01007639 __ bind(&done);
7640
7641 deferred->BindExit();
7642 frame_->Push(Factory::undefined_value());
7643}
7644
7645
7646void CodeGenerator::GenerateCallFunction(ZoneList<Expression*>* args) {
7647 Comment cmnt(masm_, "[ GenerateCallFunction");
7648
7649 ASSERT(args->length() >= 2);
7650
7651 int n_args = args->length() - 2; // for receiver and function.
7652 Load(args->at(0)); // receiver
7653 for (int i = 0; i < n_args; i++) {
7654 Load(args->at(i + 1));
7655 }
7656 Load(args->at(n_args + 1)); // function
7657 Result result = frame_->CallJSFunction(n_args);
7658 frame_->Push(&result);
7659}
7660
7661
7662// Generates the Math.pow method. Only handles special cases and
7663// branches to the runtime system for everything else. Please note
7664// that this function assumes that the callsite has executed ToNumber
7665// on both arguments.
7666void CodeGenerator::GenerateMathPow(ZoneList<Expression*>* args) {
7667 ASSERT(args->length() == 2);
7668 Load(args->at(0));
7669 Load(args->at(1));
7670 if (!CpuFeatures::IsSupported(SSE2)) {
7671 Result res = frame_->CallRuntime(Runtime::kMath_pow, 2);
7672 frame_->Push(&res);
7673 } else {
7674 CpuFeatures::Scope use_sse2(SSE2);
7675 Label allocate_return;
7676 // Load the two operands while leaving the values on the frame.
7677 frame()->Dup();
7678 Result exponent = frame()->Pop();
7679 exponent.ToRegister();
7680 frame()->Spill(exponent.reg());
7681 frame()->PushElementAt(1);
7682 Result base = frame()->Pop();
7683 base.ToRegister();
7684 frame()->Spill(base.reg());
7685
7686 Result answer = allocator()->Allocate();
7687 ASSERT(answer.is_valid());
7688 ASSERT(!exponent.reg().is(base.reg()));
7689 JumpTarget call_runtime;
7690
7691 // Save 1 in xmm3 - we need this several times later on.
7692 __ mov(answer.reg(), Immediate(1));
7693 __ cvtsi2sd(xmm3, Operand(answer.reg()));
7694
7695 Label exponent_nonsmi;
7696 Label base_nonsmi;
7697 // If the exponent is a heap number go to that specific case.
7698 __ test(exponent.reg(), Immediate(kSmiTagMask));
7699 __ j(not_zero, &exponent_nonsmi);
7700 __ test(base.reg(), Immediate(kSmiTagMask));
7701 __ j(not_zero, &base_nonsmi);
7702
7703 // Optimized version when y is an integer.
7704 Label powi;
7705 __ SmiUntag(base.reg());
7706 __ cvtsi2sd(xmm0, Operand(base.reg()));
7707 __ jmp(&powi);
7708 // exponent is smi and base is a heapnumber.
7709 __ bind(&base_nonsmi);
7710 __ cmp(FieldOperand(base.reg(), HeapObject::kMapOffset),
7711 Factory::heap_number_map());
7712 call_runtime.Branch(not_equal);
7713
7714 __ movdbl(xmm0, FieldOperand(base.reg(), HeapNumber::kValueOffset));
7715
7716 // Optimized version of pow if y is an integer.
7717 __ bind(&powi);
7718 __ SmiUntag(exponent.reg());
7719
7720 // Save exponent in base as we need to check if exponent is negative later.
7721 // We know that base and exponent are in different registers.
7722 __ mov(base.reg(), exponent.reg());
7723
7724 // Get absolute value of exponent.
7725 Label no_neg;
7726 __ cmp(exponent.reg(), 0);
7727 __ j(greater_equal, &no_neg);
7728 __ neg(exponent.reg());
7729 __ bind(&no_neg);
7730
7731 // Load xmm1 with 1.
7732 __ movsd(xmm1, xmm3);
7733 Label while_true;
7734 Label no_multiply;
7735
7736 __ bind(&while_true);
7737 __ shr(exponent.reg(), 1);
7738 __ j(not_carry, &no_multiply);
7739 __ mulsd(xmm1, xmm0);
7740 __ bind(&no_multiply);
7741 __ test(exponent.reg(), Operand(exponent.reg()));
7742 __ mulsd(xmm0, xmm0);
7743 __ j(not_zero, &while_true);
7744
7745 // x has the original value of y - if y is negative return 1/result.
7746 __ test(base.reg(), Operand(base.reg()));
7747 __ j(positive, &allocate_return);
7748 // Special case if xmm1 has reached infinity.
7749 __ mov(answer.reg(), Immediate(0x7FB00000));
7750 __ movd(xmm0, Operand(answer.reg()));
7751 __ cvtss2sd(xmm0, xmm0);
7752 __ ucomisd(xmm0, xmm1);
7753 call_runtime.Branch(equal);
7754 __ divsd(xmm3, xmm1);
7755 __ movsd(xmm1, xmm3);
7756 __ jmp(&allocate_return);
7757
7758 // exponent (or both) is a heapnumber - no matter what we should now work
7759 // on doubles.
7760 __ bind(&exponent_nonsmi);
7761 __ cmp(FieldOperand(exponent.reg(), HeapObject::kMapOffset),
7762 Factory::heap_number_map());
7763 call_runtime.Branch(not_equal);
7764 __ movdbl(xmm1, FieldOperand(exponent.reg(), HeapNumber::kValueOffset));
7765 // Test if exponent is nan.
7766 __ ucomisd(xmm1, xmm1);
7767 call_runtime.Branch(parity_even);
7768
7769 Label base_not_smi;
7770 Label handle_special_cases;
7771 __ test(base.reg(), Immediate(kSmiTagMask));
7772 __ j(not_zero, &base_not_smi);
7773 __ SmiUntag(base.reg());
7774 __ cvtsi2sd(xmm0, Operand(base.reg()));
7775 __ jmp(&handle_special_cases);
7776 __ bind(&base_not_smi);
7777 __ cmp(FieldOperand(base.reg(), HeapObject::kMapOffset),
7778 Factory::heap_number_map());
7779 call_runtime.Branch(not_equal);
7780 __ mov(answer.reg(), FieldOperand(base.reg(), HeapNumber::kExponentOffset));
7781 __ and_(answer.reg(), HeapNumber::kExponentMask);
7782 __ cmp(Operand(answer.reg()), Immediate(HeapNumber::kExponentMask));
7783 // base is NaN or +/-Infinity
7784 call_runtime.Branch(greater_equal);
7785 __ movdbl(xmm0, FieldOperand(base.reg(), HeapNumber::kValueOffset));
7786
7787 // base is in xmm0 and exponent is in xmm1.
7788 __ bind(&handle_special_cases);
7789 Label not_minus_half;
7790 // Test for -0.5.
7791 // Load xmm2 with -0.5.
7792 __ mov(answer.reg(), Immediate(0xBF000000));
7793 __ movd(xmm2, Operand(answer.reg()));
7794 __ cvtss2sd(xmm2, xmm2);
7795 // xmm2 now has -0.5.
7796 __ ucomisd(xmm2, xmm1);
7797 __ j(not_equal, &not_minus_half);
7798
7799 // Calculates reciprocal of square root.
7800 // Note that 1/sqrt(x) = sqrt(1/x))
7801 __ divsd(xmm3, xmm0);
7802 __ movsd(xmm1, xmm3);
7803 __ sqrtsd(xmm1, xmm1);
7804 __ jmp(&allocate_return);
7805
7806 // Test for 0.5.
7807 __ bind(&not_minus_half);
7808 // Load xmm2 with 0.5.
7809 // Since xmm3 is 1 and xmm2 is -0.5 this is simply xmm2 + xmm3.
7810 __ addsd(xmm2, xmm3);
7811 // xmm2 now has 0.5.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01007812 __ ucomisd(xmm2, xmm1);
Steve Block6ded16b2010-05-10 14:33:55 +01007813 call_runtime.Branch(not_equal);
7814 // Calculates square root.
7815 __ movsd(xmm1, xmm0);
7816 __ sqrtsd(xmm1, xmm1);
7817
7818 JumpTarget done;
7819 Label failure, success;
7820 __ bind(&allocate_return);
7821 // Make a copy of the frame to enable us to handle allocation
7822 // failure after the JumpTarget jump.
7823 VirtualFrame* clone = new VirtualFrame(frame());
7824 __ AllocateHeapNumber(answer.reg(), exponent.reg(),
7825 base.reg(), &failure);
7826 __ movdbl(FieldOperand(answer.reg(), HeapNumber::kValueOffset), xmm1);
7827 // Remove the two original values from the frame - we only need those
7828 // in the case where we branch to runtime.
7829 frame()->Drop(2);
7830 exponent.Unuse();
7831 base.Unuse();
7832 done.Jump(&answer);
7833 // Use the copy of the original frame as our current frame.
7834 RegisterFile empty_regs;
7835 SetFrame(clone, &empty_regs);
7836 // If we experience an allocation failure we branch to runtime.
7837 __ bind(&failure);
7838 call_runtime.Bind();
7839 answer = frame()->CallRuntime(Runtime::kMath_pow_cfunction, 2);
7840
7841 done.Bind(&answer);
7842 frame()->Push(&answer);
7843 }
7844}
7845
7846
Andrei Popescu402d9372010-02-26 13:31:12 +00007847void CodeGenerator::GenerateMathSin(ZoneList<Expression*>* args) {
7848 ASSERT_EQ(args->length(), 1);
7849 Load(args->at(0));
7850 TranscendentalCacheStub stub(TranscendentalCache::SIN);
7851 Result result = frame_->CallStub(&stub, 1);
7852 frame_->Push(&result);
7853}
7854
7855
7856void CodeGenerator::GenerateMathCos(ZoneList<Expression*>* args) {
7857 ASSERT_EQ(args->length(), 1);
7858 Load(args->at(0));
7859 TranscendentalCacheStub stub(TranscendentalCache::COS);
7860 Result result = frame_->CallStub(&stub, 1);
7861 frame_->Push(&result);
7862}
7863
7864
Steve Block6ded16b2010-05-10 14:33:55 +01007865// Generates the Math.sqrt method. Please note - this function assumes that
7866// the callsite has executed ToNumber on the argument.
7867void CodeGenerator::GenerateMathSqrt(ZoneList<Expression*>* args) {
7868 ASSERT_EQ(args->length(), 1);
7869 Load(args->at(0));
7870
7871 if (!CpuFeatures::IsSupported(SSE2)) {
7872 Result result = frame()->CallRuntime(Runtime::kMath_sqrt, 1);
7873 frame()->Push(&result);
7874 } else {
7875 CpuFeatures::Scope use_sse2(SSE2);
7876 // Leave original value on the frame if we need to call runtime.
7877 frame()->Dup();
7878 Result result = frame()->Pop();
7879 result.ToRegister();
7880 frame()->Spill(result.reg());
7881 Label runtime;
7882 Label non_smi;
7883 Label load_done;
7884 JumpTarget end;
7885
7886 __ test(result.reg(), Immediate(kSmiTagMask));
7887 __ j(not_zero, &non_smi);
7888 __ SmiUntag(result.reg());
7889 __ cvtsi2sd(xmm0, Operand(result.reg()));
7890 __ jmp(&load_done);
7891 __ bind(&non_smi);
7892 __ cmp(FieldOperand(result.reg(), HeapObject::kMapOffset),
7893 Factory::heap_number_map());
7894 __ j(not_equal, &runtime);
7895 __ movdbl(xmm0, FieldOperand(result.reg(), HeapNumber::kValueOffset));
7896
7897 __ bind(&load_done);
7898 __ sqrtsd(xmm0, xmm0);
7899 // A copy of the virtual frame to allow us to go to runtime after the
7900 // JumpTarget jump.
7901 Result scratch = allocator()->Allocate();
7902 VirtualFrame* clone = new VirtualFrame(frame());
7903 __ AllocateHeapNumber(result.reg(), scratch.reg(), no_reg, &runtime);
7904
7905 __ movdbl(FieldOperand(result.reg(), HeapNumber::kValueOffset), xmm0);
7906 frame()->Drop(1);
7907 scratch.Unuse();
7908 end.Jump(&result);
7909 // We only branch to runtime if we have an allocation error.
7910 // Use the copy of the original frame as our current frame.
7911 RegisterFile empty_regs;
7912 SetFrame(clone, &empty_regs);
7913 __ bind(&runtime);
7914 result = frame()->CallRuntime(Runtime::kMath_sqrt, 1);
7915
7916 end.Bind(&result);
7917 frame()->Push(&result);
7918 }
7919}
7920
7921
Ben Murdochbb769b22010-08-11 14:56:33 +01007922void CodeGenerator::GenerateIsRegExpEquivalent(ZoneList<Expression*>* args) {
7923 ASSERT_EQ(2, args->length());
7924 Load(args->at(0));
7925 Load(args->at(1));
7926 Result right_res = frame_->Pop();
7927 Result left_res = frame_->Pop();
7928 right_res.ToRegister();
7929 left_res.ToRegister();
7930 Result tmp_res = allocator()->Allocate();
7931 ASSERT(tmp_res.is_valid());
7932 Register right = right_res.reg();
7933 Register left = left_res.reg();
7934 Register tmp = tmp_res.reg();
7935 right_res.Unuse();
7936 left_res.Unuse();
7937 tmp_res.Unuse();
7938 __ cmp(left, Operand(right));
7939 destination()->true_target()->Branch(equal);
7940 // Fail if either is a non-HeapObject.
7941 __ mov(tmp, left);
7942 __ and_(Operand(tmp), right);
7943 __ test(Operand(tmp), Immediate(kSmiTagMask));
7944 destination()->false_target()->Branch(equal);
7945 __ CmpObjectType(left, JS_REGEXP_TYPE, tmp);
7946 destination()->false_target()->Branch(not_equal);
7947 __ cmp(tmp, FieldOperand(right, HeapObject::kMapOffset));
7948 destination()->false_target()->Branch(not_equal);
7949 __ mov(tmp, FieldOperand(left, JSRegExp::kDataOffset));
7950 __ cmp(tmp, FieldOperand(right, JSRegExp::kDataOffset));
7951 destination()->Split(equal);
7952}
7953
7954
Kristian Monsen80d68ea2010-09-08 11:05:35 +01007955void CodeGenerator::GenerateHasCachedArrayIndex(ZoneList<Expression*>* args) {
7956 ASSERT(args->length() == 1);
7957 Load(args->at(0));
7958 Result value = frame_->Pop();
7959 value.ToRegister();
7960 ASSERT(value.is_valid());
7961 if (FLAG_debug_code) {
7962 __ AbortIfNotString(value.reg());
7963 }
7964
7965 __ test(FieldOperand(value.reg(), String::kHashFieldOffset),
7966 Immediate(String::kContainsCachedArrayIndexMask));
7967
7968 value.Unuse();
7969 destination()->Split(zero);
7970}
7971
7972
7973void CodeGenerator::GenerateGetCachedArrayIndex(ZoneList<Expression*>* args) {
7974 ASSERT(args->length() == 1);
7975 Load(args->at(0));
7976 Result string = frame_->Pop();
7977 string.ToRegister();
7978 if (FLAG_debug_code) {
7979 __ AbortIfNotString(string.reg());
7980 }
7981
7982 Result number = allocator()->Allocate();
7983 ASSERT(number.is_valid());
7984 __ mov(number.reg(), FieldOperand(string.reg(), String::kHashFieldOffset));
7985 __ IndexFromHash(number.reg(), number.reg());
7986 string.Unuse();
7987 frame_->Push(&number);
7988}
7989
7990
Steve Blocka7e24c12009-10-30 11:49:00 +00007991void CodeGenerator::VisitCallRuntime(CallRuntime* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01007992 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00007993 if (CheckForInlineRuntimeCall(node)) {
7994 return;
7995 }
7996
7997 ZoneList<Expression*>* args = node->arguments();
7998 Comment cmnt(masm_, "[ CallRuntime");
7999 Runtime::Function* function = node->function();
8000
8001 if (function == NULL) {
Steve Blocka7e24c12009-10-30 11:49:00 +00008002 // Push the builtins object found in the current global object.
8003 Result temp = allocator()->Allocate();
8004 ASSERT(temp.is_valid());
8005 __ mov(temp.reg(), GlobalObject());
8006 __ mov(temp.reg(), FieldOperand(temp.reg(), GlobalObject::kBuiltinsOffset));
8007 frame_->Push(&temp);
8008 }
8009
8010 // Push the arguments ("left-to-right").
8011 int arg_count = args->length();
8012 for (int i = 0; i < arg_count; i++) {
8013 Load(args->at(i));
8014 }
8015
8016 if (function == NULL) {
8017 // Call the JS runtime function.
Leon Clarkee46be812010-01-19 14:06:41 +00008018 frame_->Push(node->name());
Steve Blocka7e24c12009-10-30 11:49:00 +00008019 Result answer = frame_->CallCallIC(RelocInfo::CODE_TARGET,
8020 arg_count,
8021 loop_nesting_);
8022 frame_->RestoreContextRegister();
Leon Clarkee46be812010-01-19 14:06:41 +00008023 frame_->Push(&answer);
Steve Blocka7e24c12009-10-30 11:49:00 +00008024 } else {
8025 // Call the C runtime function.
8026 Result answer = frame_->CallRuntime(function, arg_count);
8027 frame_->Push(&answer);
8028 }
8029}
8030
8031
8032void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00008033 Comment cmnt(masm_, "[ UnaryOperation");
8034
8035 Token::Value op = node->op();
8036
8037 if (op == Token::NOT) {
8038 // Swap the true and false targets but keep the same actual label
8039 // as the fall through.
8040 destination()->Invert();
Steve Blockd0582a62009-12-15 09:54:21 +00008041 LoadCondition(node->expression(), destination(), true);
Steve Blocka7e24c12009-10-30 11:49:00 +00008042 // Swap the labels back.
8043 destination()->Invert();
8044
8045 } else if (op == Token::DELETE) {
8046 Property* property = node->expression()->AsProperty();
8047 if (property != NULL) {
8048 Load(property->obj());
8049 Load(property->key());
8050 Result answer = frame_->InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, 2);
8051 frame_->Push(&answer);
8052 return;
8053 }
8054
8055 Variable* variable = node->expression()->AsVariableProxy()->AsVariable();
8056 if (variable != NULL) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01008057 Slot* slot = variable->AsSlot();
Steve Blocka7e24c12009-10-30 11:49:00 +00008058 if (variable->is_global()) {
8059 LoadGlobal();
8060 frame_->Push(variable->name());
8061 Result answer = frame_->InvokeBuiltin(Builtins::DELETE,
8062 CALL_FUNCTION, 2);
8063 frame_->Push(&answer);
8064 return;
8065
8066 } else if (slot != NULL && slot->type() == Slot::LOOKUP) {
8067 // Call the runtime to look up the context holding the named
8068 // variable. Sync the virtual frame eagerly so we can push the
8069 // arguments directly into place.
8070 frame_->SyncRange(0, frame_->element_count() - 1);
8071 frame_->EmitPush(esi);
8072 frame_->EmitPush(Immediate(variable->name()));
8073 Result context = frame_->CallRuntime(Runtime::kLookupContext, 2);
8074 ASSERT(context.is_register());
8075 frame_->EmitPush(context.reg());
8076 context.Unuse();
8077 frame_->EmitPush(Immediate(variable->name()));
8078 Result answer = frame_->InvokeBuiltin(Builtins::DELETE,
8079 CALL_FUNCTION, 2);
8080 frame_->Push(&answer);
8081 return;
8082 }
8083
8084 // Default: Result of deleting non-global, not dynamically
8085 // introduced variables is false.
8086 frame_->Push(Factory::false_value());
8087
8088 } else {
8089 // Default: Result of deleting expressions is true.
8090 Load(node->expression()); // may have side-effects
8091 frame_->SetElementAt(0, Factory::true_value());
8092 }
8093
8094 } else if (op == Token::TYPEOF) {
8095 // Special case for loading the typeof expression; see comment on
8096 // LoadTypeofExpression().
8097 LoadTypeofExpression(node->expression());
8098 Result answer = frame_->CallRuntime(Runtime::kTypeof, 1);
8099 frame_->Push(&answer);
8100
8101 } else if (op == Token::VOID) {
8102 Expression* expression = node->expression();
8103 if (expression && expression->AsLiteral() && (
8104 expression->AsLiteral()->IsTrue() ||
8105 expression->AsLiteral()->IsFalse() ||
8106 expression->AsLiteral()->handle()->IsNumber() ||
8107 expression->AsLiteral()->handle()->IsString() ||
8108 expression->AsLiteral()->handle()->IsJSRegExp() ||
8109 expression->AsLiteral()->IsNull())) {
8110 // Omit evaluating the value of the primitive literal.
8111 // It will be discarded anyway, and can have no side effect.
8112 frame_->Push(Factory::undefined_value());
8113 } else {
8114 Load(node->expression());
8115 frame_->SetElementAt(0, Factory::undefined_value());
8116 }
8117
8118 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01008119 if (in_safe_int32_mode()) {
8120 Visit(node->expression());
8121 Result value = frame_->Pop();
8122 ASSERT(value.is_untagged_int32());
8123 // Registers containing an int32 value are not multiply used.
8124 ASSERT(!value.is_register() || !frame_->is_used(value.reg()));
8125 value.ToRegister();
8126 switch (op) {
8127 case Token::SUB: {
8128 __ neg(value.reg());
8129 if (node->no_negative_zero()) {
8130 // -MIN_INT is MIN_INT with the overflow flag set.
8131 unsafe_bailout_->Branch(overflow);
8132 } else {
8133 // MIN_INT and 0 both have bad negations. They both have 31 zeros.
8134 __ test(value.reg(), Immediate(0x7FFFFFFF));
8135 unsafe_bailout_->Branch(zero);
8136 }
8137 break;
8138 }
8139 case Token::BIT_NOT: {
8140 __ not_(value.reg());
8141 break;
8142 }
8143 case Token::ADD: {
8144 // Unary plus has no effect on int32 values.
8145 break;
8146 }
8147 default:
8148 UNREACHABLE();
8149 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00008150 }
Steve Block6ded16b2010-05-10 14:33:55 +01008151 frame_->Push(&value);
8152 } else {
8153 Load(node->expression());
Kristian Monsen80d68ea2010-09-08 11:05:35 +01008154 bool can_overwrite = node->expression()->ResultOverwriteAllowed();
Leon Clarkeac952652010-07-15 11:15:24 +01008155 UnaryOverwriteMode overwrite =
8156 can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
8157 bool no_negative_zero = node->expression()->no_negative_zero();
Steve Block6ded16b2010-05-10 14:33:55 +01008158 switch (op) {
8159 case Token::NOT:
8160 case Token::DELETE:
8161 case Token::TYPEOF:
8162 UNREACHABLE(); // handled above
8163 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00008164
Steve Block6ded16b2010-05-10 14:33:55 +01008165 case Token::SUB: {
Leon Clarkeac952652010-07-15 11:15:24 +01008166 GenericUnaryOpStub stub(
8167 Token::SUB,
8168 overwrite,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01008169 NO_UNARY_FLAGS,
Leon Clarkeac952652010-07-15 11:15:24 +01008170 no_negative_zero ? kIgnoreNegativeZero : kStrictNegativeZero);
Steve Block6ded16b2010-05-10 14:33:55 +01008171 Result operand = frame_->Pop();
8172 Result answer = frame_->CallStub(&stub, &operand);
8173 answer.set_type_info(TypeInfo::Number());
8174 frame_->Push(&answer);
8175 break;
8176 }
8177 case Token::BIT_NOT: {
8178 // Smi check.
8179 JumpTarget smi_label;
8180 JumpTarget continue_label;
8181 Result operand = frame_->Pop();
8182 TypeInfo operand_info = operand.type_info();
8183 operand.ToRegister();
8184 if (operand_info.IsSmi()) {
8185 if (FLAG_debug_code) __ AbortIfNotSmi(operand.reg());
8186 frame_->Spill(operand.reg());
8187 // Set smi tag bit. It will be reset by the not operation.
8188 __ lea(operand.reg(), Operand(operand.reg(), kSmiTagMask));
8189 __ not_(operand.reg());
8190 Result answer = operand;
8191 answer.set_type_info(TypeInfo::Smi());
8192 frame_->Push(&answer);
8193 } else {
8194 __ test(operand.reg(), Immediate(kSmiTagMask));
8195 smi_label.Branch(zero, &operand, taken);
Steve Blocka7e24c12009-10-30 11:49:00 +00008196
Kristian Monsen0d5e1162010-09-30 15:31:59 +01008197 GenericUnaryOpStub stub(Token::BIT_NOT,
8198 overwrite,
8199 NO_UNARY_SMI_CODE_IN_STUB);
Steve Block6ded16b2010-05-10 14:33:55 +01008200 Result answer = frame_->CallStub(&stub, &operand);
8201 continue_label.Jump(&answer);
Leon Clarkee46be812010-01-19 14:06:41 +00008202
Steve Block6ded16b2010-05-10 14:33:55 +01008203 smi_label.Bind(&answer);
8204 answer.ToRegister();
8205 frame_->Spill(answer.reg());
8206 // Set smi tag bit. It will be reset by the not operation.
8207 __ lea(answer.reg(), Operand(answer.reg(), kSmiTagMask));
8208 __ not_(answer.reg());
Leon Clarkee46be812010-01-19 14:06:41 +00008209
Steve Block6ded16b2010-05-10 14:33:55 +01008210 continue_label.Bind(&answer);
8211 answer.set_type_info(TypeInfo::Integer32());
8212 frame_->Push(&answer);
8213 }
8214 break;
8215 }
8216 case Token::ADD: {
8217 // Smi check.
8218 JumpTarget continue_label;
8219 Result operand = frame_->Pop();
8220 TypeInfo operand_info = operand.type_info();
8221 operand.ToRegister();
8222 __ test(operand.reg(), Immediate(kSmiTagMask));
8223 continue_label.Branch(zero, &operand, taken);
Steve Blocka7e24c12009-10-30 11:49:00 +00008224
Steve Block6ded16b2010-05-10 14:33:55 +01008225 frame_->Push(&operand);
8226 Result answer = frame_->InvokeBuiltin(Builtins::TO_NUMBER,
Steve Blocka7e24c12009-10-30 11:49:00 +00008227 CALL_FUNCTION, 1);
8228
Steve Block6ded16b2010-05-10 14:33:55 +01008229 continue_label.Bind(&answer);
8230 if (operand_info.IsSmi()) {
8231 answer.set_type_info(TypeInfo::Smi());
8232 } else if (operand_info.IsInteger32()) {
8233 answer.set_type_info(TypeInfo::Integer32());
8234 } else {
8235 answer.set_type_info(TypeInfo::Number());
8236 }
8237 frame_->Push(&answer);
8238 break;
8239 }
8240 default:
8241 UNREACHABLE();
Steve Blocka7e24c12009-10-30 11:49:00 +00008242 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008243 }
8244 }
8245}
8246
8247
8248// The value in dst was optimistically incremented or decremented. The
8249// result overflowed or was not smi tagged. Undo the operation, call
8250// into the runtime to convert the argument to a number, and call the
8251// specialized add or subtract stub. The result is left in dst.
8252class DeferredPrefixCountOperation: public DeferredCode {
8253 public:
Steve Block6ded16b2010-05-10 14:33:55 +01008254 DeferredPrefixCountOperation(Register dst,
8255 bool is_increment,
8256 TypeInfo input_type)
8257 : dst_(dst), is_increment_(is_increment), input_type_(input_type) {
Steve Blocka7e24c12009-10-30 11:49:00 +00008258 set_comment("[ DeferredCountOperation");
8259 }
8260
8261 virtual void Generate();
8262
8263 private:
8264 Register dst_;
8265 bool is_increment_;
Steve Block6ded16b2010-05-10 14:33:55 +01008266 TypeInfo input_type_;
Steve Blocka7e24c12009-10-30 11:49:00 +00008267};
8268
8269
8270void DeferredPrefixCountOperation::Generate() {
8271 // Undo the optimistic smi operation.
8272 if (is_increment_) {
8273 __ sub(Operand(dst_), Immediate(Smi::FromInt(1)));
8274 } else {
8275 __ add(Operand(dst_), Immediate(Smi::FromInt(1)));
8276 }
Steve Block6ded16b2010-05-10 14:33:55 +01008277 Register left;
8278 if (input_type_.IsNumber()) {
8279 left = dst_;
Steve Blocka7e24c12009-10-30 11:49:00 +00008280 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01008281 __ push(dst_);
8282 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION);
8283 left = eax;
Steve Blocka7e24c12009-10-30 11:49:00 +00008284 }
Steve Block6ded16b2010-05-10 14:33:55 +01008285
8286 GenericBinaryOpStub stub(is_increment_ ? Token::ADD : Token::SUB,
8287 NO_OVERWRITE,
8288 NO_GENERIC_BINARY_FLAGS,
8289 TypeInfo::Number());
8290 stub.GenerateCall(masm_, left, Smi::FromInt(1));
8291
Steve Blocka7e24c12009-10-30 11:49:00 +00008292 if (!dst_.is(eax)) __ mov(dst_, eax);
8293}
8294
8295
8296// The value in dst was optimistically incremented or decremented. The
8297// result overflowed or was not smi tagged. Undo the operation and call
8298// into the runtime to convert the argument to a number. Update the
8299// original value in old. Call the specialized add or subtract stub.
8300// The result is left in dst.
8301class DeferredPostfixCountOperation: public DeferredCode {
8302 public:
Steve Block6ded16b2010-05-10 14:33:55 +01008303 DeferredPostfixCountOperation(Register dst,
8304 Register old,
8305 bool is_increment,
8306 TypeInfo input_type)
8307 : dst_(dst),
8308 old_(old),
8309 is_increment_(is_increment),
8310 input_type_(input_type) {
Steve Blocka7e24c12009-10-30 11:49:00 +00008311 set_comment("[ DeferredCountOperation");
8312 }
8313
8314 virtual void Generate();
8315
8316 private:
8317 Register dst_;
8318 Register old_;
8319 bool is_increment_;
Steve Block6ded16b2010-05-10 14:33:55 +01008320 TypeInfo input_type_;
Steve Blocka7e24c12009-10-30 11:49:00 +00008321};
8322
8323
8324void DeferredPostfixCountOperation::Generate() {
8325 // Undo the optimistic smi operation.
8326 if (is_increment_) {
8327 __ sub(Operand(dst_), Immediate(Smi::FromInt(1)));
8328 } else {
8329 __ add(Operand(dst_), Immediate(Smi::FromInt(1)));
8330 }
Steve Block6ded16b2010-05-10 14:33:55 +01008331 Register left;
8332 if (input_type_.IsNumber()) {
8333 __ push(dst_); // Save the input to use as the old value.
8334 left = dst_;
Steve Blocka7e24c12009-10-30 11:49:00 +00008335 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01008336 __ push(dst_);
8337 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION);
8338 __ push(eax); // Save the result of ToNumber to use as the old value.
8339 left = eax;
Steve Blocka7e24c12009-10-30 11:49:00 +00008340 }
Steve Block6ded16b2010-05-10 14:33:55 +01008341
8342 GenericBinaryOpStub stub(is_increment_ ? Token::ADD : Token::SUB,
8343 NO_OVERWRITE,
8344 NO_GENERIC_BINARY_FLAGS,
8345 TypeInfo::Number());
8346 stub.GenerateCall(masm_, left, Smi::FromInt(1));
8347
Steve Blocka7e24c12009-10-30 11:49:00 +00008348 if (!dst_.is(eax)) __ mov(dst_, eax);
8349 __ pop(old_);
8350}
8351
8352
8353void CodeGenerator::VisitCountOperation(CountOperation* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01008354 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00008355 Comment cmnt(masm_, "[ CountOperation");
8356
8357 bool is_postfix = node->is_postfix();
8358 bool is_increment = node->op() == Token::INC;
8359
8360 Variable* var = node->expression()->AsVariableProxy()->AsVariable();
8361 bool is_const = (var != NULL && var->mode() == Variable::CONST);
8362
8363 // Postfix operations need a stack slot under the reference to hold
8364 // the old value while the new value is being stored. This is so that
8365 // in the case that storing the new value requires a call, the old
8366 // value will be in the frame to be spilled.
8367 if (is_postfix) frame_->Push(Smi::FromInt(0));
8368
Leon Clarked91b9f72010-01-27 17:25:45 +00008369 // A constant reference is not saved to, so a constant reference is not a
8370 // compound assignment reference.
8371 { Reference target(this, node->expression(), !is_const);
Steve Blocka7e24c12009-10-30 11:49:00 +00008372 if (target.is_illegal()) {
8373 // Spoof the virtual frame to have the expected height (one higher
8374 // than on entry).
8375 if (!is_postfix) frame_->Push(Smi::FromInt(0));
8376 return;
8377 }
Steve Blockd0582a62009-12-15 09:54:21 +00008378 target.TakeValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00008379
8380 Result new_value = frame_->Pop();
8381 new_value.ToRegister();
8382
8383 Result old_value; // Only allocated in the postfix case.
8384 if (is_postfix) {
8385 // Allocate a temporary to preserve the old value.
8386 old_value = allocator_->Allocate();
8387 ASSERT(old_value.is_valid());
8388 __ mov(old_value.reg(), new_value.reg());
Steve Block6ded16b2010-05-10 14:33:55 +01008389
8390 // The return value for postfix operations is ToNumber(input).
8391 // Keep more precise type info if the input is some kind of
8392 // number already. If the input is not a number we have to wait
8393 // for the deferred code to convert it.
8394 if (new_value.type_info().IsNumber()) {
8395 old_value.set_type_info(new_value.type_info());
8396 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008397 }
Steve Block6ded16b2010-05-10 14:33:55 +01008398
Steve Blocka7e24c12009-10-30 11:49:00 +00008399 // Ensure the new value is writable.
8400 frame_->Spill(new_value.reg());
8401
Steve Block6ded16b2010-05-10 14:33:55 +01008402 Result tmp;
8403 if (new_value.is_smi()) {
8404 if (FLAG_debug_code) __ AbortIfNotSmi(new_value.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00008405 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01008406 // We don't know statically if the input is a smi.
8407 // In order to combine the overflow and the smi tag check, we need
8408 // to be able to allocate a byte register. We attempt to do so
8409 // without spilling. If we fail, we will generate separate overflow
8410 // and smi tag checks.
8411 // We allocate and clear a temporary byte register before performing
8412 // the count operation since clearing the register using xor will clear
8413 // the overflow flag.
8414 tmp = allocator_->AllocateByteRegisterWithoutSpilling();
8415 if (tmp.is_valid()) {
8416 __ Set(tmp.reg(), Immediate(0));
8417 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008418 }
8419
8420 if (is_increment) {
8421 __ add(Operand(new_value.reg()), Immediate(Smi::FromInt(1)));
8422 } else {
8423 __ sub(Operand(new_value.reg()), Immediate(Smi::FromInt(1)));
8424 }
8425
Steve Block6ded16b2010-05-10 14:33:55 +01008426 DeferredCode* deferred = NULL;
8427 if (is_postfix) {
8428 deferred = new DeferredPostfixCountOperation(new_value.reg(),
8429 old_value.reg(),
8430 is_increment,
8431 new_value.type_info());
8432 } else {
8433 deferred = new DeferredPrefixCountOperation(new_value.reg(),
8434 is_increment,
8435 new_value.type_info());
8436 }
8437
8438 if (new_value.is_smi()) {
8439 // In case we have a smi as input just check for overflow.
8440 deferred->Branch(overflow);
8441 } else {
8442 // If the count operation didn't overflow and the result is a valid
8443 // smi, we're done. Otherwise, we jump to the deferred slow-case
8444 // code.
Steve Blocka7e24c12009-10-30 11:49:00 +00008445 // We combine the overflow and the smi tag check if we could
8446 // successfully allocate a temporary byte register.
Steve Block6ded16b2010-05-10 14:33:55 +01008447 if (tmp.is_valid()) {
8448 __ setcc(overflow, tmp.reg());
8449 __ or_(Operand(tmp.reg()), new_value.reg());
8450 __ test(tmp.reg(), Immediate(kSmiTagMask));
8451 tmp.Unuse();
8452 deferred->Branch(not_zero);
8453 } else {
8454 // Otherwise we test separately for overflow and smi tag.
8455 deferred->Branch(overflow);
8456 __ test(new_value.reg(), Immediate(kSmiTagMask));
8457 deferred->Branch(not_zero);
8458 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008459 }
8460 deferred->BindExit();
8461
Steve Block6ded16b2010-05-10 14:33:55 +01008462 // Postfix count operations return their input converted to
8463 // number. The case when the input is already a number is covered
8464 // above in the allocation code for old_value.
8465 if (is_postfix && !new_value.type_info().IsNumber()) {
8466 old_value.set_type_info(TypeInfo::Number());
8467 }
8468
8469 // The result of ++ or -- is an Integer32 if the
8470 // input is a smi. Otherwise it is a number.
8471 if (new_value.is_smi()) {
8472 new_value.set_type_info(TypeInfo::Integer32());
8473 } else {
8474 new_value.set_type_info(TypeInfo::Number());
8475 }
8476
Steve Blocka7e24c12009-10-30 11:49:00 +00008477 // Postfix: store the old value in the allocated slot under the
8478 // reference.
8479 if (is_postfix) frame_->SetElementAt(target.size(), &old_value);
8480
8481 frame_->Push(&new_value);
8482 // Non-constant: update the reference.
8483 if (!is_const) target.SetValue(NOT_CONST_INIT);
8484 }
8485
8486 // Postfix: drop the new value and use the old.
8487 if (is_postfix) frame_->Drop();
8488}
8489
8490
Steve Block6ded16b2010-05-10 14:33:55 +01008491void CodeGenerator::Int32BinaryOperation(BinaryOperation* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00008492 Token::Value op = node->op();
Steve Block6ded16b2010-05-10 14:33:55 +01008493 Comment cmnt(masm_, "[ Int32BinaryOperation");
8494 ASSERT(in_safe_int32_mode());
8495 ASSERT(safe_int32_mode_enabled());
8496 ASSERT(FLAG_safe_int32_compiler);
Steve Blocka7e24c12009-10-30 11:49:00 +00008497
Steve Block6ded16b2010-05-10 14:33:55 +01008498 if (op == Token::COMMA) {
8499 // Discard left value.
8500 frame_->Nip(1);
8501 return;
8502 }
8503
8504 Result right = frame_->Pop();
8505 Result left = frame_->Pop();
8506
8507 ASSERT(right.is_untagged_int32());
8508 ASSERT(left.is_untagged_int32());
8509 // Registers containing an int32 value are not multiply used.
8510 ASSERT(!left.is_register() || !frame_->is_used(left.reg()));
8511 ASSERT(!right.is_register() || !frame_->is_used(right.reg()));
8512
8513 switch (op) {
8514 case Token::COMMA:
8515 case Token::OR:
8516 case Token::AND:
8517 UNREACHABLE();
8518 break;
8519 case Token::BIT_OR:
8520 case Token::BIT_XOR:
8521 case Token::BIT_AND:
8522 if (left.is_constant() || right.is_constant()) {
8523 int32_t value; // Put constant in value, non-constant in left.
8524 // Constants are known to be int32 values, from static analysis,
8525 // or else will be converted to int32 by implicit ECMA [[ToInt32]].
8526 if (left.is_constant()) {
8527 ASSERT(left.handle()->IsSmi() || left.handle()->IsHeapNumber());
8528 value = NumberToInt32(*left.handle());
8529 left = right;
8530 } else {
8531 ASSERT(right.handle()->IsSmi() || right.handle()->IsHeapNumber());
8532 value = NumberToInt32(*right.handle());
8533 }
8534
8535 left.ToRegister();
8536 if (op == Token::BIT_OR) {
8537 __ or_(Operand(left.reg()), Immediate(value));
8538 } else if (op == Token::BIT_XOR) {
8539 __ xor_(Operand(left.reg()), Immediate(value));
8540 } else {
8541 ASSERT(op == Token::BIT_AND);
8542 __ and_(Operand(left.reg()), Immediate(value));
8543 }
8544 } else {
8545 ASSERT(left.is_register());
8546 ASSERT(right.is_register());
8547 if (op == Token::BIT_OR) {
8548 __ or_(left.reg(), Operand(right.reg()));
8549 } else if (op == Token::BIT_XOR) {
8550 __ xor_(left.reg(), Operand(right.reg()));
8551 } else {
8552 ASSERT(op == Token::BIT_AND);
8553 __ and_(left.reg(), Operand(right.reg()));
8554 }
8555 }
8556 frame_->Push(&left);
8557 right.Unuse();
8558 break;
8559 case Token::SAR:
8560 case Token::SHL:
8561 case Token::SHR: {
8562 bool test_shr_overflow = false;
8563 left.ToRegister();
8564 if (right.is_constant()) {
8565 ASSERT(right.handle()->IsSmi() || right.handle()->IsHeapNumber());
8566 int shift_amount = NumberToInt32(*right.handle()) & 0x1F;
8567 if (op == Token::SAR) {
8568 __ sar(left.reg(), shift_amount);
8569 } else if (op == Token::SHL) {
8570 __ shl(left.reg(), shift_amount);
8571 } else {
8572 ASSERT(op == Token::SHR);
8573 __ shr(left.reg(), shift_amount);
8574 if (shift_amount == 0) test_shr_overflow = true;
8575 }
8576 } else {
8577 // Move right to ecx
8578 if (left.is_register() && left.reg().is(ecx)) {
8579 right.ToRegister();
8580 __ xchg(left.reg(), right.reg());
8581 left = right; // Left is unused here, copy of right unused by Push.
8582 } else {
8583 right.ToRegister(ecx);
8584 left.ToRegister();
8585 }
8586 if (op == Token::SAR) {
8587 __ sar_cl(left.reg());
8588 } else if (op == Token::SHL) {
8589 __ shl_cl(left.reg());
8590 } else {
8591 ASSERT(op == Token::SHR);
8592 __ shr_cl(left.reg());
8593 test_shr_overflow = true;
8594 }
8595 }
8596 {
8597 Register left_reg = left.reg();
8598 frame_->Push(&left);
8599 right.Unuse();
8600 if (test_shr_overflow && !node->to_int32()) {
8601 // Uint32 results with top bit set are not Int32 values.
8602 // If they will be forced to Int32, skip the test.
8603 // Test is needed because shr with shift amount 0 does not set flags.
8604 __ test(left_reg, Operand(left_reg));
8605 unsafe_bailout_->Branch(sign);
8606 }
8607 }
8608 break;
8609 }
8610 case Token::ADD:
8611 case Token::SUB:
8612 case Token::MUL:
8613 if ((left.is_constant() && op != Token::SUB) || right.is_constant()) {
8614 int32_t value; // Put constant in value, non-constant in left.
8615 if (right.is_constant()) {
8616 ASSERT(right.handle()->IsSmi() || right.handle()->IsHeapNumber());
8617 value = NumberToInt32(*right.handle());
8618 } else {
8619 ASSERT(left.handle()->IsSmi() || left.handle()->IsHeapNumber());
8620 value = NumberToInt32(*left.handle());
8621 left = right;
8622 }
8623
8624 left.ToRegister();
8625 if (op == Token::ADD) {
8626 __ add(Operand(left.reg()), Immediate(value));
8627 } else if (op == Token::SUB) {
8628 __ sub(Operand(left.reg()), Immediate(value));
8629 } else {
8630 ASSERT(op == Token::MUL);
8631 __ imul(left.reg(), left.reg(), value);
8632 }
8633 } else {
8634 left.ToRegister();
8635 ASSERT(left.is_register());
8636 ASSERT(right.is_register());
8637 if (op == Token::ADD) {
8638 __ add(left.reg(), Operand(right.reg()));
8639 } else if (op == Token::SUB) {
8640 __ sub(left.reg(), Operand(right.reg()));
8641 } else {
8642 ASSERT(op == Token::MUL);
8643 // We have statically verified that a negative zero can be ignored.
8644 __ imul(left.reg(), Operand(right.reg()));
8645 }
8646 }
8647 right.Unuse();
8648 frame_->Push(&left);
8649 if (!node->to_int32()) {
8650 // If ToInt32 is called on the result of ADD, SUB, or MUL, we don't
8651 // care about overflows.
8652 unsafe_bailout_->Branch(overflow);
8653 }
8654 break;
8655 case Token::DIV:
8656 case Token::MOD: {
8657 if (right.is_register() && (right.reg().is(eax) || right.reg().is(edx))) {
8658 if (left.is_register() && left.reg().is(edi)) {
8659 right.ToRegister(ebx);
8660 } else {
8661 right.ToRegister(edi);
8662 }
8663 }
8664 left.ToRegister(eax);
8665 Result edx_reg = allocator_->Allocate(edx);
8666 right.ToRegister();
8667 // The results are unused here because BreakTarget::Branch cannot handle
8668 // live results.
8669 Register right_reg = right.reg();
8670 left.Unuse();
8671 right.Unuse();
8672 edx_reg.Unuse();
8673 __ cmp(right_reg, 0);
8674 // Ensure divisor is positive: no chance of non-int32 or -0 result.
8675 unsafe_bailout_->Branch(less_equal);
8676 __ cdq(); // Sign-extend eax into edx:eax
8677 __ idiv(right_reg);
8678 if (op == Token::MOD) {
8679 // Negative zero can arise as a negative divident with a zero result.
8680 if (!node->no_negative_zero()) {
8681 Label not_negative_zero;
8682 __ test(edx, Operand(edx));
8683 __ j(not_zero, &not_negative_zero);
8684 __ test(eax, Operand(eax));
8685 unsafe_bailout_->Branch(negative);
8686 __ bind(&not_negative_zero);
8687 }
8688 Result edx_result(edx, TypeInfo::Integer32());
8689 edx_result.set_untagged_int32(true);
8690 frame_->Push(&edx_result);
8691 } else {
8692 ASSERT(op == Token::DIV);
8693 __ test(edx, Operand(edx));
8694 unsafe_bailout_->Branch(not_equal);
8695 Result eax_result(eax, TypeInfo::Integer32());
8696 eax_result.set_untagged_int32(true);
8697 frame_->Push(&eax_result);
8698 }
8699 break;
8700 }
8701 default:
8702 UNREACHABLE();
8703 break;
8704 }
8705}
8706
8707
8708void CodeGenerator::GenerateLogicalBooleanOperation(BinaryOperation* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00008709 // According to ECMA-262 section 11.11, page 58, the binary logical
8710 // operators must yield the result of one of the two expressions
8711 // before any ToBoolean() conversions. This means that the value
8712 // produced by a && or || operator is not necessarily a boolean.
8713
8714 // NOTE: If the left hand side produces a materialized value (not
8715 // control flow), we force the right hand side to do the same. This
8716 // is necessary because we assume that if we get control flow on the
8717 // last path out of an expression we got it on all paths.
Steve Block6ded16b2010-05-10 14:33:55 +01008718 if (node->op() == Token::AND) {
8719 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00008720 JumpTarget is_true;
8721 ControlDestination dest(&is_true, destination()->false_target(), true);
Steve Blockd0582a62009-12-15 09:54:21 +00008722 LoadCondition(node->left(), &dest, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00008723
8724 if (dest.false_was_fall_through()) {
8725 // The current false target was used as the fall-through. If
8726 // there are no dangling jumps to is_true then the left
8727 // subexpression was unconditionally false. Otherwise we have
8728 // paths where we do have to evaluate the right subexpression.
8729 if (is_true.is_linked()) {
8730 // We need to compile the right subexpression. If the jump to
8731 // the current false target was a forward jump then we have a
8732 // valid frame, we have just bound the false target, and we
8733 // have to jump around the code for the right subexpression.
8734 if (has_valid_frame()) {
8735 destination()->false_target()->Unuse();
8736 destination()->false_target()->Jump();
8737 }
8738 is_true.Bind();
8739 // The left subexpression compiled to control flow, so the
8740 // right one is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00008741 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00008742 } else {
8743 // We have actually just jumped to or bound the current false
8744 // target but the current control destination is not marked as
8745 // used.
8746 destination()->Use(false);
8747 }
8748
8749 } else if (dest.is_used()) {
8750 // The left subexpression compiled to control flow (and is_true
8751 // was just bound), so the right is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00008752 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00008753
8754 } else {
8755 // We have a materialized value on the frame, so we exit with
8756 // one on all paths. There are possibly also jumps to is_true
8757 // from nested subexpressions.
8758 JumpTarget pop_and_continue;
8759 JumpTarget exit;
8760
8761 // Avoid popping the result if it converts to 'false' using the
8762 // standard ToBoolean() conversion as described in ECMA-262,
8763 // section 9.2, page 30.
8764 //
8765 // Duplicate the TOS value. The duplicate will be popped by
8766 // ToBoolean.
8767 frame_->Dup();
8768 ControlDestination dest(&pop_and_continue, &exit, true);
8769 ToBoolean(&dest);
8770
8771 // Pop the result of evaluating the first part.
8772 frame_->Drop();
8773
8774 // Compile right side expression.
8775 is_true.Bind();
8776 Load(node->right());
8777
8778 // Exit (always with a materialized value).
8779 exit.Bind();
8780 }
8781
Steve Block6ded16b2010-05-10 14:33:55 +01008782 } else {
8783 ASSERT(node->op() == Token::OR);
8784 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00008785 JumpTarget is_false;
8786 ControlDestination dest(destination()->true_target(), &is_false, false);
Steve Blockd0582a62009-12-15 09:54:21 +00008787 LoadCondition(node->left(), &dest, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00008788
8789 if (dest.true_was_fall_through()) {
8790 // The current true target was used as the fall-through. If
8791 // there are no dangling jumps to is_false then the left
8792 // subexpression was unconditionally true. Otherwise we have
8793 // paths where we do have to evaluate the right subexpression.
8794 if (is_false.is_linked()) {
8795 // We need to compile the right subexpression. If the jump to
8796 // the current true target was a forward jump then we have a
8797 // valid frame, we have just bound the true target, and we
8798 // have to jump around the code for the right subexpression.
8799 if (has_valid_frame()) {
8800 destination()->true_target()->Unuse();
8801 destination()->true_target()->Jump();
8802 }
8803 is_false.Bind();
8804 // The left subexpression compiled to control flow, so the
8805 // right one is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00008806 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00008807 } else {
8808 // We have just jumped to or bound the current true target but
8809 // the current control destination is not marked as used.
8810 destination()->Use(true);
8811 }
8812
8813 } else if (dest.is_used()) {
8814 // The left subexpression compiled to control flow (and is_false
8815 // was just bound), so the right is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00008816 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00008817
8818 } else {
8819 // We have a materialized value on the frame, so we exit with
8820 // one on all paths. There are possibly also jumps to is_false
8821 // from nested subexpressions.
8822 JumpTarget pop_and_continue;
8823 JumpTarget exit;
8824
8825 // Avoid popping the result if it converts to 'true' using the
8826 // standard ToBoolean() conversion as described in ECMA-262,
8827 // section 9.2, page 30.
8828 //
8829 // Duplicate the TOS value. The duplicate will be popped by
8830 // ToBoolean.
8831 frame_->Dup();
8832 ControlDestination dest(&exit, &pop_and_continue, false);
8833 ToBoolean(&dest);
8834
8835 // Pop the result of evaluating the first part.
8836 frame_->Drop();
8837
8838 // Compile right side expression.
8839 is_false.Bind();
8840 Load(node->right());
8841
8842 // Exit (always with a materialized value).
8843 exit.Bind();
8844 }
Steve Block6ded16b2010-05-10 14:33:55 +01008845 }
8846}
Steve Blocka7e24c12009-10-30 11:49:00 +00008847
Steve Block6ded16b2010-05-10 14:33:55 +01008848
8849void CodeGenerator::VisitBinaryOperation(BinaryOperation* node) {
8850 Comment cmnt(masm_, "[ BinaryOperation");
8851
8852 if (node->op() == Token::AND || node->op() == Token::OR) {
8853 GenerateLogicalBooleanOperation(node);
8854 } else if (in_safe_int32_mode()) {
8855 Visit(node->left());
8856 Visit(node->right());
8857 Int32BinaryOperation(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00008858 } else {
8859 // NOTE: The code below assumes that the slow cases (calls to runtime)
8860 // never return a constant/immutable object.
8861 OverwriteMode overwrite_mode = NO_OVERWRITE;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01008862 if (node->left()->ResultOverwriteAllowed()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00008863 overwrite_mode = OVERWRITE_LEFT;
Kristian Monsen80d68ea2010-09-08 11:05:35 +01008864 } else if (node->right()->ResultOverwriteAllowed()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00008865 overwrite_mode = OVERWRITE_RIGHT;
8866 }
8867
Steve Block6ded16b2010-05-10 14:33:55 +01008868 if (node->left()->IsTrivial()) {
8869 Load(node->right());
8870 Result right = frame_->Pop();
8871 frame_->Push(node->left());
8872 frame_->Push(&right);
8873 } else {
8874 Load(node->left());
8875 Load(node->right());
8876 }
8877 GenericBinaryOperation(node, overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00008878 }
8879}
8880
8881
8882void CodeGenerator::VisitThisFunction(ThisFunction* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01008883 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00008884 frame_->PushFunction();
8885}
8886
8887
8888void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01008889 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00008890 Comment cmnt(masm_, "[ CompareOperation");
8891
Leon Clarkee46be812010-01-19 14:06:41 +00008892 bool left_already_loaded = false;
8893
Steve Blocka7e24c12009-10-30 11:49:00 +00008894 // Get the expressions from the node.
8895 Expression* left = node->left();
8896 Expression* right = node->right();
8897 Token::Value op = node->op();
8898 // To make typeof testing for natives implemented in JavaScript really
8899 // efficient, we generate special code for expressions of the form:
8900 // 'typeof <expression> == <string>'.
8901 UnaryOperation* operation = left->AsUnaryOperation();
8902 if ((op == Token::EQ || op == Token::EQ_STRICT) &&
8903 (operation != NULL && operation->op() == Token::TYPEOF) &&
8904 (right->AsLiteral() != NULL &&
8905 right->AsLiteral()->handle()->IsString())) {
8906 Handle<String> check(String::cast(*right->AsLiteral()->handle()));
8907
8908 // Load the operand and move it to a register.
8909 LoadTypeofExpression(operation->expression());
8910 Result answer = frame_->Pop();
8911 answer.ToRegister();
8912
8913 if (check->Equals(Heap::number_symbol())) {
8914 __ test(answer.reg(), Immediate(kSmiTagMask));
8915 destination()->true_target()->Branch(zero);
8916 frame_->Spill(answer.reg());
8917 __ mov(answer.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset));
8918 __ cmp(answer.reg(), Factory::heap_number_map());
8919 answer.Unuse();
8920 destination()->Split(equal);
8921
8922 } else if (check->Equals(Heap::string_symbol())) {
8923 __ test(answer.reg(), Immediate(kSmiTagMask));
8924 destination()->false_target()->Branch(zero);
8925
8926 // It can be an undetectable string object.
8927 Result temp = allocator()->Allocate();
8928 ASSERT(temp.is_valid());
8929 __ mov(temp.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01008930 __ test_b(FieldOperand(temp.reg(), Map::kBitFieldOffset),
8931 1 << Map::kIsUndetectable);
Steve Blocka7e24c12009-10-30 11:49:00 +00008932 destination()->false_target()->Branch(not_zero);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01008933 __ CmpInstanceType(temp.reg(), FIRST_NONSTRING_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00008934 temp.Unuse();
8935 answer.Unuse();
Andrei Popescu402d9372010-02-26 13:31:12 +00008936 destination()->Split(below);
Steve Blocka7e24c12009-10-30 11:49:00 +00008937
8938 } else if (check->Equals(Heap::boolean_symbol())) {
8939 __ cmp(answer.reg(), Factory::true_value());
8940 destination()->true_target()->Branch(equal);
8941 __ cmp(answer.reg(), Factory::false_value());
8942 answer.Unuse();
8943 destination()->Split(equal);
8944
8945 } else if (check->Equals(Heap::undefined_symbol())) {
8946 __ cmp(answer.reg(), Factory::undefined_value());
8947 destination()->true_target()->Branch(equal);
8948
8949 __ test(answer.reg(), Immediate(kSmiTagMask));
8950 destination()->false_target()->Branch(zero);
8951
8952 // It can be an undetectable object.
8953 frame_->Spill(answer.reg());
8954 __ mov(answer.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01008955 __ test_b(FieldOperand(answer.reg(), Map::kBitFieldOffset),
8956 1 << Map::kIsUndetectable);
Steve Blocka7e24c12009-10-30 11:49:00 +00008957 answer.Unuse();
8958 destination()->Split(not_zero);
8959
8960 } else if (check->Equals(Heap::function_symbol())) {
8961 __ test(answer.reg(), Immediate(kSmiTagMask));
8962 destination()->false_target()->Branch(zero);
8963 frame_->Spill(answer.reg());
8964 __ CmpObjectType(answer.reg(), JS_FUNCTION_TYPE, answer.reg());
Steve Blockd0582a62009-12-15 09:54:21 +00008965 destination()->true_target()->Branch(equal);
8966 // Regular expressions are callable so typeof == 'function'.
8967 __ CmpInstanceType(answer.reg(), JS_REGEXP_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00008968 answer.Unuse();
8969 destination()->Split(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +00008970 } else if (check->Equals(Heap::object_symbol())) {
8971 __ test(answer.reg(), Immediate(kSmiTagMask));
8972 destination()->false_target()->Branch(zero);
8973 __ cmp(answer.reg(), Factory::null_value());
8974 destination()->true_target()->Branch(equal);
8975
Steve Blocka7e24c12009-10-30 11:49:00 +00008976 Result map = allocator()->Allocate();
8977 ASSERT(map.is_valid());
Steve Blockd0582a62009-12-15 09:54:21 +00008978 // Regular expressions are typeof == 'function', not 'object'.
8979 __ CmpObjectType(answer.reg(), JS_REGEXP_TYPE, map.reg());
8980 destination()->false_target()->Branch(equal);
8981
8982 // It can be an undetectable object.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01008983 __ test_b(FieldOperand(map.reg(), Map::kBitFieldOffset),
8984 1 << Map::kIsUndetectable);
Steve Blocka7e24c12009-10-30 11:49:00 +00008985 destination()->false_target()->Branch(not_zero);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01008986 // Do a range test for JSObject type. We can't use
8987 // MacroAssembler::IsInstanceJSObjectType, because we are using a
8988 // ControlDestination, so we copy its implementation here.
Steve Blocka7e24c12009-10-30 11:49:00 +00008989 __ movzx_b(map.reg(), FieldOperand(map.reg(), Map::kInstanceTypeOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01008990 __ sub(Operand(map.reg()), Immediate(FIRST_JS_OBJECT_TYPE));
8991 __ cmp(map.reg(), LAST_JS_OBJECT_TYPE - FIRST_JS_OBJECT_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00008992 answer.Unuse();
8993 map.Unuse();
Leon Clarkef7060e22010-06-03 12:02:55 +01008994 destination()->Split(below_equal);
Steve Blocka7e24c12009-10-30 11:49:00 +00008995 } else {
8996 // Uncommon case: typeof testing against a string literal that is
8997 // never returned from the typeof operator.
8998 answer.Unuse();
8999 destination()->Goto(false);
9000 }
9001 return;
Leon Clarkee46be812010-01-19 14:06:41 +00009002 } else if (op == Token::LT &&
9003 right->AsLiteral() != NULL &&
9004 right->AsLiteral()->handle()->IsHeapNumber()) {
9005 Handle<HeapNumber> check(HeapNumber::cast(*right->AsLiteral()->handle()));
9006 if (check->value() == 2147483648.0) { // 0x80000000.
9007 Load(left);
9008 left_already_loaded = true;
9009 Result lhs = frame_->Pop();
9010 lhs.ToRegister();
9011 __ test(lhs.reg(), Immediate(kSmiTagMask));
9012 destination()->true_target()->Branch(zero); // All Smis are less.
9013 Result scratch = allocator()->Allocate();
9014 ASSERT(scratch.is_valid());
9015 __ mov(scratch.reg(), FieldOperand(lhs.reg(), HeapObject::kMapOffset));
9016 __ cmp(scratch.reg(), Factory::heap_number_map());
9017 JumpTarget not_a_number;
9018 not_a_number.Branch(not_equal, &lhs);
9019 __ mov(scratch.reg(),
9020 FieldOperand(lhs.reg(), HeapNumber::kExponentOffset));
9021 __ cmp(Operand(scratch.reg()), Immediate(0xfff00000));
9022 not_a_number.Branch(above_equal, &lhs); // It's a negative NaN or -Inf.
9023 const uint32_t borderline_exponent =
9024 (HeapNumber::kExponentBias + 31) << HeapNumber::kExponentShift;
9025 __ cmp(Operand(scratch.reg()), Immediate(borderline_exponent));
9026 scratch.Unuse();
9027 lhs.Unuse();
9028 destination()->true_target()->Branch(less);
9029 destination()->false_target()->Jump();
9030
9031 not_a_number.Bind(&lhs);
9032 frame_->Push(&lhs);
9033 }
Steve Blocka7e24c12009-10-30 11:49:00 +00009034 }
9035
9036 Condition cc = no_condition;
9037 bool strict = false;
9038 switch (op) {
9039 case Token::EQ_STRICT:
9040 strict = true;
9041 // Fall through
9042 case Token::EQ:
9043 cc = equal;
9044 break;
9045 case Token::LT:
9046 cc = less;
9047 break;
9048 case Token::GT:
9049 cc = greater;
9050 break;
9051 case Token::LTE:
9052 cc = less_equal;
9053 break;
9054 case Token::GTE:
9055 cc = greater_equal;
9056 break;
9057 case Token::IN: {
Leon Clarkee46be812010-01-19 14:06:41 +00009058 if (!left_already_loaded) Load(left);
Steve Blocka7e24c12009-10-30 11:49:00 +00009059 Load(right);
9060 Result answer = frame_->InvokeBuiltin(Builtins::IN, CALL_FUNCTION, 2);
9061 frame_->Push(&answer); // push the result
9062 return;
9063 }
9064 case Token::INSTANCEOF: {
Leon Clarkee46be812010-01-19 14:06:41 +00009065 if (!left_already_loaded) Load(left);
Steve Blocka7e24c12009-10-30 11:49:00 +00009066 Load(right);
9067 InstanceofStub stub;
9068 Result answer = frame_->CallStub(&stub, 2);
9069 answer.ToRegister();
9070 __ test(answer.reg(), Operand(answer.reg()));
9071 answer.Unuse();
9072 destination()->Split(zero);
9073 return;
9074 }
9075 default:
9076 UNREACHABLE();
9077 }
Steve Block6ded16b2010-05-10 14:33:55 +01009078
9079 if (left->IsTrivial()) {
9080 if (!left_already_loaded) {
9081 Load(right);
9082 Result right_result = frame_->Pop();
9083 frame_->Push(left);
9084 frame_->Push(&right_result);
9085 } else {
9086 Load(right);
9087 }
9088 } else {
9089 if (!left_already_loaded) Load(left);
9090 Load(right);
9091 }
Leon Clarkee46be812010-01-19 14:06:41 +00009092 Comparison(node, cc, strict, destination());
Steve Blocka7e24c12009-10-30 11:49:00 +00009093}
9094
9095
Kristian Monsen80d68ea2010-09-08 11:05:35 +01009096void CodeGenerator::VisitCompareToNull(CompareToNull* node) {
9097 ASSERT(!in_safe_int32_mode());
9098 Comment cmnt(masm_, "[ CompareToNull");
9099
9100 Load(node->expression());
9101 Result operand = frame_->Pop();
9102 operand.ToRegister();
9103 __ cmp(operand.reg(), Factory::null_value());
9104 if (node->is_strict()) {
9105 operand.Unuse();
9106 destination()->Split(equal);
9107 } else {
9108 // The 'null' value is only equal to 'undefined' if using non-strict
9109 // comparisons.
9110 destination()->true_target()->Branch(equal);
9111 __ cmp(operand.reg(), Factory::undefined_value());
9112 destination()->true_target()->Branch(equal);
9113 __ test(operand.reg(), Immediate(kSmiTagMask));
9114 destination()->false_target()->Branch(equal);
9115
9116 // It can be an undetectable object.
9117 // Use a scratch register in preference to spilling operand.reg().
9118 Result temp = allocator()->Allocate();
9119 ASSERT(temp.is_valid());
9120 __ mov(temp.reg(),
9121 FieldOperand(operand.reg(), HeapObject::kMapOffset));
9122 __ test_b(FieldOperand(temp.reg(), Map::kBitFieldOffset),
9123 1 << Map::kIsUndetectable);
9124 temp.Unuse();
9125 operand.Unuse();
9126 destination()->Split(not_zero);
9127 }
9128}
9129
9130
Steve Blocka7e24c12009-10-30 11:49:00 +00009131#ifdef DEBUG
9132bool CodeGenerator::HasValidEntryRegisters() {
9133 return (allocator()->count(eax) == (frame()->is_used(eax) ? 1 : 0))
9134 && (allocator()->count(ebx) == (frame()->is_used(ebx) ? 1 : 0))
9135 && (allocator()->count(ecx) == (frame()->is_used(ecx) ? 1 : 0))
9136 && (allocator()->count(edx) == (frame()->is_used(edx) ? 1 : 0))
9137 && (allocator()->count(edi) == (frame()->is_used(edi) ? 1 : 0));
9138}
9139#endif
9140
9141
9142// Emit a LoadIC call to get the value from receiver and leave it in
Andrei Popescu402d9372010-02-26 13:31:12 +00009143// dst.
Steve Blocka7e24c12009-10-30 11:49:00 +00009144class DeferredReferenceGetNamedValue: public DeferredCode {
9145 public:
9146 DeferredReferenceGetNamedValue(Register dst,
9147 Register receiver,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009148 Handle<String> name,
9149 bool is_contextual)
9150 : dst_(dst),
9151 receiver_(receiver),
9152 name_(name),
Ben Murdochf87a2032010-10-22 12:50:53 +01009153 is_contextual_(is_contextual),
9154 is_dont_delete_(false) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009155 set_comment(is_contextual
9156 ? "[ DeferredReferenceGetNamedValue (contextual)"
9157 : "[ DeferredReferenceGetNamedValue");
Steve Blocka7e24c12009-10-30 11:49:00 +00009158 }
9159
9160 virtual void Generate();
9161
9162 Label* patch_site() { return &patch_site_; }
9163
Ben Murdochf87a2032010-10-22 12:50:53 +01009164 void set_is_dont_delete(bool value) {
9165 ASSERT(is_contextual_);
9166 is_dont_delete_ = value;
9167 }
9168
Steve Blocka7e24c12009-10-30 11:49:00 +00009169 private:
9170 Label patch_site_;
9171 Register dst_;
9172 Register receiver_;
9173 Handle<String> name_;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009174 bool is_contextual_;
Ben Murdochf87a2032010-10-22 12:50:53 +01009175 bool is_dont_delete_;
Steve Blocka7e24c12009-10-30 11:49:00 +00009176};
9177
9178
9179void DeferredReferenceGetNamedValue::Generate() {
Andrei Popescu402d9372010-02-26 13:31:12 +00009180 if (!receiver_.is(eax)) {
9181 __ mov(eax, receiver_);
9182 }
Steve Blocka7e24c12009-10-30 11:49:00 +00009183 __ Set(ecx, Immediate(name_));
9184 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009185 RelocInfo::Mode mode = is_contextual_
9186 ? RelocInfo::CODE_TARGET_CONTEXT
9187 : RelocInfo::CODE_TARGET;
9188 __ call(ic, mode);
9189 // The call must be followed by:
9190 // - a test eax instruction to indicate that the inobject property
9191 // case was inlined.
Ben Murdochf87a2032010-10-22 12:50:53 +01009192 // - a mov ecx or mov edx instruction to indicate that the
9193 // contextual property load was inlined.
Steve Blocka7e24c12009-10-30 11:49:00 +00009194 //
9195 // Store the delta to the map check instruction here in the test
9196 // instruction. Use masm_-> instead of the __ macro since the
9197 // latter can't return a value.
9198 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
9199 // Here we use masm_-> instead of the __ macro because this is the
9200 // instruction that gets patched and coverage code gets in the way.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009201 if (is_contextual_) {
Ben Murdochf87a2032010-10-22 12:50:53 +01009202 masm_->mov(is_dont_delete_ ? edx : ecx, -delta_to_patch_site);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009203 __ IncrementCounter(&Counters::named_load_global_inline_miss, 1);
Ben Murdochf87a2032010-10-22 12:50:53 +01009204 if (is_dont_delete_) {
9205 __ IncrementCounter(&Counters::dont_delete_hint_miss, 1);
9206 }
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009207 } else {
9208 masm_->test(eax, Immediate(-delta_to_patch_site));
9209 __ IncrementCounter(&Counters::named_load_inline_miss, 1);
9210 }
Steve Blocka7e24c12009-10-30 11:49:00 +00009211
9212 if (!dst_.is(eax)) __ mov(dst_, eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00009213}
9214
9215
9216class DeferredReferenceGetKeyedValue: public DeferredCode {
9217 public:
9218 explicit DeferredReferenceGetKeyedValue(Register dst,
9219 Register receiver,
Andrei Popescu402d9372010-02-26 13:31:12 +00009220 Register key)
9221 : dst_(dst), receiver_(receiver), key_(key) {
Steve Blocka7e24c12009-10-30 11:49:00 +00009222 set_comment("[ DeferredReferenceGetKeyedValue");
9223 }
9224
9225 virtual void Generate();
9226
9227 Label* patch_site() { return &patch_site_; }
9228
9229 private:
9230 Label patch_site_;
9231 Register dst_;
9232 Register receiver_;
9233 Register key_;
Steve Blocka7e24c12009-10-30 11:49:00 +00009234};
9235
9236
9237void DeferredReferenceGetKeyedValue::Generate() {
Andrei Popescu402d9372010-02-26 13:31:12 +00009238 if (!receiver_.is(eax)) {
9239 // Register eax is available for key.
9240 if (!key_.is(eax)) {
9241 __ mov(eax, key_);
9242 }
9243 if (!receiver_.is(edx)) {
9244 __ mov(edx, receiver_);
9245 }
9246 } else if (!key_.is(edx)) {
9247 // Register edx is available for receiver.
9248 if (!receiver_.is(edx)) {
9249 __ mov(edx, receiver_);
9250 }
9251 if (!key_.is(eax)) {
9252 __ mov(eax, key_);
9253 }
9254 } else {
9255 __ xchg(edx, eax);
9256 }
Steve Blocka7e24c12009-10-30 11:49:00 +00009257 // Calculate the delta from the IC call instruction to the map check
9258 // cmp instruction in the inlined version. This delta is stored in
9259 // a test(eax, delta) instruction after the call so that we can find
9260 // it in the IC initialization code and patch the cmp instruction.
9261 // This means that we cannot allow test instructions after calls to
9262 // KeyedLoadIC stubs in other places.
9263 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
Andrei Popescu402d9372010-02-26 13:31:12 +00009264 __ call(ic, RelocInfo::CODE_TARGET);
Steve Blocka7e24c12009-10-30 11:49:00 +00009265 // The delta from the start of the map-compare instruction to the
9266 // test instruction. We use masm_-> directly here instead of the __
9267 // macro because the macro sometimes uses macro expansion to turn
9268 // into something that can't return a value. This is encountered
9269 // when doing generated code coverage tests.
9270 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
9271 // Here we use masm_-> instead of the __ macro because this is the
9272 // instruction that gets patched and coverage code gets in the way.
9273 masm_->test(eax, Immediate(-delta_to_patch_site));
9274 __ IncrementCounter(&Counters::keyed_load_inline_miss, 1);
9275
9276 if (!dst_.is(eax)) __ mov(dst_, eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00009277}
9278
9279
9280class DeferredReferenceSetKeyedValue: public DeferredCode {
9281 public:
9282 DeferredReferenceSetKeyedValue(Register value,
9283 Register key,
Steve Block6ded16b2010-05-10 14:33:55 +01009284 Register receiver,
9285 Register scratch)
9286 : value_(value),
9287 key_(key),
9288 receiver_(receiver),
9289 scratch_(scratch) {
Steve Blocka7e24c12009-10-30 11:49:00 +00009290 set_comment("[ DeferredReferenceSetKeyedValue");
9291 }
9292
9293 virtual void Generate();
9294
9295 Label* patch_site() { return &patch_site_; }
9296
9297 private:
9298 Register value_;
9299 Register key_;
9300 Register receiver_;
Steve Block6ded16b2010-05-10 14:33:55 +01009301 Register scratch_;
Steve Blocka7e24c12009-10-30 11:49:00 +00009302 Label patch_site_;
9303};
9304
9305
9306void DeferredReferenceSetKeyedValue::Generate() {
9307 __ IncrementCounter(&Counters::keyed_store_inline_miss, 1);
Steve Block6ded16b2010-05-10 14:33:55 +01009308 // Move value_ to eax, key_ to ecx, and receiver_ to edx.
9309 Register old_value = value_;
9310
9311 // First, move value to eax.
9312 if (!value_.is(eax)) {
9313 if (key_.is(eax)) {
9314 // Move key_ out of eax, preferably to ecx.
9315 if (!value_.is(ecx) && !receiver_.is(ecx)) {
9316 __ mov(ecx, key_);
9317 key_ = ecx;
9318 } else {
9319 __ mov(scratch_, key_);
9320 key_ = scratch_;
9321 }
9322 }
9323 if (receiver_.is(eax)) {
9324 // Move receiver_ out of eax, preferably to edx.
9325 if (!value_.is(edx) && !key_.is(edx)) {
9326 __ mov(edx, receiver_);
9327 receiver_ = edx;
9328 } else {
9329 // Both moves to scratch are from eax, also, no valid path hits both.
9330 __ mov(scratch_, receiver_);
9331 receiver_ = scratch_;
9332 }
9333 }
9334 __ mov(eax, value_);
9335 value_ = eax;
9336 }
9337
9338 // Now value_ is in eax. Move the other two to the right positions.
9339 // We do not update the variables key_ and receiver_ to ecx and edx.
9340 if (key_.is(ecx)) {
9341 if (!receiver_.is(edx)) {
9342 __ mov(edx, receiver_);
9343 }
9344 } else if (key_.is(edx)) {
9345 if (receiver_.is(ecx)) {
9346 __ xchg(edx, ecx);
9347 } else {
9348 __ mov(ecx, key_);
9349 if (!receiver_.is(edx)) {
9350 __ mov(edx, receiver_);
9351 }
9352 }
9353 } else { // Key is not in edx or ecx.
9354 if (!receiver_.is(edx)) {
9355 __ mov(edx, receiver_);
9356 }
9357 __ mov(ecx, key_);
9358 }
9359
Steve Blocka7e24c12009-10-30 11:49:00 +00009360 // Call the IC stub.
9361 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
9362 __ call(ic, RelocInfo::CODE_TARGET);
9363 // The delta from the start of the map-compare instruction to the
9364 // test instruction. We use masm_-> directly here instead of the
9365 // __ macro because the macro sometimes uses macro expansion to turn
9366 // into something that can't return a value. This is encountered
9367 // when doing generated code coverage tests.
9368 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
9369 // Here we use masm_-> instead of the __ macro because this is the
9370 // instruction that gets patched and coverage code gets in the way.
9371 masm_->test(eax, Immediate(-delta_to_patch_site));
Steve Block6ded16b2010-05-10 14:33:55 +01009372 // Restore value (returned from store IC) register.
9373 if (!old_value.is(eax)) __ mov(old_value, eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00009374}
9375
9376
Andrei Popescu402d9372010-02-26 13:31:12 +00009377Result CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) {
9378#ifdef DEBUG
9379 int original_height = frame()->height();
9380#endif
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009381
9382 bool contextual_load_in_builtin =
9383 is_contextual &&
9384 (Bootstrapper::IsActive() ||
9385 (!info_->closure().is_null() && info_->closure()->IsBuiltin()));
9386
Andrei Popescu402d9372010-02-26 13:31:12 +00009387 Result result;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009388 // Do not inline in the global code or when not in loop.
9389 if (scope()->is_global_scope() ||
9390 loop_nesting() == 0 ||
9391 contextual_load_in_builtin) {
Andrei Popescu402d9372010-02-26 13:31:12 +00009392 Comment cmnt(masm(), "[ Load from named Property");
9393 frame()->Push(name);
9394
9395 RelocInfo::Mode mode = is_contextual
9396 ? RelocInfo::CODE_TARGET_CONTEXT
9397 : RelocInfo::CODE_TARGET;
9398 result = frame()->CallLoadIC(mode);
9399 // A test eax instruction following the call signals that the inobject
9400 // property case was inlined. Ensure that there is not a test eax
9401 // instruction here.
9402 __ nop();
9403 } else {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009404 // Inline the property load.
9405 Comment cmnt(masm(), is_contextual
9406 ? "[ Inlined contextual property load"
9407 : "[ Inlined named property load");
Andrei Popescu402d9372010-02-26 13:31:12 +00009408 Result receiver = frame()->Pop();
9409 receiver.ToRegister();
9410
9411 result = allocator()->Allocate();
9412 ASSERT(result.is_valid());
9413 DeferredReferenceGetNamedValue* deferred =
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009414 new DeferredReferenceGetNamedValue(result.reg(),
9415 receiver.reg(),
9416 name,
9417 is_contextual);
Andrei Popescu402d9372010-02-26 13:31:12 +00009418
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009419 if (!is_contextual) {
9420 // Check that the receiver is a heap object.
9421 __ test(receiver.reg(), Immediate(kSmiTagMask));
9422 deferred->Branch(zero);
9423 }
Andrei Popescu402d9372010-02-26 13:31:12 +00009424
9425 __ bind(deferred->patch_site());
9426 // This is the map check instruction that will be patched (so we can't
9427 // use the double underscore macro that may insert instructions).
9428 // Initially use an invalid map to force a failure.
9429 masm()->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
9430 Immediate(Factory::null_value()));
9431 // This branch is always a forwards branch so it's always a fixed size
9432 // which allows the assert below to succeed and patching to work.
9433 deferred->Branch(not_equal);
9434
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009435 // The delta from the patch label to the actual load must be
9436 // statically known.
Andrei Popescu402d9372010-02-26 13:31:12 +00009437 ASSERT(masm()->SizeOfCodeGeneratedSince(deferred->patch_site()) ==
9438 LoadIC::kOffsetToLoadInstruction);
Andrei Popescu402d9372010-02-26 13:31:12 +00009439
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009440 if (is_contextual) {
9441 // Load the (initialy invalid) cell and get its value.
9442 masm()->mov(result.reg(), Factory::null_value());
9443 if (FLAG_debug_code) {
9444 __ cmp(FieldOperand(result.reg(), HeapObject::kMapOffset),
9445 Factory::global_property_cell_map());
9446 __ Assert(equal, "Uninitialized inlined contextual load");
9447 }
9448 __ mov(result.reg(),
9449 FieldOperand(result.reg(), JSGlobalPropertyCell::kValueOffset));
Ben Murdochf87a2032010-10-22 12:50:53 +01009450 bool is_dont_delete = false;
9451 if (!info_->closure().is_null()) {
9452 // When doing lazy compilation we can check if the global cell
9453 // already exists and use its "don't delete" status as a hint.
9454 AssertNoAllocation no_gc;
9455 v8::internal::GlobalObject* global_object =
9456 info_->closure()->context()->global();
9457 LookupResult lookup;
9458 global_object->LocalLookupRealNamedProperty(*name, &lookup);
9459 if (lookup.IsProperty() && lookup.type() == NORMAL) {
9460 ASSERT(lookup.holder() == global_object);
9461 ASSERT(global_object->property_dictionary()->ValueAt(
9462 lookup.GetDictionaryEntry())->IsJSGlobalPropertyCell());
9463 is_dont_delete = lookup.IsDontDelete();
9464 }
9465 }
9466 deferred->set_is_dont_delete(is_dont_delete);
9467 if (!is_dont_delete) {
9468 __ cmp(result.reg(), Factory::the_hole_value());
9469 deferred->Branch(equal);
9470 } else if (FLAG_debug_code) {
9471 __ cmp(result.reg(), Factory::the_hole_value());
9472 __ Check(not_equal, "DontDelete cells can't contain the hole");
9473 }
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009474 __ IncrementCounter(&Counters::named_load_global_inline, 1);
Ben Murdochf87a2032010-10-22 12:50:53 +01009475 if (is_dont_delete) {
9476 __ IncrementCounter(&Counters::dont_delete_hint_hit, 1);
9477 }
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009478 } else {
9479 // The initial (invalid) offset has to be large enough to force a 32-bit
9480 // instruction encoding to allow patching with an arbitrary offset. Use
9481 // kMaxInt (minus kHeapObjectTag).
9482 int offset = kMaxInt;
9483 masm()->mov(result.reg(), FieldOperand(receiver.reg(), offset));
9484 __ IncrementCounter(&Counters::named_load_inline, 1);
9485 }
9486
Andrei Popescu402d9372010-02-26 13:31:12 +00009487 deferred->BindExit();
9488 }
9489 ASSERT(frame()->height() == original_height - 1);
9490 return result;
9491}
9492
9493
9494Result CodeGenerator::EmitNamedStore(Handle<String> name, bool is_contextual) {
9495#ifdef DEBUG
9496 int expected_height = frame()->height() - (is_contextual ? 1 : 2);
9497#endif
Kristian Monsen50ef84f2010-07-29 15:18:00 +01009498
9499 Result result;
9500 if (is_contextual || scope()->is_global_scope() || loop_nesting() == 0) {
9501 result = frame()->CallStoreIC(name, is_contextual);
9502 // A test eax instruction following the call signals that the inobject
9503 // property case was inlined. Ensure that there is not a test eax
9504 // instruction here.
9505 __ nop();
9506 } else {
9507 // Inline the in-object property case.
9508 JumpTarget slow, done;
9509 Label patch_site;
9510
9511 // Get the value and receiver from the stack.
9512 Result value = frame()->Pop();
9513 value.ToRegister();
9514 Result receiver = frame()->Pop();
9515 receiver.ToRegister();
9516
9517 // Allocate result register.
9518 result = allocator()->Allocate();
9519 ASSERT(result.is_valid() && receiver.is_valid() && value.is_valid());
9520
9521 // Check that the receiver is a heap object.
9522 __ test(receiver.reg(), Immediate(kSmiTagMask));
9523 slow.Branch(zero, &value, &receiver);
9524
9525 // This is the map check instruction that will be patched (so we can't
9526 // use the double underscore macro that may insert instructions).
9527 // Initially use an invalid map to force a failure.
9528 __ bind(&patch_site);
9529 masm()->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
9530 Immediate(Factory::null_value()));
9531 // This branch is always a forwards branch so it's always a fixed size
9532 // which allows the assert below to succeed and patching to work.
9533 slow.Branch(not_equal, &value, &receiver);
9534
9535 // The delta from the patch label to the store offset must be
9536 // statically known.
9537 ASSERT(masm()->SizeOfCodeGeneratedSince(&patch_site) ==
9538 StoreIC::kOffsetToStoreInstruction);
9539
9540 // The initial (invalid) offset has to be large enough to force a 32-bit
9541 // instruction encoding to allow patching with an arbitrary offset. Use
9542 // kMaxInt (minus kHeapObjectTag).
9543 int offset = kMaxInt;
9544 __ mov(FieldOperand(receiver.reg(), offset), value.reg());
9545 __ mov(result.reg(), Operand(value.reg()));
9546
9547 // Allocate scratch register for write barrier.
9548 Result scratch = allocator()->Allocate();
9549 ASSERT(scratch.is_valid());
9550
9551 // The write barrier clobbers all input registers, so spill the
9552 // receiver and the value.
9553 frame_->Spill(receiver.reg());
9554 frame_->Spill(value.reg());
9555
9556 // If the receiver and the value share a register allocate a new
9557 // register for the receiver.
9558 if (receiver.reg().is(value.reg())) {
9559 receiver = allocator()->Allocate();
9560 ASSERT(receiver.is_valid());
9561 __ mov(receiver.reg(), Operand(value.reg()));
9562 }
9563
9564 // Update the write barrier. To save instructions in the inlined
9565 // version we do not filter smis.
9566 Label skip_write_barrier;
9567 __ InNewSpace(receiver.reg(), value.reg(), equal, &skip_write_barrier);
9568 int delta_to_record_write = masm_->SizeOfCodeGeneratedSince(&patch_site);
9569 __ lea(scratch.reg(), Operand(receiver.reg(), offset));
9570 __ RecordWriteHelper(receiver.reg(), scratch.reg(), value.reg());
9571 if (FLAG_debug_code) {
9572 __ mov(receiver.reg(), Immediate(BitCast<int32_t>(kZapValue)));
9573 __ mov(value.reg(), Immediate(BitCast<int32_t>(kZapValue)));
9574 __ mov(scratch.reg(), Immediate(BitCast<int32_t>(kZapValue)));
9575 }
9576 __ bind(&skip_write_barrier);
9577 value.Unuse();
9578 scratch.Unuse();
9579 receiver.Unuse();
9580 done.Jump(&result);
9581
9582 slow.Bind(&value, &receiver);
9583 frame()->Push(&receiver);
9584 frame()->Push(&value);
9585 result = frame()->CallStoreIC(name, is_contextual);
9586 // Encode the offset to the map check instruction and the offset
9587 // to the write barrier store address computation in a test eax
9588 // instruction.
9589 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site);
9590 __ test(eax,
9591 Immediate((delta_to_record_write << 16) | delta_to_patch_site));
9592 done.Bind(&result);
9593 }
Andrei Popescu402d9372010-02-26 13:31:12 +00009594
9595 ASSERT_EQ(expected_height, frame()->height());
9596 return result;
9597}
9598
9599
9600Result CodeGenerator::EmitKeyedLoad() {
9601#ifdef DEBUG
9602 int original_height = frame()->height();
9603#endif
9604 Result result;
9605 // Inline array load code if inside of a loop. We do not know the
9606 // receiver map yet, so we initially generate the code with a check
9607 // against an invalid map. In the inline cache code, we patch the map
9608 // check if appropriate.
Leon Clarked91b9f72010-01-27 17:25:45 +00009609 if (loop_nesting() > 0) {
9610 Comment cmnt(masm_, "[ Inlined load from keyed Property");
9611
Leon Clarked91b9f72010-01-27 17:25:45 +00009612 // Use a fresh temporary to load the elements without destroying
9613 // the receiver which is needed for the deferred slow case.
9614 Result elements = allocator()->Allocate();
9615 ASSERT(elements.is_valid());
9616
Leon Clarkef7060e22010-06-03 12:02:55 +01009617 Result key = frame_->Pop();
9618 Result receiver = frame_->Pop();
9619 key.ToRegister();
9620 receiver.ToRegister();
9621
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009622 // If key and receiver are shared registers on the frame, their values will
9623 // be automatically saved and restored when going to deferred code.
9624 // The result is in elements, which is guaranteed non-shared.
Leon Clarked91b9f72010-01-27 17:25:45 +00009625 DeferredReferenceGetKeyedValue* deferred =
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009626 new DeferredReferenceGetKeyedValue(elements.reg(),
Leon Clarked91b9f72010-01-27 17:25:45 +00009627 receiver.reg(),
Andrei Popescu402d9372010-02-26 13:31:12 +00009628 key.reg());
Leon Clarked91b9f72010-01-27 17:25:45 +00009629
Andrei Popescu402d9372010-02-26 13:31:12 +00009630 __ test(receiver.reg(), Immediate(kSmiTagMask));
9631 deferred->Branch(zero);
Leon Clarked91b9f72010-01-27 17:25:45 +00009632
Leon Clarkef7060e22010-06-03 12:02:55 +01009633 // Check that the receiver has the expected map.
Leon Clarked91b9f72010-01-27 17:25:45 +00009634 // Initially, use an invalid map. The map is patched in the IC
9635 // initialization code.
9636 __ bind(deferred->patch_site());
9637 // Use masm-> here instead of the double underscore macro since extra
9638 // coverage code can interfere with the patching.
9639 masm_->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
Steve Block8defd9f2010-07-08 12:39:36 +01009640 Immediate(Factory::null_value()));
Leon Clarked91b9f72010-01-27 17:25:45 +00009641 deferred->Branch(not_equal);
9642
9643 // Check that the key is a smi.
Steve Block6ded16b2010-05-10 14:33:55 +01009644 if (!key.is_smi()) {
9645 __ test(key.reg(), Immediate(kSmiTagMask));
9646 deferred->Branch(not_zero);
9647 } else {
9648 if (FLAG_debug_code) __ AbortIfNotSmi(key.reg());
9649 }
Leon Clarked91b9f72010-01-27 17:25:45 +00009650
Iain Merrick75681382010-08-19 15:07:18 +01009651 // Get the elements array from the receiver.
Leon Clarked91b9f72010-01-27 17:25:45 +00009652 __ mov(elements.reg(),
9653 FieldOperand(receiver.reg(), JSObject::kElementsOffset));
Iain Merrick75681382010-08-19 15:07:18 +01009654 __ AssertFastElements(elements.reg());
Leon Clarked91b9f72010-01-27 17:25:45 +00009655
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009656 // Check that the key is within bounds.
9657 __ cmp(key.reg(),
Leon Clarked91b9f72010-01-27 17:25:45 +00009658 FieldOperand(elements.reg(), FixedArray::kLengthOffset));
9659 deferred->Branch(above_equal);
9660
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009661 // Load and check that the result is not the hole.
9662 // Key holds a smi.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01009663 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009664 __ mov(elements.reg(),
9665 FieldOperand(elements.reg(),
9666 key.reg(),
9667 times_2,
9668 FixedArray::kHeaderSize));
9669 result = elements;
Andrei Popescu402d9372010-02-26 13:31:12 +00009670 __ cmp(Operand(result.reg()), Immediate(Factory::the_hole_value()));
Leon Clarked91b9f72010-01-27 17:25:45 +00009671 deferred->Branch(equal);
9672 __ IncrementCounter(&Counters::keyed_load_inline, 1);
9673
9674 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00009675 } else {
9676 Comment cmnt(masm_, "[ Load from keyed Property");
Andrei Popescu402d9372010-02-26 13:31:12 +00009677 result = frame_->CallKeyedLoadIC(RelocInfo::CODE_TARGET);
Leon Clarked91b9f72010-01-27 17:25:45 +00009678 // Make sure that we do not have a test instruction after the
9679 // call. A test instruction after the call is used to
9680 // indicate that we have generated an inline version of the
9681 // keyed load. The explicit nop instruction is here because
9682 // the push that follows might be peep-hole optimized away.
9683 __ nop();
Leon Clarked91b9f72010-01-27 17:25:45 +00009684 }
Andrei Popescu402d9372010-02-26 13:31:12 +00009685 ASSERT(frame()->height() == original_height - 2);
9686 return result;
9687}
9688
9689
9690Result CodeGenerator::EmitKeyedStore(StaticType* key_type) {
9691#ifdef DEBUG
9692 int original_height = frame()->height();
9693#endif
9694 Result result;
9695 // Generate inlined version of the keyed store if the code is in a loop
9696 // and the key is likely to be a smi.
9697 if (loop_nesting() > 0 && key_type->IsLikelySmi()) {
9698 Comment cmnt(masm(), "[ Inlined store to keyed Property");
9699
9700 // Get the receiver, key and value into registers.
9701 result = frame()->Pop();
9702 Result key = frame()->Pop();
9703 Result receiver = frame()->Pop();
9704
9705 Result tmp = allocator_->Allocate();
9706 ASSERT(tmp.is_valid());
Steve Block6ded16b2010-05-10 14:33:55 +01009707 Result tmp2 = allocator_->Allocate();
9708 ASSERT(tmp2.is_valid());
Andrei Popescu402d9372010-02-26 13:31:12 +00009709
9710 // Determine whether the value is a constant before putting it in a
9711 // register.
9712 bool value_is_constant = result.is_constant();
9713
9714 // Make sure that value, key and receiver are in registers.
9715 result.ToRegister();
9716 key.ToRegister();
9717 receiver.ToRegister();
9718
9719 DeferredReferenceSetKeyedValue* deferred =
9720 new DeferredReferenceSetKeyedValue(result.reg(),
9721 key.reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01009722 receiver.reg(),
9723 tmp.reg());
Andrei Popescu402d9372010-02-26 13:31:12 +00009724
9725 // Check that the receiver is not a smi.
9726 __ test(receiver.reg(), Immediate(kSmiTagMask));
9727 deferred->Branch(zero);
9728
Steve Block6ded16b2010-05-10 14:33:55 +01009729 // Check that the key is a smi.
9730 if (!key.is_smi()) {
9731 __ test(key.reg(), Immediate(kSmiTagMask));
9732 deferred->Branch(not_zero);
9733 } else {
9734 if (FLAG_debug_code) __ AbortIfNotSmi(key.reg());
9735 }
9736
Andrei Popescu402d9372010-02-26 13:31:12 +00009737 // Check that the receiver is a JSArray.
Steve Block6ded16b2010-05-10 14:33:55 +01009738 __ CmpObjectType(receiver.reg(), JS_ARRAY_TYPE, tmp.reg());
Andrei Popescu402d9372010-02-26 13:31:12 +00009739 deferred->Branch(not_equal);
9740
9741 // Check that the key is within bounds. Both the key and the length of
Steve Block6ded16b2010-05-10 14:33:55 +01009742 // the JSArray are smis. Use unsigned comparison to handle negative keys.
Andrei Popescu402d9372010-02-26 13:31:12 +00009743 __ cmp(key.reg(),
9744 FieldOperand(receiver.reg(), JSArray::kLengthOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01009745 deferred->Branch(above_equal);
Andrei Popescu402d9372010-02-26 13:31:12 +00009746
9747 // Get the elements array from the receiver and check that it is not a
9748 // dictionary.
9749 __ mov(tmp.reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01009750 FieldOperand(receiver.reg(), JSArray::kElementsOffset));
9751
9752 // Check whether it is possible to omit the write barrier. If the elements
9753 // array is in new space or the value written is a smi we can safely update
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009754 // the elements array without write barrier.
Steve Block6ded16b2010-05-10 14:33:55 +01009755 Label in_new_space;
9756 __ InNewSpace(tmp.reg(), tmp2.reg(), equal, &in_new_space);
9757 if (!value_is_constant) {
9758 __ test(result.reg(), Immediate(kSmiTagMask));
9759 deferred->Branch(not_zero);
9760 }
9761
9762 __ bind(&in_new_space);
Andrei Popescu402d9372010-02-26 13:31:12 +00009763 // Bind the deferred code patch site to be able to locate the fixed
9764 // array map comparison. When debugging, we patch this comparison to
9765 // always fail so that we will hit the IC call in the deferred code
9766 // which will allow the debugger to break for fast case stores.
9767 __ bind(deferred->patch_site());
9768 __ cmp(FieldOperand(tmp.reg(), HeapObject::kMapOffset),
9769 Immediate(Factory::fixed_array_map()));
9770 deferred->Branch(not_equal);
9771
9772 // Store the value.
Kristian Monsen25f61362010-05-21 11:50:48 +01009773 __ mov(FixedArrayElementOperand(tmp.reg(), key.reg()), result.reg());
Andrei Popescu402d9372010-02-26 13:31:12 +00009774 __ IncrementCounter(&Counters::keyed_store_inline, 1);
9775
9776 deferred->BindExit();
9777 } else {
9778 result = frame()->CallKeyedStoreIC();
9779 // Make sure that we do not have a test instruction after the
9780 // call. A test instruction after the call is used to
9781 // indicate that we have generated an inline version of the
9782 // keyed store.
9783 __ nop();
Andrei Popescu402d9372010-02-26 13:31:12 +00009784 }
9785 ASSERT(frame()->height() == original_height - 3);
9786 return result;
Leon Clarked91b9f72010-01-27 17:25:45 +00009787}
9788
9789
Steve Blocka7e24c12009-10-30 11:49:00 +00009790#undef __
9791#define __ ACCESS_MASM(masm)
9792
9793
9794Handle<String> Reference::GetName() {
9795 ASSERT(type_ == NAMED);
9796 Property* property = expression_->AsProperty();
9797 if (property == NULL) {
9798 // Global variable reference treated as a named property reference.
9799 VariableProxy* proxy = expression_->AsVariableProxy();
9800 ASSERT(proxy->AsVariable() != NULL);
9801 ASSERT(proxy->AsVariable()->is_global());
9802 return proxy->name();
9803 } else {
9804 Literal* raw_name = property->key()->AsLiteral();
9805 ASSERT(raw_name != NULL);
Andrei Popescu402d9372010-02-26 13:31:12 +00009806 return Handle<String>::cast(raw_name->handle());
Steve Blocka7e24c12009-10-30 11:49:00 +00009807 }
9808}
9809
9810
Steve Blockd0582a62009-12-15 09:54:21 +00009811void Reference::GetValue() {
Steve Blocka7e24c12009-10-30 11:49:00 +00009812 ASSERT(!cgen_->in_spilled_code());
9813 ASSERT(cgen_->HasValidEntryRegisters());
9814 ASSERT(!is_illegal());
9815 MacroAssembler* masm = cgen_->masm();
9816
9817 // Record the source position for the property load.
9818 Property* property = expression_->AsProperty();
9819 if (property != NULL) {
9820 cgen_->CodeForSourcePosition(property->position());
9821 }
9822
9823 switch (type_) {
9824 case SLOT: {
9825 Comment cmnt(masm, "[ Load from Slot");
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009826 Slot* slot = expression_->AsVariableProxy()->AsVariable()->AsSlot();
Steve Blocka7e24c12009-10-30 11:49:00 +00009827 ASSERT(slot != NULL);
Leon Clarkef7060e22010-06-03 12:02:55 +01009828 cgen_->LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF);
Andrei Popescu402d9372010-02-26 13:31:12 +00009829 if (!persist_after_get_) set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00009830 break;
9831 }
9832
9833 case NAMED: {
Steve Blocka7e24c12009-10-30 11:49:00 +00009834 Variable* var = expression_->AsVariableProxy()->AsVariable();
9835 bool is_global = var != NULL;
9836 ASSERT(!is_global || var->is_global());
Andrei Popescu402d9372010-02-26 13:31:12 +00009837 if (persist_after_get_) cgen_->frame()->Dup();
9838 Result result = cgen_->EmitNamedLoad(GetName(), is_global);
9839 if (!persist_after_get_) set_unloaded();
9840 cgen_->frame()->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00009841 break;
9842 }
9843
9844 case KEYED: {
Andrei Popescu402d9372010-02-26 13:31:12 +00009845 if (persist_after_get_) {
9846 cgen_->frame()->PushElementAt(1);
9847 cgen_->frame()->PushElementAt(1);
9848 }
9849 Result value = cgen_->EmitKeyedLoad();
Leon Clarked91b9f72010-01-27 17:25:45 +00009850 cgen_->frame()->Push(&value);
Andrei Popescu402d9372010-02-26 13:31:12 +00009851 if (!persist_after_get_) set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00009852 break;
9853 }
9854
9855 default:
9856 UNREACHABLE();
9857 }
9858}
9859
9860
Steve Blockd0582a62009-12-15 09:54:21 +00009861void Reference::TakeValue() {
Steve Blocka7e24c12009-10-30 11:49:00 +00009862 // For non-constant frame-allocated slots, we invalidate the value in the
9863 // slot. For all others, we fall back on GetValue.
9864 ASSERT(!cgen_->in_spilled_code());
9865 ASSERT(!is_illegal());
9866 if (type_ != SLOT) {
Steve Blockd0582a62009-12-15 09:54:21 +00009867 GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00009868 return;
9869 }
9870
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009871 Slot* slot = expression_->AsVariableProxy()->AsVariable()->AsSlot();
Steve Blocka7e24c12009-10-30 11:49:00 +00009872 ASSERT(slot != NULL);
9873 if (slot->type() == Slot::LOOKUP ||
9874 slot->type() == Slot::CONTEXT ||
9875 slot->var()->mode() == Variable::CONST ||
9876 slot->is_arguments()) {
Steve Blockd0582a62009-12-15 09:54:21 +00009877 GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00009878 return;
9879 }
9880
9881 // Only non-constant, frame-allocated parameters and locals can
9882 // reach here. Be careful not to use the optimizations for arguments
9883 // object access since it may not have been initialized yet.
9884 ASSERT(!slot->is_arguments());
9885 if (slot->type() == Slot::PARAMETER) {
9886 cgen_->frame()->TakeParameterAt(slot->index());
9887 } else {
9888 ASSERT(slot->type() == Slot::LOCAL);
9889 cgen_->frame()->TakeLocalAt(slot->index());
9890 }
Leon Clarked91b9f72010-01-27 17:25:45 +00009891
9892 ASSERT(persist_after_get_);
9893 // Do not unload the reference, because it is used in SetValue.
Steve Blocka7e24c12009-10-30 11:49:00 +00009894}
9895
9896
9897void Reference::SetValue(InitState init_state) {
9898 ASSERT(cgen_->HasValidEntryRegisters());
9899 ASSERT(!is_illegal());
9900 MacroAssembler* masm = cgen_->masm();
9901 switch (type_) {
9902 case SLOT: {
9903 Comment cmnt(masm, "[ Store to Slot");
Kristian Monsen0d5e1162010-09-30 15:31:59 +01009904 Slot* slot = expression_->AsVariableProxy()->AsVariable()->AsSlot();
Steve Blocka7e24c12009-10-30 11:49:00 +00009905 ASSERT(slot != NULL);
9906 cgen_->StoreToSlot(slot, init_state);
Andrei Popescu402d9372010-02-26 13:31:12 +00009907 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00009908 break;
9909 }
9910
9911 case NAMED: {
9912 Comment cmnt(masm, "[ Store to named Property");
Andrei Popescu402d9372010-02-26 13:31:12 +00009913 Result answer = cgen_->EmitNamedStore(GetName(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00009914 cgen_->frame()->Push(&answer);
Leon Clarke4515c472010-02-03 11:58:03 +00009915 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00009916 break;
9917 }
9918
9919 case KEYED: {
9920 Comment cmnt(masm, "[ Store to keyed Property");
Steve Blocka7e24c12009-10-30 11:49:00 +00009921 Property* property = expression()->AsProperty();
9922 ASSERT(property != NULL);
Steve Block6ded16b2010-05-10 14:33:55 +01009923
Andrei Popescu402d9372010-02-26 13:31:12 +00009924 Result answer = cgen_->EmitKeyedStore(property->key()->type());
9925 cgen_->frame()->Push(&answer);
9926 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00009927 break;
9928 }
9929
Andrei Popescu402d9372010-02-26 13:31:12 +00009930 case UNLOADED:
9931 case ILLEGAL:
Steve Blocka7e24c12009-10-30 11:49:00 +00009932 UNREACHABLE();
9933 }
9934}
9935
9936
Steve Blocka7e24c12009-10-30 11:49:00 +00009937#undef __
9938
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009939#define __ masm.
9940
9941MemCopyFunction CreateMemCopyFunction() {
9942 size_t actual_size;
9943 byte* buffer = static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize,
9944 &actual_size,
9945 true));
9946 CHECK(buffer);
9947 HandleScope handles;
9948 MacroAssembler masm(buffer, static_cast<int>(actual_size));
9949
9950 // Generated code is put into a fixed, unmovable, buffer, and not into
9951 // the V8 heap. We can't, and don't, refer to any relocatable addresses
9952 // (e.g. the JavaScript nan-object).
9953
9954 // 32-bit C declaration function calls pass arguments on stack.
9955
9956 // Stack layout:
9957 // esp[12]: Third argument, size.
9958 // esp[8]: Second argument, source pointer.
9959 // esp[4]: First argument, destination pointer.
9960 // esp[0]: return address
9961
9962 const int kDestinationOffset = 1 * kPointerSize;
9963 const int kSourceOffset = 2 * kPointerSize;
9964 const int kSizeOffset = 3 * kPointerSize;
9965
9966 int stack_offset = 0; // Update if we change the stack height.
9967
9968 if (FLAG_debug_code) {
9969 __ cmp(Operand(esp, kSizeOffset + stack_offset),
9970 Immediate(kMinComplexMemCopy));
9971 Label ok;
9972 __ j(greater_equal, &ok);
9973 __ int3();
9974 __ bind(&ok);
9975 }
9976 if (CpuFeatures::IsSupported(SSE2)) {
9977 CpuFeatures::Scope enable(SSE2);
9978 __ push(edi);
9979 __ push(esi);
9980 stack_offset += 2 * kPointerSize;
9981 Register dst = edi;
9982 Register src = esi;
9983 Register count = ecx;
9984 __ mov(dst, Operand(esp, stack_offset + kDestinationOffset));
9985 __ mov(src, Operand(esp, stack_offset + kSourceOffset));
9986 __ mov(count, Operand(esp, stack_offset + kSizeOffset));
9987
9988
9989 __ movdqu(xmm0, Operand(src, 0));
9990 __ movdqu(Operand(dst, 0), xmm0);
9991 __ mov(edx, dst);
9992 __ and_(edx, 0xF);
9993 __ neg(edx);
9994 __ add(Operand(edx), Immediate(16));
9995 __ add(dst, Operand(edx));
9996 __ add(src, Operand(edx));
9997 __ sub(Operand(count), edx);
9998
9999 // edi is now aligned. Check if esi is also aligned.
10000 Label unaligned_source;
10001 __ test(Operand(src), Immediate(0x0F));
10002 __ j(not_zero, &unaligned_source);
10003 {
10004 __ IncrementCounter(&Counters::memcopy_aligned, 1);
10005 // Copy loop for aligned source and destination.
10006 __ mov(edx, count);
10007 Register loop_count = ecx;
10008 Register count = edx;
10009 __ shr(loop_count, 5);
10010 {
10011 // Main copy loop.
10012 Label loop;
10013 __ bind(&loop);
10014 __ prefetch(Operand(src, 0x20), 1);
10015 __ movdqa(xmm0, Operand(src, 0x00));
10016 __ movdqa(xmm1, Operand(src, 0x10));
10017 __ add(Operand(src), Immediate(0x20));
10018
10019 __ movdqa(Operand(dst, 0x00), xmm0);
10020 __ movdqa(Operand(dst, 0x10), xmm1);
10021 __ add(Operand(dst), Immediate(0x20));
10022
10023 __ dec(loop_count);
10024 __ j(not_zero, &loop);
10025 }
10026
10027 // At most 31 bytes to copy.
10028 Label move_less_16;
10029 __ test(Operand(count), Immediate(0x10));
10030 __ j(zero, &move_less_16);
10031 __ movdqa(xmm0, Operand(src, 0));
10032 __ add(Operand(src), Immediate(0x10));
10033 __ movdqa(Operand(dst, 0), xmm0);
10034 __ add(Operand(dst), Immediate(0x10));
10035 __ bind(&move_less_16);
10036
10037 // At most 15 bytes to copy. Copy 16 bytes at end of string.
10038 __ and_(count, 0xF);
10039 __ movdqu(xmm0, Operand(src, count, times_1, -0x10));
10040 __ movdqu(Operand(dst, count, times_1, -0x10), xmm0);
10041
10042 __ pop(esi);
10043 __ pop(edi);
10044 __ ret(0);
10045 }
10046 __ Align(16);
10047 {
10048 // Copy loop for unaligned source and aligned destination.
10049 // If source is not aligned, we can't read it as efficiently.
10050 __ bind(&unaligned_source);
10051 __ IncrementCounter(&Counters::memcopy_unaligned, 1);
10052 __ mov(edx, ecx);
10053 Register loop_count = ecx;
10054 Register count = edx;
10055 __ shr(loop_count, 5);
10056 {
10057 // Main copy loop
10058 Label loop;
10059 __ bind(&loop);
10060 __ prefetch(Operand(src, 0x20), 1);
10061 __ movdqu(xmm0, Operand(src, 0x00));
10062 __ movdqu(xmm1, Operand(src, 0x10));
10063 __ add(Operand(src), Immediate(0x20));
10064
10065 __ movdqa(Operand(dst, 0x00), xmm0);
10066 __ movdqa(Operand(dst, 0x10), xmm1);
10067 __ add(Operand(dst), Immediate(0x20));
10068
10069 __ dec(loop_count);
10070 __ j(not_zero, &loop);
10071 }
10072
10073 // At most 31 bytes to copy.
10074 Label move_less_16;
10075 __ test(Operand(count), Immediate(0x10));
10076 __ j(zero, &move_less_16);
10077 __ movdqu(xmm0, Operand(src, 0));
10078 __ add(Operand(src), Immediate(0x10));
10079 __ movdqa(Operand(dst, 0), xmm0);
10080 __ add(Operand(dst), Immediate(0x10));
10081 __ bind(&move_less_16);
10082
10083 // At most 15 bytes to copy. Copy 16 bytes at end of string.
10084 __ and_(count, 0x0F);
10085 __ movdqu(xmm0, Operand(src, count, times_1, -0x10));
10086 __ movdqu(Operand(dst, count, times_1, -0x10), xmm0);
10087
10088 __ pop(esi);
10089 __ pop(edi);
10090 __ ret(0);
10091 }
10092
10093 } else {
10094 __ IncrementCounter(&Counters::memcopy_noxmm, 1);
10095 // SSE2 not supported. Unlikely to happen in practice.
10096 __ push(edi);
10097 __ push(esi);
10098 stack_offset += 2 * kPointerSize;
10099 __ cld();
10100 Register dst = edi;
10101 Register src = esi;
10102 Register count = ecx;
10103 __ mov(dst, Operand(esp, stack_offset + kDestinationOffset));
10104 __ mov(src, Operand(esp, stack_offset + kSourceOffset));
10105 __ mov(count, Operand(esp, stack_offset + kSizeOffset));
10106
10107 // Copy the first word.
10108 __ mov(eax, Operand(src, 0));
10109 __ mov(Operand(dst, 0), eax);
10110
10111 // Increment src,dstso that dst is aligned.
10112 __ mov(edx, dst);
10113 __ and_(edx, 0x03);
10114 __ neg(edx);
10115 __ add(Operand(edx), Immediate(4)); // edx = 4 - (dst & 3)
10116 __ add(dst, Operand(edx));
10117 __ add(src, Operand(edx));
10118 __ sub(Operand(count), edx);
10119 // edi is now aligned, ecx holds number of remaning bytes to copy.
10120
10121 __ mov(edx, count);
10122 count = edx;
10123 __ shr(ecx, 2); // Make word count instead of byte count.
10124 __ rep_movs();
10125
10126 // At most 3 bytes left to copy. Copy 4 bytes at end of string.
10127 __ and_(count, 3);
10128 __ mov(eax, Operand(src, count, times_1, -4));
10129 __ mov(Operand(dst, count, times_1, -4), eax);
10130
10131 __ pop(esi);
10132 __ pop(edi);
10133 __ ret(0);
10134 }
10135
10136 CodeDesc desc;
10137 masm.GetCode(&desc);
10138 // Call the function from C++.
10139 return FUNCTION_CAST<MemCopyFunction>(buffer);
10140}
10141
10142#undef __
10143
Steve Blocka7e24c12009-10-30 11:49:00 +000010144} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +010010145
10146#endif // V8_TARGET_ARCH_IA32