blob: a48c74e994e0b69617b880b0eb0967c5fd909974 [file] [log] [blame]
Leon Clarked91b9f72010-01-27 17:25:45 +00001// Copyright 2010 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_IA32)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "bootstrapper.h"
33#include "codegen-inl.h"
Steve Blockd0582a62009-12-15 09:54:21 +000034#include "compiler.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000035#include "debug.h"
36#include "ic-inl.h"
37#include "parser.h"
Leon Clarkee46be812010-01-19 14:06:41 +000038#include "regexp-macro-assembler.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000039#include "register-allocator-inl.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000040#include "scopes.h"
Steve Block6ded16b2010-05-10 14:33:55 +010041#include "virtual-frame-inl.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000042
43namespace v8 {
44namespace internal {
45
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010046#define __ ACCESS_MASM(masm)
Steve Blocka7e24c12009-10-30 11:49:00 +000047
48// -------------------------------------------------------------------------
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010049// Platform-specific FrameRegisterState functions.
Steve Blocka7e24c12009-10-30 11:49:00 +000050
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010051void FrameRegisterState::Save(MacroAssembler* masm) const {
Steve Blocka7e24c12009-10-30 11:49:00 +000052 for (int i = 0; i < RegisterAllocator::kNumRegisters; i++) {
53 int action = registers_[i];
54 if (action == kPush) {
55 __ push(RegisterAllocator::ToRegister(i));
56 } else if (action != kIgnore && (action & kSyncedFlag) == 0) {
57 __ mov(Operand(ebp, action), RegisterAllocator::ToRegister(i));
58 }
59 }
60}
61
62
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010063void FrameRegisterState::Restore(MacroAssembler* masm) const {
Steve Blocka7e24c12009-10-30 11:49:00 +000064 // Restore registers in reverse order due to the stack.
65 for (int i = RegisterAllocator::kNumRegisters - 1; i >= 0; i--) {
66 int action = registers_[i];
67 if (action == kPush) {
68 __ pop(RegisterAllocator::ToRegister(i));
69 } else if (action != kIgnore) {
70 action &= ~kSyncedFlag;
71 __ mov(RegisterAllocator::ToRegister(i), Operand(ebp, action));
72 }
73 }
74}
75
76
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010077#undef __
78#define __ ACCESS_MASM(masm_)
79
80// -------------------------------------------------------------------------
81// Platform-specific DeferredCode functions.
82
83void DeferredCode::SaveRegisters() {
84 frame_state_.Save(masm_);
85}
86
87
88void DeferredCode::RestoreRegisters() {
89 frame_state_.Restore(masm_);
90}
91
92
93// -------------------------------------------------------------------------
94// Platform-specific RuntimeCallHelper functions.
95
96void VirtualFrameRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
97 frame_state_->Save(masm);
98}
99
100
101void VirtualFrameRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
102 frame_state_->Restore(masm);
103}
104
105
106void ICRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
107 masm->EnterInternalFrame();
108}
109
110
111void ICRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
112 masm->LeaveInternalFrame();
113}
114
115
Steve Blocka7e24c12009-10-30 11:49:00 +0000116// -------------------------------------------------------------------------
117// CodeGenState implementation.
118
119CodeGenState::CodeGenState(CodeGenerator* owner)
120 : owner_(owner),
Steve Blocka7e24c12009-10-30 11:49:00 +0000121 destination_(NULL),
122 previous_(NULL) {
123 owner_->set_state(this);
124}
125
126
127CodeGenState::CodeGenState(CodeGenerator* owner,
Steve Blocka7e24c12009-10-30 11:49:00 +0000128 ControlDestination* destination)
129 : owner_(owner),
Steve Blocka7e24c12009-10-30 11:49:00 +0000130 destination_(destination),
131 previous_(owner->state()) {
132 owner_->set_state(this);
133}
134
135
136CodeGenState::~CodeGenState() {
137 ASSERT(owner_->state() == this);
138 owner_->set_state(previous_);
139}
140
Steve Blocka7e24c12009-10-30 11:49:00 +0000141// -------------------------------------------------------------------------
Ben Murdoch3bec4d22010-07-22 14:51:16 +0100142// CodeGenerator implementation.
Steve Blocka7e24c12009-10-30 11:49:00 +0000143
Andrei Popescu31002712010-02-23 13:46:05 +0000144CodeGenerator::CodeGenerator(MacroAssembler* masm)
145 : deferred_(8),
Leon Clarke4515c472010-02-03 11:58:03 +0000146 masm_(masm),
Andrei Popescu31002712010-02-23 13:46:05 +0000147 info_(NULL),
Steve Blocka7e24c12009-10-30 11:49:00 +0000148 frame_(NULL),
149 allocator_(NULL),
150 state_(NULL),
151 loop_nesting_(0),
Steve Block6ded16b2010-05-10 14:33:55 +0100152 in_safe_int32_mode_(false),
153 safe_int32_mode_enabled_(true),
Steve Blocka7e24c12009-10-30 11:49:00 +0000154 function_return_is_shadowed_(false),
155 in_spilled_code_(false) {
156}
157
158
159// Calling conventions:
160// ebp: caller's frame pointer
161// esp: stack pointer
162// edi: called JS function
163// esi: callee's context
164
Andrei Popescu402d9372010-02-26 13:31:12 +0000165void CodeGenerator::Generate(CompilationInfo* info) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000166 // Record the position for debugging purposes.
Andrei Popescu31002712010-02-23 13:46:05 +0000167 CodeForFunctionPosition(info->function());
Steve Block6ded16b2010-05-10 14:33:55 +0100168 Comment cmnt(masm_, "[ function compiled by virtual frame code generator");
Steve Blocka7e24c12009-10-30 11:49:00 +0000169
170 // Initialize state.
Andrei Popescu31002712010-02-23 13:46:05 +0000171 info_ = info;
Steve Blocka7e24c12009-10-30 11:49:00 +0000172 ASSERT(allocator_ == NULL);
173 RegisterAllocator register_allocator(this);
174 allocator_ = &register_allocator;
175 ASSERT(frame_ == NULL);
176 frame_ = new VirtualFrame();
177 set_in_spilled_code(false);
178
179 // Adjust for function-level loop nesting.
Steve Block6ded16b2010-05-10 14:33:55 +0100180 ASSERT_EQ(0, loop_nesting_);
181 loop_nesting_ = info->loop_nesting();
Steve Blocka7e24c12009-10-30 11:49:00 +0000182
183 JumpTarget::set_compiling_deferred_code(false);
184
185#ifdef DEBUG
186 if (strlen(FLAG_stop_at) > 0 &&
Andrei Popescu31002712010-02-23 13:46:05 +0000187 info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000188 frame_->SpillAll();
189 __ int3();
190 }
191#endif
192
193 // New scope to get automatic timing calculation.
Steve Block6ded16b2010-05-10 14:33:55 +0100194 { HistogramTimerScope codegen_timer(&Counters::code_generation);
Steve Blocka7e24c12009-10-30 11:49:00 +0000195 CodeGenState state(this);
196
197 // Entry:
198 // Stack: receiver, arguments, return address.
199 // ebp: caller's frame pointer
200 // esp: stack pointer
201 // edi: called JS function
202 // esi: callee's context
203 allocator_->Initialize();
Steve Blocka7e24c12009-10-30 11:49:00 +0000204
Iain Merrick75681382010-08-19 15:07:18 +0100205 frame_->Enter();
Leon Clarke4515c472010-02-03 11:58:03 +0000206
Iain Merrick75681382010-08-19 15:07:18 +0100207 // Allocate space for locals and initialize them.
208 frame_->AllocateStackSlots();
Leon Clarke4515c472010-02-03 11:58:03 +0000209
Iain Merrick75681382010-08-19 15:07:18 +0100210 // Allocate the local context if needed.
211 int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
212 if (heap_slots > 0) {
213 Comment cmnt(masm_, "[ allocate local context");
214 // Allocate local context.
215 // Get outer context and create a new context based on it.
216 frame_->PushFunction();
217 Result context;
218 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
219 FastNewContextStub stub(heap_slots);
220 context = frame_->CallStub(&stub, 1);
221 } else {
222 context = frame_->CallRuntime(Runtime::kNewContext, 1);
Leon Clarke4515c472010-02-03 11:58:03 +0000223 }
224
Iain Merrick75681382010-08-19 15:07:18 +0100225 // Update context local.
226 frame_->SaveContextRegister();
Leon Clarke4515c472010-02-03 11:58:03 +0000227
Iain Merrick75681382010-08-19 15:07:18 +0100228 // Verify that the runtime call result and esi agree.
229 if (FLAG_debug_code) {
230 __ cmp(context.reg(), Operand(esi));
231 __ Assert(equal, "Runtime::NewContext should end up in esi");
Andrei Popescu402d9372010-02-26 13:31:12 +0000232 }
Leon Clarke4515c472010-02-03 11:58:03 +0000233 }
234
Iain Merrick75681382010-08-19 15:07:18 +0100235 // TODO(1241774): Improve this code:
236 // 1) only needed if we have a context
237 // 2) no need to recompute context ptr every single time
238 // 3) don't copy parameter operand code from SlotOperand!
239 {
240 Comment cmnt2(masm_, "[ copy context parameters into .context");
241 // Note that iteration order is relevant here! If we have the same
242 // parameter twice (e.g., function (x, y, x)), and that parameter
243 // needs to be copied into the context, it must be the last argument
244 // passed to the parameter that needs to be copied. This is a rare
245 // case so we don't check for it, instead we rely on the copying
246 // order: such a parameter is copied repeatedly into the same
247 // context location and thus the last value is what is seen inside
248 // the function.
249 for (int i = 0; i < scope()->num_parameters(); i++) {
250 Variable* par = scope()->parameter(i);
251 Slot* slot = par->slot();
252 if (slot != NULL && slot->type() == Slot::CONTEXT) {
253 // The use of SlotOperand below is safe in unspilled code
254 // because the slot is guaranteed to be a context slot.
255 //
256 // There are no parameters in the global scope.
257 ASSERT(!scope()->is_global_scope());
258 frame_->PushParameterAt(i);
259 Result value = frame_->Pop();
260 value.ToRegister();
261
262 // SlotOperand loads context.reg() with the context object
263 // stored to, used below in RecordWrite.
264 Result context = allocator_->Allocate();
265 ASSERT(context.is_valid());
266 __ mov(SlotOperand(slot, context.reg()), value.reg());
267 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
268 Result scratch = allocator_->Allocate();
269 ASSERT(scratch.is_valid());
270 frame_->Spill(context.reg());
271 frame_->Spill(value.reg());
272 __ RecordWrite(context.reg(), offset, value.reg(), scratch.reg());
273 }
274 }
275 }
276
277 // Store the arguments object. This must happen after context
278 // initialization because the arguments object may be stored in
279 // the context.
280 if (ArgumentsMode() != NO_ARGUMENTS_ALLOCATION) {
281 StoreArgumentsObject(true);
282 }
283
284 // Initialize ThisFunction reference if present.
285 if (scope()->is_function_scope() && scope()->function() != NULL) {
286 frame_->Push(Factory::the_hole_value());
287 StoreToSlot(scope()->function()->slot(), NOT_CONST_INIT);
288 }
289
290
Steve Blocka7e24c12009-10-30 11:49:00 +0000291 // Initialize the function return target after the locals are set
292 // up, because it needs the expected frame height from the frame.
293 function_return_.set_direction(JumpTarget::BIDIRECTIONAL);
294 function_return_is_shadowed_ = false;
295
Steve Blocka7e24c12009-10-30 11:49:00 +0000296 // Generate code to 'execute' declarations and initialize functions
297 // (source elements). In case of an illegal redeclaration we need to
298 // handle that instead of processing the declarations.
Andrei Popescu31002712010-02-23 13:46:05 +0000299 if (scope()->HasIllegalRedeclaration()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000300 Comment cmnt(masm_, "[ illegal redeclarations");
Andrei Popescu31002712010-02-23 13:46:05 +0000301 scope()->VisitIllegalRedeclaration(this);
Steve Blocka7e24c12009-10-30 11:49:00 +0000302 } else {
303 Comment cmnt(masm_, "[ declarations");
Andrei Popescu31002712010-02-23 13:46:05 +0000304 ProcessDeclarations(scope()->declarations());
Steve Blocka7e24c12009-10-30 11:49:00 +0000305 // Bail out if a stack-overflow exception occurred when processing
306 // declarations.
307 if (HasStackOverflow()) return;
308 }
309
310 if (FLAG_trace) {
311 frame_->CallRuntime(Runtime::kTraceEnter, 0);
312 // Ignore the return value.
313 }
314 CheckStack();
315
316 // Compile the body of the function in a vanilla state. Don't
317 // bother compiling all the code if the scope has an illegal
318 // redeclaration.
Andrei Popescu31002712010-02-23 13:46:05 +0000319 if (!scope()->HasIllegalRedeclaration()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000320 Comment cmnt(masm_, "[ function body");
321#ifdef DEBUG
322 bool is_builtin = Bootstrapper::IsActive();
323 bool should_trace =
324 is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls;
325 if (should_trace) {
326 frame_->CallRuntime(Runtime::kDebugTrace, 0);
327 // Ignore the return value.
328 }
329#endif
Andrei Popescu31002712010-02-23 13:46:05 +0000330 VisitStatements(info->function()->body());
Steve Blocka7e24c12009-10-30 11:49:00 +0000331
332 // Handle the return from the function.
333 if (has_valid_frame()) {
334 // If there is a valid frame, control flow can fall off the end of
335 // the body. In that case there is an implicit return statement.
336 ASSERT(!function_return_is_shadowed_);
Andrei Popescu31002712010-02-23 13:46:05 +0000337 CodeForReturnPosition(info->function());
Steve Blocka7e24c12009-10-30 11:49:00 +0000338 frame_->PrepareForReturn();
339 Result undefined(Factory::undefined_value());
340 if (function_return_.is_bound()) {
341 function_return_.Jump(&undefined);
342 } else {
343 function_return_.Bind(&undefined);
344 GenerateReturnSequence(&undefined);
345 }
346 } else if (function_return_.is_linked()) {
347 // If the return target has dangling jumps to it, then we have not
348 // yet generated the return sequence. This can happen when (a)
349 // control does not flow off the end of the body so we did not
350 // compile an artificial return statement just above, and (b) there
351 // are return statements in the body but (c) they are all shadowed.
352 Result return_value;
353 function_return_.Bind(&return_value);
354 GenerateReturnSequence(&return_value);
355 }
356 }
357 }
358
359 // Adjust for function-level loop nesting.
Ben Murdoch3bec4d22010-07-22 14:51:16 +0100360 ASSERT_EQ(loop_nesting_, info->loop_nesting());
Steve Block6ded16b2010-05-10 14:33:55 +0100361 loop_nesting_ = 0;
Steve Blocka7e24c12009-10-30 11:49:00 +0000362
363 // Code generation state must be reset.
364 ASSERT(state_ == NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000365 ASSERT(!function_return_is_shadowed_);
366 function_return_.Unuse();
367 DeleteFrame();
368
369 // Process any deferred code using the register allocator.
370 if (!HasStackOverflow()) {
371 HistogramTimerScope deferred_timer(&Counters::deferred_code_generation);
372 JumpTarget::set_compiling_deferred_code(true);
373 ProcessDeferred();
374 JumpTarget::set_compiling_deferred_code(false);
375 }
376
377 // There is no need to delete the register allocator, it is a
378 // stack-allocated local.
379 allocator_ = NULL;
Steve Blocka7e24c12009-10-30 11:49:00 +0000380}
381
382
383Operand CodeGenerator::SlotOperand(Slot* slot, Register tmp) {
384 // Currently, this assertion will fail if we try to assign to
385 // a constant variable that is constant because it is read-only
386 // (such as the variable referring to a named function expression).
387 // We need to implement assignments to read-only variables.
388 // Ideally, we should do this during AST generation (by converting
389 // such assignments into expression statements); however, in general
390 // we may not be able to make the decision until past AST generation,
391 // that is when the entire program is known.
392 ASSERT(slot != NULL);
393 int index = slot->index();
394 switch (slot->type()) {
395 case Slot::PARAMETER:
396 return frame_->ParameterAt(index);
397
398 case Slot::LOCAL:
399 return frame_->LocalAt(index);
400
401 case Slot::CONTEXT: {
402 // Follow the context chain if necessary.
403 ASSERT(!tmp.is(esi)); // do not overwrite context register
404 Register context = esi;
405 int chain_length = scope()->ContextChainLength(slot->var()->scope());
406 for (int i = 0; i < chain_length; i++) {
407 // Load the closure.
408 // (All contexts, even 'with' contexts, have a closure,
409 // and it is the same for all contexts inside a function.
410 // There is no need to go to the function context first.)
411 __ mov(tmp, ContextOperand(context, Context::CLOSURE_INDEX));
412 // Load the function context (which is the incoming, outer context).
413 __ mov(tmp, FieldOperand(tmp, JSFunction::kContextOffset));
414 context = tmp;
415 }
416 // We may have a 'with' context now. Get the function context.
417 // (In fact this mov may never be the needed, since the scope analysis
418 // may not permit a direct context access in this case and thus we are
419 // always at a function context. However it is safe to dereference be-
420 // cause the function context of a function context is itself. Before
421 // deleting this mov we should try to create a counter-example first,
422 // though...)
423 __ mov(tmp, ContextOperand(context, Context::FCONTEXT_INDEX));
424 return ContextOperand(tmp, index);
425 }
426
427 default:
428 UNREACHABLE();
429 return Operand(eax);
430 }
431}
432
433
434Operand CodeGenerator::ContextSlotOperandCheckExtensions(Slot* slot,
435 Result tmp,
436 JumpTarget* slow) {
437 ASSERT(slot->type() == Slot::CONTEXT);
438 ASSERT(tmp.is_register());
439 Register context = esi;
440
441 for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) {
442 if (s->num_heap_slots() > 0) {
443 if (s->calls_eval()) {
444 // Check that extension is NULL.
445 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
446 Immediate(0));
447 slow->Branch(not_equal, not_taken);
448 }
449 __ mov(tmp.reg(), ContextOperand(context, Context::CLOSURE_INDEX));
450 __ mov(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset));
451 context = tmp.reg();
452 }
453 }
454 // Check that last extension is NULL.
455 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
456 slow->Branch(not_equal, not_taken);
457 __ mov(tmp.reg(), ContextOperand(context, Context::FCONTEXT_INDEX));
458 return ContextOperand(tmp.reg(), slot->index());
459}
460
461
462// Emit code to load the value of an expression to the top of the
463// frame. If the expression is boolean-valued it may be compiled (or
464// partially compiled) into control flow to the control destination.
465// If force_control is true, control flow is forced.
Steve Block6ded16b2010-05-10 14:33:55 +0100466void CodeGenerator::LoadCondition(Expression* expr,
Steve Blocka7e24c12009-10-30 11:49:00 +0000467 ControlDestination* dest,
468 bool force_control) {
469 ASSERT(!in_spilled_code());
470 int original_height = frame_->height();
471
Steve Blockd0582a62009-12-15 09:54:21 +0000472 { CodeGenState new_state(this, dest);
Steve Block6ded16b2010-05-10 14:33:55 +0100473 Visit(expr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000474
475 // If we hit a stack overflow, we may not have actually visited
476 // the expression. In that case, we ensure that we have a
477 // valid-looking frame state because we will continue to generate
478 // code as we unwind the C++ stack.
479 //
480 // It's possible to have both a stack overflow and a valid frame
481 // state (eg, a subexpression overflowed, visiting it returned
482 // with a dummied frame state, and visiting this expression
483 // returned with a normal-looking state).
484 if (HasStackOverflow() &&
485 !dest->is_used() &&
486 frame_->height() == original_height) {
487 dest->Goto(true);
488 }
489 }
490
491 if (force_control && !dest->is_used()) {
492 // Convert the TOS value into flow to the control destination.
493 ToBoolean(dest);
494 }
495
496 ASSERT(!(force_control && !dest->is_used()));
497 ASSERT(dest->is_used() || frame_->height() == original_height + 1);
498}
499
500
Steve Blockd0582a62009-12-15 09:54:21 +0000501void CodeGenerator::LoadAndSpill(Expression* expression) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000502 ASSERT(in_spilled_code());
503 set_in_spilled_code(false);
Steve Blockd0582a62009-12-15 09:54:21 +0000504 Load(expression);
Steve Blocka7e24c12009-10-30 11:49:00 +0000505 frame_->SpillAll();
506 set_in_spilled_code(true);
507}
508
509
Steve Block6ded16b2010-05-10 14:33:55 +0100510void CodeGenerator::LoadInSafeInt32Mode(Expression* expr,
511 BreakTarget* unsafe_bailout) {
512 set_unsafe_bailout(unsafe_bailout);
513 set_in_safe_int32_mode(true);
514 Load(expr);
515 Result value = frame_->Pop();
516 ASSERT(frame_->HasNoUntaggedInt32Elements());
517 if (expr->GuaranteedSmiResult()) {
518 ConvertInt32ResultToSmi(&value);
519 } else {
520 ConvertInt32ResultToNumber(&value);
521 }
522 set_in_safe_int32_mode(false);
523 set_unsafe_bailout(NULL);
524 frame_->Push(&value);
525}
526
527
528void CodeGenerator::LoadWithSafeInt32ModeDisabled(Expression* expr) {
529 set_safe_int32_mode_enabled(false);
530 Load(expr);
531 set_safe_int32_mode_enabled(true);
532}
533
534
535void CodeGenerator::ConvertInt32ResultToSmi(Result* value) {
536 ASSERT(value->is_untagged_int32());
537 if (value->is_register()) {
538 __ add(value->reg(), Operand(value->reg()));
539 } else {
540 ASSERT(value->is_constant());
541 ASSERT(value->handle()->IsSmi());
542 }
543 value->set_untagged_int32(false);
544 value->set_type_info(TypeInfo::Smi());
545}
546
547
548void CodeGenerator::ConvertInt32ResultToNumber(Result* value) {
549 ASSERT(value->is_untagged_int32());
550 if (value->is_register()) {
551 Register val = value->reg();
552 JumpTarget done;
553 __ add(val, Operand(val));
554 done.Branch(no_overflow, value);
555 __ sar(val, 1);
556 // If there was an overflow, bits 30 and 31 of the original number disagree.
557 __ xor_(val, 0x80000000u);
558 if (CpuFeatures::IsSupported(SSE2)) {
559 CpuFeatures::Scope fscope(SSE2);
560 __ cvtsi2sd(xmm0, Operand(val));
561 } else {
562 // Move val to ST[0] in the FPU
563 // Push and pop are safe with respect to the virtual frame because
564 // all synced elements are below the actual stack pointer.
565 __ push(val);
566 __ fild_s(Operand(esp, 0));
567 __ pop(val);
568 }
569 Result scratch = allocator_->Allocate();
570 ASSERT(scratch.is_register());
571 Label allocation_failed;
572 __ AllocateHeapNumber(val, scratch.reg(),
573 no_reg, &allocation_failed);
574 VirtualFrame* clone = new VirtualFrame(frame_);
575 scratch.Unuse();
576 if (CpuFeatures::IsSupported(SSE2)) {
577 CpuFeatures::Scope fscope(SSE2);
578 __ movdbl(FieldOperand(val, HeapNumber::kValueOffset), xmm0);
579 } else {
580 __ fstp_d(FieldOperand(val, HeapNumber::kValueOffset));
581 }
582 done.Jump(value);
583
584 // Establish the virtual frame, cloned from where AllocateHeapNumber
585 // jumped to allocation_failed.
586 RegisterFile empty_regs;
587 SetFrame(clone, &empty_regs);
588 __ bind(&allocation_failed);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100589 if (!CpuFeatures::IsSupported(SSE2)) {
590 // Pop the value from the floating point stack.
591 __ fstp(0);
592 }
Steve Block6ded16b2010-05-10 14:33:55 +0100593 unsafe_bailout_->Jump();
594
595 done.Bind(value);
596 } else {
597 ASSERT(value->is_constant());
598 }
599 value->set_untagged_int32(false);
600 value->set_type_info(TypeInfo::Integer32());
601}
602
603
Steve Blockd0582a62009-12-15 09:54:21 +0000604void CodeGenerator::Load(Expression* expr) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000605#ifdef DEBUG
606 int original_height = frame_->height();
607#endif
608 ASSERT(!in_spilled_code());
Steve Blocka7e24c12009-10-30 11:49:00 +0000609
Steve Block6ded16b2010-05-10 14:33:55 +0100610 // If the expression should be a side-effect-free 32-bit int computation,
611 // compile that SafeInt32 path, and a bailout path.
612 if (!in_safe_int32_mode() &&
613 safe_int32_mode_enabled() &&
614 expr->side_effect_free() &&
615 expr->num_bit_ops() > 2 &&
616 CpuFeatures::IsSupported(SSE2)) {
617 BreakTarget unsafe_bailout;
618 JumpTarget done;
619 unsafe_bailout.set_expected_height(frame_->height());
620 LoadInSafeInt32Mode(expr, &unsafe_bailout);
621 done.Jump();
622
623 if (unsafe_bailout.is_linked()) {
624 unsafe_bailout.Bind();
625 LoadWithSafeInt32ModeDisabled(expr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000626 }
Steve Block6ded16b2010-05-10 14:33:55 +0100627 done.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +0000628 } else {
Steve Block6ded16b2010-05-10 14:33:55 +0100629 JumpTarget true_target;
630 JumpTarget false_target;
Steve Block6ded16b2010-05-10 14:33:55 +0100631 ControlDestination dest(&true_target, &false_target, true);
632 LoadCondition(expr, &dest, false);
633
634 if (dest.false_was_fall_through()) {
635 // The false target was just bound.
Steve Blocka7e24c12009-10-30 11:49:00 +0000636 JumpTarget loaded;
Steve Block6ded16b2010-05-10 14:33:55 +0100637 frame_->Push(Factory::false_value());
638 // There may be dangling jumps to the true target.
Steve Blocka7e24c12009-10-30 11:49:00 +0000639 if (true_target.is_linked()) {
Steve Block6ded16b2010-05-10 14:33:55 +0100640 loaded.Jump();
Steve Blocka7e24c12009-10-30 11:49:00 +0000641 true_target.Bind();
642 frame_->Push(Factory::true_value());
Steve Block6ded16b2010-05-10 14:33:55 +0100643 loaded.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +0000644 }
Steve Block6ded16b2010-05-10 14:33:55 +0100645
646 } else if (dest.is_used()) {
647 // There is true, and possibly false, control flow (with true as
648 // the fall through).
649 JumpTarget loaded;
650 frame_->Push(Factory::true_value());
Steve Blocka7e24c12009-10-30 11:49:00 +0000651 if (false_target.is_linked()) {
Steve Block6ded16b2010-05-10 14:33:55 +0100652 loaded.Jump();
Steve Blocka7e24c12009-10-30 11:49:00 +0000653 false_target.Bind();
654 frame_->Push(Factory::false_value());
Steve Block6ded16b2010-05-10 14:33:55 +0100655 loaded.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +0000656 }
Steve Block6ded16b2010-05-10 14:33:55 +0100657
658 } else {
659 // We have a valid value on top of the frame, but we still may
660 // have dangling jumps to the true and false targets from nested
661 // subexpressions (eg, the left subexpressions of the
662 // short-circuited boolean operators).
663 ASSERT(has_valid_frame());
664 if (true_target.is_linked() || false_target.is_linked()) {
665 JumpTarget loaded;
666 loaded.Jump(); // Don't lose the current TOS.
667 if (true_target.is_linked()) {
668 true_target.Bind();
669 frame_->Push(Factory::true_value());
670 if (false_target.is_linked()) {
671 loaded.Jump();
672 }
673 }
674 if (false_target.is_linked()) {
675 false_target.Bind();
676 frame_->Push(Factory::false_value());
677 }
678 loaded.Bind();
679 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000680 }
681 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000682 ASSERT(has_valid_frame());
683 ASSERT(frame_->height() == original_height + 1);
684}
685
686
687void CodeGenerator::LoadGlobal() {
688 if (in_spilled_code()) {
689 frame_->EmitPush(GlobalObject());
690 } else {
691 Result temp = allocator_->Allocate();
692 __ mov(temp.reg(), GlobalObject());
693 frame_->Push(&temp);
694 }
695}
696
697
698void CodeGenerator::LoadGlobalReceiver() {
699 Result temp = allocator_->Allocate();
700 Register reg = temp.reg();
701 __ mov(reg, GlobalObject());
702 __ mov(reg, FieldOperand(reg, GlobalObject::kGlobalReceiverOffset));
703 frame_->Push(&temp);
704}
705
706
Steve Blockd0582a62009-12-15 09:54:21 +0000707void CodeGenerator::LoadTypeofExpression(Expression* expr) {
708 // Special handling of identifiers as subexpressions of typeof.
709 Variable* variable = expr->AsVariableProxy()->AsVariable();
Steve Blocka7e24c12009-10-30 11:49:00 +0000710 if (variable != NULL && !variable->is_this() && variable->is_global()) {
Steve Blockd0582a62009-12-15 09:54:21 +0000711 // For a global variable we build the property reference
712 // <global>.<variable> and perform a (regular non-contextual) property
713 // load to make sure we do not get reference errors.
Steve Blocka7e24c12009-10-30 11:49:00 +0000714 Slot global(variable, Slot::CONTEXT, Context::GLOBAL_INDEX);
715 Literal key(variable->name());
Steve Blocka7e24c12009-10-30 11:49:00 +0000716 Property property(&global, &key, RelocInfo::kNoPosition);
Steve Blockd0582a62009-12-15 09:54:21 +0000717 Reference ref(this, &property);
718 ref.GetValue();
719 } else if (variable != NULL && variable->slot() != NULL) {
720 // For a variable that rewrites to a slot, we signal it is the immediate
721 // subexpression of a typeof.
Leon Clarkef7060e22010-06-03 12:02:55 +0100722 LoadFromSlotCheckForArguments(variable->slot(), INSIDE_TYPEOF);
Steve Blocka7e24c12009-10-30 11:49:00 +0000723 } else {
Steve Blockd0582a62009-12-15 09:54:21 +0000724 // Anything else can be handled normally.
725 Load(expr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000726 }
727}
728
729
Andrei Popescu31002712010-02-23 13:46:05 +0000730ArgumentsAllocationMode CodeGenerator::ArgumentsMode() {
731 if (scope()->arguments() == NULL) return NO_ARGUMENTS_ALLOCATION;
732 ASSERT(scope()->arguments_shadow() != NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000733 // We don't want to do lazy arguments allocation for functions that
734 // have heap-allocated contexts, because it interfers with the
735 // uninitialized const tracking in the context objects.
Andrei Popescu31002712010-02-23 13:46:05 +0000736 return (scope()->num_heap_slots() > 0)
Steve Blocka7e24c12009-10-30 11:49:00 +0000737 ? EAGER_ARGUMENTS_ALLOCATION
738 : LAZY_ARGUMENTS_ALLOCATION;
739}
740
741
742Result CodeGenerator::StoreArgumentsObject(bool initial) {
743 ArgumentsAllocationMode mode = ArgumentsMode();
744 ASSERT(mode != NO_ARGUMENTS_ALLOCATION);
745
746 Comment cmnt(masm_, "[ store arguments object");
747 if (mode == LAZY_ARGUMENTS_ALLOCATION && initial) {
748 // When using lazy arguments allocation, we store the hole value
749 // as a sentinel indicating that the arguments object hasn't been
750 // allocated yet.
751 frame_->Push(Factory::the_hole_value());
752 } else {
753 ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT);
754 frame_->PushFunction();
755 frame_->PushReceiverSlotAddress();
Andrei Popescu31002712010-02-23 13:46:05 +0000756 frame_->Push(Smi::FromInt(scope()->num_parameters()));
Steve Blocka7e24c12009-10-30 11:49:00 +0000757 Result result = frame_->CallStub(&stub, 3);
758 frame_->Push(&result);
759 }
760
Andrei Popescu31002712010-02-23 13:46:05 +0000761 Variable* arguments = scope()->arguments()->var();
762 Variable* shadow = scope()->arguments_shadow()->var();
Leon Clarkee46be812010-01-19 14:06:41 +0000763 ASSERT(arguments != NULL && arguments->slot() != NULL);
764 ASSERT(shadow != NULL && shadow->slot() != NULL);
765 JumpTarget done;
766 bool skip_arguments = false;
767 if (mode == LAZY_ARGUMENTS_ALLOCATION && !initial) {
Ben Murdoch3bec4d22010-07-22 14:51:16 +0100768 // We have to skip storing into the arguments slot if it has
769 // already been written to. This can happen if the a function
770 // has a local variable named 'arguments'.
Leon Clarkef7060e22010-06-03 12:02:55 +0100771 LoadFromSlot(arguments->slot(), NOT_INSIDE_TYPEOF);
772 Result probe = frame_->Pop();
Leon Clarkee46be812010-01-19 14:06:41 +0000773 if (probe.is_constant()) {
774 // We have to skip updating the arguments object if it has
775 // been assigned a proper value.
776 skip_arguments = !probe.handle()->IsTheHole();
777 } else {
778 __ cmp(Operand(probe.reg()), Immediate(Factory::the_hole_value()));
779 probe.Unuse();
780 done.Branch(not_equal);
Steve Blocka7e24c12009-10-30 11:49:00 +0000781 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000782 }
Leon Clarkee46be812010-01-19 14:06:41 +0000783 if (!skip_arguments) {
784 StoreToSlot(arguments->slot(), NOT_CONST_INIT);
785 if (mode == LAZY_ARGUMENTS_ALLOCATION) done.Bind();
786 }
787 StoreToSlot(shadow->slot(), NOT_CONST_INIT);
Steve Blocka7e24c12009-10-30 11:49:00 +0000788 return frame_->Pop();
789}
790
Leon Clarked91b9f72010-01-27 17:25:45 +0000791//------------------------------------------------------------------------------
792// CodeGenerator implementation of variables, lookups, and stores.
Steve Blocka7e24c12009-10-30 11:49:00 +0000793
Leon Clarked91b9f72010-01-27 17:25:45 +0000794Reference::Reference(CodeGenerator* cgen,
795 Expression* expression,
796 bool persist_after_get)
797 : cgen_(cgen),
798 expression_(expression),
799 type_(ILLEGAL),
800 persist_after_get_(persist_after_get) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000801 cgen->LoadReference(this);
802}
803
804
805Reference::~Reference() {
Leon Clarked91b9f72010-01-27 17:25:45 +0000806 ASSERT(is_unloaded() || is_illegal());
Steve Blocka7e24c12009-10-30 11:49:00 +0000807}
808
809
810void CodeGenerator::LoadReference(Reference* ref) {
811 // References are loaded from both spilled and unspilled code. Set the
812 // state to unspilled to allow that (and explicitly spill after
813 // construction at the construction sites).
814 bool was_in_spilled_code = in_spilled_code_;
815 in_spilled_code_ = false;
816
817 Comment cmnt(masm_, "[ LoadReference");
818 Expression* e = ref->expression();
819 Property* property = e->AsProperty();
820 Variable* var = e->AsVariableProxy()->AsVariable();
821
822 if (property != NULL) {
823 // The expression is either a property or a variable proxy that rewrites
824 // to a property.
825 Load(property->obj());
Leon Clarkee46be812010-01-19 14:06:41 +0000826 if (property->key()->IsPropertyName()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000827 ref->set_type(Reference::NAMED);
828 } else {
829 Load(property->key());
830 ref->set_type(Reference::KEYED);
831 }
832 } else if (var != NULL) {
833 // The expression is a variable proxy that does not rewrite to a
834 // property. Global variables are treated as named property references.
835 if (var->is_global()) {
Andrei Popescu402d9372010-02-26 13:31:12 +0000836 // If eax is free, the register allocator prefers it. Thus the code
837 // generator will load the global object into eax, which is where
838 // LoadIC wants it. Most uses of Reference call LoadIC directly
839 // after the reference is created.
840 frame_->Spill(eax);
Steve Blocka7e24c12009-10-30 11:49:00 +0000841 LoadGlobal();
842 ref->set_type(Reference::NAMED);
843 } else {
844 ASSERT(var->slot() != NULL);
845 ref->set_type(Reference::SLOT);
846 }
847 } else {
848 // Anything else is a runtime error.
849 Load(e);
850 frame_->CallRuntime(Runtime::kThrowReferenceError, 1);
851 }
852
853 in_spilled_code_ = was_in_spilled_code;
854}
855
856
Steve Blocka7e24c12009-10-30 11:49:00 +0000857// ECMA-262, section 9.2, page 30: ToBoolean(). Pop the top of stack and
858// convert it to a boolean in the condition code register or jump to
859// 'false_target'/'true_target' as appropriate.
860void CodeGenerator::ToBoolean(ControlDestination* dest) {
861 Comment cmnt(masm_, "[ ToBoolean");
862
863 // The value to convert should be popped from the frame.
864 Result value = frame_->Pop();
865 value.ToRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +0000866
Steve Block6ded16b2010-05-10 14:33:55 +0100867 if (value.is_integer32()) { // Also takes Smi case.
868 Comment cmnt(masm_, "ONLY_INTEGER_32");
Andrei Popescu402d9372010-02-26 13:31:12 +0000869 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100870 Label ok;
871 __ AbortIfNotNumber(value.reg());
872 __ test(value.reg(), Immediate(kSmiTagMask));
873 __ j(zero, &ok);
874 __ fldz();
875 __ fld_d(FieldOperand(value.reg(), HeapNumber::kValueOffset));
876 __ FCmp();
877 __ j(not_zero, &ok);
878 __ Abort("Smi was wrapped in HeapNumber in output from bitop");
879 __ bind(&ok);
880 }
881 // In the integer32 case there are no Smis hidden in heap numbers, so we
882 // need only test for Smi zero.
883 __ test(value.reg(), Operand(value.reg()));
884 dest->false_target()->Branch(zero);
885 value.Unuse();
886 dest->Split(not_zero);
887 } else if (value.is_number()) {
888 Comment cmnt(masm_, "ONLY_NUMBER");
889 // Fast case if TypeInfo indicates only numbers.
890 if (FLAG_debug_code) {
891 __ AbortIfNotNumber(value.reg());
Andrei Popescu402d9372010-02-26 13:31:12 +0000892 }
893 // Smi => false iff zero.
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100894 STATIC_ASSERT(kSmiTag == 0);
Andrei Popescu402d9372010-02-26 13:31:12 +0000895 __ test(value.reg(), Operand(value.reg()));
896 dest->false_target()->Branch(zero);
897 __ test(value.reg(), Immediate(kSmiTagMask));
898 dest->true_target()->Branch(zero);
899 __ fldz();
900 __ fld_d(FieldOperand(value.reg(), HeapNumber::kValueOffset));
901 __ FCmp();
902 value.Unuse();
903 dest->Split(not_zero);
904 } else {
905 // Fast case checks.
906 // 'false' => false.
907 __ cmp(value.reg(), Factory::false_value());
908 dest->false_target()->Branch(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +0000909
Andrei Popescu402d9372010-02-26 13:31:12 +0000910 // 'true' => true.
911 __ cmp(value.reg(), Factory::true_value());
912 dest->true_target()->Branch(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +0000913
Andrei Popescu402d9372010-02-26 13:31:12 +0000914 // 'undefined' => false.
915 __ cmp(value.reg(), Factory::undefined_value());
916 dest->false_target()->Branch(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +0000917
Andrei Popescu402d9372010-02-26 13:31:12 +0000918 // Smi => false iff zero.
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100919 STATIC_ASSERT(kSmiTag == 0);
Andrei Popescu402d9372010-02-26 13:31:12 +0000920 __ test(value.reg(), Operand(value.reg()));
921 dest->false_target()->Branch(zero);
922 __ test(value.reg(), Immediate(kSmiTagMask));
923 dest->true_target()->Branch(zero);
Steve Blocka7e24c12009-10-30 11:49:00 +0000924
Andrei Popescu402d9372010-02-26 13:31:12 +0000925 // Call the stub for all other cases.
926 frame_->Push(&value); // Undo the Pop() from above.
927 ToBooleanStub stub;
928 Result temp = frame_->CallStub(&stub, 1);
929 // Convert the result to a condition code.
930 __ test(temp.reg(), Operand(temp.reg()));
931 temp.Unuse();
932 dest->Split(not_equal);
933 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000934}
935
936
937class FloatingPointHelper : public AllStatic {
938 public:
Leon Clarked91b9f72010-01-27 17:25:45 +0000939
940 enum ArgLocation {
941 ARGS_ON_STACK,
942 ARGS_IN_REGISTERS
943 };
944
Steve Blocka7e24c12009-10-30 11:49:00 +0000945 // Code pattern for loading a floating point value. Input value must
946 // be either a smi or a heap number object (fp value). Requirements:
947 // operand in register number. Returns operand as floating point number
948 // on FPU stack.
949 static void LoadFloatOperand(MacroAssembler* masm, Register number);
Steve Block6ded16b2010-05-10 14:33:55 +0100950
Steve Blocka7e24c12009-10-30 11:49:00 +0000951 // Code pattern for loading floating point values. Input values must
952 // be either smi or heap number objects (fp values). Requirements:
Leon Clarked91b9f72010-01-27 17:25:45 +0000953 // operand_1 on TOS+1 or in edx, operand_2 on TOS+2 or in eax.
954 // Returns operands as floating point numbers on FPU stack.
955 static void LoadFloatOperands(MacroAssembler* masm,
956 Register scratch,
957 ArgLocation arg_location = ARGS_ON_STACK);
958
959 // Similar to LoadFloatOperand but assumes that both operands are smis.
960 // Expects operands in edx, eax.
961 static void LoadFloatSmis(MacroAssembler* masm, Register scratch);
962
Steve Blocka7e24c12009-10-30 11:49:00 +0000963 // Test if operands are smi or number objects (fp). Requirements:
964 // operand_1 in eax, operand_2 in edx; falls through on float
965 // operands, jumps to the non_float label otherwise.
966 static void CheckFloatOperands(MacroAssembler* masm,
967 Label* non_float,
968 Register scratch);
Steve Block6ded16b2010-05-10 14:33:55 +0100969
Leon Clarkee46be812010-01-19 14:06:41 +0000970 // Takes the operands in edx and eax and loads them as integers in eax
971 // and ecx.
972 static void LoadAsIntegers(MacroAssembler* masm,
Steve Block6ded16b2010-05-10 14:33:55 +0100973 TypeInfo type_info,
Leon Clarkee46be812010-01-19 14:06:41 +0000974 bool use_sse3,
975 Label* operand_conversion_failure);
Steve Block6ded16b2010-05-10 14:33:55 +0100976 static void LoadNumbersAsIntegers(MacroAssembler* masm,
977 TypeInfo type_info,
978 bool use_sse3,
979 Label* operand_conversion_failure);
980 static void LoadUnknownsAsIntegers(MacroAssembler* masm,
981 bool use_sse3,
982 Label* operand_conversion_failure);
983
Andrei Popescu402d9372010-02-26 13:31:12 +0000984 // Test if operands are smis or heap numbers and load them
985 // into xmm0 and xmm1 if they are. Operands are in edx and eax.
986 // Leaves operands unchanged.
987 static void LoadSSE2Operands(MacroAssembler* masm);
Steve Block6ded16b2010-05-10 14:33:55 +0100988
Steve Blocka7e24c12009-10-30 11:49:00 +0000989 // Test if operands are numbers (smi or HeapNumber objects), and load
990 // them into xmm0 and xmm1 if they are. Jump to label not_numbers if
991 // either operand is not a number. Operands are in edx and eax.
992 // Leaves operands unchanged.
Leon Clarked91b9f72010-01-27 17:25:45 +0000993 static void LoadSSE2Operands(MacroAssembler* masm, Label* not_numbers);
994
995 // Similar to LoadSSE2Operands but assumes that both operands are smis.
996 // Expects operands in edx, eax.
997 static void LoadSSE2Smis(MacroAssembler* masm, Register scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000998};
999
1000
1001const char* GenericBinaryOpStub::GetName() {
Leon Clarkee46be812010-01-19 14:06:41 +00001002 if (name_ != NULL) return name_;
1003 const int kMaxNameLength = 100;
1004 name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);
1005 if (name_ == NULL) return "OOM";
1006 const char* op_name = Token::Name(op_);
1007 const char* overwrite_name;
1008 switch (mode_) {
1009 case NO_OVERWRITE: overwrite_name = "Alloc"; break;
1010 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break;
1011 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
1012 default: overwrite_name = "UnknownOverwrite"; break;
Steve Blocka7e24c12009-10-30 11:49:00 +00001013 }
Leon Clarkee46be812010-01-19 14:06:41 +00001014
1015 OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
Steve Block6ded16b2010-05-10 14:33:55 +01001016 "GenericBinaryOpStub_%s_%s%s_%s%s_%s_%s",
Leon Clarkee46be812010-01-19 14:06:41 +00001017 op_name,
1018 overwrite_name,
1019 (flags_ & NO_SMI_CODE_IN_STUB) ? "_NoSmiInStub" : "",
1020 args_in_registers_ ? "RegArgs" : "StackArgs",
Andrei Popescu402d9372010-02-26 13:31:12 +00001021 args_reversed_ ? "_R" : "",
Steve Block6ded16b2010-05-10 14:33:55 +01001022 static_operands_type_.ToString(),
1023 BinaryOpIC::GetName(runtime_operands_type_));
Leon Clarkee46be812010-01-19 14:06:41 +00001024 return name_;
Steve Blocka7e24c12009-10-30 11:49:00 +00001025}
1026
1027
Iain Merrick75681382010-08-19 15:07:18 +01001028// Perform or call the specialized stub for a binary operation. Requires the
1029// three registers left, right and dst to be distinct and spilled. This
1030// deferred operation has up to three entry points: The main one calls the
1031// runtime system. The second is for when the result is a non-Smi. The
1032// third is for when at least one of the inputs is non-Smi and we have SSE2.
Steve Blocka7e24c12009-10-30 11:49:00 +00001033class DeferredInlineBinaryOperation: public DeferredCode {
1034 public:
1035 DeferredInlineBinaryOperation(Token::Value op,
1036 Register dst,
1037 Register left,
1038 Register right,
Steve Block6ded16b2010-05-10 14:33:55 +01001039 TypeInfo left_info,
1040 TypeInfo right_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00001041 OverwriteMode mode)
Steve Block6ded16b2010-05-10 14:33:55 +01001042 : op_(op), dst_(dst), left_(left), right_(right),
1043 left_info_(left_info), right_info_(right_info), mode_(mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001044 set_comment("[ DeferredInlineBinaryOperation");
Iain Merrick75681382010-08-19 15:07:18 +01001045 ASSERT(!left.is(right));
Steve Blocka7e24c12009-10-30 11:49:00 +00001046 }
1047
1048 virtual void Generate();
1049
Iain Merrick75681382010-08-19 15:07:18 +01001050 // This stub makes explicit calls to SaveRegisters(), RestoreRegisters() and
1051 // Exit().
1052 virtual bool AutoSaveAndRestore() { return false; }
1053
1054 void JumpToAnswerOutOfRange(Condition cond);
1055 void JumpToConstantRhs(Condition cond, Smi* smi_value);
1056 Label* NonSmiInputLabel();
1057
Steve Blocka7e24c12009-10-30 11:49:00 +00001058 private:
Iain Merrick75681382010-08-19 15:07:18 +01001059 void GenerateAnswerOutOfRange();
1060 void GenerateNonSmiInput();
1061
Steve Blocka7e24c12009-10-30 11:49:00 +00001062 Token::Value op_;
1063 Register dst_;
1064 Register left_;
1065 Register right_;
Steve Block6ded16b2010-05-10 14:33:55 +01001066 TypeInfo left_info_;
1067 TypeInfo right_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +00001068 OverwriteMode mode_;
Iain Merrick75681382010-08-19 15:07:18 +01001069 Label answer_out_of_range_;
1070 Label non_smi_input_;
1071 Label constant_rhs_;
1072 Smi* smi_value_;
Steve Blocka7e24c12009-10-30 11:49:00 +00001073};
1074
1075
Iain Merrick75681382010-08-19 15:07:18 +01001076Label* DeferredInlineBinaryOperation::NonSmiInputLabel() {
1077 if (Token::IsBitOp(op_) && CpuFeatures::IsSupported(SSE2)) {
1078 return &non_smi_input_;
1079 } else {
1080 return entry_label();
1081 }
1082}
1083
1084
1085void DeferredInlineBinaryOperation::JumpToAnswerOutOfRange(Condition cond) {
1086 __ j(cond, &answer_out_of_range_);
1087}
1088
1089
1090void DeferredInlineBinaryOperation::JumpToConstantRhs(Condition cond,
1091 Smi* smi_value) {
1092 smi_value_ = smi_value;
1093 __ j(cond, &constant_rhs_);
1094}
1095
1096
Steve Blocka7e24c12009-10-30 11:49:00 +00001097void DeferredInlineBinaryOperation::Generate() {
Iain Merrick75681382010-08-19 15:07:18 +01001098 // Registers are not saved implicitly for this stub, so we should not
1099 // tread on the registers that were not passed to us.
1100 if (CpuFeatures::IsSupported(SSE2) &&
1101 ((op_ == Token::ADD) ||
1102 (op_ == Token::SUB) ||
1103 (op_ == Token::MUL) ||
1104 (op_ == Token::DIV))) {
Leon Clarkee46be812010-01-19 14:06:41 +00001105 CpuFeatures::Scope use_sse2(SSE2);
1106 Label call_runtime, after_alloc_failure;
1107 Label left_smi, right_smi, load_right, do_op;
Steve Block6ded16b2010-05-10 14:33:55 +01001108 if (!left_info_.IsSmi()) {
1109 __ test(left_, Immediate(kSmiTagMask));
1110 __ j(zero, &left_smi);
1111 if (!left_info_.IsNumber()) {
1112 __ cmp(FieldOperand(left_, HeapObject::kMapOffset),
1113 Factory::heap_number_map());
1114 __ j(not_equal, &call_runtime);
1115 }
1116 __ movdbl(xmm0, FieldOperand(left_, HeapNumber::kValueOffset));
1117 if (mode_ == OVERWRITE_LEFT) {
1118 __ mov(dst_, left_);
1119 }
1120 __ jmp(&load_right);
Leon Clarkee46be812010-01-19 14:06:41 +00001121
Steve Block6ded16b2010-05-10 14:33:55 +01001122 __ bind(&left_smi);
1123 } else {
1124 if (FLAG_debug_code) __ AbortIfNotSmi(left_);
1125 }
Leon Clarkee46be812010-01-19 14:06:41 +00001126 __ SmiUntag(left_);
1127 __ cvtsi2sd(xmm0, Operand(left_));
1128 __ SmiTag(left_);
1129 if (mode_ == OVERWRITE_LEFT) {
1130 Label alloc_failure;
1131 __ push(left_);
1132 __ AllocateHeapNumber(dst_, left_, no_reg, &after_alloc_failure);
1133 __ pop(left_);
1134 }
1135
1136 __ bind(&load_right);
Steve Block6ded16b2010-05-10 14:33:55 +01001137 if (!right_info_.IsSmi()) {
1138 __ test(right_, Immediate(kSmiTagMask));
1139 __ j(zero, &right_smi);
1140 if (!right_info_.IsNumber()) {
1141 __ cmp(FieldOperand(right_, HeapObject::kMapOffset),
1142 Factory::heap_number_map());
1143 __ j(not_equal, &call_runtime);
1144 }
1145 __ movdbl(xmm1, FieldOperand(right_, HeapNumber::kValueOffset));
1146 if (mode_ == OVERWRITE_RIGHT) {
1147 __ mov(dst_, right_);
1148 } else if (mode_ == NO_OVERWRITE) {
1149 Label alloc_failure;
1150 __ push(left_);
1151 __ AllocateHeapNumber(dst_, left_, no_reg, &after_alloc_failure);
1152 __ pop(left_);
1153 }
1154 __ jmp(&do_op);
Leon Clarkee46be812010-01-19 14:06:41 +00001155
Steve Block6ded16b2010-05-10 14:33:55 +01001156 __ bind(&right_smi);
1157 } else {
1158 if (FLAG_debug_code) __ AbortIfNotSmi(right_);
1159 }
Leon Clarkee46be812010-01-19 14:06:41 +00001160 __ SmiUntag(right_);
1161 __ cvtsi2sd(xmm1, Operand(right_));
1162 __ SmiTag(right_);
1163 if (mode_ == OVERWRITE_RIGHT || mode_ == NO_OVERWRITE) {
Leon Clarkee46be812010-01-19 14:06:41 +00001164 __ push(left_);
1165 __ AllocateHeapNumber(dst_, left_, no_reg, &after_alloc_failure);
1166 __ pop(left_);
1167 }
1168
1169 __ bind(&do_op);
1170 switch (op_) {
1171 case Token::ADD: __ addsd(xmm0, xmm1); break;
1172 case Token::SUB: __ subsd(xmm0, xmm1); break;
1173 case Token::MUL: __ mulsd(xmm0, xmm1); break;
1174 case Token::DIV: __ divsd(xmm0, xmm1); break;
1175 default: UNREACHABLE();
1176 }
1177 __ movdbl(FieldOperand(dst_, HeapNumber::kValueOffset), xmm0);
Iain Merrick75681382010-08-19 15:07:18 +01001178 Exit();
1179
Leon Clarkee46be812010-01-19 14:06:41 +00001180
1181 __ bind(&after_alloc_failure);
1182 __ pop(left_);
1183 __ bind(&call_runtime);
1184 }
Iain Merrick75681382010-08-19 15:07:18 +01001185 // Register spilling is not done implicitly for this stub.
1186 // We can't postpone it any more now though.
1187 SaveRegisters();
1188
Steve Block6ded16b2010-05-10 14:33:55 +01001189 GenericBinaryOpStub stub(op_,
1190 mode_,
1191 NO_SMI_CODE_IN_STUB,
1192 TypeInfo::Combine(left_info_, right_info_));
Steve Block3ce2e202009-11-05 08:53:23 +00001193 stub.GenerateCall(masm_, left_, right_);
Steve Blocka7e24c12009-10-30 11:49:00 +00001194 if (!dst_.is(eax)) __ mov(dst_, eax);
Iain Merrick75681382010-08-19 15:07:18 +01001195 RestoreRegisters();
1196 Exit();
1197
1198 if (non_smi_input_.is_linked() || constant_rhs_.is_linked()) {
1199 GenerateNonSmiInput();
1200 }
1201 if (answer_out_of_range_.is_linked()) {
1202 GenerateAnswerOutOfRange();
1203 }
1204}
1205
1206
1207void DeferredInlineBinaryOperation::GenerateNonSmiInput() {
1208 // We know at least one of the inputs was not a Smi.
1209 // This is a third entry point into the deferred code.
1210 // We may not overwrite left_ because we want to be able
1211 // to call the handling code for non-smi answer and it
1212 // might want to overwrite the heap number in left_.
1213 ASSERT(!right_.is(dst_));
1214 ASSERT(!left_.is(dst_));
1215 ASSERT(!left_.is(right_));
1216 // This entry point is used for bit ops where the right hand side
1217 // is a constant Smi and the left hand side is a heap object. It
1218 // is also used for bit ops where both sides are unknown, but where
1219 // at least one of them is a heap object.
1220 bool rhs_is_constant = constant_rhs_.is_linked();
1221 // We can't generate code for both cases.
1222 ASSERT(!non_smi_input_.is_linked() || !constant_rhs_.is_linked());
1223
1224 if (FLAG_debug_code) {
1225 __ int3(); // We don't fall through into this code.
1226 }
1227
1228 __ bind(&non_smi_input_);
1229
1230 if (rhs_is_constant) {
1231 __ bind(&constant_rhs_);
1232 // In this case the input is a heap object and it is in the dst_ register.
1233 // The left_ and right_ registers have not been initialized yet.
1234 __ mov(right_, Immediate(smi_value_));
1235 __ mov(left_, Operand(dst_));
1236 if (!CpuFeatures::IsSupported(SSE2)) {
1237 __ jmp(entry_label());
1238 return;
1239 } else {
1240 CpuFeatures::Scope use_sse2(SSE2);
1241 __ JumpIfNotNumber(dst_, left_info_, entry_label());
1242 __ ConvertToInt32(dst_, left_, dst_, left_info_, entry_label());
1243 __ SmiUntag(right_);
1244 }
1245 } else {
1246 // We know we have SSE2 here because otherwise the label is not linked (see
1247 // NonSmiInputLabel).
1248 CpuFeatures::Scope use_sse2(SSE2);
1249 // Handle the non-constant right hand side situation:
1250 if (left_info_.IsSmi()) {
1251 // Right is a heap object.
1252 __ JumpIfNotNumber(right_, right_info_, entry_label());
1253 __ ConvertToInt32(right_, right_, dst_, right_info_, entry_label());
1254 __ mov(dst_, Operand(left_));
1255 __ SmiUntag(dst_);
1256 } else if (right_info_.IsSmi()) {
1257 // Left is a heap object.
1258 __ JumpIfNotNumber(left_, left_info_, entry_label());
1259 __ ConvertToInt32(dst_, left_, dst_, left_info_, entry_label());
1260 __ SmiUntag(right_);
1261 } else {
1262 // Here we don't know if it's one or both that is a heap object.
1263 Label only_right_is_heap_object, got_both;
1264 __ mov(dst_, Operand(left_));
1265 __ SmiUntag(dst_, &only_right_is_heap_object);
1266 // Left was a heap object.
1267 __ JumpIfNotNumber(left_, left_info_, entry_label());
1268 __ ConvertToInt32(dst_, left_, dst_, left_info_, entry_label());
1269 __ SmiUntag(right_, &got_both);
1270 // Both were heap objects.
1271 __ rcl(right_, 1); // Put tag back.
1272 __ JumpIfNotNumber(right_, right_info_, entry_label());
1273 __ ConvertToInt32(right_, right_, no_reg, right_info_, entry_label());
1274 __ jmp(&got_both);
1275 __ bind(&only_right_is_heap_object);
1276 __ JumpIfNotNumber(right_, right_info_, entry_label());
1277 __ ConvertToInt32(right_, right_, no_reg, right_info_, entry_label());
1278 __ bind(&got_both);
1279 }
1280 }
1281 ASSERT(op_ == Token::BIT_AND ||
1282 op_ == Token::BIT_OR ||
1283 op_ == Token::BIT_XOR ||
1284 right_.is(ecx));
1285 switch (op_) {
1286 case Token::BIT_AND: __ and_(dst_, Operand(right_)); break;
1287 case Token::BIT_OR: __ or_(dst_, Operand(right_)); break;
1288 case Token::BIT_XOR: __ xor_(dst_, Operand(right_)); break;
1289 case Token::SHR: __ shr_cl(dst_); break;
1290 case Token::SAR: __ sar_cl(dst_); break;
1291 case Token::SHL: __ shl_cl(dst_); break;
1292 default: UNREACHABLE();
1293 }
1294 if (op_ == Token::SHR) {
1295 // Check that the *unsigned* result fits in a smi. Neither of
1296 // the two high-order bits can be set:
1297 // * 0x80000000: high bit would be lost when smi tagging.
1298 // * 0x40000000: this number would convert to negative when smi
1299 // tagging.
1300 __ test(dst_, Immediate(0xc0000000));
1301 __ j(not_zero, &answer_out_of_range_);
1302 } else {
1303 // Check that the *signed* result fits in a smi.
1304 __ cmp(dst_, 0xc0000000);
1305 __ j(negative, &answer_out_of_range_);
1306 }
1307 __ SmiTag(dst_);
1308 Exit();
1309}
1310
1311
1312void DeferredInlineBinaryOperation::GenerateAnswerOutOfRange() {
1313 Label after_alloc_failure2;
1314 Label allocation_ok;
1315 __ bind(&after_alloc_failure2);
1316 // We have to allocate a number, causing a GC, while keeping hold of
1317 // the answer in dst_. The answer is not a Smi. We can't just call the
1318 // runtime shift function here because we already threw away the inputs.
1319 __ xor_(left_, Operand(left_));
1320 __ shl(dst_, 1); // Put top bit in carry flag and Smi tag the low bits.
1321 __ rcr(left_, 1); // Rotate with carry.
1322 __ push(dst_); // Smi tagged low 31 bits.
1323 __ push(left_); // 0 or 0x80000000, which is Smi tagged in both cases.
1324 __ CallRuntime(Runtime::kNumberAlloc, 0);
1325 if (!left_.is(eax)) {
1326 __ mov(left_, eax);
1327 }
1328 __ pop(right_); // High bit.
1329 __ pop(dst_); // Low 31 bits.
1330 __ shr(dst_, 1); // Put 0 in top bit.
1331 __ or_(dst_, Operand(right_));
1332 __ jmp(&allocation_ok);
1333
1334 // This is the second entry point to the deferred code. It is used only by
1335 // the bit operations.
1336 // The dst_ register has the answer. It is not Smi tagged. If mode_ is
1337 // OVERWRITE_LEFT then left_ must contain either an overwritable heap number
1338 // or a Smi.
1339 // Put a heap number pointer in left_.
1340 __ bind(&answer_out_of_range_);
1341 SaveRegisters();
1342 if (mode_ == OVERWRITE_LEFT) {
1343 __ test(left_, Immediate(kSmiTagMask));
1344 __ j(not_zero, &allocation_ok);
1345 }
1346 // This trashes right_.
1347 __ AllocateHeapNumber(left_, right_, no_reg, &after_alloc_failure2);
1348 __ bind(&allocation_ok);
1349 if (CpuFeatures::IsSupported(SSE2) && op_ != Token::SHR) {
1350 CpuFeatures::Scope use_sse2(SSE2);
1351 ASSERT(Token::IsBitOp(op_));
1352 // Signed conversion.
1353 __ cvtsi2sd(xmm0, Operand(dst_));
1354 __ movdbl(FieldOperand(left_, HeapNumber::kValueOffset), xmm0);
1355 } else {
1356 if (op_ == Token::SHR) {
1357 __ push(Immediate(0)); // High word of unsigned value.
1358 __ push(dst_);
1359 __ fild_d(Operand(esp, 0));
1360 __ Drop(2);
1361 } else {
1362 ASSERT(Token::IsBitOp(op_));
1363 __ push(dst_);
1364 __ fild_s(Operand(esp, 0)); // Signed conversion.
1365 __ pop(dst_);
1366 }
1367 __ fstp_d(FieldOperand(left_, HeapNumber::kValueOffset));
1368 }
1369 __ mov(dst_, left_);
1370 RestoreRegisters();
1371 Exit();
Steve Blocka7e24c12009-10-30 11:49:00 +00001372}
1373
1374
Steve Block6ded16b2010-05-10 14:33:55 +01001375static TypeInfo CalculateTypeInfo(TypeInfo operands_type,
1376 Token::Value op,
1377 const Result& right,
1378 const Result& left) {
1379 // Set TypeInfo of result according to the operation performed.
1380 // Rely on the fact that smis have a 31 bit payload on ia32.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01001381 STATIC_ASSERT(kSmiValueSize == 31);
Steve Block6ded16b2010-05-10 14:33:55 +01001382 switch (op) {
1383 case Token::COMMA:
1384 return right.type_info();
1385 case Token::OR:
1386 case Token::AND:
1387 // Result type can be either of the two input types.
1388 return operands_type;
1389 case Token::BIT_AND: {
1390 // Anding with positive Smis will give you a Smi.
1391 if (right.is_constant() && right.handle()->IsSmi() &&
1392 Smi::cast(*right.handle())->value() >= 0) {
1393 return TypeInfo::Smi();
1394 } else if (left.is_constant() && left.handle()->IsSmi() &&
1395 Smi::cast(*left.handle())->value() >= 0) {
1396 return TypeInfo::Smi();
1397 }
1398 return (operands_type.IsSmi())
1399 ? TypeInfo::Smi()
1400 : TypeInfo::Integer32();
1401 }
1402 case Token::BIT_OR: {
1403 // Oring with negative Smis will give you a Smi.
1404 if (right.is_constant() && right.handle()->IsSmi() &&
1405 Smi::cast(*right.handle())->value() < 0) {
1406 return TypeInfo::Smi();
1407 } else if (left.is_constant() && left.handle()->IsSmi() &&
1408 Smi::cast(*left.handle())->value() < 0) {
1409 return TypeInfo::Smi();
1410 }
1411 return (operands_type.IsSmi())
1412 ? TypeInfo::Smi()
1413 : TypeInfo::Integer32();
1414 }
1415 case Token::BIT_XOR:
1416 // Result is always a 32 bit integer. Smi property of inputs is preserved.
1417 return (operands_type.IsSmi())
1418 ? TypeInfo::Smi()
1419 : TypeInfo::Integer32();
1420 case Token::SAR:
1421 if (left.is_smi()) return TypeInfo::Smi();
1422 // Result is a smi if we shift by a constant >= 1, otherwise an integer32.
1423 // Shift amount is masked with 0x1F (ECMA standard 11.7.2).
1424 return (right.is_constant() && right.handle()->IsSmi()
1425 && (Smi::cast(*right.handle())->value() & 0x1F) >= 1)
1426 ? TypeInfo::Smi()
1427 : TypeInfo::Integer32();
1428 case Token::SHR:
1429 // Result is a smi if we shift by a constant >= 2, an integer32 if
1430 // we shift by 1, and an unsigned 32-bit integer if we shift by 0.
1431 if (right.is_constant() && right.handle()->IsSmi()) {
1432 int shift_amount = Smi::cast(*right.handle())->value() & 0x1F;
1433 if (shift_amount > 1) {
1434 return TypeInfo::Smi();
1435 } else if (shift_amount > 0) {
1436 return TypeInfo::Integer32();
1437 }
1438 }
1439 return TypeInfo::Number();
1440 case Token::ADD:
1441 if (operands_type.IsSmi()) {
1442 // The Integer32 range is big enough to take the sum of any two Smis.
1443 return TypeInfo::Integer32();
1444 } else if (operands_type.IsNumber()) {
1445 return TypeInfo::Number();
1446 } else if (left.type_info().IsString() || right.type_info().IsString()) {
1447 return TypeInfo::String();
1448 } else {
1449 return TypeInfo::Unknown();
1450 }
1451 case Token::SHL:
1452 return TypeInfo::Integer32();
1453 case Token::SUB:
1454 // The Integer32 range is big enough to take the difference of any two
1455 // Smis.
1456 return (operands_type.IsSmi()) ?
1457 TypeInfo::Integer32() :
1458 TypeInfo::Number();
1459 case Token::MUL:
1460 case Token::DIV:
1461 case Token::MOD:
1462 // Result is always a number.
1463 return TypeInfo::Number();
1464 default:
1465 UNREACHABLE();
1466 }
1467 UNREACHABLE();
1468 return TypeInfo::Unknown();
1469}
1470
1471
1472void CodeGenerator::GenericBinaryOperation(BinaryOperation* expr,
Steve Blocka7e24c12009-10-30 11:49:00 +00001473 OverwriteMode overwrite_mode) {
1474 Comment cmnt(masm_, "[ BinaryOperation");
Steve Block6ded16b2010-05-10 14:33:55 +01001475 Token::Value op = expr->op();
Steve Blocka7e24c12009-10-30 11:49:00 +00001476 Comment cmnt_token(masm_, Token::String(op));
1477
1478 if (op == Token::COMMA) {
1479 // Simply discard left value.
1480 frame_->Nip(1);
1481 return;
1482 }
1483
Steve Blocka7e24c12009-10-30 11:49:00 +00001484 Result right = frame_->Pop();
1485 Result left = frame_->Pop();
1486
1487 if (op == Token::ADD) {
Steve Block6ded16b2010-05-10 14:33:55 +01001488 const bool left_is_string = left.type_info().IsString();
1489 const bool right_is_string = right.type_info().IsString();
1490 // Make sure constant strings have string type info.
1491 ASSERT(!(left.is_constant() && left.handle()->IsString()) ||
1492 left_is_string);
1493 ASSERT(!(right.is_constant() && right.handle()->IsString()) ||
1494 right_is_string);
Steve Blocka7e24c12009-10-30 11:49:00 +00001495 if (left_is_string || right_is_string) {
1496 frame_->Push(&left);
1497 frame_->Push(&right);
1498 Result answer;
1499 if (left_is_string) {
1500 if (right_is_string) {
Steve Block6ded16b2010-05-10 14:33:55 +01001501 StringAddStub stub(NO_STRING_CHECK_IN_STUB);
1502 answer = frame_->CallStub(&stub, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001503 } else {
1504 answer =
1505 frame_->InvokeBuiltin(Builtins::STRING_ADD_LEFT, CALL_FUNCTION, 2);
1506 }
1507 } else if (right_is_string) {
1508 answer =
1509 frame_->InvokeBuiltin(Builtins::STRING_ADD_RIGHT, CALL_FUNCTION, 2);
1510 }
Steve Block6ded16b2010-05-10 14:33:55 +01001511 answer.set_type_info(TypeInfo::String());
Steve Blocka7e24c12009-10-30 11:49:00 +00001512 frame_->Push(&answer);
1513 return;
1514 }
1515 // Neither operand is known to be a string.
1516 }
1517
Andrei Popescu402d9372010-02-26 13:31:12 +00001518 bool left_is_smi_constant = left.is_constant() && left.handle()->IsSmi();
1519 bool left_is_non_smi_constant = left.is_constant() && !left.handle()->IsSmi();
1520 bool right_is_smi_constant = right.is_constant() && right.handle()->IsSmi();
1521 bool right_is_non_smi_constant =
1522 right.is_constant() && !right.handle()->IsSmi();
Steve Blocka7e24c12009-10-30 11:49:00 +00001523
Andrei Popescu402d9372010-02-26 13:31:12 +00001524 if (left_is_smi_constant && right_is_smi_constant) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001525 // Compute the constant result at compile time, and leave it on the frame.
1526 int left_int = Smi::cast(*left.handle())->value();
1527 int right_int = Smi::cast(*right.handle())->value();
1528 if (FoldConstantSmis(op, left_int, right_int)) return;
1529 }
1530
Andrei Popescu402d9372010-02-26 13:31:12 +00001531 // Get number type of left and right sub-expressions.
Steve Block6ded16b2010-05-10 14:33:55 +01001532 TypeInfo operands_type =
1533 TypeInfo::Combine(left.type_info(), right.type_info());
1534
1535 TypeInfo result_type = CalculateTypeInfo(operands_type, op, right, left);
Andrei Popescu402d9372010-02-26 13:31:12 +00001536
Leon Clarked91b9f72010-01-27 17:25:45 +00001537 Result answer;
Andrei Popescu402d9372010-02-26 13:31:12 +00001538 if (left_is_non_smi_constant || right_is_non_smi_constant) {
Leon Clarked91b9f72010-01-27 17:25:45 +00001539 // Go straight to the slow case, with no smi code.
Andrei Popescu402d9372010-02-26 13:31:12 +00001540 GenericBinaryOpStub stub(op,
1541 overwrite_mode,
1542 NO_SMI_CODE_IN_STUB,
1543 operands_type);
Leon Clarked91b9f72010-01-27 17:25:45 +00001544 answer = stub.GenerateCall(masm_, frame_, &left, &right);
Andrei Popescu402d9372010-02-26 13:31:12 +00001545 } else if (right_is_smi_constant) {
Steve Block6ded16b2010-05-10 14:33:55 +01001546 answer = ConstantSmiBinaryOperation(expr, &left, right.handle(),
1547 false, overwrite_mode);
Andrei Popescu402d9372010-02-26 13:31:12 +00001548 } else if (left_is_smi_constant) {
Steve Block6ded16b2010-05-10 14:33:55 +01001549 answer = ConstantSmiBinaryOperation(expr, &right, left.handle(),
1550 true, overwrite_mode);
Leon Clarkeeab96aa2010-01-27 16:31:12 +00001551 } else {
Leon Clarked91b9f72010-01-27 17:25:45 +00001552 // Set the flags based on the operation, type and loop nesting level.
1553 // Bit operations always assume they likely operate on Smis. Still only
1554 // generate the inline Smi check code if this operation is part of a loop.
1555 // For all other operations only inline the Smi check code for likely smis
1556 // if the operation is part of a loop.
Steve Block6ded16b2010-05-10 14:33:55 +01001557 if (loop_nesting() > 0 &&
1558 (Token::IsBitOp(op) ||
1559 operands_type.IsInteger32() ||
1560 expr->type()->IsLikelySmi())) {
1561 answer = LikelySmiBinaryOperation(expr, &left, &right, overwrite_mode);
Leon Clarked91b9f72010-01-27 17:25:45 +00001562 } else {
Andrei Popescu402d9372010-02-26 13:31:12 +00001563 GenericBinaryOpStub stub(op,
1564 overwrite_mode,
1565 NO_GENERIC_BINARY_FLAGS,
1566 operands_type);
Leon Clarked91b9f72010-01-27 17:25:45 +00001567 answer = stub.GenerateCall(masm_, frame_, &left, &right);
Leon Clarkeeab96aa2010-01-27 16:31:12 +00001568 }
Leon Clarkeeab96aa2010-01-27 16:31:12 +00001569 }
Andrei Popescu402d9372010-02-26 13:31:12 +00001570
Steve Block6ded16b2010-05-10 14:33:55 +01001571 answer.set_type_info(result_type);
Leon Clarked91b9f72010-01-27 17:25:45 +00001572 frame_->Push(&answer);
Steve Blocka7e24c12009-10-30 11:49:00 +00001573}
1574
1575
1576bool CodeGenerator::FoldConstantSmis(Token::Value op, int left, int right) {
1577 Object* answer_object = Heap::undefined_value();
1578 switch (op) {
1579 case Token::ADD:
1580 if (Smi::IsValid(left + right)) {
1581 answer_object = Smi::FromInt(left + right);
1582 }
1583 break;
1584 case Token::SUB:
1585 if (Smi::IsValid(left - right)) {
1586 answer_object = Smi::FromInt(left - right);
1587 }
1588 break;
1589 case Token::MUL: {
1590 double answer = static_cast<double>(left) * right;
1591 if (answer >= Smi::kMinValue && answer <= Smi::kMaxValue) {
1592 // If the product is zero and the non-zero factor is negative,
1593 // the spec requires us to return floating point negative zero.
1594 if (answer != 0 || (left >= 0 && right >= 0)) {
1595 answer_object = Smi::FromInt(static_cast<int>(answer));
1596 }
1597 }
1598 }
1599 break;
1600 case Token::DIV:
1601 case Token::MOD:
1602 break;
1603 case Token::BIT_OR:
1604 answer_object = Smi::FromInt(left | right);
1605 break;
1606 case Token::BIT_AND:
1607 answer_object = Smi::FromInt(left & right);
1608 break;
1609 case Token::BIT_XOR:
1610 answer_object = Smi::FromInt(left ^ right);
1611 break;
1612
1613 case Token::SHL: {
1614 int shift_amount = right & 0x1F;
1615 if (Smi::IsValid(left << shift_amount)) {
1616 answer_object = Smi::FromInt(left << shift_amount);
1617 }
1618 break;
1619 }
1620 case Token::SHR: {
1621 int shift_amount = right & 0x1F;
1622 unsigned int unsigned_left = left;
1623 unsigned_left >>= shift_amount;
1624 if (unsigned_left <= static_cast<unsigned int>(Smi::kMaxValue)) {
1625 answer_object = Smi::FromInt(unsigned_left);
1626 }
1627 break;
1628 }
1629 case Token::SAR: {
1630 int shift_amount = right & 0x1F;
1631 unsigned int unsigned_left = left;
1632 if (left < 0) {
1633 // Perform arithmetic shift of a negative number by
1634 // complementing number, logical shifting, complementing again.
1635 unsigned_left = ~unsigned_left;
1636 unsigned_left >>= shift_amount;
1637 unsigned_left = ~unsigned_left;
1638 } else {
1639 unsigned_left >>= shift_amount;
1640 }
Ben Murdoch3bec4d22010-07-22 14:51:16 +01001641 ASSERT(Smi::IsValid(static_cast<int32_t>(unsigned_left)));
1642 answer_object = Smi::FromInt(static_cast<int32_t>(unsigned_left));
Steve Blocka7e24c12009-10-30 11:49:00 +00001643 break;
1644 }
1645 default:
1646 UNREACHABLE();
1647 break;
1648 }
1649 if (answer_object == Heap::undefined_value()) {
1650 return false;
1651 }
1652 frame_->Push(Handle<Object>(answer_object));
1653 return true;
1654}
1655
1656
Kristian Monsen50ef84f2010-07-29 15:18:00 +01001657void CodeGenerator::JumpIfBothSmiUsingTypeInfo(Result* left,
1658 Result* right,
1659 JumpTarget* both_smi) {
1660 TypeInfo left_info = left->type_info();
1661 TypeInfo right_info = right->type_info();
1662 if (left_info.IsDouble() || left_info.IsString() ||
1663 right_info.IsDouble() || right_info.IsString()) {
1664 // We know that left and right are not both smi. Don't do any tests.
1665 return;
1666 }
1667
1668 if (left->reg().is(right->reg())) {
1669 if (!left_info.IsSmi()) {
1670 __ test(left->reg(), Immediate(kSmiTagMask));
1671 both_smi->Branch(zero);
1672 } else {
1673 if (FLAG_debug_code) __ AbortIfNotSmi(left->reg());
1674 left->Unuse();
1675 right->Unuse();
1676 both_smi->Jump();
1677 }
1678 } else if (!left_info.IsSmi()) {
1679 if (!right_info.IsSmi()) {
1680 Result temp = allocator_->Allocate();
1681 ASSERT(temp.is_valid());
1682 __ mov(temp.reg(), left->reg());
1683 __ or_(temp.reg(), Operand(right->reg()));
1684 __ test(temp.reg(), Immediate(kSmiTagMask));
1685 temp.Unuse();
1686 both_smi->Branch(zero);
1687 } else {
1688 __ test(left->reg(), Immediate(kSmiTagMask));
1689 both_smi->Branch(zero);
1690 }
1691 } else {
1692 if (FLAG_debug_code) __ AbortIfNotSmi(left->reg());
1693 if (!right_info.IsSmi()) {
1694 __ test(right->reg(), Immediate(kSmiTagMask));
1695 both_smi->Branch(zero);
1696 } else {
1697 if (FLAG_debug_code) __ AbortIfNotSmi(right->reg());
1698 left->Unuse();
1699 right->Unuse();
1700 both_smi->Jump();
1701 }
1702 }
1703}
1704
1705
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001706void CodeGenerator::JumpIfNotBothSmiUsingTypeInfo(Register left,
1707 Register right,
1708 Register scratch,
1709 TypeInfo left_info,
1710 TypeInfo right_info,
1711 DeferredCode* deferred) {
Iain Merrick75681382010-08-19 15:07:18 +01001712 JumpIfNotBothSmiUsingTypeInfo(left,
1713 right,
1714 scratch,
1715 left_info,
1716 right_info,
1717 deferred->entry_label());
1718}
1719
1720
1721void CodeGenerator::JumpIfNotBothSmiUsingTypeInfo(Register left,
1722 Register right,
1723 Register scratch,
1724 TypeInfo left_info,
1725 TypeInfo right_info,
1726 Label* on_not_smi) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001727 if (left.is(right)) {
1728 if (!left_info.IsSmi()) {
1729 __ test(left, Immediate(kSmiTagMask));
Iain Merrick75681382010-08-19 15:07:18 +01001730 __ j(not_zero, on_not_smi);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001731 } else {
1732 if (FLAG_debug_code) __ AbortIfNotSmi(left);
1733 }
1734 } else if (!left_info.IsSmi()) {
1735 if (!right_info.IsSmi()) {
1736 __ mov(scratch, left);
1737 __ or_(scratch, Operand(right));
1738 __ test(scratch, Immediate(kSmiTagMask));
Iain Merrick75681382010-08-19 15:07:18 +01001739 __ j(not_zero, on_not_smi);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001740 } else {
1741 __ test(left, Immediate(kSmiTagMask));
Iain Merrick75681382010-08-19 15:07:18 +01001742 __ j(not_zero, on_not_smi);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001743 if (FLAG_debug_code) __ AbortIfNotSmi(right);
1744 }
1745 } else {
1746 if (FLAG_debug_code) __ AbortIfNotSmi(left);
1747 if (!right_info.IsSmi()) {
1748 __ test(right, Immediate(kSmiTagMask));
Iain Merrick75681382010-08-19 15:07:18 +01001749 __ j(not_zero, on_not_smi);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001750 } else {
1751 if (FLAG_debug_code) __ AbortIfNotSmi(right);
1752 }
1753 }
1754}
Steve Block6ded16b2010-05-10 14:33:55 +01001755
1756
Steve Blocka7e24c12009-10-30 11:49:00 +00001757// Implements a binary operation using a deferred code object and some
1758// inline code to operate on smis quickly.
Steve Block6ded16b2010-05-10 14:33:55 +01001759Result CodeGenerator::LikelySmiBinaryOperation(BinaryOperation* expr,
Leon Clarked91b9f72010-01-27 17:25:45 +00001760 Result* left,
1761 Result* right,
1762 OverwriteMode overwrite_mode) {
Kristian Monsen25f61362010-05-21 11:50:48 +01001763 // Copy the type info because left and right may be overwritten.
1764 TypeInfo left_type_info = left->type_info();
1765 TypeInfo right_type_info = right->type_info();
Steve Block6ded16b2010-05-10 14:33:55 +01001766 Token::Value op = expr->op();
Leon Clarked91b9f72010-01-27 17:25:45 +00001767 Result answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00001768 // Special handling of div and mod because they use fixed registers.
1769 if (op == Token::DIV || op == Token::MOD) {
1770 // We need eax as the quotient register, edx as the remainder
1771 // register, neither left nor right in eax or edx, and left copied
1772 // to eax.
1773 Result quotient;
1774 Result remainder;
1775 bool left_is_in_eax = false;
1776 // Step 1: get eax for quotient.
1777 if ((left->is_register() && left->reg().is(eax)) ||
1778 (right->is_register() && right->reg().is(eax))) {
1779 // One or both is in eax. Use a fresh non-edx register for
1780 // them.
1781 Result fresh = allocator_->Allocate();
1782 ASSERT(fresh.is_valid());
1783 if (fresh.reg().is(edx)) {
1784 remainder = fresh;
1785 fresh = allocator_->Allocate();
1786 ASSERT(fresh.is_valid());
1787 }
1788 if (left->is_register() && left->reg().is(eax)) {
1789 quotient = *left;
1790 *left = fresh;
1791 left_is_in_eax = true;
1792 }
1793 if (right->is_register() && right->reg().is(eax)) {
1794 quotient = *right;
1795 *right = fresh;
1796 }
1797 __ mov(fresh.reg(), eax);
1798 } else {
1799 // Neither left nor right is in eax.
1800 quotient = allocator_->Allocate(eax);
1801 }
1802 ASSERT(quotient.is_register() && quotient.reg().is(eax));
1803 ASSERT(!(left->is_register() && left->reg().is(eax)));
1804 ASSERT(!(right->is_register() && right->reg().is(eax)));
1805
1806 // Step 2: get edx for remainder if necessary.
1807 if (!remainder.is_valid()) {
1808 if ((left->is_register() && left->reg().is(edx)) ||
1809 (right->is_register() && right->reg().is(edx))) {
1810 Result fresh = allocator_->Allocate();
1811 ASSERT(fresh.is_valid());
1812 if (left->is_register() && left->reg().is(edx)) {
1813 remainder = *left;
1814 *left = fresh;
1815 }
1816 if (right->is_register() && right->reg().is(edx)) {
1817 remainder = *right;
1818 *right = fresh;
1819 }
1820 __ mov(fresh.reg(), edx);
1821 } else {
1822 // Neither left nor right is in edx.
1823 remainder = allocator_->Allocate(edx);
1824 }
1825 }
1826 ASSERT(remainder.is_register() && remainder.reg().is(edx));
1827 ASSERT(!(left->is_register() && left->reg().is(edx)));
1828 ASSERT(!(right->is_register() && right->reg().is(edx)));
1829
1830 left->ToRegister();
1831 right->ToRegister();
1832 frame_->Spill(eax);
1833 frame_->Spill(edx);
Iain Merrick75681382010-08-19 15:07:18 +01001834 // DeferredInlineBinaryOperation requires all the registers that it is
1835 // told about to be spilled and distinct.
1836 Result distinct_right = frame_->MakeDistinctAndSpilled(left, right);
Steve Blocka7e24c12009-10-30 11:49:00 +00001837
1838 // Check that left and right are smi tagged.
1839 DeferredInlineBinaryOperation* deferred =
1840 new DeferredInlineBinaryOperation(op,
1841 (op == Token::DIV) ? eax : edx,
1842 left->reg(),
Iain Merrick75681382010-08-19 15:07:18 +01001843 distinct_right.reg(),
Kristian Monsen25f61362010-05-21 11:50:48 +01001844 left_type_info,
1845 right_type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00001846 overwrite_mode);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001847 JumpIfNotBothSmiUsingTypeInfo(left->reg(), right->reg(), edx,
1848 left_type_info, right_type_info, deferred);
1849 if (!left_is_in_eax) {
1850 __ mov(eax, left->reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001851 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001852 // Sign extend eax into edx:eax.
1853 __ cdq();
1854 // Check for 0 divisor.
1855 __ test(right->reg(), Operand(right->reg()));
1856 deferred->Branch(zero);
1857 // Divide edx:eax by the right operand.
1858 __ idiv(right->reg());
1859
1860 // Complete the operation.
1861 if (op == Token::DIV) {
1862 // Check for negative zero result. If result is zero, and divisor
1863 // is negative, return a floating point negative zero. The
1864 // virtual frame is unchanged in this block, so local control flow
Steve Block6ded16b2010-05-10 14:33:55 +01001865 // can use a Label rather than a JumpTarget. If the context of this
1866 // expression will treat -0 like 0, do not do this test.
1867 if (!expr->no_negative_zero()) {
1868 Label non_zero_result;
1869 __ test(left->reg(), Operand(left->reg()));
1870 __ j(not_zero, &non_zero_result);
1871 __ test(right->reg(), Operand(right->reg()));
1872 deferred->Branch(negative);
1873 __ bind(&non_zero_result);
1874 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001875 // Check for the corner case of dividing the most negative smi by
1876 // -1. We cannot use the overflow flag, since it is not set by
1877 // idiv instruction.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01001878 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001879 __ cmp(eax, 0x40000000);
1880 deferred->Branch(equal);
1881 // Check that the remainder is zero.
1882 __ test(edx, Operand(edx));
1883 deferred->Branch(not_zero);
1884 // Tag the result and store it in the quotient register.
Leon Clarkee46be812010-01-19 14:06:41 +00001885 __ SmiTag(eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00001886 deferred->BindExit();
1887 left->Unuse();
1888 right->Unuse();
Leon Clarked91b9f72010-01-27 17:25:45 +00001889 answer = quotient;
Steve Blocka7e24c12009-10-30 11:49:00 +00001890 } else {
1891 ASSERT(op == Token::MOD);
1892 // Check for a negative zero result. If the result is zero, and
1893 // the dividend is negative, return a floating point negative
1894 // zero. The frame is unchanged in this block, so local control
1895 // flow can use a Label rather than a JumpTarget.
Steve Block6ded16b2010-05-10 14:33:55 +01001896 if (!expr->no_negative_zero()) {
1897 Label non_zero_result;
1898 __ test(edx, Operand(edx));
1899 __ j(not_zero, &non_zero_result, taken);
1900 __ test(left->reg(), Operand(left->reg()));
1901 deferred->Branch(negative);
1902 __ bind(&non_zero_result);
1903 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001904 deferred->BindExit();
1905 left->Unuse();
1906 right->Unuse();
Leon Clarked91b9f72010-01-27 17:25:45 +00001907 answer = remainder;
Steve Blocka7e24c12009-10-30 11:49:00 +00001908 }
Leon Clarked91b9f72010-01-27 17:25:45 +00001909 ASSERT(answer.is_valid());
1910 return answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00001911 }
1912
1913 // Special handling of shift operations because they use fixed
1914 // registers.
1915 if (op == Token::SHL || op == Token::SHR || op == Token::SAR) {
1916 // Move left out of ecx if necessary.
1917 if (left->is_register() && left->reg().is(ecx)) {
1918 *left = allocator_->Allocate();
1919 ASSERT(left->is_valid());
1920 __ mov(left->reg(), ecx);
1921 }
1922 right->ToRegister(ecx);
1923 left->ToRegister();
1924 ASSERT(left->is_register() && !left->reg().is(ecx));
1925 ASSERT(right->is_register() && right->reg().is(ecx));
Iain Merrick75681382010-08-19 15:07:18 +01001926 if (left_type_info.IsSmi()) {
1927 if (FLAG_debug_code) __ AbortIfNotSmi(left->reg());
1928 }
1929 if (right_type_info.IsSmi()) {
1930 if (FLAG_debug_code) __ AbortIfNotSmi(right->reg());
1931 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001932
1933 // We will modify right, it must be spilled.
1934 frame_->Spill(ecx);
Iain Merrick75681382010-08-19 15:07:18 +01001935 // DeferredInlineBinaryOperation requires all the registers that it is told
1936 // about to be spilled and distinct. We know that right is ecx and left is
1937 // not ecx.
1938 frame_->Spill(left->reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001939
1940 // Use a fresh answer register to avoid spilling the left operand.
Leon Clarked91b9f72010-01-27 17:25:45 +00001941 answer = allocator_->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00001942 ASSERT(answer.is_valid());
Iain Merrick75681382010-08-19 15:07:18 +01001943
Steve Blocka7e24c12009-10-30 11:49:00 +00001944 DeferredInlineBinaryOperation* deferred =
1945 new DeferredInlineBinaryOperation(op,
1946 answer.reg(),
1947 left->reg(),
1948 ecx,
Kristian Monsen25f61362010-05-21 11:50:48 +01001949 left_type_info,
1950 right_type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00001951 overwrite_mode);
Iain Merrick75681382010-08-19 15:07:18 +01001952 JumpIfNotBothSmiUsingTypeInfo(left->reg(), right->reg(), answer.reg(),
1953 left_type_info, right_type_info,
1954 deferred->NonSmiInputLabel());
Steve Blocka7e24c12009-10-30 11:49:00 +00001955
Iain Merrick75681382010-08-19 15:07:18 +01001956 // Untag both operands.
1957 __ mov(answer.reg(), left->reg());
1958 __ SmiUntag(answer.reg());
1959 __ SmiUntag(right->reg()); // Right is ecx.
Steve Block6ded16b2010-05-10 14:33:55 +01001960
Steve Blocka7e24c12009-10-30 11:49:00 +00001961 // Perform the operation.
Iain Merrick75681382010-08-19 15:07:18 +01001962 ASSERT(right->reg().is(ecx));
Steve Blocka7e24c12009-10-30 11:49:00 +00001963 switch (op) {
Iain Merrick75681382010-08-19 15:07:18 +01001964 case Token::SAR: {
Steve Blockd0582a62009-12-15 09:54:21 +00001965 __ sar_cl(answer.reg());
Iain Merrick75681382010-08-19 15:07:18 +01001966 if (!left_type_info.IsSmi()) {
1967 // Check that the *signed* result fits in a smi.
1968 __ cmp(answer.reg(), 0xc0000000);
1969 deferred->JumpToAnswerOutOfRange(negative);
1970 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001971 break;
Iain Merrick75681382010-08-19 15:07:18 +01001972 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001973 case Token::SHR: {
Steve Blockd0582a62009-12-15 09:54:21 +00001974 __ shr_cl(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001975 // Check that the *unsigned* result fits in a smi. Neither of
1976 // the two high-order bits can be set:
1977 // * 0x80000000: high bit would be lost when smi tagging.
1978 // * 0x40000000: this number would convert to negative when smi
1979 // tagging.
1980 // These two cases can only happen with shifts by 0 or 1 when
1981 // handed a valid smi. If the answer cannot be represented by a
1982 // smi, restore the left and right arguments, and jump to slow
1983 // case. The low bit of the left argument may be lost, but only
1984 // in a case where it is dropped anyway.
1985 __ test(answer.reg(), Immediate(0xc0000000));
Iain Merrick75681382010-08-19 15:07:18 +01001986 deferred->JumpToAnswerOutOfRange(not_zero);
Steve Blocka7e24c12009-10-30 11:49:00 +00001987 break;
1988 }
1989 case Token::SHL: {
Steve Blockd0582a62009-12-15 09:54:21 +00001990 __ shl_cl(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001991 // Check that the *signed* result fits in a smi.
1992 __ cmp(answer.reg(), 0xc0000000);
Iain Merrick75681382010-08-19 15:07:18 +01001993 deferred->JumpToAnswerOutOfRange(negative);
Steve Blocka7e24c12009-10-30 11:49:00 +00001994 break;
1995 }
1996 default:
1997 UNREACHABLE();
1998 }
1999 // Smi-tag the result in answer.
Leon Clarkee46be812010-01-19 14:06:41 +00002000 __ SmiTag(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00002001 deferred->BindExit();
2002 left->Unuse();
2003 right->Unuse();
Leon Clarked91b9f72010-01-27 17:25:45 +00002004 ASSERT(answer.is_valid());
2005 return answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00002006 }
2007
2008 // Handle the other binary operations.
2009 left->ToRegister();
2010 right->ToRegister();
Iain Merrick75681382010-08-19 15:07:18 +01002011 // DeferredInlineBinaryOperation requires all the registers that it is told
2012 // about to be spilled.
2013 Result distinct_right = frame_->MakeDistinctAndSpilled(left, right);
Steve Blocka7e24c12009-10-30 11:49:00 +00002014 // A newly allocated register answer is used to hold the answer. The
2015 // registers containing left and right are not modified so they don't
2016 // need to be spilled in the fast case.
Leon Clarked91b9f72010-01-27 17:25:45 +00002017 answer = allocator_->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00002018 ASSERT(answer.is_valid());
2019
2020 // Perform the smi tag check.
2021 DeferredInlineBinaryOperation* deferred =
2022 new DeferredInlineBinaryOperation(op,
2023 answer.reg(),
2024 left->reg(),
Iain Merrick75681382010-08-19 15:07:18 +01002025 distinct_right.reg(),
Kristian Monsen25f61362010-05-21 11:50:48 +01002026 left_type_info,
2027 right_type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00002028 overwrite_mode);
Iain Merrick75681382010-08-19 15:07:18 +01002029 Label non_smi_bit_op;
2030 if (op != Token::BIT_OR) {
2031 JumpIfNotBothSmiUsingTypeInfo(left->reg(), right->reg(), answer.reg(),
2032 left_type_info, right_type_info,
2033 deferred->NonSmiInputLabel());
2034 }
Steve Block6ded16b2010-05-10 14:33:55 +01002035
Steve Blocka7e24c12009-10-30 11:49:00 +00002036 __ mov(answer.reg(), left->reg());
2037 switch (op) {
2038 case Token::ADD:
Leon Clarked91b9f72010-01-27 17:25:45 +00002039 __ add(answer.reg(), Operand(right->reg()));
Steve Blocka7e24c12009-10-30 11:49:00 +00002040 deferred->Branch(overflow);
2041 break;
2042
2043 case Token::SUB:
Leon Clarked91b9f72010-01-27 17:25:45 +00002044 __ sub(answer.reg(), Operand(right->reg()));
Steve Blocka7e24c12009-10-30 11:49:00 +00002045 deferred->Branch(overflow);
2046 break;
2047
2048 case Token::MUL: {
2049 // If the smi tag is 0 we can just leave the tag on one operand.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002050 STATIC_ASSERT(kSmiTag == 0); // Adjust code below if not the case.
Steve Blocka7e24c12009-10-30 11:49:00 +00002051 // Remove smi tag from the left operand (but keep sign).
2052 // Left-hand operand has been copied into answer.
Leon Clarkee46be812010-01-19 14:06:41 +00002053 __ SmiUntag(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00002054 // Do multiplication of smis, leaving result in answer.
2055 __ imul(answer.reg(), Operand(right->reg()));
2056 // Go slow on overflows.
2057 deferred->Branch(overflow);
2058 // Check for negative zero result. If product is zero, and one
2059 // argument is negative, go to slow case. The frame is unchanged
2060 // in this block, so local control flow can use a Label rather
2061 // than a JumpTarget.
Steve Block6ded16b2010-05-10 14:33:55 +01002062 if (!expr->no_negative_zero()) {
2063 Label non_zero_result;
2064 __ test(answer.reg(), Operand(answer.reg()));
2065 __ j(not_zero, &non_zero_result, taken);
2066 __ mov(answer.reg(), left->reg());
2067 __ or_(answer.reg(), Operand(right->reg()));
2068 deferred->Branch(negative);
2069 __ xor_(answer.reg(), Operand(answer.reg())); // Positive 0 is correct.
2070 __ bind(&non_zero_result);
2071 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002072 break;
2073 }
2074
2075 case Token::BIT_OR:
2076 __ or_(answer.reg(), Operand(right->reg()));
Iain Merrick75681382010-08-19 15:07:18 +01002077 __ test(answer.reg(), Immediate(kSmiTagMask));
2078 __ j(not_zero, deferred->NonSmiInputLabel());
Steve Blocka7e24c12009-10-30 11:49:00 +00002079 break;
2080
2081 case Token::BIT_AND:
2082 __ and_(answer.reg(), Operand(right->reg()));
2083 break;
2084
2085 case Token::BIT_XOR:
2086 __ xor_(answer.reg(), Operand(right->reg()));
2087 break;
2088
2089 default:
2090 UNREACHABLE();
2091 break;
2092 }
Iain Merrick75681382010-08-19 15:07:18 +01002093
Steve Blocka7e24c12009-10-30 11:49:00 +00002094 deferred->BindExit();
2095 left->Unuse();
2096 right->Unuse();
Leon Clarked91b9f72010-01-27 17:25:45 +00002097 ASSERT(answer.is_valid());
2098 return answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00002099}
2100
2101
2102// Call the appropriate binary operation stub to compute src op value
2103// and leave the result in dst.
2104class DeferredInlineSmiOperation: public DeferredCode {
2105 public:
2106 DeferredInlineSmiOperation(Token::Value op,
2107 Register dst,
2108 Register src,
Steve Block6ded16b2010-05-10 14:33:55 +01002109 TypeInfo type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00002110 Smi* value,
2111 OverwriteMode overwrite_mode)
2112 : op_(op),
2113 dst_(dst),
2114 src_(src),
Steve Block6ded16b2010-05-10 14:33:55 +01002115 type_info_(type_info),
Steve Blocka7e24c12009-10-30 11:49:00 +00002116 value_(value),
2117 overwrite_mode_(overwrite_mode) {
Steve Block6ded16b2010-05-10 14:33:55 +01002118 if (type_info.IsSmi()) overwrite_mode_ = NO_OVERWRITE;
Steve Blocka7e24c12009-10-30 11:49:00 +00002119 set_comment("[ DeferredInlineSmiOperation");
2120 }
2121
2122 virtual void Generate();
2123
2124 private:
2125 Token::Value op_;
2126 Register dst_;
2127 Register src_;
Steve Block6ded16b2010-05-10 14:33:55 +01002128 TypeInfo type_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +00002129 Smi* value_;
2130 OverwriteMode overwrite_mode_;
2131};
2132
2133
2134void DeferredInlineSmiOperation::Generate() {
Steve Blocka7e24c12009-10-30 11:49:00 +00002135 // For mod we don't generate all the Smi code inline.
2136 GenericBinaryOpStub stub(
2137 op_,
2138 overwrite_mode_,
Steve Block6ded16b2010-05-10 14:33:55 +01002139 (op_ == Token::MOD) ? NO_GENERIC_BINARY_FLAGS : NO_SMI_CODE_IN_STUB,
2140 TypeInfo::Combine(TypeInfo::Smi(), type_info_));
Steve Block3ce2e202009-11-05 08:53:23 +00002141 stub.GenerateCall(masm_, src_, value_);
Steve Blocka7e24c12009-10-30 11:49:00 +00002142 if (!dst_.is(eax)) __ mov(dst_, eax);
2143}
2144
2145
2146// Call the appropriate binary operation stub to compute value op src
2147// and leave the result in dst.
2148class DeferredInlineSmiOperationReversed: public DeferredCode {
2149 public:
2150 DeferredInlineSmiOperationReversed(Token::Value op,
2151 Register dst,
2152 Smi* value,
2153 Register src,
Steve Block6ded16b2010-05-10 14:33:55 +01002154 TypeInfo type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00002155 OverwriteMode overwrite_mode)
2156 : op_(op),
2157 dst_(dst),
Steve Block6ded16b2010-05-10 14:33:55 +01002158 type_info_(type_info),
Steve Blocka7e24c12009-10-30 11:49:00 +00002159 value_(value),
2160 src_(src),
2161 overwrite_mode_(overwrite_mode) {
2162 set_comment("[ DeferredInlineSmiOperationReversed");
2163 }
2164
2165 virtual void Generate();
2166
2167 private:
2168 Token::Value op_;
2169 Register dst_;
Steve Block6ded16b2010-05-10 14:33:55 +01002170 TypeInfo type_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +00002171 Smi* value_;
2172 Register src_;
2173 OverwriteMode overwrite_mode_;
2174};
2175
2176
2177void DeferredInlineSmiOperationReversed::Generate() {
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002178 GenericBinaryOpStub stub(
Steve Block6ded16b2010-05-10 14:33:55 +01002179 op_,
2180 overwrite_mode_,
2181 NO_SMI_CODE_IN_STUB,
2182 TypeInfo::Combine(TypeInfo::Smi(), type_info_));
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002183 stub.GenerateCall(masm_, value_, src_);
Steve Blocka7e24c12009-10-30 11:49:00 +00002184 if (!dst_.is(eax)) __ mov(dst_, eax);
2185}
2186
2187
2188// The result of src + value is in dst. It either overflowed or was not
2189// smi tagged. Undo the speculative addition and call the appropriate
2190// specialized stub for add. The result is left in dst.
2191class DeferredInlineSmiAdd: public DeferredCode {
2192 public:
2193 DeferredInlineSmiAdd(Register dst,
Steve Block6ded16b2010-05-10 14:33:55 +01002194 TypeInfo type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00002195 Smi* value,
2196 OverwriteMode overwrite_mode)
Steve Block6ded16b2010-05-10 14:33:55 +01002197 : dst_(dst),
2198 type_info_(type_info),
2199 value_(value),
2200 overwrite_mode_(overwrite_mode) {
2201 if (type_info_.IsSmi()) overwrite_mode_ = NO_OVERWRITE;
Steve Blocka7e24c12009-10-30 11:49:00 +00002202 set_comment("[ DeferredInlineSmiAdd");
2203 }
2204
2205 virtual void Generate();
2206
2207 private:
2208 Register dst_;
Steve Block6ded16b2010-05-10 14:33:55 +01002209 TypeInfo type_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +00002210 Smi* value_;
2211 OverwriteMode overwrite_mode_;
2212};
2213
2214
2215void DeferredInlineSmiAdd::Generate() {
2216 // Undo the optimistic add operation and call the shared stub.
2217 __ sub(Operand(dst_), Immediate(value_));
Steve Block6ded16b2010-05-10 14:33:55 +01002218 GenericBinaryOpStub igostub(
2219 Token::ADD,
2220 overwrite_mode_,
2221 NO_SMI_CODE_IN_STUB,
2222 TypeInfo::Combine(TypeInfo::Smi(), type_info_));
Steve Block3ce2e202009-11-05 08:53:23 +00002223 igostub.GenerateCall(masm_, dst_, value_);
Steve Blocka7e24c12009-10-30 11:49:00 +00002224 if (!dst_.is(eax)) __ mov(dst_, eax);
2225}
2226
2227
2228// The result of value + src is in dst. It either overflowed or was not
2229// smi tagged. Undo the speculative addition and call the appropriate
2230// specialized stub for add. The result is left in dst.
2231class DeferredInlineSmiAddReversed: public DeferredCode {
2232 public:
2233 DeferredInlineSmiAddReversed(Register dst,
Steve Block6ded16b2010-05-10 14:33:55 +01002234 TypeInfo type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00002235 Smi* value,
2236 OverwriteMode overwrite_mode)
Steve Block6ded16b2010-05-10 14:33:55 +01002237 : dst_(dst),
2238 type_info_(type_info),
2239 value_(value),
2240 overwrite_mode_(overwrite_mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002241 set_comment("[ DeferredInlineSmiAddReversed");
2242 }
2243
2244 virtual void Generate();
2245
2246 private:
2247 Register dst_;
Steve Block6ded16b2010-05-10 14:33:55 +01002248 TypeInfo type_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +00002249 Smi* value_;
2250 OverwriteMode overwrite_mode_;
2251};
2252
2253
2254void DeferredInlineSmiAddReversed::Generate() {
2255 // Undo the optimistic add operation and call the shared stub.
2256 __ sub(Operand(dst_), Immediate(value_));
Steve Block6ded16b2010-05-10 14:33:55 +01002257 GenericBinaryOpStub igostub(
2258 Token::ADD,
2259 overwrite_mode_,
2260 NO_SMI_CODE_IN_STUB,
2261 TypeInfo::Combine(TypeInfo::Smi(), type_info_));
Steve Block3ce2e202009-11-05 08:53:23 +00002262 igostub.GenerateCall(masm_, value_, dst_);
Steve Blocka7e24c12009-10-30 11:49:00 +00002263 if (!dst_.is(eax)) __ mov(dst_, eax);
2264}
2265
2266
2267// The result of src - value is in dst. It either overflowed or was not
2268// smi tagged. Undo the speculative subtraction and call the
2269// appropriate specialized stub for subtract. The result is left in
2270// dst.
2271class DeferredInlineSmiSub: public DeferredCode {
2272 public:
2273 DeferredInlineSmiSub(Register dst,
Steve Block6ded16b2010-05-10 14:33:55 +01002274 TypeInfo type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00002275 Smi* value,
2276 OverwriteMode overwrite_mode)
Steve Block6ded16b2010-05-10 14:33:55 +01002277 : dst_(dst),
2278 type_info_(type_info),
2279 value_(value),
2280 overwrite_mode_(overwrite_mode) {
2281 if (type_info.IsSmi()) overwrite_mode_ = NO_OVERWRITE;
Steve Blocka7e24c12009-10-30 11:49:00 +00002282 set_comment("[ DeferredInlineSmiSub");
2283 }
2284
2285 virtual void Generate();
2286
2287 private:
2288 Register dst_;
Steve Block6ded16b2010-05-10 14:33:55 +01002289 TypeInfo type_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +00002290 Smi* value_;
2291 OverwriteMode overwrite_mode_;
2292};
2293
2294
2295void DeferredInlineSmiSub::Generate() {
2296 // Undo the optimistic sub operation and call the shared stub.
2297 __ add(Operand(dst_), Immediate(value_));
Steve Block6ded16b2010-05-10 14:33:55 +01002298 GenericBinaryOpStub igostub(
2299 Token::SUB,
2300 overwrite_mode_,
2301 NO_SMI_CODE_IN_STUB,
2302 TypeInfo::Combine(TypeInfo::Smi(), type_info_));
Steve Block3ce2e202009-11-05 08:53:23 +00002303 igostub.GenerateCall(masm_, dst_, value_);
Steve Blocka7e24c12009-10-30 11:49:00 +00002304 if (!dst_.is(eax)) __ mov(dst_, eax);
2305}
2306
2307
Kristian Monsen25f61362010-05-21 11:50:48 +01002308Result CodeGenerator::ConstantSmiBinaryOperation(BinaryOperation* expr,
2309 Result* operand,
2310 Handle<Object> value,
2311 bool reversed,
2312 OverwriteMode overwrite_mode) {
2313 // Generate inline code for a binary operation when one of the
2314 // operands is a constant smi. Consumes the argument "operand".
Steve Blocka7e24c12009-10-30 11:49:00 +00002315 if (IsUnsafeSmi(value)) {
2316 Result unsafe_operand(value);
2317 if (reversed) {
Steve Block6ded16b2010-05-10 14:33:55 +01002318 return LikelySmiBinaryOperation(expr, &unsafe_operand, operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00002319 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002320 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01002321 return LikelySmiBinaryOperation(expr, operand, &unsafe_operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00002322 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002323 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002324 }
2325
2326 // Get the literal value.
2327 Smi* smi_value = Smi::cast(*value);
2328 int int_value = smi_value->value();
2329
Steve Block6ded16b2010-05-10 14:33:55 +01002330 Token::Value op = expr->op();
Leon Clarked91b9f72010-01-27 17:25:45 +00002331 Result answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00002332 switch (op) {
2333 case Token::ADD: {
2334 operand->ToRegister();
2335 frame_->Spill(operand->reg());
2336
2337 // Optimistically add. Call the specialized add stub if the
2338 // result is not a smi or overflows.
2339 DeferredCode* deferred = NULL;
2340 if (reversed) {
2341 deferred = new DeferredInlineSmiAddReversed(operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002342 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002343 smi_value,
2344 overwrite_mode);
2345 } else {
2346 deferred = new DeferredInlineSmiAdd(operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002347 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002348 smi_value,
2349 overwrite_mode);
2350 }
2351 __ add(Operand(operand->reg()), Immediate(value));
2352 deferred->Branch(overflow);
Steve Block6ded16b2010-05-10 14:33:55 +01002353 if (!operand->type_info().IsSmi()) {
2354 __ test(operand->reg(), Immediate(kSmiTagMask));
2355 deferred->Branch(not_zero);
2356 } else if (FLAG_debug_code) {
2357 __ AbortIfNotSmi(operand->reg());
2358 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002359 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00002360 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00002361 break;
2362 }
2363
2364 case Token::SUB: {
2365 DeferredCode* deferred = NULL;
Steve Blocka7e24c12009-10-30 11:49:00 +00002366 if (reversed) {
2367 // The reversed case is only hit when the right operand is not a
2368 // constant.
2369 ASSERT(operand->is_register());
2370 answer = allocator()->Allocate();
2371 ASSERT(answer.is_valid());
2372 __ Set(answer.reg(), Immediate(value));
Steve Block6ded16b2010-05-10 14:33:55 +01002373 deferred =
2374 new DeferredInlineSmiOperationReversed(op,
2375 answer.reg(),
2376 smi_value,
2377 operand->reg(),
2378 operand->type_info(),
2379 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002380 __ sub(answer.reg(), Operand(operand->reg()));
2381 } else {
2382 operand->ToRegister();
2383 frame_->Spill(operand->reg());
2384 answer = *operand;
2385 deferred = new DeferredInlineSmiSub(operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002386 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002387 smi_value,
2388 overwrite_mode);
2389 __ sub(Operand(operand->reg()), Immediate(value));
2390 }
2391 deferred->Branch(overflow);
Steve Block6ded16b2010-05-10 14:33:55 +01002392 if (!operand->type_info().IsSmi()) {
2393 __ test(answer.reg(), Immediate(kSmiTagMask));
2394 deferred->Branch(not_zero);
2395 } else if (FLAG_debug_code) {
2396 __ AbortIfNotSmi(operand->reg());
2397 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002398 deferred->BindExit();
2399 operand->Unuse();
Steve Blocka7e24c12009-10-30 11:49:00 +00002400 break;
2401 }
2402
2403 case Token::SAR:
2404 if (reversed) {
2405 Result constant_operand(value);
Steve Block6ded16b2010-05-10 14:33:55 +01002406 answer = LikelySmiBinaryOperation(expr, &constant_operand, operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00002407 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002408 } else {
2409 // Only the least significant 5 bits of the shift value are used.
2410 // In the slow case, this masking is done inside the runtime call.
2411 int shift_value = int_value & 0x1f;
2412 operand->ToRegister();
2413 frame_->Spill(operand->reg());
Steve Block6ded16b2010-05-10 14:33:55 +01002414 if (!operand->type_info().IsSmi()) {
2415 DeferredInlineSmiOperation* deferred =
2416 new DeferredInlineSmiOperation(op,
2417 operand->reg(),
2418 operand->reg(),
2419 operand->type_info(),
2420 smi_value,
2421 overwrite_mode);
2422 __ test(operand->reg(), Immediate(kSmiTagMask));
2423 deferred->Branch(not_zero);
2424 if (shift_value > 0) {
2425 __ sar(operand->reg(), shift_value);
2426 __ and_(operand->reg(), ~kSmiTagMask);
2427 }
2428 deferred->BindExit();
2429 } else {
2430 if (FLAG_debug_code) {
2431 __ AbortIfNotSmi(operand->reg());
2432 }
2433 if (shift_value > 0) {
2434 __ sar(operand->reg(), shift_value);
2435 __ and_(operand->reg(), ~kSmiTagMask);
2436 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002437 }
Leon Clarked91b9f72010-01-27 17:25:45 +00002438 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00002439 }
2440 break;
2441
2442 case Token::SHR:
2443 if (reversed) {
2444 Result constant_operand(value);
Steve Block6ded16b2010-05-10 14:33:55 +01002445 answer = LikelySmiBinaryOperation(expr, &constant_operand, operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00002446 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002447 } else {
2448 // Only the least significant 5 bits of the shift value are used.
2449 // In the slow case, this masking is done inside the runtime call.
2450 int shift_value = int_value & 0x1f;
2451 operand->ToRegister();
Leon Clarked91b9f72010-01-27 17:25:45 +00002452 answer = allocator()->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00002453 ASSERT(answer.is_valid());
2454 DeferredInlineSmiOperation* deferred =
2455 new DeferredInlineSmiOperation(op,
2456 answer.reg(),
2457 operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002458 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002459 smi_value,
2460 overwrite_mode);
Steve Block6ded16b2010-05-10 14:33:55 +01002461 if (!operand->type_info().IsSmi()) {
2462 __ test(operand->reg(), Immediate(kSmiTagMask));
2463 deferred->Branch(not_zero);
2464 } else if (FLAG_debug_code) {
2465 __ AbortIfNotSmi(operand->reg());
2466 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002467 __ mov(answer.reg(), operand->reg());
Leon Clarkee46be812010-01-19 14:06:41 +00002468 __ SmiUntag(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00002469 __ shr(answer.reg(), shift_value);
2470 // A negative Smi shifted right two is in the positive Smi range.
2471 if (shift_value < 2) {
2472 __ test(answer.reg(), Immediate(0xc0000000));
2473 deferred->Branch(not_zero);
2474 }
2475 operand->Unuse();
Leon Clarkee46be812010-01-19 14:06:41 +00002476 __ SmiTag(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00002477 deferred->BindExit();
Steve Blocka7e24c12009-10-30 11:49:00 +00002478 }
2479 break;
2480
2481 case Token::SHL:
2482 if (reversed) {
Steve Block6ded16b2010-05-10 14:33:55 +01002483 // Move operand into ecx and also into a second register.
2484 // If operand is already in a register, take advantage of that.
2485 // This lets us modify ecx, but still bail out to deferred code.
Leon Clarkee46be812010-01-19 14:06:41 +00002486 Result right;
2487 Result right_copy_in_ecx;
Steve Block6ded16b2010-05-10 14:33:55 +01002488 TypeInfo right_type_info = operand->type_info();
Leon Clarkee46be812010-01-19 14:06:41 +00002489 operand->ToRegister();
2490 if (operand->reg().is(ecx)) {
2491 right = allocator()->Allocate();
2492 __ mov(right.reg(), ecx);
2493 frame_->Spill(ecx);
2494 right_copy_in_ecx = *operand;
2495 } else {
2496 right_copy_in_ecx = allocator()->Allocate(ecx);
2497 __ mov(ecx, operand->reg());
2498 right = *operand;
2499 }
2500 operand->Unuse();
2501
Leon Clarked91b9f72010-01-27 17:25:45 +00002502 answer = allocator()->Allocate();
Leon Clarkee46be812010-01-19 14:06:41 +00002503 DeferredInlineSmiOperationReversed* deferred =
2504 new DeferredInlineSmiOperationReversed(op,
2505 answer.reg(),
2506 smi_value,
2507 right.reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002508 right_type_info,
Leon Clarkee46be812010-01-19 14:06:41 +00002509 overwrite_mode);
2510 __ mov(answer.reg(), Immediate(int_value));
2511 __ sar(ecx, kSmiTagSize);
Steve Block6ded16b2010-05-10 14:33:55 +01002512 if (!right_type_info.IsSmi()) {
2513 deferred->Branch(carry);
2514 } else if (FLAG_debug_code) {
2515 __ AbortIfNotSmi(right.reg());
2516 }
Leon Clarkee46be812010-01-19 14:06:41 +00002517 __ shl_cl(answer.reg());
2518 __ cmp(answer.reg(), 0xc0000000);
2519 deferred->Branch(sign);
2520 __ SmiTag(answer.reg());
2521
2522 deferred->BindExit();
Steve Blocka7e24c12009-10-30 11:49:00 +00002523 } else {
2524 // Only the least significant 5 bits of the shift value are used.
2525 // In the slow case, this masking is done inside the runtime call.
2526 int shift_value = int_value & 0x1f;
2527 operand->ToRegister();
2528 if (shift_value == 0) {
2529 // Spill operand so it can be overwritten in the slow case.
2530 frame_->Spill(operand->reg());
2531 DeferredInlineSmiOperation* deferred =
2532 new DeferredInlineSmiOperation(op,
2533 operand->reg(),
2534 operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002535 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002536 smi_value,
2537 overwrite_mode);
2538 __ test(operand->reg(), Immediate(kSmiTagMask));
2539 deferred->Branch(not_zero);
2540 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00002541 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00002542 } else {
2543 // Use a fresh temporary for nonzero shift values.
Leon Clarked91b9f72010-01-27 17:25:45 +00002544 answer = allocator()->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00002545 ASSERT(answer.is_valid());
2546 DeferredInlineSmiOperation* deferred =
2547 new DeferredInlineSmiOperation(op,
2548 answer.reg(),
2549 operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002550 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002551 smi_value,
2552 overwrite_mode);
Steve Block6ded16b2010-05-10 14:33:55 +01002553 if (!operand->type_info().IsSmi()) {
2554 __ test(operand->reg(), Immediate(kSmiTagMask));
2555 deferred->Branch(not_zero);
2556 } else if (FLAG_debug_code) {
2557 __ AbortIfNotSmi(operand->reg());
2558 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002559 __ mov(answer.reg(), operand->reg());
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002560 STATIC_ASSERT(kSmiTag == 0); // adjust code if not the case
Steve Blocka7e24c12009-10-30 11:49:00 +00002561 // We do no shifts, only the Smi conversion, if shift_value is 1.
2562 if (shift_value > 1) {
2563 __ shl(answer.reg(), shift_value - 1);
2564 }
2565 // Convert int result to Smi, checking that it is in int range.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002566 STATIC_ASSERT(kSmiTagSize == 1); // adjust code if not the case
Steve Blocka7e24c12009-10-30 11:49:00 +00002567 __ add(answer.reg(), Operand(answer.reg()));
2568 deferred->Branch(overflow);
2569 deferred->BindExit();
2570 operand->Unuse();
Steve Blocka7e24c12009-10-30 11:49:00 +00002571 }
2572 }
2573 break;
2574
2575 case Token::BIT_OR:
2576 case Token::BIT_XOR:
2577 case Token::BIT_AND: {
2578 operand->ToRegister();
Iain Merrick75681382010-08-19 15:07:18 +01002579 // DeferredInlineBinaryOperation requires all the registers that it is
2580 // told about to be spilled.
Steve Blocka7e24c12009-10-30 11:49:00 +00002581 frame_->Spill(operand->reg());
Iain Merrick75681382010-08-19 15:07:18 +01002582 DeferredInlineBinaryOperation* deferred = NULL;
Steve Block6ded16b2010-05-10 14:33:55 +01002583 if (!operand->type_info().IsSmi()) {
Iain Merrick75681382010-08-19 15:07:18 +01002584 Result left = allocator()->Allocate();
2585 ASSERT(left.is_valid());
2586 Result right = allocator()->Allocate();
2587 ASSERT(right.is_valid());
2588 deferred = new DeferredInlineBinaryOperation(
2589 op,
2590 operand->reg(),
2591 left.reg(),
2592 right.reg(),
2593 operand->type_info(),
2594 TypeInfo::Smi(),
2595 overwrite_mode == NO_OVERWRITE ? NO_OVERWRITE : OVERWRITE_LEFT);
Steve Block6ded16b2010-05-10 14:33:55 +01002596 __ test(operand->reg(), Immediate(kSmiTagMask));
Iain Merrick75681382010-08-19 15:07:18 +01002597 deferred->JumpToConstantRhs(not_zero, smi_value);
Steve Block6ded16b2010-05-10 14:33:55 +01002598 } else if (FLAG_debug_code) {
2599 __ AbortIfNotSmi(operand->reg());
2600 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002601 if (op == Token::BIT_AND) {
2602 __ and_(Operand(operand->reg()), Immediate(value));
2603 } else if (op == Token::BIT_XOR) {
2604 if (int_value != 0) {
2605 __ xor_(Operand(operand->reg()), Immediate(value));
2606 }
2607 } else {
2608 ASSERT(op == Token::BIT_OR);
2609 if (int_value != 0) {
2610 __ or_(Operand(operand->reg()), Immediate(value));
2611 }
2612 }
Iain Merrick75681382010-08-19 15:07:18 +01002613 if (deferred != NULL) deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00002614 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00002615 break;
2616 }
2617
Andrei Popescu402d9372010-02-26 13:31:12 +00002618 case Token::DIV:
2619 if (!reversed && int_value == 2) {
2620 operand->ToRegister();
2621 frame_->Spill(operand->reg());
2622
2623 DeferredInlineSmiOperation* deferred =
2624 new DeferredInlineSmiOperation(op,
2625 operand->reg(),
2626 operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002627 operand->type_info(),
Andrei Popescu402d9372010-02-26 13:31:12 +00002628 smi_value,
2629 overwrite_mode);
2630 // Check that lowest log2(value) bits of operand are zero, and test
2631 // smi tag at the same time.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002632 STATIC_ASSERT(kSmiTag == 0);
2633 STATIC_ASSERT(kSmiTagSize == 1);
Andrei Popescu402d9372010-02-26 13:31:12 +00002634 __ test(operand->reg(), Immediate(3));
2635 deferred->Branch(not_zero); // Branch if non-smi or odd smi.
2636 __ sar(operand->reg(), 1);
2637 deferred->BindExit();
2638 answer = *operand;
2639 } else {
2640 // Cannot fall through MOD to default case, so we duplicate the
2641 // default case here.
2642 Result constant_operand(value);
2643 if (reversed) {
Steve Block6ded16b2010-05-10 14:33:55 +01002644 answer = LikelySmiBinaryOperation(expr, &constant_operand, operand,
Andrei Popescu402d9372010-02-26 13:31:12 +00002645 overwrite_mode);
2646 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01002647 answer = LikelySmiBinaryOperation(expr, operand, &constant_operand,
Andrei Popescu402d9372010-02-26 13:31:12 +00002648 overwrite_mode);
2649 }
2650 }
2651 break;
Steve Block6ded16b2010-05-10 14:33:55 +01002652
Steve Blocka7e24c12009-10-30 11:49:00 +00002653 // Generate inline code for mod of powers of 2 and negative powers of 2.
2654 case Token::MOD:
2655 if (!reversed &&
2656 int_value != 0 &&
2657 (IsPowerOf2(int_value) || IsPowerOf2(-int_value))) {
2658 operand->ToRegister();
2659 frame_->Spill(operand->reg());
Steve Block6ded16b2010-05-10 14:33:55 +01002660 DeferredCode* deferred =
2661 new DeferredInlineSmiOperation(op,
2662 operand->reg(),
2663 operand->reg(),
2664 operand->type_info(),
2665 smi_value,
2666 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002667 // Check for negative or non-Smi left hand side.
Steve Block6ded16b2010-05-10 14:33:55 +01002668 __ test(operand->reg(), Immediate(kSmiTagMask | kSmiSignMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00002669 deferred->Branch(not_zero);
2670 if (int_value < 0) int_value = -int_value;
2671 if (int_value == 1) {
2672 __ mov(operand->reg(), Immediate(Smi::FromInt(0)));
2673 } else {
2674 __ and_(operand->reg(), (int_value << kSmiTagSize) - 1);
2675 }
2676 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00002677 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00002678 break;
2679 }
2680 // Fall through if we did not find a power of 2 on the right hand side!
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002681 // The next case must be the default.
Steve Blocka7e24c12009-10-30 11:49:00 +00002682
2683 default: {
2684 Result constant_operand(value);
2685 if (reversed) {
Steve Block6ded16b2010-05-10 14:33:55 +01002686 answer = LikelySmiBinaryOperation(expr, &constant_operand, operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00002687 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002688 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01002689 answer = LikelySmiBinaryOperation(expr, operand, &constant_operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00002690 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002691 }
2692 break;
2693 }
2694 }
Leon Clarked91b9f72010-01-27 17:25:45 +00002695 ASSERT(answer.is_valid());
2696 return answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00002697}
2698
2699
Leon Clarkee46be812010-01-19 14:06:41 +00002700static bool CouldBeNaN(const Result& result) {
Steve Block6ded16b2010-05-10 14:33:55 +01002701 if (result.type_info().IsSmi()) return false;
2702 if (result.type_info().IsInteger32()) return false;
Leon Clarkee46be812010-01-19 14:06:41 +00002703 if (!result.is_constant()) return true;
2704 if (!result.handle()->IsHeapNumber()) return false;
2705 return isnan(HeapNumber::cast(*result.handle())->value());
2706}
2707
2708
Steve Block6ded16b2010-05-10 14:33:55 +01002709// Convert from signed to unsigned comparison to match the way EFLAGS are set
2710// by FPU and XMM compare instructions.
2711static Condition DoubleCondition(Condition cc) {
2712 switch (cc) {
2713 case less: return below;
2714 case equal: return equal;
2715 case less_equal: return below_equal;
2716 case greater: return above;
2717 case greater_equal: return above_equal;
2718 default: UNREACHABLE();
2719 }
2720 UNREACHABLE();
2721 return equal;
2722}
2723
2724
Leon Clarkee46be812010-01-19 14:06:41 +00002725void CodeGenerator::Comparison(AstNode* node,
2726 Condition cc,
Steve Blocka7e24c12009-10-30 11:49:00 +00002727 bool strict,
2728 ControlDestination* dest) {
2729 // Strict only makes sense for equality comparisons.
2730 ASSERT(!strict || cc == equal);
2731
2732 Result left_side;
2733 Result right_side;
2734 // Implement '>' and '<=' by reversal to obtain ECMA-262 conversion order.
2735 if (cc == greater || cc == less_equal) {
2736 cc = ReverseCondition(cc);
2737 left_side = frame_->Pop();
2738 right_side = frame_->Pop();
2739 } else {
2740 right_side = frame_->Pop();
2741 left_side = frame_->Pop();
2742 }
2743 ASSERT(cc == less || cc == equal || cc == greater_equal);
2744
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002745 // If either side is a constant smi, optimize the comparison.
Leon Clarkee46be812010-01-19 14:06:41 +00002746 bool left_side_constant_smi = false;
2747 bool left_side_constant_null = false;
2748 bool left_side_constant_1_char_string = false;
2749 if (left_side.is_constant()) {
2750 left_side_constant_smi = left_side.handle()->IsSmi();
2751 left_side_constant_null = left_side.handle()->IsNull();
2752 left_side_constant_1_char_string =
2753 (left_side.handle()->IsString() &&
Steve Block6ded16b2010-05-10 14:33:55 +01002754 String::cast(*left_side.handle())->length() == 1 &&
2755 String::cast(*left_side.handle())->IsAsciiRepresentation());
Leon Clarkee46be812010-01-19 14:06:41 +00002756 }
2757 bool right_side_constant_smi = false;
2758 bool right_side_constant_null = false;
2759 bool right_side_constant_1_char_string = false;
2760 if (right_side.is_constant()) {
2761 right_side_constant_smi = right_side.handle()->IsSmi();
2762 right_side_constant_null = right_side.handle()->IsNull();
2763 right_side_constant_1_char_string =
2764 (right_side.handle()->IsString() &&
Steve Block6ded16b2010-05-10 14:33:55 +01002765 String::cast(*right_side.handle())->length() == 1 &&
2766 String::cast(*right_side.handle())->IsAsciiRepresentation());
Leon Clarkee46be812010-01-19 14:06:41 +00002767 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002768
2769 if (left_side_constant_smi || right_side_constant_smi) {
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002770 bool is_loop_condition = (node->AsExpression() != NULL) &&
2771 node->AsExpression()->is_loop_condition();
2772 ConstantSmiComparison(cc, strict, dest, &left_side, &right_side,
2773 left_side_constant_smi, right_side_constant_smi,
2774 is_loop_condition);
Steve Blocka7e24c12009-10-30 11:49:00 +00002775 } else if (cc == equal &&
2776 (left_side_constant_null || right_side_constant_null)) {
2777 // To make null checks efficient, we check if either the left side or
2778 // the right side is the constant 'null'.
2779 // If so, we optimize the code by inlining a null check instead of
2780 // calling the (very) general runtime routine for checking equality.
2781 Result operand = left_side_constant_null ? right_side : left_side;
2782 right_side.Unuse();
2783 left_side.Unuse();
2784 operand.ToRegister();
2785 __ cmp(operand.reg(), Factory::null_value());
2786 if (strict) {
2787 operand.Unuse();
2788 dest->Split(equal);
2789 } else {
2790 // The 'null' value is only equal to 'undefined' if using non-strict
2791 // comparisons.
2792 dest->true_target()->Branch(equal);
2793 __ cmp(operand.reg(), Factory::undefined_value());
2794 dest->true_target()->Branch(equal);
2795 __ test(operand.reg(), Immediate(kSmiTagMask));
2796 dest->false_target()->Branch(equal);
2797
2798 // It can be an undetectable object.
2799 // Use a scratch register in preference to spilling operand.reg().
2800 Result temp = allocator()->Allocate();
2801 ASSERT(temp.is_valid());
2802 __ mov(temp.reg(),
2803 FieldOperand(operand.reg(), HeapObject::kMapOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002804 __ test_b(FieldOperand(temp.reg(), Map::kBitFieldOffset),
2805 1 << Map::kIsUndetectable);
Steve Blocka7e24c12009-10-30 11:49:00 +00002806 temp.Unuse();
2807 operand.Unuse();
2808 dest->Split(not_zero);
2809 }
Leon Clarkee46be812010-01-19 14:06:41 +00002810 } else if (left_side_constant_1_char_string ||
2811 right_side_constant_1_char_string) {
2812 if (left_side_constant_1_char_string && right_side_constant_1_char_string) {
2813 // Trivial case, comparing two constants.
2814 int left_value = String::cast(*left_side.handle())->Get(0);
2815 int right_value = String::cast(*right_side.handle())->Get(0);
2816 switch (cc) {
2817 case less:
2818 dest->Goto(left_value < right_value);
2819 break;
2820 case equal:
2821 dest->Goto(left_value == right_value);
2822 break;
2823 case greater_equal:
2824 dest->Goto(left_value >= right_value);
2825 break;
2826 default:
2827 UNREACHABLE();
2828 }
2829 } else {
2830 // Only one side is a constant 1 character string.
2831 // If left side is a constant 1-character string, reverse the operands.
2832 // Since one side is a constant string, conversion order does not matter.
2833 if (left_side_constant_1_char_string) {
2834 Result temp = left_side;
2835 left_side = right_side;
2836 right_side = temp;
2837 cc = ReverseCondition(cc);
2838 // This may reintroduce greater or less_equal as the value of cc.
2839 // CompareStub and the inline code both support all values of cc.
2840 }
2841 // Implement comparison against a constant string, inlining the case
2842 // where both sides are strings.
2843 left_side.ToRegister();
2844
2845 // Here we split control flow to the stub call and inlined cases
2846 // before finally splitting it to the control destination. We use
2847 // a jump target and branching to duplicate the virtual frame at
2848 // the first split. We manually handle the off-frame references
2849 // by reconstituting them on the non-fall-through path.
2850 JumpTarget is_not_string, is_string;
2851 Register left_reg = left_side.reg();
2852 Handle<Object> right_val = right_side.handle();
Steve Block6ded16b2010-05-10 14:33:55 +01002853 ASSERT(StringShape(String::cast(*right_val)).IsSymbol());
Leon Clarkee46be812010-01-19 14:06:41 +00002854 __ test(left_side.reg(), Immediate(kSmiTagMask));
2855 is_not_string.Branch(zero, &left_side);
2856 Result temp = allocator_->Allocate();
2857 ASSERT(temp.is_valid());
2858 __ mov(temp.reg(),
2859 FieldOperand(left_side.reg(), HeapObject::kMapOffset));
2860 __ movzx_b(temp.reg(),
2861 FieldOperand(temp.reg(), Map::kInstanceTypeOffset));
2862 // If we are testing for equality then make use of the symbol shortcut.
2863 // Check if the right left hand side has the same type as the left hand
2864 // side (which is always a symbol).
2865 if (cc == equal) {
2866 Label not_a_symbol;
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002867 STATIC_ASSERT(kSymbolTag != 0);
Leon Clarkee46be812010-01-19 14:06:41 +00002868 // Ensure that no non-strings have the symbol bit set.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002869 STATIC_ASSERT(LAST_TYPE < kNotStringTag + kIsSymbolMask);
Leon Clarkee46be812010-01-19 14:06:41 +00002870 __ test(temp.reg(), Immediate(kIsSymbolMask)); // Test the symbol bit.
2871 __ j(zero, &not_a_symbol);
2872 // They are symbols, so do identity compare.
2873 __ cmp(left_side.reg(), right_side.handle());
2874 dest->true_target()->Branch(equal);
2875 dest->false_target()->Branch(not_equal);
2876 __ bind(&not_a_symbol);
2877 }
Steve Block6ded16b2010-05-10 14:33:55 +01002878 // Call the compare stub if the left side is not a flat ascii string.
Leon Clarkee46be812010-01-19 14:06:41 +00002879 __ and_(temp.reg(),
2880 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
2881 __ cmp(temp.reg(), kStringTag | kSeqStringTag | kAsciiStringTag);
2882 temp.Unuse();
2883 is_string.Branch(equal, &left_side);
2884
2885 // Setup and call the compare stub.
2886 is_not_string.Bind(&left_side);
2887 CompareStub stub(cc, strict, kCantBothBeNaN);
2888 Result result = frame_->CallStub(&stub, &left_side, &right_side);
2889 result.ToRegister();
2890 __ cmp(result.reg(), 0);
2891 result.Unuse();
2892 dest->true_target()->Branch(cc);
2893 dest->false_target()->Jump();
2894
2895 is_string.Bind(&left_side);
Steve Block6ded16b2010-05-10 14:33:55 +01002896 // left_side is a sequential ASCII string.
Leon Clarkee46be812010-01-19 14:06:41 +00002897 left_side = Result(left_reg);
2898 right_side = Result(right_val);
Leon Clarkee46be812010-01-19 14:06:41 +00002899 // Test string equality and comparison.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002900 Label comparison_done;
Leon Clarkee46be812010-01-19 14:06:41 +00002901 if (cc == equal) {
Leon Clarkee46be812010-01-19 14:06:41 +00002902 __ cmp(FieldOperand(left_side.reg(), String::kLengthOffset),
Steve Block6ded16b2010-05-10 14:33:55 +01002903 Immediate(Smi::FromInt(1)));
Leon Clarkee46be812010-01-19 14:06:41 +00002904 __ j(not_equal, &comparison_done);
2905 uint8_t char_value =
Steve Block6ded16b2010-05-10 14:33:55 +01002906 static_cast<uint8_t>(String::cast(*right_val)->Get(0));
Leon Clarkee46be812010-01-19 14:06:41 +00002907 __ cmpb(FieldOperand(left_side.reg(), SeqAsciiString::kHeaderSize),
2908 char_value);
Leon Clarkee46be812010-01-19 14:06:41 +00002909 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002910 __ cmp(FieldOperand(left_side.reg(), String::kLengthOffset),
2911 Immediate(Smi::FromInt(1)));
2912 // If the length is 0 then the jump is taken and the flags
2913 // correctly represent being less than the one-character string.
2914 __ j(below, &comparison_done);
Steve Block6ded16b2010-05-10 14:33:55 +01002915 // Compare the first character of the string with the
2916 // constant 1-character string.
Leon Clarkee46be812010-01-19 14:06:41 +00002917 uint8_t char_value =
Steve Block6ded16b2010-05-10 14:33:55 +01002918 static_cast<uint8_t>(String::cast(*right_val)->Get(0));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002919 __ cmpb(FieldOperand(left_side.reg(), SeqAsciiString::kHeaderSize),
2920 char_value);
2921 __ j(not_equal, &comparison_done);
Leon Clarkee46be812010-01-19 14:06:41 +00002922 // If the first character is the same then the long string sorts after
2923 // the short one.
2924 __ cmp(FieldOperand(left_side.reg(), String::kLengthOffset),
Steve Block6ded16b2010-05-10 14:33:55 +01002925 Immediate(Smi::FromInt(1)));
Leon Clarkee46be812010-01-19 14:06:41 +00002926 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002927 __ bind(&comparison_done);
Leon Clarkee46be812010-01-19 14:06:41 +00002928 left_side.Unuse();
2929 right_side.Unuse();
2930 dest->Split(cc);
2931 }
2932 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01002933 // Neither side is a constant Smi, constant 1-char string or constant null.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002934 // If either side is a non-smi constant, or known to be a heap number,
2935 // skip the smi check.
Steve Blocka7e24c12009-10-30 11:49:00 +00002936 bool known_non_smi =
2937 (left_side.is_constant() && !left_side.handle()->IsSmi()) ||
Steve Block6ded16b2010-05-10 14:33:55 +01002938 (right_side.is_constant() && !right_side.handle()->IsSmi()) ||
2939 left_side.type_info().IsDouble() ||
2940 right_side.type_info().IsDouble();
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002941
Leon Clarkee46be812010-01-19 14:06:41 +00002942 NaNInformation nan_info =
2943 (CouldBeNaN(left_side) && CouldBeNaN(right_side)) ?
2944 kBothCouldBeNaN :
2945 kCantBothBeNaN;
Steve Block6ded16b2010-05-10 14:33:55 +01002946
2947 // Inline number comparison handling any combination of smi's and heap
2948 // numbers if:
2949 // code is in a loop
2950 // the compare operation is different from equal
2951 // compare is not a for-loop comparison
2952 // The reason for excluding equal is that it will most likely be done
2953 // with smi's (not heap numbers) and the code to comparing smi's is inlined
2954 // separately. The same reason applies for for-loop comparison which will
2955 // also most likely be smi comparisons.
2956 bool is_loop_condition = (node->AsExpression() != NULL)
2957 && node->AsExpression()->is_loop_condition();
2958 bool inline_number_compare =
2959 loop_nesting() > 0 && cc != equal && !is_loop_condition;
2960
2961 // Left and right needed in registers for the following code.
Steve Blocka7e24c12009-10-30 11:49:00 +00002962 left_side.ToRegister();
2963 right_side.ToRegister();
2964
2965 if (known_non_smi) {
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002966 // Inlined equality check:
2967 // If at least one of the objects is not NaN, then if the objects
2968 // are identical, they are equal.
Steve Block6ded16b2010-05-10 14:33:55 +01002969 if (nan_info == kCantBothBeNaN && cc == equal) {
2970 __ cmp(left_side.reg(), Operand(right_side.reg()));
2971 dest->true_target()->Branch(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +00002972 }
Steve Block6ded16b2010-05-10 14:33:55 +01002973
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002974 // Inlined number comparison:
Steve Block6ded16b2010-05-10 14:33:55 +01002975 if (inline_number_compare) {
2976 GenerateInlineNumberComparison(&left_side, &right_side, cc, dest);
2977 }
2978
2979 // End of in-line compare, call out to the compare stub. Don't include
2980 // number comparison in the stub if it was inlined.
2981 CompareStub stub(cc, strict, nan_info, !inline_number_compare);
2982 Result answer = frame_->CallStub(&stub, &left_side, &right_side);
2983 __ test(answer.reg(), Operand(answer.reg()));
Steve Blocka7e24c12009-10-30 11:49:00 +00002984 answer.Unuse();
2985 dest->Split(cc);
2986 } else {
2987 // Here we split control flow to the stub call and inlined cases
2988 // before finally splitting it to the control destination. We use
2989 // a jump target and branching to duplicate the virtual frame at
2990 // the first split. We manually handle the off-frame references
2991 // by reconstituting them on the non-fall-through path.
2992 JumpTarget is_smi;
2993 Register left_reg = left_side.reg();
2994 Register right_reg = right_side.reg();
2995
Steve Block6ded16b2010-05-10 14:33:55 +01002996 // In-line check for comparing two smis.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002997 JumpIfBothSmiUsingTypeInfo(&left_side, &right_side, &is_smi);
Steve Block6ded16b2010-05-10 14:33:55 +01002998
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002999 if (has_valid_frame()) {
3000 // Inline the equality check if both operands can't be a NaN. If both
3001 // objects are the same they are equal.
3002 if (nan_info == kCantBothBeNaN && cc == equal) {
3003 __ cmp(left_side.reg(), Operand(right_side.reg()));
3004 dest->true_target()->Branch(equal);
3005 }
3006
3007 // Inlined number comparison:
3008 if (inline_number_compare) {
3009 GenerateInlineNumberComparison(&left_side, &right_side, cc, dest);
3010 }
3011
3012 // End of in-line compare, call out to the compare stub. Don't include
3013 // number comparison in the stub if it was inlined.
3014 CompareStub stub(cc, strict, nan_info, !inline_number_compare);
3015 Result answer = frame_->CallStub(&stub, &left_side, &right_side);
3016 __ test(answer.reg(), Operand(answer.reg()));
3017 answer.Unuse();
3018 if (is_smi.is_linked()) {
3019 dest->true_target()->Branch(cc);
3020 dest->false_target()->Jump();
3021 } else {
3022 dest->Split(cc);
3023 }
3024 }
3025
3026 if (is_smi.is_linked()) {
3027 is_smi.Bind();
3028 left_side = Result(left_reg);
3029 right_side = Result(right_reg);
Steve Block6ded16b2010-05-10 14:33:55 +01003030 __ cmp(left_side.reg(), Operand(right_side.reg()));
Kristian Monsen50ef84f2010-07-29 15:18:00 +01003031 right_side.Unuse();
3032 left_side.Unuse();
3033 dest->Split(cc);
Steve Block6ded16b2010-05-10 14:33:55 +01003034 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003035 }
3036 }
3037}
3038
3039
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003040void CodeGenerator::ConstantSmiComparison(Condition cc,
3041 bool strict,
3042 ControlDestination* dest,
3043 Result* left_side,
3044 Result* right_side,
3045 bool left_side_constant_smi,
3046 bool right_side_constant_smi,
3047 bool is_loop_condition) {
3048 if (left_side_constant_smi && right_side_constant_smi) {
3049 // Trivial case, comparing two constants.
3050 int left_value = Smi::cast(*left_side->handle())->value();
3051 int right_value = Smi::cast(*right_side->handle())->value();
3052 switch (cc) {
3053 case less:
3054 dest->Goto(left_value < right_value);
3055 break;
3056 case equal:
3057 dest->Goto(left_value == right_value);
3058 break;
3059 case greater_equal:
3060 dest->Goto(left_value >= right_value);
3061 break;
3062 default:
3063 UNREACHABLE();
3064 }
3065 } else {
3066 // Only one side is a constant Smi.
3067 // If left side is a constant Smi, reverse the operands.
3068 // Since one side is a constant Smi, conversion order does not matter.
3069 if (left_side_constant_smi) {
3070 Result* temp = left_side;
3071 left_side = right_side;
3072 right_side = temp;
3073 cc = ReverseCondition(cc);
3074 // This may re-introduce greater or less_equal as the value of cc.
3075 // CompareStub and the inline code both support all values of cc.
3076 }
3077 // Implement comparison against a constant Smi, inlining the case
3078 // where both sides are Smis.
3079 left_side->ToRegister();
3080 Register left_reg = left_side->reg();
3081 Handle<Object> right_val = right_side->handle();
3082
3083 if (left_side->is_smi()) {
3084 if (FLAG_debug_code) {
3085 __ AbortIfNotSmi(left_reg);
3086 }
3087 // Test smi equality and comparison by signed int comparison.
3088 if (IsUnsafeSmi(right_side->handle())) {
3089 right_side->ToRegister();
3090 __ cmp(left_reg, Operand(right_side->reg()));
3091 } else {
3092 __ cmp(Operand(left_reg), Immediate(right_side->handle()));
3093 }
3094 left_side->Unuse();
3095 right_side->Unuse();
3096 dest->Split(cc);
3097 } else {
3098 // Only the case where the left side could possibly be a non-smi is left.
3099 JumpTarget is_smi;
3100 if (cc == equal) {
3101 // We can do the equality comparison before the smi check.
3102 __ cmp(Operand(left_reg), Immediate(right_side->handle()));
3103 dest->true_target()->Branch(equal);
3104 __ test(left_reg, Immediate(kSmiTagMask));
3105 dest->false_target()->Branch(zero);
3106 } else {
3107 // Do the smi check, then the comparison.
3108 JumpTarget is_not_smi;
3109 __ test(left_reg, Immediate(kSmiTagMask));
3110 is_smi.Branch(zero, left_side, right_side);
3111 }
3112
3113 // Jump or fall through to here if we are comparing a non-smi to a
3114 // constant smi. If the non-smi is a heap number and this is not
3115 // a loop condition, inline the floating point code.
3116 if (!is_loop_condition && CpuFeatures::IsSupported(SSE2)) {
3117 // Right side is a constant smi and left side has been checked
3118 // not to be a smi.
3119 CpuFeatures::Scope use_sse2(SSE2);
3120 JumpTarget not_number;
3121 __ cmp(FieldOperand(left_reg, HeapObject::kMapOffset),
3122 Immediate(Factory::heap_number_map()));
3123 not_number.Branch(not_equal, left_side);
3124 __ movdbl(xmm1,
3125 FieldOperand(left_reg, HeapNumber::kValueOffset));
3126 int value = Smi::cast(*right_val)->value();
3127 if (value == 0) {
3128 __ xorpd(xmm0, xmm0);
3129 } else {
3130 Result temp = allocator()->Allocate();
3131 __ mov(temp.reg(), Immediate(value));
3132 __ cvtsi2sd(xmm0, Operand(temp.reg()));
3133 temp.Unuse();
3134 }
3135 __ ucomisd(xmm1, xmm0);
3136 // Jump to builtin for NaN.
3137 not_number.Branch(parity_even, left_side);
3138 left_side->Unuse();
3139 dest->true_target()->Branch(DoubleCondition(cc));
3140 dest->false_target()->Jump();
3141 not_number.Bind(left_side);
3142 }
3143
3144 // Setup and call the compare stub.
3145 CompareStub stub(cc, strict, kCantBothBeNaN);
3146 Result result = frame_->CallStub(&stub, left_side, right_side);
3147 result.ToRegister();
3148 __ test(result.reg(), Operand(result.reg()));
3149 result.Unuse();
3150 if (cc == equal) {
3151 dest->Split(cc);
3152 } else {
3153 dest->true_target()->Branch(cc);
3154 dest->false_target()->Jump();
3155
3156 // It is important for performance for this case to be at the end.
3157 is_smi.Bind(left_side, right_side);
3158 if (IsUnsafeSmi(right_side->handle())) {
3159 right_side->ToRegister();
3160 __ cmp(left_reg, Operand(right_side->reg()));
3161 } else {
3162 __ cmp(Operand(left_reg), Immediate(right_side->handle()));
3163 }
3164 left_side->Unuse();
3165 right_side->Unuse();
3166 dest->Split(cc);
3167 }
3168 }
3169 }
3170}
3171
3172
Steve Block6ded16b2010-05-10 14:33:55 +01003173// Check that the comparison operand is a number. Jump to not_numbers jump
3174// target passing the left and right result if the operand is not a number.
3175static void CheckComparisonOperand(MacroAssembler* masm_,
3176 Result* operand,
3177 Result* left_side,
3178 Result* right_side,
3179 JumpTarget* not_numbers) {
3180 // Perform check if operand is not known to be a number.
3181 if (!operand->type_info().IsNumber()) {
3182 Label done;
3183 __ test(operand->reg(), Immediate(kSmiTagMask));
3184 __ j(zero, &done);
3185 __ cmp(FieldOperand(operand->reg(), HeapObject::kMapOffset),
3186 Immediate(Factory::heap_number_map()));
3187 not_numbers->Branch(not_equal, left_side, right_side, not_taken);
3188 __ bind(&done);
3189 }
3190}
3191
3192
3193// Load a comparison operand to the FPU stack. This assumes that the operand has
3194// already been checked and is a number.
3195static void LoadComparisonOperand(MacroAssembler* masm_,
3196 Result* operand) {
3197 Label done;
3198 if (operand->type_info().IsDouble()) {
3199 // Operand is known to be a heap number, just load it.
3200 __ fld_d(FieldOperand(operand->reg(), HeapNumber::kValueOffset));
3201 } else if (operand->type_info().IsSmi()) {
3202 // Operand is known to be a smi. Convert it to double and keep the original
3203 // smi.
3204 __ SmiUntag(operand->reg());
3205 __ push(operand->reg());
3206 __ fild_s(Operand(esp, 0));
3207 __ pop(operand->reg());
3208 __ SmiTag(operand->reg());
3209 } else {
3210 // Operand type not known, check for smi otherwise assume heap number.
3211 Label smi;
3212 __ test(operand->reg(), Immediate(kSmiTagMask));
3213 __ j(zero, &smi);
3214 __ fld_d(FieldOperand(operand->reg(), HeapNumber::kValueOffset));
3215 __ jmp(&done);
3216 __ bind(&smi);
3217 __ SmiUntag(operand->reg());
3218 __ push(operand->reg());
3219 __ fild_s(Operand(esp, 0));
3220 __ pop(operand->reg());
3221 __ SmiTag(operand->reg());
3222 __ jmp(&done);
3223 }
3224 __ bind(&done);
3225}
3226
3227
3228// Load a comparison operand into into a XMM register. Jump to not_numbers jump
3229// target passing the left and right result if the operand is not a number.
3230static void LoadComparisonOperandSSE2(MacroAssembler* masm_,
3231 Result* operand,
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003232 XMMRegister xmm_reg,
Steve Block6ded16b2010-05-10 14:33:55 +01003233 Result* left_side,
3234 Result* right_side,
3235 JumpTarget* not_numbers) {
3236 Label done;
3237 if (operand->type_info().IsDouble()) {
3238 // Operand is known to be a heap number, just load it.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003239 __ movdbl(xmm_reg, FieldOperand(operand->reg(), HeapNumber::kValueOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01003240 } else if (operand->type_info().IsSmi()) {
3241 // Operand is known to be a smi. Convert it to double and keep the original
3242 // smi.
3243 __ SmiUntag(operand->reg());
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003244 __ cvtsi2sd(xmm_reg, Operand(operand->reg()));
Steve Block6ded16b2010-05-10 14:33:55 +01003245 __ SmiTag(operand->reg());
3246 } else {
3247 // Operand type not known, check for smi or heap number.
3248 Label smi;
3249 __ test(operand->reg(), Immediate(kSmiTagMask));
3250 __ j(zero, &smi);
3251 if (!operand->type_info().IsNumber()) {
3252 __ cmp(FieldOperand(operand->reg(), HeapObject::kMapOffset),
3253 Immediate(Factory::heap_number_map()));
3254 not_numbers->Branch(not_equal, left_side, right_side, taken);
3255 }
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003256 __ movdbl(xmm_reg, FieldOperand(operand->reg(), HeapNumber::kValueOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01003257 __ jmp(&done);
3258
3259 __ bind(&smi);
3260 // Comvert smi to float and keep the original smi.
3261 __ SmiUntag(operand->reg());
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003262 __ cvtsi2sd(xmm_reg, Operand(operand->reg()));
Steve Block6ded16b2010-05-10 14:33:55 +01003263 __ SmiTag(operand->reg());
3264 __ jmp(&done);
3265 }
3266 __ bind(&done);
3267}
3268
3269
3270void CodeGenerator::GenerateInlineNumberComparison(Result* left_side,
3271 Result* right_side,
3272 Condition cc,
3273 ControlDestination* dest) {
3274 ASSERT(left_side->is_register());
3275 ASSERT(right_side->is_register());
3276
3277 JumpTarget not_numbers;
3278 if (CpuFeatures::IsSupported(SSE2)) {
3279 CpuFeatures::Scope use_sse2(SSE2);
3280
3281 // Load left and right operand into registers xmm0 and xmm1 and compare.
3282 LoadComparisonOperandSSE2(masm_, left_side, xmm0, left_side, right_side,
3283 &not_numbers);
3284 LoadComparisonOperandSSE2(masm_, right_side, xmm1, left_side, right_side,
3285 &not_numbers);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003286 __ ucomisd(xmm0, xmm1);
Steve Block6ded16b2010-05-10 14:33:55 +01003287 } else {
3288 Label check_right, compare;
3289
3290 // Make sure that both comparison operands are numbers.
3291 CheckComparisonOperand(masm_, left_side, left_side, right_side,
3292 &not_numbers);
3293 CheckComparisonOperand(masm_, right_side, left_side, right_side,
3294 &not_numbers);
3295
3296 // Load right and left operand to FPU stack and compare.
3297 LoadComparisonOperand(masm_, right_side);
3298 LoadComparisonOperand(masm_, left_side);
3299 __ FCmp();
3300 }
3301
3302 // Bail out if a NaN is involved.
3303 not_numbers.Branch(parity_even, left_side, right_side, not_taken);
3304
3305 // Split to destination targets based on comparison.
3306 left_side->Unuse();
3307 right_side->Unuse();
3308 dest->true_target()->Branch(DoubleCondition(cc));
3309 dest->false_target()->Jump();
3310
3311 not_numbers.Bind(left_side, right_side);
3312}
3313
3314
Steve Blocka7e24c12009-10-30 11:49:00 +00003315// Call the function just below TOS on the stack with the given
3316// arguments. The receiver is the TOS.
3317void CodeGenerator::CallWithArguments(ZoneList<Expression*>* args,
Leon Clarkee46be812010-01-19 14:06:41 +00003318 CallFunctionFlags flags,
Steve Blocka7e24c12009-10-30 11:49:00 +00003319 int position) {
3320 // Push the arguments ("left-to-right") on the stack.
3321 int arg_count = args->length();
3322 for (int i = 0; i < arg_count; i++) {
3323 Load(args->at(i));
Leon Clarkef7060e22010-06-03 12:02:55 +01003324 frame_->SpillTop();
Steve Blocka7e24c12009-10-30 11:49:00 +00003325 }
3326
3327 // Record the position for debugging purposes.
3328 CodeForSourcePosition(position);
3329
3330 // Use the shared code stub to call the function.
3331 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Leon Clarkee46be812010-01-19 14:06:41 +00003332 CallFunctionStub call_function(arg_count, in_loop, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00003333 Result answer = frame_->CallStub(&call_function, arg_count + 1);
3334 // Restore context and replace function on the stack with the
3335 // result of the stub invocation.
3336 frame_->RestoreContextRegister();
3337 frame_->SetElementAt(0, &answer);
3338}
3339
3340
Leon Clarked91b9f72010-01-27 17:25:45 +00003341void CodeGenerator::CallApplyLazy(Expression* applicand,
Steve Blocka7e24c12009-10-30 11:49:00 +00003342 Expression* receiver,
3343 VariableProxy* arguments,
3344 int position) {
Leon Clarked91b9f72010-01-27 17:25:45 +00003345 // An optimized implementation of expressions of the form
3346 // x.apply(y, arguments).
3347 // If the arguments object of the scope has not been allocated,
3348 // and x.apply is Function.prototype.apply, this optimization
3349 // just copies y and the arguments of the current function on the
3350 // stack, as receiver and arguments, and calls x.
3351 // In the implementation comments, we call x the applicand
3352 // and y the receiver.
Steve Blocka7e24c12009-10-30 11:49:00 +00003353 ASSERT(ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION);
3354 ASSERT(arguments->IsArguments());
3355
Leon Clarked91b9f72010-01-27 17:25:45 +00003356 // Load applicand.apply onto the stack. This will usually
Steve Blocka7e24c12009-10-30 11:49:00 +00003357 // give us a megamorphic load site. Not super, but it works.
Leon Clarked91b9f72010-01-27 17:25:45 +00003358 Load(applicand);
Andrei Popescu402d9372010-02-26 13:31:12 +00003359 frame()->Dup();
Leon Clarked91b9f72010-01-27 17:25:45 +00003360 Handle<String> name = Factory::LookupAsciiSymbol("apply");
3361 frame()->Push(name);
3362 Result answer = frame()->CallLoadIC(RelocInfo::CODE_TARGET);
3363 __ nop();
3364 frame()->Push(&answer);
Steve Blocka7e24c12009-10-30 11:49:00 +00003365
3366 // Load the receiver and the existing arguments object onto the
3367 // expression stack. Avoid allocating the arguments object here.
3368 Load(receiver);
Leon Clarkef7060e22010-06-03 12:02:55 +01003369 LoadFromSlot(scope()->arguments()->var()->slot(), NOT_INSIDE_TYPEOF);
Steve Blocka7e24c12009-10-30 11:49:00 +00003370
3371 // Emit the source position information after having loaded the
3372 // receiver and the arguments.
3373 CodeForSourcePosition(position);
Leon Clarked91b9f72010-01-27 17:25:45 +00003374 // Contents of frame at this point:
3375 // Frame[0]: arguments object of the current function or the hole.
3376 // Frame[1]: receiver
3377 // Frame[2]: applicand.apply
3378 // Frame[3]: applicand.
Steve Blocka7e24c12009-10-30 11:49:00 +00003379
3380 // Check if the arguments object has been lazily allocated
3381 // already. If so, just use that instead of copying the arguments
3382 // from the stack. This also deals with cases where a local variable
3383 // named 'arguments' has been introduced.
3384 frame_->Dup();
3385 Result probe = frame_->Pop();
Leon Clarked91b9f72010-01-27 17:25:45 +00003386 { VirtualFrame::SpilledScope spilled_scope;
3387 Label slow, done;
3388 bool try_lazy = true;
3389 if (probe.is_constant()) {
3390 try_lazy = probe.handle()->IsTheHole();
3391 } else {
3392 __ cmp(Operand(probe.reg()), Immediate(Factory::the_hole_value()));
3393 probe.Unuse();
3394 __ j(not_equal, &slow);
3395 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003396
Leon Clarked91b9f72010-01-27 17:25:45 +00003397 if (try_lazy) {
3398 Label build_args;
3399 // Get rid of the arguments object probe.
3400 frame_->Drop(); // Can be called on a spilled frame.
3401 // Stack now has 3 elements on it.
3402 // Contents of stack at this point:
3403 // esp[0]: receiver
3404 // esp[1]: applicand.apply
3405 // esp[2]: applicand.
Steve Blocka7e24c12009-10-30 11:49:00 +00003406
Leon Clarked91b9f72010-01-27 17:25:45 +00003407 // Check that the receiver really is a JavaScript object.
3408 __ mov(eax, Operand(esp, 0));
3409 __ test(eax, Immediate(kSmiTagMask));
3410 __ j(zero, &build_args);
Steve Blocka7e24c12009-10-30 11:49:00 +00003411 // We allow all JSObjects including JSFunctions. As long as
3412 // JS_FUNCTION_TYPE is the last instance type and it is right
3413 // after LAST_JS_OBJECT_TYPE, we do not have to check the upper
3414 // bound.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01003415 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
3416 STATIC_ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
Leon Clarked91b9f72010-01-27 17:25:45 +00003417 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
3418 __ j(below, &build_args);
Steve Blocka7e24c12009-10-30 11:49:00 +00003419
Leon Clarked91b9f72010-01-27 17:25:45 +00003420 // Check that applicand.apply is Function.prototype.apply.
3421 __ mov(eax, Operand(esp, kPointerSize));
3422 __ test(eax, Immediate(kSmiTagMask));
3423 __ j(zero, &build_args);
3424 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ecx);
3425 __ j(not_equal, &build_args);
Steve Block791712a2010-08-27 10:21:07 +01003426 __ mov(ecx, FieldOperand(eax, JSFunction::kCodeEntryOffset));
3427 __ sub(Operand(ecx), Immediate(Code::kHeaderSize - kHeapObjectTag));
Leon Clarkeeab96aa2010-01-27 16:31:12 +00003428 Handle<Code> apply_code(Builtins::builtin(Builtins::FunctionApply));
Steve Block791712a2010-08-27 10:21:07 +01003429 __ cmp(Operand(ecx), Immediate(apply_code));
Leon Clarked91b9f72010-01-27 17:25:45 +00003430 __ j(not_equal, &build_args);
3431
3432 // Check that applicand is a function.
3433 __ mov(edi, Operand(esp, 2 * kPointerSize));
3434 __ test(edi, Immediate(kSmiTagMask));
3435 __ j(zero, &build_args);
3436 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
3437 __ j(not_equal, &build_args);
3438
3439 // Copy the arguments to this function possibly from the
3440 // adaptor frame below it.
3441 Label invoke, adapted;
3442 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
3443 __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
3444 __ cmp(Operand(ecx),
3445 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3446 __ j(equal, &adapted);
3447
3448 // No arguments adaptor frame. Copy fixed number of arguments.
Andrei Popescu31002712010-02-23 13:46:05 +00003449 __ mov(eax, Immediate(scope()->num_parameters()));
3450 for (int i = 0; i < scope()->num_parameters(); i++) {
Leon Clarked91b9f72010-01-27 17:25:45 +00003451 __ push(frame_->ParameterAt(i));
3452 }
3453 __ jmp(&invoke);
3454
3455 // Arguments adaptor frame present. Copy arguments from there, but
3456 // avoid copying too many arguments to avoid stack overflows.
3457 __ bind(&adapted);
3458 static const uint32_t kArgumentsLimit = 1 * KB;
3459 __ mov(eax, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
3460 __ SmiUntag(eax);
3461 __ mov(ecx, Operand(eax));
3462 __ cmp(eax, kArgumentsLimit);
3463 __ j(above, &build_args);
3464
3465 // Loop through the arguments pushing them onto the execution
3466 // stack. We don't inform the virtual frame of the push, so we don't
3467 // have to worry about getting rid of the elements from the virtual
3468 // frame.
3469 Label loop;
3470 // ecx is a small non-negative integer, due to the test above.
3471 __ test(ecx, Operand(ecx));
3472 __ j(zero, &invoke);
3473 __ bind(&loop);
3474 __ push(Operand(edx, ecx, times_pointer_size, 1 * kPointerSize));
3475 __ dec(ecx);
3476 __ j(not_zero, &loop);
3477
3478 // Invoke the function.
3479 __ bind(&invoke);
3480 ParameterCount actual(eax);
3481 __ InvokeFunction(edi, actual, CALL_FUNCTION);
3482 // Drop applicand.apply and applicand from the stack, and push
3483 // the result of the function call, but leave the spilled frame
3484 // unchanged, with 3 elements, so it is correct when we compile the
3485 // slow-case code.
3486 __ add(Operand(esp), Immediate(2 * kPointerSize));
3487 __ push(eax);
3488 // Stack now has 1 element:
3489 // esp[0]: result
3490 __ jmp(&done);
3491
3492 // Slow-case: Allocate the arguments object since we know it isn't
3493 // there, and fall-through to the slow-case where we call
3494 // applicand.apply.
3495 __ bind(&build_args);
3496 // Stack now has 3 elements, because we have jumped from where:
3497 // esp[0]: receiver
3498 // esp[1]: applicand.apply
3499 // esp[2]: applicand.
3500
3501 // StoreArgumentsObject requires a correct frame, and may modify it.
3502 Result arguments_object = StoreArgumentsObject(false);
3503 frame_->SpillAll();
3504 arguments_object.ToRegister();
3505 frame_->EmitPush(arguments_object.reg());
3506 arguments_object.Unuse();
3507 // Stack and frame now have 4 elements.
3508 __ bind(&slow);
Leon Clarkeeab96aa2010-01-27 16:31:12 +00003509 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003510
Leon Clarked91b9f72010-01-27 17:25:45 +00003511 // Generic computation of x.apply(y, args) with no special optimization.
3512 // Flip applicand.apply and applicand on the stack, so
3513 // applicand looks like the receiver of the applicand.apply call.
3514 // Then process it as a normal function call.
3515 __ mov(eax, Operand(esp, 3 * kPointerSize));
3516 __ mov(ebx, Operand(esp, 2 * kPointerSize));
3517 __ mov(Operand(esp, 2 * kPointerSize), eax);
3518 __ mov(Operand(esp, 3 * kPointerSize), ebx);
Leon Clarkeeab96aa2010-01-27 16:31:12 +00003519
Leon Clarked91b9f72010-01-27 17:25:45 +00003520 CallFunctionStub call_function(2, NOT_IN_LOOP, NO_CALL_FUNCTION_FLAGS);
3521 Result res = frame_->CallStub(&call_function, 3);
3522 // The function and its two arguments have been dropped.
3523 frame_->Drop(1); // Drop the receiver as well.
3524 res.ToRegister();
3525 frame_->EmitPush(res.reg());
3526 // Stack now has 1 element:
3527 // esp[0]: result
3528 if (try_lazy) __ bind(&done);
3529 } // End of spilled scope.
3530 // Restore the context register after a call.
Steve Blocka7e24c12009-10-30 11:49:00 +00003531 frame_->RestoreContextRegister();
3532}
3533
3534
3535class DeferredStackCheck: public DeferredCode {
3536 public:
3537 DeferredStackCheck() {
3538 set_comment("[ DeferredStackCheck");
3539 }
3540
3541 virtual void Generate();
3542};
3543
3544
3545void DeferredStackCheck::Generate() {
3546 StackCheckStub stub;
3547 __ CallStub(&stub);
3548}
3549
3550
3551void CodeGenerator::CheckStack() {
Steve Blockd0582a62009-12-15 09:54:21 +00003552 DeferredStackCheck* deferred = new DeferredStackCheck;
3553 ExternalReference stack_limit =
3554 ExternalReference::address_of_stack_limit();
3555 __ cmp(esp, Operand::StaticVariable(stack_limit));
3556 deferred->Branch(below);
3557 deferred->BindExit();
Steve Blocka7e24c12009-10-30 11:49:00 +00003558}
3559
3560
3561void CodeGenerator::VisitAndSpill(Statement* statement) {
3562 ASSERT(in_spilled_code());
3563 set_in_spilled_code(false);
3564 Visit(statement);
3565 if (frame_ != NULL) {
3566 frame_->SpillAll();
3567 }
3568 set_in_spilled_code(true);
3569}
3570
3571
3572void CodeGenerator::VisitStatementsAndSpill(ZoneList<Statement*>* statements) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003573#ifdef DEBUG
3574 int original_height = frame_->height();
3575#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003576 ASSERT(in_spilled_code());
3577 set_in_spilled_code(false);
3578 VisitStatements(statements);
3579 if (frame_ != NULL) {
3580 frame_->SpillAll();
3581 }
3582 set_in_spilled_code(true);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003583
3584 ASSERT(!has_valid_frame() || frame_->height() == original_height);
Steve Blocka7e24c12009-10-30 11:49:00 +00003585}
3586
3587
3588void CodeGenerator::VisitStatements(ZoneList<Statement*>* statements) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003589#ifdef DEBUG
3590 int original_height = frame_->height();
3591#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003592 ASSERT(!in_spilled_code());
3593 for (int i = 0; has_valid_frame() && i < statements->length(); i++) {
3594 Visit(statements->at(i));
3595 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003596 ASSERT(!has_valid_frame() || frame_->height() == original_height);
Steve Blocka7e24c12009-10-30 11:49:00 +00003597}
3598
3599
3600void CodeGenerator::VisitBlock(Block* node) {
3601 ASSERT(!in_spilled_code());
3602 Comment cmnt(masm_, "[ Block");
3603 CodeForStatementPosition(node);
3604 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
3605 VisitStatements(node->statements());
3606 if (node->break_target()->is_linked()) {
3607 node->break_target()->Bind();
3608 }
3609 node->break_target()->Unuse();
3610}
3611
3612
3613void CodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
3614 // Call the runtime to declare the globals. The inevitable call
3615 // will sync frame elements to memory anyway, so we do it eagerly to
3616 // allow us to push the arguments directly into place.
3617 frame_->SyncRange(0, frame_->element_count() - 1);
3618
Steve Block3ce2e202009-11-05 08:53:23 +00003619 frame_->EmitPush(esi); // The context is the first argument.
Steve Blocka7e24c12009-10-30 11:49:00 +00003620 frame_->EmitPush(Immediate(pairs));
Steve Blocka7e24c12009-10-30 11:49:00 +00003621 frame_->EmitPush(Immediate(Smi::FromInt(is_eval() ? 1 : 0)));
3622 Result ignored = frame_->CallRuntime(Runtime::kDeclareGlobals, 3);
3623 // Return value is ignored.
3624}
3625
3626
3627void CodeGenerator::VisitDeclaration(Declaration* node) {
3628 Comment cmnt(masm_, "[ Declaration");
3629 Variable* var = node->proxy()->var();
3630 ASSERT(var != NULL); // must have been resolved
3631 Slot* slot = var->slot();
3632
3633 // If it was not possible to allocate the variable at compile time,
3634 // we need to "declare" it at runtime to make sure it actually
3635 // exists in the local context.
3636 if (slot != NULL && slot->type() == Slot::LOOKUP) {
3637 // Variables with a "LOOKUP" slot were introduced as non-locals
3638 // during variable resolution and must have mode DYNAMIC.
3639 ASSERT(var->is_dynamic());
3640 // For now, just do a runtime call. Sync the virtual frame eagerly
3641 // so we can simply push the arguments into place.
3642 frame_->SyncRange(0, frame_->element_count() - 1);
3643 frame_->EmitPush(esi);
3644 frame_->EmitPush(Immediate(var->name()));
3645 // Declaration nodes are always introduced in one of two modes.
3646 ASSERT(node->mode() == Variable::VAR || node->mode() == Variable::CONST);
3647 PropertyAttributes attr = node->mode() == Variable::VAR ? NONE : READ_ONLY;
3648 frame_->EmitPush(Immediate(Smi::FromInt(attr)));
3649 // Push initial value, if any.
3650 // Note: For variables we must not push an initial value (such as
3651 // 'undefined') because we may have a (legal) redeclaration and we
3652 // must not destroy the current value.
3653 if (node->mode() == Variable::CONST) {
3654 frame_->EmitPush(Immediate(Factory::the_hole_value()));
3655 } else if (node->fun() != NULL) {
3656 Load(node->fun());
3657 } else {
3658 frame_->EmitPush(Immediate(Smi::FromInt(0))); // no initial value!
3659 }
3660 Result ignored = frame_->CallRuntime(Runtime::kDeclareContextSlot, 4);
3661 // Ignore the return value (declarations are statements).
3662 return;
3663 }
3664
3665 ASSERT(!var->is_global());
3666
3667 // If we have a function or a constant, we need to initialize the variable.
3668 Expression* val = NULL;
3669 if (node->mode() == Variable::CONST) {
3670 val = new Literal(Factory::the_hole_value());
3671 } else {
3672 val = node->fun(); // NULL if we don't have a function
3673 }
3674
3675 if (val != NULL) {
3676 {
3677 // Set the initial value.
3678 Reference target(this, node->proxy());
3679 Load(val);
3680 target.SetValue(NOT_CONST_INIT);
3681 // The reference is removed from the stack (preserving TOS) when
3682 // it goes out of scope.
3683 }
3684 // Get rid of the assigned value (declarations are statements).
3685 frame_->Drop();
3686 }
3687}
3688
3689
3690void CodeGenerator::VisitExpressionStatement(ExpressionStatement* node) {
3691 ASSERT(!in_spilled_code());
3692 Comment cmnt(masm_, "[ ExpressionStatement");
3693 CodeForStatementPosition(node);
3694 Expression* expression = node->expression();
3695 expression->MarkAsStatement();
3696 Load(expression);
3697 // Remove the lingering expression result from the top of stack.
3698 frame_->Drop();
3699}
3700
3701
3702void CodeGenerator::VisitEmptyStatement(EmptyStatement* node) {
3703 ASSERT(!in_spilled_code());
3704 Comment cmnt(masm_, "// EmptyStatement");
3705 CodeForStatementPosition(node);
3706 // nothing to do
3707}
3708
3709
3710void CodeGenerator::VisitIfStatement(IfStatement* node) {
3711 ASSERT(!in_spilled_code());
3712 Comment cmnt(masm_, "[ IfStatement");
3713 // Generate different code depending on which parts of the if statement
3714 // are present or not.
3715 bool has_then_stm = node->HasThenStatement();
3716 bool has_else_stm = node->HasElseStatement();
3717
3718 CodeForStatementPosition(node);
3719 JumpTarget exit;
3720 if (has_then_stm && has_else_stm) {
3721 JumpTarget then;
3722 JumpTarget else_;
3723 ControlDestination dest(&then, &else_, true);
Steve Blockd0582a62009-12-15 09:54:21 +00003724 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00003725
3726 if (dest.false_was_fall_through()) {
3727 // The else target was bound, so we compile the else part first.
3728 Visit(node->else_statement());
3729
3730 // We may have dangling jumps to the then part.
3731 if (then.is_linked()) {
3732 if (has_valid_frame()) exit.Jump();
3733 then.Bind();
3734 Visit(node->then_statement());
3735 }
3736 } else {
3737 // The then target was bound, so we compile the then part first.
3738 Visit(node->then_statement());
3739
3740 if (else_.is_linked()) {
3741 if (has_valid_frame()) exit.Jump();
3742 else_.Bind();
3743 Visit(node->else_statement());
3744 }
3745 }
3746
3747 } else if (has_then_stm) {
3748 ASSERT(!has_else_stm);
3749 JumpTarget then;
3750 ControlDestination dest(&then, &exit, true);
Steve Blockd0582a62009-12-15 09:54:21 +00003751 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00003752
3753 if (dest.false_was_fall_through()) {
3754 // The exit label was bound. We may have dangling jumps to the
3755 // then part.
3756 if (then.is_linked()) {
3757 exit.Unuse();
3758 exit.Jump();
3759 then.Bind();
3760 Visit(node->then_statement());
3761 }
3762 } else {
3763 // The then label was bound.
3764 Visit(node->then_statement());
3765 }
3766
3767 } else if (has_else_stm) {
3768 ASSERT(!has_then_stm);
3769 JumpTarget else_;
3770 ControlDestination dest(&exit, &else_, false);
Steve Blockd0582a62009-12-15 09:54:21 +00003771 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00003772
3773 if (dest.true_was_fall_through()) {
3774 // The exit label was bound. We may have dangling jumps to the
3775 // else part.
3776 if (else_.is_linked()) {
3777 exit.Unuse();
3778 exit.Jump();
3779 else_.Bind();
3780 Visit(node->else_statement());
3781 }
3782 } else {
3783 // The else label was bound.
3784 Visit(node->else_statement());
3785 }
3786
3787 } else {
3788 ASSERT(!has_then_stm && !has_else_stm);
3789 // We only care about the condition's side effects (not its value
3790 // or control flow effect). LoadCondition is called without
3791 // forcing control flow.
3792 ControlDestination dest(&exit, &exit, true);
Steve Blockd0582a62009-12-15 09:54:21 +00003793 LoadCondition(node->condition(), &dest, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00003794 if (!dest.is_used()) {
3795 // We got a value on the frame rather than (or in addition to)
3796 // control flow.
3797 frame_->Drop();
3798 }
3799 }
3800
3801 if (exit.is_linked()) {
3802 exit.Bind();
3803 }
3804}
3805
3806
3807void CodeGenerator::VisitContinueStatement(ContinueStatement* node) {
3808 ASSERT(!in_spilled_code());
3809 Comment cmnt(masm_, "[ ContinueStatement");
3810 CodeForStatementPosition(node);
3811 node->target()->continue_target()->Jump();
3812}
3813
3814
3815void CodeGenerator::VisitBreakStatement(BreakStatement* node) {
3816 ASSERT(!in_spilled_code());
3817 Comment cmnt(masm_, "[ BreakStatement");
3818 CodeForStatementPosition(node);
3819 node->target()->break_target()->Jump();
3820}
3821
3822
3823void CodeGenerator::VisitReturnStatement(ReturnStatement* node) {
3824 ASSERT(!in_spilled_code());
3825 Comment cmnt(masm_, "[ ReturnStatement");
3826
3827 CodeForStatementPosition(node);
3828 Load(node->expression());
3829 Result return_value = frame_->Pop();
Steve Blockd0582a62009-12-15 09:54:21 +00003830 masm()->WriteRecordedPositions();
Steve Blocka7e24c12009-10-30 11:49:00 +00003831 if (function_return_is_shadowed_) {
3832 function_return_.Jump(&return_value);
3833 } else {
3834 frame_->PrepareForReturn();
3835 if (function_return_.is_bound()) {
3836 // If the function return label is already bound we reuse the
3837 // code by jumping to the return site.
3838 function_return_.Jump(&return_value);
3839 } else {
3840 function_return_.Bind(&return_value);
3841 GenerateReturnSequence(&return_value);
3842 }
3843 }
3844}
3845
3846
3847void CodeGenerator::GenerateReturnSequence(Result* return_value) {
3848 // The return value is a live (but not currently reference counted)
3849 // reference to eax. This is safe because the current frame does not
3850 // contain a reference to eax (it is prepared for the return by spilling
3851 // all registers).
3852 if (FLAG_trace) {
3853 frame_->Push(return_value);
3854 *return_value = frame_->CallRuntime(Runtime::kTraceExit, 1);
3855 }
3856 return_value->ToRegister(eax);
3857
3858 // Add a label for checking the size of the code used for returning.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003859#ifdef DEBUG
Steve Blocka7e24c12009-10-30 11:49:00 +00003860 Label check_exit_codesize;
3861 masm_->bind(&check_exit_codesize);
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003862#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003863
3864 // Leave the frame and return popping the arguments and the
3865 // receiver.
3866 frame_->Exit();
Andrei Popescu31002712010-02-23 13:46:05 +00003867 masm_->ret((scope()->num_parameters() + 1) * kPointerSize);
Steve Blocka7e24c12009-10-30 11:49:00 +00003868 DeleteFrame();
3869
3870#ifdef ENABLE_DEBUGGER_SUPPORT
3871 // Check that the size of the code used for returning matches what is
3872 // expected by the debugger.
Steve Blockd0582a62009-12-15 09:54:21 +00003873 ASSERT_EQ(Assembler::kJSReturnSequenceLength,
Steve Blocka7e24c12009-10-30 11:49:00 +00003874 masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
3875#endif
3876}
3877
3878
3879void CodeGenerator::VisitWithEnterStatement(WithEnterStatement* node) {
3880 ASSERT(!in_spilled_code());
3881 Comment cmnt(masm_, "[ WithEnterStatement");
3882 CodeForStatementPosition(node);
3883 Load(node->expression());
3884 Result context;
3885 if (node->is_catch_block()) {
3886 context = frame_->CallRuntime(Runtime::kPushCatchContext, 1);
3887 } else {
3888 context = frame_->CallRuntime(Runtime::kPushContext, 1);
3889 }
3890
3891 // Update context local.
3892 frame_->SaveContextRegister();
3893
3894 // Verify that the runtime call result and esi agree.
3895 if (FLAG_debug_code) {
3896 __ cmp(context.reg(), Operand(esi));
3897 __ Assert(equal, "Runtime::NewContext should end up in esi");
3898 }
3899}
3900
3901
3902void CodeGenerator::VisitWithExitStatement(WithExitStatement* node) {
3903 ASSERT(!in_spilled_code());
3904 Comment cmnt(masm_, "[ WithExitStatement");
3905 CodeForStatementPosition(node);
3906 // Pop context.
3907 __ mov(esi, ContextOperand(esi, Context::PREVIOUS_INDEX));
3908 // Update context local.
3909 frame_->SaveContextRegister();
3910}
3911
3912
3913void CodeGenerator::VisitSwitchStatement(SwitchStatement* node) {
3914 ASSERT(!in_spilled_code());
3915 Comment cmnt(masm_, "[ SwitchStatement");
3916 CodeForStatementPosition(node);
3917 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
3918
3919 // Compile the switch value.
3920 Load(node->tag());
3921
3922 ZoneList<CaseClause*>* cases = node->cases();
3923 int length = cases->length();
3924 CaseClause* default_clause = NULL;
3925
3926 JumpTarget next_test;
3927 // Compile the case label expressions and comparisons. Exit early
3928 // if a comparison is unconditionally true. The target next_test is
3929 // bound before the loop in order to indicate control flow to the
3930 // first comparison.
3931 next_test.Bind();
3932 for (int i = 0; i < length && !next_test.is_unused(); i++) {
3933 CaseClause* clause = cases->at(i);
3934 // The default is not a test, but remember it for later.
3935 if (clause->is_default()) {
3936 default_clause = clause;
3937 continue;
3938 }
3939
3940 Comment cmnt(masm_, "[ Case comparison");
3941 // We recycle the same target next_test for each test. Bind it if
3942 // the previous test has not done so and then unuse it for the
3943 // loop.
3944 if (next_test.is_linked()) {
3945 next_test.Bind();
3946 }
3947 next_test.Unuse();
3948
3949 // Duplicate the switch value.
3950 frame_->Dup();
3951
3952 // Compile the label expression.
3953 Load(clause->label());
3954
3955 // Compare and branch to the body if true or the next test if
3956 // false. Prefer the next test as a fall through.
3957 ControlDestination dest(clause->body_target(), &next_test, false);
Leon Clarkee46be812010-01-19 14:06:41 +00003958 Comparison(node, equal, true, &dest);
Steve Blocka7e24c12009-10-30 11:49:00 +00003959
3960 // If the comparison fell through to the true target, jump to the
3961 // actual body.
3962 if (dest.true_was_fall_through()) {
3963 clause->body_target()->Unuse();
3964 clause->body_target()->Jump();
3965 }
3966 }
3967
3968 // If there was control flow to a next test from the last one
3969 // compiled, compile a jump to the default or break target.
3970 if (!next_test.is_unused()) {
3971 if (next_test.is_linked()) {
3972 next_test.Bind();
3973 }
3974 // Drop the switch value.
3975 frame_->Drop();
3976 if (default_clause != NULL) {
3977 default_clause->body_target()->Jump();
3978 } else {
3979 node->break_target()->Jump();
3980 }
3981 }
3982
Steve Blocka7e24c12009-10-30 11:49:00 +00003983 // The last instruction emitted was a jump, either to the default
3984 // clause or the break target, or else to a case body from the loop
3985 // that compiles the tests.
3986 ASSERT(!has_valid_frame());
3987 // Compile case bodies as needed.
3988 for (int i = 0; i < length; i++) {
3989 CaseClause* clause = cases->at(i);
3990
3991 // There are two ways to reach the body: from the corresponding
3992 // test or as the fall through of the previous body.
3993 if (clause->body_target()->is_linked() || has_valid_frame()) {
3994 if (clause->body_target()->is_linked()) {
3995 if (has_valid_frame()) {
3996 // If we have both a jump to the test and a fall through, put
3997 // a jump on the fall through path to avoid the dropping of
3998 // the switch value on the test path. The exception is the
3999 // default which has already had the switch value dropped.
4000 if (clause->is_default()) {
4001 clause->body_target()->Bind();
4002 } else {
4003 JumpTarget body;
4004 body.Jump();
4005 clause->body_target()->Bind();
4006 frame_->Drop();
4007 body.Bind();
4008 }
4009 } else {
4010 // No fall through to worry about.
4011 clause->body_target()->Bind();
4012 if (!clause->is_default()) {
4013 frame_->Drop();
4014 }
4015 }
4016 } else {
4017 // Otherwise, we have only fall through.
4018 ASSERT(has_valid_frame());
4019 }
4020
4021 // We are now prepared to compile the body.
4022 Comment cmnt(masm_, "[ Case body");
4023 VisitStatements(clause->statements());
4024 }
4025 clause->body_target()->Unuse();
4026 }
4027
4028 // We may not have a valid frame here so bind the break target only
4029 // if needed.
4030 if (node->break_target()->is_linked()) {
4031 node->break_target()->Bind();
4032 }
4033 node->break_target()->Unuse();
4034}
4035
4036
Steve Block3ce2e202009-11-05 08:53:23 +00004037void CodeGenerator::VisitDoWhileStatement(DoWhileStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004038 ASSERT(!in_spilled_code());
Steve Block3ce2e202009-11-05 08:53:23 +00004039 Comment cmnt(masm_, "[ DoWhileStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00004040 CodeForStatementPosition(node);
4041 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
Steve Block3ce2e202009-11-05 08:53:23 +00004042 JumpTarget body(JumpTarget::BIDIRECTIONAL);
4043 IncrementLoopNesting();
Steve Blocka7e24c12009-10-30 11:49:00 +00004044
Steve Block3ce2e202009-11-05 08:53:23 +00004045 ConditionAnalysis info = AnalyzeCondition(node->cond());
4046 // Label the top of the loop for the backward jump if necessary.
4047 switch (info) {
4048 case ALWAYS_TRUE:
4049 // Use the continue target.
4050 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
4051 node->continue_target()->Bind();
4052 break;
4053 case ALWAYS_FALSE:
4054 // No need to label it.
4055 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
4056 break;
4057 case DONT_KNOW:
4058 // Continue is the test, so use the backward body target.
4059 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
4060 body.Bind();
4061 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00004062 }
4063
Steve Block3ce2e202009-11-05 08:53:23 +00004064 CheckStack(); // TODO(1222600): ignore if body contains calls.
4065 Visit(node->body());
Steve Blocka7e24c12009-10-30 11:49:00 +00004066
Steve Block3ce2e202009-11-05 08:53:23 +00004067 // Compile the test.
4068 switch (info) {
4069 case ALWAYS_TRUE:
Ben Murdoch3bec4d22010-07-22 14:51:16 +01004070 // If control flow can fall off the end of the body, jump back
4071 // to the top and bind the break target at the exit.
Steve Block3ce2e202009-11-05 08:53:23 +00004072 if (has_valid_frame()) {
4073 node->continue_target()->Jump();
Steve Blocka7e24c12009-10-30 11:49:00 +00004074 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004075 if (node->break_target()->is_linked()) {
4076 node->break_target()->Bind();
4077 }
4078 break;
Steve Block3ce2e202009-11-05 08:53:23 +00004079 case ALWAYS_FALSE:
4080 // We may have had continues or breaks in the body.
4081 if (node->continue_target()->is_linked()) {
4082 node->continue_target()->Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00004083 }
Steve Block3ce2e202009-11-05 08:53:23 +00004084 if (node->break_target()->is_linked()) {
4085 node->break_target()->Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00004086 }
Steve Block3ce2e202009-11-05 08:53:23 +00004087 break;
4088 case DONT_KNOW:
4089 // We have to compile the test expression if it can be reached by
4090 // control flow falling out of the body or via continue.
4091 if (node->continue_target()->is_linked()) {
4092 node->continue_target()->Bind();
4093 }
4094 if (has_valid_frame()) {
Steve Blockd0582a62009-12-15 09:54:21 +00004095 Comment cmnt(masm_, "[ DoWhileCondition");
4096 CodeForDoWhileConditionPosition(node);
Steve Block3ce2e202009-11-05 08:53:23 +00004097 ControlDestination dest(&body, node->break_target(), false);
Steve Blockd0582a62009-12-15 09:54:21 +00004098 LoadCondition(node->cond(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00004099 }
Steve Block3ce2e202009-11-05 08:53:23 +00004100 if (node->break_target()->is_linked()) {
4101 node->break_target()->Bind();
4102 }
4103 break;
4104 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004105
Steve Block3ce2e202009-11-05 08:53:23 +00004106 DecrementLoopNesting();
Ben Murdoch3bec4d22010-07-22 14:51:16 +01004107 node->continue_target()->Unuse();
4108 node->break_target()->Unuse();
Steve Block3ce2e202009-11-05 08:53:23 +00004109}
Steve Blocka7e24c12009-10-30 11:49:00 +00004110
Steve Block3ce2e202009-11-05 08:53:23 +00004111
4112void CodeGenerator::VisitWhileStatement(WhileStatement* node) {
4113 ASSERT(!in_spilled_code());
4114 Comment cmnt(masm_, "[ WhileStatement");
4115 CodeForStatementPosition(node);
4116
4117 // If the condition is always false and has no side effects, we do not
4118 // need to compile anything.
4119 ConditionAnalysis info = AnalyzeCondition(node->cond());
4120 if (info == ALWAYS_FALSE) return;
4121
4122 // Do not duplicate conditions that may have function literal
4123 // subexpressions. This can cause us to compile the function literal
4124 // twice.
4125 bool test_at_bottom = !node->may_have_function_literal();
4126 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
4127 IncrementLoopNesting();
4128 JumpTarget body;
4129 if (test_at_bottom) {
4130 body.set_direction(JumpTarget::BIDIRECTIONAL);
4131 }
4132
4133 // Based on the condition analysis, compile the test as necessary.
4134 switch (info) {
4135 case ALWAYS_TRUE:
4136 // We will not compile the test expression. Label the top of the
4137 // loop with the continue target.
4138 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
4139 node->continue_target()->Bind();
4140 break;
4141 case DONT_KNOW: {
4142 if (test_at_bottom) {
4143 // Continue is the test at the bottom, no need to label the test
4144 // at the top. The body is a backward target.
4145 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
4146 } else {
4147 // Label the test at the top as the continue target. The body
4148 // is a forward-only target.
4149 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
4150 node->continue_target()->Bind();
4151 }
4152 // Compile the test with the body as the true target and preferred
4153 // fall-through and with the break target as the false target.
4154 ControlDestination dest(&body, node->break_target(), true);
Steve Blockd0582a62009-12-15 09:54:21 +00004155 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00004156
4157 if (dest.false_was_fall_through()) {
4158 // If we got the break target as fall-through, the test may have
4159 // been unconditionally false (if there are no jumps to the
4160 // body).
4161 if (!body.is_linked()) {
4162 DecrementLoopNesting();
4163 return;
4164 }
4165
4166 // Otherwise, jump around the body on the fall through and then
4167 // bind the body target.
4168 node->break_target()->Unuse();
4169 node->break_target()->Jump();
4170 body.Bind();
4171 }
4172 break;
4173 }
4174 case ALWAYS_FALSE:
4175 UNREACHABLE();
4176 break;
4177 }
4178
4179 CheckStack(); // TODO(1222600): ignore if body contains calls.
4180 Visit(node->body());
4181
4182 // Based on the condition analysis, compile the backward jump as
4183 // necessary.
4184 switch (info) {
4185 case ALWAYS_TRUE:
4186 // The loop body has been labeled with the continue target.
4187 if (has_valid_frame()) {
4188 node->continue_target()->Jump();
4189 }
4190 break;
4191 case DONT_KNOW:
4192 if (test_at_bottom) {
Ben Murdoch3bec4d22010-07-22 14:51:16 +01004193 // If we have chosen to recompile the test at the bottom,
4194 // then it is the continue target.
Steve Blocka7e24c12009-10-30 11:49:00 +00004195 if (node->continue_target()->is_linked()) {
4196 node->continue_target()->Bind();
4197 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004198 if (has_valid_frame()) {
Steve Block3ce2e202009-11-05 08:53:23 +00004199 // The break target is the fall-through (body is a backward
4200 // jump from here and thus an invalid fall-through).
4201 ControlDestination dest(&body, node->break_target(), false);
Steve Blockd0582a62009-12-15 09:54:21 +00004202 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00004203 }
4204 } else {
4205 // If we have chosen not to recompile the test at the bottom,
4206 // jump back to the one at the top.
4207 if (has_valid_frame()) {
4208 node->continue_target()->Jump();
Steve Blocka7e24c12009-10-30 11:49:00 +00004209 }
4210 }
Steve Block3ce2e202009-11-05 08:53:23 +00004211 break;
4212 case ALWAYS_FALSE:
4213 UNREACHABLE();
4214 break;
4215 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004216
Steve Block3ce2e202009-11-05 08:53:23 +00004217 // The break target may be already bound (by the condition), or there
4218 // may not be a valid frame. Bind it only if needed.
4219 if (node->break_target()->is_linked()) {
4220 node->break_target()->Bind();
4221 }
4222 DecrementLoopNesting();
4223}
4224
4225
Steve Block6ded16b2010-05-10 14:33:55 +01004226void CodeGenerator::SetTypeForStackSlot(Slot* slot, TypeInfo info) {
4227 ASSERT(slot->type() == Slot::LOCAL || slot->type() == Slot::PARAMETER);
4228 if (slot->type() == Slot::LOCAL) {
4229 frame_->SetTypeForLocalAt(slot->index(), info);
4230 } else {
4231 frame_->SetTypeForParamAt(slot->index(), info);
4232 }
4233 if (FLAG_debug_code && info.IsSmi()) {
4234 if (slot->type() == Slot::LOCAL) {
4235 frame_->PushLocalAt(slot->index());
4236 } else {
4237 frame_->PushParameterAt(slot->index());
4238 }
4239 Result var = frame_->Pop();
4240 var.ToRegister();
4241 __ AbortIfNotSmi(var.reg());
4242 }
4243}
4244
4245
Steve Block3ce2e202009-11-05 08:53:23 +00004246void CodeGenerator::VisitForStatement(ForStatement* node) {
4247 ASSERT(!in_spilled_code());
4248 Comment cmnt(masm_, "[ ForStatement");
4249 CodeForStatementPosition(node);
4250
4251 // Compile the init expression if present.
4252 if (node->init() != NULL) {
4253 Visit(node->init());
4254 }
4255
4256 // If the condition is always false and has no side effects, we do not
4257 // need to compile anything else.
4258 ConditionAnalysis info = AnalyzeCondition(node->cond());
4259 if (info == ALWAYS_FALSE) return;
4260
4261 // Do not duplicate conditions that may have function literal
4262 // subexpressions. This can cause us to compile the function literal
4263 // twice.
4264 bool test_at_bottom = !node->may_have_function_literal();
4265 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
4266 IncrementLoopNesting();
4267
4268 // Target for backward edge if no test at the bottom, otherwise
4269 // unused.
4270 JumpTarget loop(JumpTarget::BIDIRECTIONAL);
4271
4272 // Target for backward edge if there is a test at the bottom,
4273 // otherwise used as target for test at the top.
4274 JumpTarget body;
4275 if (test_at_bottom) {
4276 body.set_direction(JumpTarget::BIDIRECTIONAL);
4277 }
4278
4279 // Based on the condition analysis, compile the test as necessary.
4280 switch (info) {
4281 case ALWAYS_TRUE:
4282 // We will not compile the test expression. Label the top of the
4283 // loop.
4284 if (node->next() == NULL) {
4285 // Use the continue target if there is no update expression.
4286 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
4287 node->continue_target()->Bind();
4288 } else {
4289 // Otherwise use the backward loop target.
4290 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
4291 loop.Bind();
4292 }
4293 break;
4294 case DONT_KNOW: {
4295 if (test_at_bottom) {
4296 // Continue is either the update expression or the test at the
4297 // bottom, no need to label the test at the top.
4298 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
4299 } else if (node->next() == NULL) {
4300 // We are not recompiling the test at the bottom and there is no
4301 // update expression.
4302 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
4303 node->continue_target()->Bind();
4304 } else {
4305 // We are not recompiling the test at the bottom and there is an
4306 // update expression.
4307 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
4308 loop.Bind();
4309 }
Ben Murdoch3bec4d22010-07-22 14:51:16 +01004310
Steve Block3ce2e202009-11-05 08:53:23 +00004311 // Compile the test with the body as the true target and preferred
4312 // fall-through and with the break target as the false target.
4313 ControlDestination dest(&body, node->break_target(), true);
Steve Blockd0582a62009-12-15 09:54:21 +00004314 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00004315
4316 if (dest.false_was_fall_through()) {
4317 // If we got the break target as fall-through, the test may have
4318 // been unconditionally false (if there are no jumps to the
4319 // body).
4320 if (!body.is_linked()) {
4321 DecrementLoopNesting();
4322 return;
4323 }
4324
4325 // Otherwise, jump around the body on the fall through and then
4326 // bind the body target.
4327 node->break_target()->Unuse();
4328 node->break_target()->Jump();
4329 body.Bind();
4330 }
4331 break;
4332 }
4333 case ALWAYS_FALSE:
4334 UNREACHABLE();
4335 break;
4336 }
4337
4338 CheckStack(); // TODO(1222600): ignore if body contains calls.
Steve Block6ded16b2010-05-10 14:33:55 +01004339
4340 // We know that the loop index is a smi if it is not modified in the
4341 // loop body and it is checked against a constant limit in the loop
4342 // condition. In this case, we reset the static type information of the
4343 // loop index to smi before compiling the body, the update expression, and
4344 // the bottom check of the loop condition.
4345 if (node->is_fast_smi_loop()) {
4346 // Set number type of the loop variable to smi.
4347 SetTypeForStackSlot(node->loop_variable()->slot(), TypeInfo::Smi());
4348 }
4349
Steve Block3ce2e202009-11-05 08:53:23 +00004350 Visit(node->body());
4351
4352 // If there is an update expression, compile it if necessary.
4353 if (node->next() != NULL) {
4354 if (node->continue_target()->is_linked()) {
4355 node->continue_target()->Bind();
4356 }
4357
4358 // Control can reach the update by falling out of the body or by a
4359 // continue.
4360 if (has_valid_frame()) {
4361 // Record the source position of the statement as this code which
4362 // is after the code for the body actually belongs to the loop
4363 // statement and not the body.
4364 CodeForStatementPosition(node);
4365 Visit(node->next());
4366 }
4367 }
4368
Steve Block6ded16b2010-05-10 14:33:55 +01004369 // Set the type of the loop variable to smi before compiling the test
4370 // expression if we are in a fast smi loop condition.
4371 if (node->is_fast_smi_loop() && has_valid_frame()) {
4372 // Set number type of the loop variable to smi.
4373 SetTypeForStackSlot(node->loop_variable()->slot(), TypeInfo::Smi());
4374 }
4375
Steve Block3ce2e202009-11-05 08:53:23 +00004376 // Based on the condition analysis, compile the backward jump as
4377 // necessary.
4378 switch (info) {
4379 case ALWAYS_TRUE:
4380 if (has_valid_frame()) {
4381 if (node->next() == NULL) {
4382 node->continue_target()->Jump();
4383 } else {
4384 loop.Jump();
4385 }
4386 }
4387 break;
4388 case DONT_KNOW:
4389 if (test_at_bottom) {
4390 if (node->continue_target()->is_linked()) {
4391 // We can have dangling jumps to the continue target if there
4392 // was no update expression.
4393 node->continue_target()->Bind();
4394 }
4395 // Control can reach the test at the bottom by falling out of
4396 // the body, by a continue in the body, or from the update
4397 // expression.
4398 if (has_valid_frame()) {
4399 // The break target is the fall-through (body is a backward
4400 // jump from here).
4401 ControlDestination dest(&body, node->break_target(), false);
Steve Blockd0582a62009-12-15 09:54:21 +00004402 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00004403 }
4404 } else {
4405 // Otherwise, jump back to the test at the top.
Steve Blocka7e24c12009-10-30 11:49:00 +00004406 if (has_valid_frame()) {
4407 if (node->next() == NULL) {
4408 node->continue_target()->Jump();
4409 } else {
4410 loop.Jump();
4411 }
4412 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004413 }
4414 break;
Steve Block3ce2e202009-11-05 08:53:23 +00004415 case ALWAYS_FALSE:
4416 UNREACHABLE();
4417 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00004418 }
4419
Ben Murdoch3bec4d22010-07-22 14:51:16 +01004420 // The break target may be already bound (by the condition), or there
4421 // may not be a valid frame. Bind it only if needed.
Steve Block3ce2e202009-11-05 08:53:23 +00004422 if (node->break_target()->is_linked()) {
4423 node->break_target()->Bind();
4424 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004425 DecrementLoopNesting();
Steve Blocka7e24c12009-10-30 11:49:00 +00004426}
4427
4428
4429void CodeGenerator::VisitForInStatement(ForInStatement* node) {
4430 ASSERT(!in_spilled_code());
4431 VirtualFrame::SpilledScope spilled_scope;
4432 Comment cmnt(masm_, "[ ForInStatement");
4433 CodeForStatementPosition(node);
4434
4435 JumpTarget primitive;
4436 JumpTarget jsobject;
4437 JumpTarget fixed_array;
4438 JumpTarget entry(JumpTarget::BIDIRECTIONAL);
4439 JumpTarget end_del_check;
4440 JumpTarget exit;
4441
4442 // Get the object to enumerate over (converted to JSObject).
4443 LoadAndSpill(node->enumerable());
4444
4445 // Both SpiderMonkey and kjs ignore null and undefined in contrast
4446 // to the specification. 12.6.4 mandates a call to ToObject.
4447 frame_->EmitPop(eax);
4448
4449 // eax: value to be iterated over
4450 __ cmp(eax, Factory::undefined_value());
4451 exit.Branch(equal);
4452 __ cmp(eax, Factory::null_value());
4453 exit.Branch(equal);
4454
4455 // Stack layout in body:
4456 // [iteration counter (smi)] <- slot 0
4457 // [length of array] <- slot 1
4458 // [FixedArray] <- slot 2
4459 // [Map or 0] <- slot 3
4460 // [Object] <- slot 4
4461
4462 // Check if enumerable is already a JSObject
4463 // eax: value to be iterated over
4464 __ test(eax, Immediate(kSmiTagMask));
4465 primitive.Branch(zero);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004466 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
Steve Blocka7e24c12009-10-30 11:49:00 +00004467 jsobject.Branch(above_equal);
4468
4469 primitive.Bind();
4470 frame_->EmitPush(eax);
4471 frame_->InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION, 1);
4472 // function call returns the value in eax, which is where we want it below
4473
4474 jsobject.Bind();
4475 // Get the set of properties (as a FixedArray or Map).
4476 // eax: value to be iterated over
Steve Blockd0582a62009-12-15 09:54:21 +00004477 frame_->EmitPush(eax); // Push the object being iterated over.
Steve Blocka7e24c12009-10-30 11:49:00 +00004478
Steve Blockd0582a62009-12-15 09:54:21 +00004479 // Check cache validity in generated code. This is a fast case for
4480 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
4481 // guarantee cache validity, call the runtime system to check cache
4482 // validity or get the property names in a fixed array.
4483 JumpTarget call_runtime;
4484 JumpTarget loop(JumpTarget::BIDIRECTIONAL);
4485 JumpTarget check_prototype;
4486 JumpTarget use_cache;
4487 __ mov(ecx, eax);
4488 loop.Bind();
4489 // Check that there are no elements.
4490 __ mov(edx, FieldOperand(ecx, JSObject::kElementsOffset));
4491 __ cmp(Operand(edx), Immediate(Factory::empty_fixed_array()));
4492 call_runtime.Branch(not_equal);
4493 // Check that instance descriptors are not empty so that we can
4494 // check for an enum cache. Leave the map in ebx for the subsequent
4495 // prototype load.
4496 __ mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
4497 __ mov(edx, FieldOperand(ebx, Map::kInstanceDescriptorsOffset));
4498 __ cmp(Operand(edx), Immediate(Factory::empty_descriptor_array()));
4499 call_runtime.Branch(equal);
4500 // Check that there in an enum cache in the non-empty instance
4501 // descriptors. This is the case if the next enumeration index
4502 // field does not contain a smi.
4503 __ mov(edx, FieldOperand(edx, DescriptorArray::kEnumerationIndexOffset));
4504 __ test(edx, Immediate(kSmiTagMask));
4505 call_runtime.Branch(zero);
4506 // For all objects but the receiver, check that the cache is empty.
4507 __ cmp(ecx, Operand(eax));
4508 check_prototype.Branch(equal);
4509 __ mov(edx, FieldOperand(edx, DescriptorArray::kEnumCacheBridgeCacheOffset));
4510 __ cmp(Operand(edx), Immediate(Factory::empty_fixed_array()));
4511 call_runtime.Branch(not_equal);
4512 check_prototype.Bind();
4513 // Load the prototype from the map and loop if non-null.
4514 __ mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
4515 __ cmp(Operand(ecx), Immediate(Factory::null_value()));
4516 loop.Branch(not_equal);
4517 // The enum cache is valid. Load the map of the object being
4518 // iterated over and use the cache for the iteration.
4519 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
4520 use_cache.Jump();
4521
4522 call_runtime.Bind();
4523 // Call the runtime to get the property names for the object.
Steve Blocka7e24c12009-10-30 11:49:00 +00004524 frame_->EmitPush(eax); // push the Object (slot 4) for the runtime call
4525 frame_->CallRuntime(Runtime::kGetPropertyNamesFast, 1);
4526
Steve Blockd0582a62009-12-15 09:54:21 +00004527 // If we got a map from the runtime call, we can do a fast
4528 // modification check. Otherwise, we got a fixed array, and we have
4529 // to do a slow check.
Steve Blocka7e24c12009-10-30 11:49:00 +00004530 // eax: map or fixed array (result from call to
4531 // Runtime::kGetPropertyNamesFast)
4532 __ mov(edx, Operand(eax));
4533 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
4534 __ cmp(ecx, Factory::meta_map());
4535 fixed_array.Branch(not_equal);
4536
Steve Blockd0582a62009-12-15 09:54:21 +00004537 use_cache.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00004538 // Get enum cache
Steve Blockd0582a62009-12-15 09:54:21 +00004539 // eax: map (either the result from a call to
4540 // Runtime::kGetPropertyNamesFast or has been fetched directly from
4541 // the object)
Steve Blocka7e24c12009-10-30 11:49:00 +00004542 __ mov(ecx, Operand(eax));
Steve Blockd0582a62009-12-15 09:54:21 +00004543
Steve Blocka7e24c12009-10-30 11:49:00 +00004544 __ mov(ecx, FieldOperand(ecx, Map::kInstanceDescriptorsOffset));
4545 // Get the bridge array held in the enumeration index field.
4546 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumerationIndexOffset));
4547 // Get the cache from the bridge array.
4548 __ mov(edx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset));
4549
4550 frame_->EmitPush(eax); // <- slot 3
4551 frame_->EmitPush(edx); // <- slot 2
4552 __ mov(eax, FieldOperand(edx, FixedArray::kLengthOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004553 frame_->EmitPush(eax); // <- slot 1
4554 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 0
4555 entry.Jump();
4556
4557 fixed_array.Bind();
4558 // eax: fixed array (result from call to Runtime::kGetPropertyNamesFast)
4559 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 3
4560 frame_->EmitPush(eax); // <- slot 2
4561
4562 // Push the length of the array and the initial index onto the stack.
4563 __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004564 frame_->EmitPush(eax); // <- slot 1
4565 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 0
4566
4567 // Condition.
4568 entry.Bind();
4569 // Grab the current frame's height for the break and continue
4570 // targets only after all the state is pushed on the frame.
4571 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
4572 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
4573
4574 __ mov(eax, frame_->ElementAt(0)); // load the current count
4575 __ cmp(eax, frame_->ElementAt(1)); // compare to the array length
4576 node->break_target()->Branch(above_equal);
4577
4578 // Get the i'th entry of the array.
4579 __ mov(edx, frame_->ElementAt(2));
Kristian Monsen25f61362010-05-21 11:50:48 +01004580 __ mov(ebx, FixedArrayElementOperand(edx, eax));
Steve Blocka7e24c12009-10-30 11:49:00 +00004581
4582 // Get the expected map from the stack or a zero map in the
4583 // permanent slow case eax: current iteration count ebx: i'th entry
4584 // of the enum cache
4585 __ mov(edx, frame_->ElementAt(3));
4586 // Check if the expected map still matches that of the enumerable.
4587 // If not, we have to filter the key.
4588 // eax: current iteration count
4589 // ebx: i'th entry of the enum cache
4590 // edx: expected map value
4591 __ mov(ecx, frame_->ElementAt(4));
4592 __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
4593 __ cmp(ecx, Operand(edx));
4594 end_del_check.Branch(equal);
4595
4596 // Convert the entry to a string (or null if it isn't a property anymore).
4597 frame_->EmitPush(frame_->ElementAt(4)); // push enumerable
4598 frame_->EmitPush(ebx); // push entry
4599 frame_->InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION, 2);
4600 __ mov(ebx, Operand(eax));
4601
4602 // If the property has been removed while iterating, we just skip it.
Iain Merrick75681382010-08-19 15:07:18 +01004603 __ test(ebx, Operand(ebx));
Steve Blocka7e24c12009-10-30 11:49:00 +00004604 node->continue_target()->Branch(equal);
4605
4606 end_del_check.Bind();
4607 // Store the entry in the 'each' expression and take another spin in the
4608 // loop. edx: i'th entry of the enum cache (or string there of)
4609 frame_->EmitPush(ebx);
4610 { Reference each(this, node->each());
Steve Blocka7e24c12009-10-30 11:49:00 +00004611 if (!each.is_illegal()) {
4612 if (each.size() > 0) {
Iain Merrick75681382010-08-19 15:07:18 +01004613 // Loading a reference may leave the frame in an unspilled state.
4614 frame_->SpillAll();
4615 // Get the value (under the reference on the stack) from memory.
Steve Blocka7e24c12009-10-30 11:49:00 +00004616 frame_->EmitPush(frame_->ElementAt(each.size()));
Leon Clarked91b9f72010-01-27 17:25:45 +00004617 each.SetValue(NOT_CONST_INIT);
4618 frame_->Drop(2);
4619 } else {
4620 // If the reference was to a slot we rely on the convenient property
4621 // that it doesn't matter whether a value (eg, ebx pushed above) is
4622 // right on top of or right underneath a zero-sized reference.
4623 each.SetValue(NOT_CONST_INIT);
Steve Blocka7e24c12009-10-30 11:49:00 +00004624 frame_->Drop();
4625 }
4626 }
4627 }
4628 // Unloading a reference may leave the frame in an unspilled state.
4629 frame_->SpillAll();
4630
Steve Blocka7e24c12009-10-30 11:49:00 +00004631 // Body.
4632 CheckStack(); // TODO(1222600): ignore if body contains calls.
4633 VisitAndSpill(node->body());
4634
4635 // Next. Reestablish a spilled frame in case we are coming here via
4636 // a continue in the body.
4637 node->continue_target()->Bind();
4638 frame_->SpillAll();
4639 frame_->EmitPop(eax);
4640 __ add(Operand(eax), Immediate(Smi::FromInt(1)));
4641 frame_->EmitPush(eax);
4642 entry.Jump();
4643
4644 // Cleanup. No need to spill because VirtualFrame::Drop is safe for
4645 // any frame.
4646 node->break_target()->Bind();
4647 frame_->Drop(5);
4648
4649 // Exit.
4650 exit.Bind();
4651
4652 node->continue_target()->Unuse();
4653 node->break_target()->Unuse();
4654}
4655
4656
Steve Block3ce2e202009-11-05 08:53:23 +00004657void CodeGenerator::VisitTryCatchStatement(TryCatchStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004658 ASSERT(!in_spilled_code());
4659 VirtualFrame::SpilledScope spilled_scope;
Steve Block3ce2e202009-11-05 08:53:23 +00004660 Comment cmnt(masm_, "[ TryCatchStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00004661 CodeForStatementPosition(node);
4662
4663 JumpTarget try_block;
4664 JumpTarget exit;
4665
4666 try_block.Call();
4667 // --- Catch block ---
4668 frame_->EmitPush(eax);
4669
4670 // Store the caught exception in the catch variable.
Leon Clarkee46be812010-01-19 14:06:41 +00004671 Variable* catch_var = node->catch_var()->var();
4672 ASSERT(catch_var != NULL && catch_var->slot() != NULL);
4673 StoreToSlot(catch_var->slot(), NOT_CONST_INIT);
Steve Blocka7e24c12009-10-30 11:49:00 +00004674
4675 // Remove the exception from the stack.
4676 frame_->Drop();
4677
4678 VisitStatementsAndSpill(node->catch_block()->statements());
4679 if (has_valid_frame()) {
4680 exit.Jump();
4681 }
4682
4683
4684 // --- Try block ---
4685 try_block.Bind();
4686
4687 frame_->PushTryHandler(TRY_CATCH_HANDLER);
4688 int handler_height = frame_->height();
4689
4690 // Shadow the jump targets for all escapes from the try block, including
4691 // returns. During shadowing, the original target is hidden as the
4692 // ShadowTarget and operations on the original actually affect the
4693 // shadowing target.
4694 //
4695 // We should probably try to unify the escaping targets and the return
4696 // target.
4697 int nof_escapes = node->escaping_targets()->length();
4698 List<ShadowTarget*> shadows(1 + nof_escapes);
4699
4700 // Add the shadow target for the function return.
4701 static const int kReturnShadowIndex = 0;
4702 shadows.Add(new ShadowTarget(&function_return_));
4703 bool function_return_was_shadowed = function_return_is_shadowed_;
4704 function_return_is_shadowed_ = true;
4705 ASSERT(shadows[kReturnShadowIndex]->other_target() == &function_return_);
4706
4707 // Add the remaining shadow targets.
4708 for (int i = 0; i < nof_escapes; i++) {
4709 shadows.Add(new ShadowTarget(node->escaping_targets()->at(i)));
4710 }
4711
4712 // Generate code for the statements in the try block.
4713 VisitStatementsAndSpill(node->try_block()->statements());
4714
4715 // Stop the introduced shadowing and count the number of required unlinks.
4716 // After shadowing stops, the original targets are unshadowed and the
4717 // ShadowTargets represent the formerly shadowing targets.
4718 bool has_unlinks = false;
4719 for (int i = 0; i < shadows.length(); i++) {
4720 shadows[i]->StopShadowing();
4721 has_unlinks = has_unlinks || shadows[i]->is_linked();
4722 }
4723 function_return_is_shadowed_ = function_return_was_shadowed;
4724
4725 // Get an external reference to the handler address.
4726 ExternalReference handler_address(Top::k_handler_address);
4727
4728 // Make sure that there's nothing left on the stack above the
4729 // handler structure.
4730 if (FLAG_debug_code) {
4731 __ mov(eax, Operand::StaticVariable(handler_address));
4732 __ cmp(esp, Operand(eax));
4733 __ Assert(equal, "stack pointer should point to top handler");
4734 }
4735
4736 // If we can fall off the end of the try block, unlink from try chain.
4737 if (has_valid_frame()) {
4738 // The next handler address is on top of the frame. Unlink from
4739 // the handler list and drop the rest of this handler from the
4740 // frame.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01004741 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00004742 frame_->EmitPop(Operand::StaticVariable(handler_address));
4743 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
4744 if (has_unlinks) {
4745 exit.Jump();
4746 }
4747 }
4748
4749 // Generate unlink code for the (formerly) shadowing targets that
4750 // have been jumped to. Deallocate each shadow target.
4751 Result return_value;
4752 for (int i = 0; i < shadows.length(); i++) {
4753 if (shadows[i]->is_linked()) {
4754 // Unlink from try chain; be careful not to destroy the TOS if
4755 // there is one.
4756 if (i == kReturnShadowIndex) {
4757 shadows[i]->Bind(&return_value);
4758 return_value.ToRegister(eax);
4759 } else {
4760 shadows[i]->Bind();
4761 }
4762 // Because we can be jumping here (to spilled code) from
4763 // unspilled code, we need to reestablish a spilled frame at
4764 // this block.
4765 frame_->SpillAll();
4766
4767 // Reload sp from the top handler, because some statements that we
4768 // break from (eg, for...in) may have left stuff on the stack.
4769 __ mov(esp, Operand::StaticVariable(handler_address));
4770 frame_->Forget(frame_->height() - handler_height);
4771
Kristian Monsen50ef84f2010-07-29 15:18:00 +01004772 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00004773 frame_->EmitPop(Operand::StaticVariable(handler_address));
4774 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
4775
4776 if (i == kReturnShadowIndex) {
4777 if (!function_return_is_shadowed_) frame_->PrepareForReturn();
4778 shadows[i]->other_target()->Jump(&return_value);
4779 } else {
4780 shadows[i]->other_target()->Jump();
4781 }
4782 }
4783 }
4784
4785 exit.Bind();
4786}
4787
4788
Steve Block3ce2e202009-11-05 08:53:23 +00004789void CodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004790 ASSERT(!in_spilled_code());
4791 VirtualFrame::SpilledScope spilled_scope;
Steve Block3ce2e202009-11-05 08:53:23 +00004792 Comment cmnt(masm_, "[ TryFinallyStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00004793 CodeForStatementPosition(node);
4794
4795 // State: Used to keep track of reason for entering the finally
4796 // block. Should probably be extended to hold information for
4797 // break/continue from within the try block.
4798 enum { FALLING, THROWING, JUMPING };
4799
4800 JumpTarget try_block;
4801 JumpTarget finally_block;
4802
4803 try_block.Call();
4804
4805 frame_->EmitPush(eax);
4806 // In case of thrown exceptions, this is where we continue.
4807 __ Set(ecx, Immediate(Smi::FromInt(THROWING)));
4808 finally_block.Jump();
4809
4810 // --- Try block ---
4811 try_block.Bind();
4812
4813 frame_->PushTryHandler(TRY_FINALLY_HANDLER);
4814 int handler_height = frame_->height();
4815
4816 // Shadow the jump targets for all escapes from the try block, including
4817 // returns. During shadowing, the original target is hidden as the
4818 // ShadowTarget and operations on the original actually affect the
4819 // shadowing target.
4820 //
4821 // We should probably try to unify the escaping targets and the return
4822 // target.
4823 int nof_escapes = node->escaping_targets()->length();
4824 List<ShadowTarget*> shadows(1 + nof_escapes);
4825
4826 // Add the shadow target for the function return.
4827 static const int kReturnShadowIndex = 0;
4828 shadows.Add(new ShadowTarget(&function_return_));
4829 bool function_return_was_shadowed = function_return_is_shadowed_;
4830 function_return_is_shadowed_ = true;
4831 ASSERT(shadows[kReturnShadowIndex]->other_target() == &function_return_);
4832
4833 // Add the remaining shadow targets.
4834 for (int i = 0; i < nof_escapes; i++) {
4835 shadows.Add(new ShadowTarget(node->escaping_targets()->at(i)));
4836 }
4837
4838 // Generate code for the statements in the try block.
4839 VisitStatementsAndSpill(node->try_block()->statements());
4840
4841 // Stop the introduced shadowing and count the number of required unlinks.
4842 // After shadowing stops, the original targets are unshadowed and the
4843 // ShadowTargets represent the formerly shadowing targets.
4844 int nof_unlinks = 0;
4845 for (int i = 0; i < shadows.length(); i++) {
4846 shadows[i]->StopShadowing();
4847 if (shadows[i]->is_linked()) nof_unlinks++;
4848 }
4849 function_return_is_shadowed_ = function_return_was_shadowed;
4850
4851 // Get an external reference to the handler address.
4852 ExternalReference handler_address(Top::k_handler_address);
4853
4854 // If we can fall off the end of the try block, unlink from the try
4855 // chain and set the state on the frame to FALLING.
4856 if (has_valid_frame()) {
4857 // The next handler address is on top of the frame.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01004858 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00004859 frame_->EmitPop(Operand::StaticVariable(handler_address));
4860 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
4861
4862 // Fake a top of stack value (unneeded when FALLING) and set the
4863 // state in ecx, then jump around the unlink blocks if any.
4864 frame_->EmitPush(Immediate(Factory::undefined_value()));
4865 __ Set(ecx, Immediate(Smi::FromInt(FALLING)));
4866 if (nof_unlinks > 0) {
4867 finally_block.Jump();
4868 }
4869 }
4870
4871 // Generate code to unlink and set the state for the (formerly)
4872 // shadowing targets that have been jumped to.
4873 for (int i = 0; i < shadows.length(); i++) {
4874 if (shadows[i]->is_linked()) {
4875 // If we have come from the shadowed return, the return value is
4876 // on the virtual frame. We must preserve it until it is
4877 // pushed.
4878 if (i == kReturnShadowIndex) {
4879 Result return_value;
4880 shadows[i]->Bind(&return_value);
4881 return_value.ToRegister(eax);
4882 } else {
4883 shadows[i]->Bind();
4884 }
4885 // Because we can be jumping here (to spilled code) from
4886 // unspilled code, we need to reestablish a spilled frame at
4887 // this block.
4888 frame_->SpillAll();
4889
4890 // Reload sp from the top handler, because some statements that
4891 // we break from (eg, for...in) may have left stuff on the
4892 // stack.
4893 __ mov(esp, Operand::StaticVariable(handler_address));
4894 frame_->Forget(frame_->height() - handler_height);
4895
4896 // Unlink this handler and drop it from the frame.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01004897 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00004898 frame_->EmitPop(Operand::StaticVariable(handler_address));
4899 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
4900
4901 if (i == kReturnShadowIndex) {
4902 // If this target shadowed the function return, materialize
4903 // the return value on the stack.
4904 frame_->EmitPush(eax);
4905 } else {
4906 // Fake TOS for targets that shadowed breaks and continues.
4907 frame_->EmitPush(Immediate(Factory::undefined_value()));
4908 }
4909 __ Set(ecx, Immediate(Smi::FromInt(JUMPING + i)));
4910 if (--nof_unlinks > 0) {
4911 // If this is not the last unlink block, jump around the next.
4912 finally_block.Jump();
4913 }
4914 }
4915 }
4916
4917 // --- Finally block ---
4918 finally_block.Bind();
4919
4920 // Push the state on the stack.
4921 frame_->EmitPush(ecx);
4922
4923 // We keep two elements on the stack - the (possibly faked) result
4924 // and the state - while evaluating the finally block.
4925 //
4926 // Generate code for the statements in the finally block.
4927 VisitStatementsAndSpill(node->finally_block()->statements());
4928
4929 if (has_valid_frame()) {
4930 // Restore state and return value or faked TOS.
4931 frame_->EmitPop(ecx);
4932 frame_->EmitPop(eax);
4933 }
4934
4935 // Generate code to jump to the right destination for all used
4936 // formerly shadowing targets. Deallocate each shadow target.
4937 for (int i = 0; i < shadows.length(); i++) {
4938 if (has_valid_frame() && shadows[i]->is_bound()) {
4939 BreakTarget* original = shadows[i]->other_target();
4940 __ cmp(Operand(ecx), Immediate(Smi::FromInt(JUMPING + i)));
4941 if (i == kReturnShadowIndex) {
4942 // The return value is (already) in eax.
4943 Result return_value = allocator_->Allocate(eax);
4944 ASSERT(return_value.is_valid());
4945 if (function_return_is_shadowed_) {
4946 original->Branch(equal, &return_value);
4947 } else {
4948 // Branch around the preparation for return which may emit
4949 // code.
4950 JumpTarget skip;
4951 skip.Branch(not_equal);
4952 frame_->PrepareForReturn();
4953 original->Jump(&return_value);
4954 skip.Bind();
4955 }
4956 } else {
4957 original->Branch(equal);
4958 }
4959 }
4960 }
4961
4962 if (has_valid_frame()) {
4963 // Check if we need to rethrow the exception.
4964 JumpTarget exit;
4965 __ cmp(Operand(ecx), Immediate(Smi::FromInt(THROWING)));
4966 exit.Branch(not_equal);
4967
4968 // Rethrow exception.
4969 frame_->EmitPush(eax); // undo pop from above
4970 frame_->CallRuntime(Runtime::kReThrow, 1);
4971
4972 // Done.
4973 exit.Bind();
4974 }
4975}
4976
4977
4978void CodeGenerator::VisitDebuggerStatement(DebuggerStatement* node) {
4979 ASSERT(!in_spilled_code());
4980 Comment cmnt(masm_, "[ DebuggerStatement");
4981 CodeForStatementPosition(node);
4982#ifdef ENABLE_DEBUGGER_SUPPORT
4983 // Spill everything, even constants, to the frame.
4984 frame_->SpillAll();
Leon Clarke4515c472010-02-03 11:58:03 +00004985
Andrei Popescu402d9372010-02-26 13:31:12 +00004986 frame_->DebugBreak();
Steve Blocka7e24c12009-10-30 11:49:00 +00004987 // Ignore the return value.
4988#endif
4989}
4990
4991
Steve Block6ded16b2010-05-10 14:33:55 +01004992Result CodeGenerator::InstantiateFunction(
4993 Handle<SharedFunctionInfo> function_info) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004994 // The inevitable call will sync frame elements to memory anyway, so
4995 // we do it eagerly to allow us to push the arguments directly into
4996 // place.
Andrei Popescu402d9372010-02-26 13:31:12 +00004997 frame()->SyncRange(0, frame()->element_count() - 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00004998
Leon Clarkee46be812010-01-19 14:06:41 +00004999 // Use the fast case closure allocation code that allocates in new
5000 // space for nested functions that don't need literals cloning.
Steve Block6ded16b2010-05-10 14:33:55 +01005001 if (scope()->is_function_scope() && function_info->num_literals() == 0) {
Leon Clarkee46be812010-01-19 14:06:41 +00005002 FastNewClosureStub stub;
Steve Block6ded16b2010-05-10 14:33:55 +01005003 frame()->EmitPush(Immediate(function_info));
Andrei Popescu402d9372010-02-26 13:31:12 +00005004 return frame()->CallStub(&stub, 1);
Leon Clarkee46be812010-01-19 14:06:41 +00005005 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01005006 // Call the runtime to instantiate the function based on the
5007 // shared function info.
Andrei Popescu402d9372010-02-26 13:31:12 +00005008 frame()->EmitPush(esi);
Steve Block6ded16b2010-05-10 14:33:55 +01005009 frame()->EmitPush(Immediate(function_info));
Andrei Popescu402d9372010-02-26 13:31:12 +00005010 return frame()->CallRuntime(Runtime::kNewClosure, 2);
Leon Clarkee46be812010-01-19 14:06:41 +00005011 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005012}
5013
5014
5015void CodeGenerator::VisitFunctionLiteral(FunctionLiteral* node) {
5016 Comment cmnt(masm_, "[ FunctionLiteral");
Steve Block6ded16b2010-05-10 14:33:55 +01005017 ASSERT(!in_safe_int32_mode());
5018 // Build the function info and instantiate it.
5019 Handle<SharedFunctionInfo> function_info =
5020 Compiler::BuildFunctionInfo(node, script(), this);
Steve Blocka7e24c12009-10-30 11:49:00 +00005021 // Check for stack-overflow exception.
5022 if (HasStackOverflow()) return;
Steve Block6ded16b2010-05-10 14:33:55 +01005023 Result result = InstantiateFunction(function_info);
Andrei Popescu402d9372010-02-26 13:31:12 +00005024 frame()->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00005025}
5026
5027
Steve Block6ded16b2010-05-10 14:33:55 +01005028void CodeGenerator::VisitSharedFunctionInfoLiteral(
5029 SharedFunctionInfoLiteral* node) {
5030 ASSERT(!in_safe_int32_mode());
5031 Comment cmnt(masm_, "[ SharedFunctionInfoLiteral");
5032 Result result = InstantiateFunction(node->shared_function_info());
Andrei Popescu402d9372010-02-26 13:31:12 +00005033 frame()->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00005034}
5035
5036
5037void CodeGenerator::VisitConditional(Conditional* node) {
5038 Comment cmnt(masm_, "[ Conditional");
Steve Block6ded16b2010-05-10 14:33:55 +01005039 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005040 JumpTarget then;
5041 JumpTarget else_;
5042 JumpTarget exit;
5043 ControlDestination dest(&then, &else_, true);
Steve Blockd0582a62009-12-15 09:54:21 +00005044 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00005045
5046 if (dest.false_was_fall_through()) {
5047 // The else target was bound, so we compile the else part first.
Steve Blockd0582a62009-12-15 09:54:21 +00005048 Load(node->else_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00005049
5050 if (then.is_linked()) {
5051 exit.Jump();
5052 then.Bind();
Steve Blockd0582a62009-12-15 09:54:21 +00005053 Load(node->then_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00005054 }
5055 } else {
5056 // The then target was bound, so we compile the then part first.
Steve Blockd0582a62009-12-15 09:54:21 +00005057 Load(node->then_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00005058
5059 if (else_.is_linked()) {
5060 exit.Jump();
5061 else_.Bind();
Steve Blockd0582a62009-12-15 09:54:21 +00005062 Load(node->else_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00005063 }
5064 }
5065
5066 exit.Bind();
5067}
5068
5069
Leon Clarkef7060e22010-06-03 12:02:55 +01005070void CodeGenerator::LoadFromSlot(Slot* slot, TypeofState typeof_state) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005071 if (slot->type() == Slot::LOOKUP) {
5072 ASSERT(slot->var()->is_dynamic());
Steve Blocka7e24c12009-10-30 11:49:00 +00005073 JumpTarget slow;
5074 JumpTarget done;
Leon Clarkef7060e22010-06-03 12:02:55 +01005075 Result value;
Steve Blocka7e24c12009-10-30 11:49:00 +00005076
Kristian Monsen25f61362010-05-21 11:50:48 +01005077 // Generate fast case for loading from slots that correspond to
5078 // local/global variables or arguments unless they are shadowed by
5079 // eval-introduced bindings.
5080 EmitDynamicLoadFromSlotFastCase(slot,
5081 typeof_state,
Leon Clarkef7060e22010-06-03 12:02:55 +01005082 &value,
Kristian Monsen25f61362010-05-21 11:50:48 +01005083 &slow,
5084 &done);
Steve Blocka7e24c12009-10-30 11:49:00 +00005085
5086 slow.Bind();
5087 // A runtime call is inevitable. We eagerly sync frame elements
5088 // to memory so that we can push the arguments directly into place
5089 // on top of the frame.
Andrei Popescu402d9372010-02-26 13:31:12 +00005090 frame()->SyncRange(0, frame()->element_count() - 1);
5091 frame()->EmitPush(esi);
5092 frame()->EmitPush(Immediate(slot->var()->name()));
Steve Blocka7e24c12009-10-30 11:49:00 +00005093 if (typeof_state == INSIDE_TYPEOF) {
Leon Clarkef7060e22010-06-03 12:02:55 +01005094 value =
Andrei Popescu402d9372010-02-26 13:31:12 +00005095 frame()->CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00005096 } else {
Leon Clarkef7060e22010-06-03 12:02:55 +01005097 value = frame()->CallRuntime(Runtime::kLoadContextSlot, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00005098 }
5099
Leon Clarkef7060e22010-06-03 12:02:55 +01005100 done.Bind(&value);
5101 frame_->Push(&value);
Steve Blocka7e24c12009-10-30 11:49:00 +00005102
5103 } else if (slot->var()->mode() == Variable::CONST) {
5104 // Const slots may contain 'the hole' value (the constant hasn't been
5105 // initialized yet) which needs to be converted into the 'undefined'
5106 // value.
5107 //
5108 // We currently spill the virtual frame because constants use the
5109 // potentially unsafe direct-frame access of SlotOperand.
5110 VirtualFrame::SpilledScope spilled_scope;
5111 Comment cmnt(masm_, "[ Load const");
Andrei Popescu402d9372010-02-26 13:31:12 +00005112 Label exit;
Steve Blocka7e24c12009-10-30 11:49:00 +00005113 __ mov(ecx, SlotOperand(slot, ecx));
5114 __ cmp(ecx, Factory::the_hole_value());
Andrei Popescu402d9372010-02-26 13:31:12 +00005115 __ j(not_equal, &exit);
Steve Blocka7e24c12009-10-30 11:49:00 +00005116 __ mov(ecx, Factory::undefined_value());
Andrei Popescu402d9372010-02-26 13:31:12 +00005117 __ bind(&exit);
Leon Clarkef7060e22010-06-03 12:02:55 +01005118 frame()->EmitPush(ecx);
Steve Blocka7e24c12009-10-30 11:49:00 +00005119
5120 } else if (slot->type() == Slot::PARAMETER) {
Andrei Popescu402d9372010-02-26 13:31:12 +00005121 frame()->PushParameterAt(slot->index());
Steve Blocka7e24c12009-10-30 11:49:00 +00005122
5123 } else if (slot->type() == Slot::LOCAL) {
Andrei Popescu402d9372010-02-26 13:31:12 +00005124 frame()->PushLocalAt(slot->index());
Steve Blocka7e24c12009-10-30 11:49:00 +00005125
5126 } else {
5127 // The other remaining slot types (LOOKUP and GLOBAL) cannot reach
5128 // here.
5129 //
5130 // The use of SlotOperand below is safe for an unspilled frame
5131 // because it will always be a context slot.
5132 ASSERT(slot->type() == Slot::CONTEXT);
Leon Clarkef7060e22010-06-03 12:02:55 +01005133 Result temp = allocator()->Allocate();
5134 ASSERT(temp.is_valid());
5135 __ mov(temp.reg(), SlotOperand(slot, temp.reg()));
5136 frame()->Push(&temp);
Steve Blocka7e24c12009-10-30 11:49:00 +00005137 }
5138}
5139
5140
Leon Clarkef7060e22010-06-03 12:02:55 +01005141void CodeGenerator::LoadFromSlotCheckForArguments(Slot* slot,
Andrei Popescu402d9372010-02-26 13:31:12 +00005142 TypeofState state) {
Leon Clarkef7060e22010-06-03 12:02:55 +01005143 LoadFromSlot(slot, state);
Steve Blocka7e24c12009-10-30 11:49:00 +00005144
5145 // Bail out quickly if we're not using lazy arguments allocation.
Leon Clarkef7060e22010-06-03 12:02:55 +01005146 if (ArgumentsMode() != LAZY_ARGUMENTS_ALLOCATION) return;
Steve Blocka7e24c12009-10-30 11:49:00 +00005147
5148 // ... or if the slot isn't a non-parameter arguments slot.
Leon Clarkef7060e22010-06-03 12:02:55 +01005149 if (slot->type() == Slot::PARAMETER || !slot->is_arguments()) return;
Steve Blocka7e24c12009-10-30 11:49:00 +00005150
5151 // If the loaded value is a constant, we know if the arguments
5152 // object has been lazily loaded yet.
Leon Clarkef7060e22010-06-03 12:02:55 +01005153 Result result = frame()->Pop();
Andrei Popescu402d9372010-02-26 13:31:12 +00005154 if (result.is_constant()) {
5155 if (result.handle()->IsTheHole()) {
Leon Clarkef7060e22010-06-03 12:02:55 +01005156 result = StoreArgumentsObject(false);
Steve Blocka7e24c12009-10-30 11:49:00 +00005157 }
Leon Clarkef7060e22010-06-03 12:02:55 +01005158 frame()->Push(&result);
5159 return;
Steve Blocka7e24c12009-10-30 11:49:00 +00005160 }
Leon Clarkef7060e22010-06-03 12:02:55 +01005161 ASSERT(result.is_register());
Steve Blocka7e24c12009-10-30 11:49:00 +00005162 // The loaded value is in a register. If it is the sentinel that
5163 // indicates that we haven't loaded the arguments object yet, we
5164 // need to do it now.
5165 JumpTarget exit;
Andrei Popescu402d9372010-02-26 13:31:12 +00005166 __ cmp(Operand(result.reg()), Immediate(Factory::the_hole_value()));
Leon Clarkef7060e22010-06-03 12:02:55 +01005167 frame()->Push(&result);
5168 exit.Branch(not_equal);
Andrei Popescu402d9372010-02-26 13:31:12 +00005169
Andrei Popescu402d9372010-02-26 13:31:12 +00005170 result = StoreArgumentsObject(false);
Leon Clarkef7060e22010-06-03 12:02:55 +01005171 frame()->SetElementAt(0, &result);
5172 result.Unuse();
5173 exit.Bind();
5174 return;
Steve Blocka7e24c12009-10-30 11:49:00 +00005175}
5176
5177
5178Result CodeGenerator::LoadFromGlobalSlotCheckExtensions(
5179 Slot* slot,
5180 TypeofState typeof_state,
5181 JumpTarget* slow) {
Steve Block6ded16b2010-05-10 14:33:55 +01005182 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005183 // Check that no extension objects have been created by calls to
5184 // eval from the current scope to the global scope.
5185 Register context = esi;
5186 Result tmp = allocator_->Allocate();
5187 ASSERT(tmp.is_valid()); // All non-reserved registers were available.
5188
5189 Scope* s = scope();
5190 while (s != NULL) {
5191 if (s->num_heap_slots() > 0) {
5192 if (s->calls_eval()) {
5193 // Check that extension is NULL.
5194 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
5195 Immediate(0));
5196 slow->Branch(not_equal, not_taken);
5197 }
5198 // Load next context in chain.
5199 __ mov(tmp.reg(), ContextOperand(context, Context::CLOSURE_INDEX));
5200 __ mov(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset));
5201 context = tmp.reg();
5202 }
5203 // If no outer scope calls eval, we do not need to check more
5204 // context extensions. If we have reached an eval scope, we check
5205 // all extensions from this point.
5206 if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break;
5207 s = s->outer_scope();
5208 }
5209
5210 if (s != NULL && s->is_eval_scope()) {
5211 // Loop up the context chain. There is no frame effect so it is
5212 // safe to use raw labels here.
5213 Label next, fast;
5214 if (!context.is(tmp.reg())) {
5215 __ mov(tmp.reg(), context);
5216 }
5217 __ bind(&next);
5218 // Terminate at global context.
5219 __ cmp(FieldOperand(tmp.reg(), HeapObject::kMapOffset),
5220 Immediate(Factory::global_context_map()));
5221 __ j(equal, &fast);
5222 // Check that extension is NULL.
5223 __ cmp(ContextOperand(tmp.reg(), Context::EXTENSION_INDEX), Immediate(0));
5224 slow->Branch(not_equal, not_taken);
5225 // Load next context in chain.
5226 __ mov(tmp.reg(), ContextOperand(tmp.reg(), Context::CLOSURE_INDEX));
5227 __ mov(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset));
5228 __ jmp(&next);
5229 __ bind(&fast);
5230 }
5231 tmp.Unuse();
5232
5233 // All extension objects were empty and it is safe to use a global
5234 // load IC call.
Andrei Popescu402d9372010-02-26 13:31:12 +00005235 // The register allocator prefers eax if it is free, so the code generator
5236 // will load the global object directly into eax, which is where the LoadIC
5237 // expects it.
5238 frame_->Spill(eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00005239 LoadGlobal();
5240 frame_->Push(slot->var()->name());
5241 RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
5242 ? RelocInfo::CODE_TARGET
5243 : RelocInfo::CODE_TARGET_CONTEXT;
5244 Result answer = frame_->CallLoadIC(mode);
5245 // A test eax instruction following the call signals that the inobject
5246 // property case was inlined. Ensure that there is not a test eax
5247 // instruction here.
5248 __ nop();
Steve Blocka7e24c12009-10-30 11:49:00 +00005249 return answer;
5250}
5251
5252
Kristian Monsen25f61362010-05-21 11:50:48 +01005253void CodeGenerator::EmitDynamicLoadFromSlotFastCase(Slot* slot,
5254 TypeofState typeof_state,
5255 Result* result,
5256 JumpTarget* slow,
5257 JumpTarget* done) {
5258 // Generate fast-case code for variables that might be shadowed by
5259 // eval-introduced variables. Eval is used a lot without
5260 // introducing variables. In those cases, we do not want to
5261 // perform a runtime call for all variables in the scope
5262 // containing the eval.
5263 if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) {
5264 *result = LoadFromGlobalSlotCheckExtensions(slot, typeof_state, slow);
5265 done->Jump(result);
5266
5267 } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) {
5268 Slot* potential_slot = slot->var()->local_if_not_shadowed()->slot();
5269 Expression* rewrite = slot->var()->local_if_not_shadowed()->rewrite();
5270 if (potential_slot != NULL) {
5271 // Generate fast case for locals that rewrite to slots.
5272 // Allocate a fresh register to use as a temp in
5273 // ContextSlotOperandCheckExtensions and to hold the result
5274 // value.
5275 *result = allocator()->Allocate();
5276 ASSERT(result->is_valid());
5277 __ mov(result->reg(),
5278 ContextSlotOperandCheckExtensions(potential_slot, *result, slow));
5279 if (potential_slot->var()->mode() == Variable::CONST) {
5280 __ cmp(result->reg(), Factory::the_hole_value());
5281 done->Branch(not_equal, result);
5282 __ mov(result->reg(), Factory::undefined_value());
5283 }
5284 done->Jump(result);
5285 } else if (rewrite != NULL) {
5286 // Generate fast case for calls of an argument function.
5287 Property* property = rewrite->AsProperty();
5288 if (property != NULL) {
5289 VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
5290 Literal* key_literal = property->key()->AsLiteral();
5291 if (obj_proxy != NULL &&
5292 key_literal != NULL &&
5293 obj_proxy->IsArguments() &&
5294 key_literal->handle()->IsSmi()) {
5295 // Load arguments object if there are no eval-introduced
5296 // variables. Then load the argument from the arguments
5297 // object using keyed load.
5298 Result arguments = allocator()->Allocate();
5299 ASSERT(arguments.is_valid());
5300 __ mov(arguments.reg(),
5301 ContextSlotOperandCheckExtensions(obj_proxy->var()->slot(),
5302 arguments,
5303 slow));
5304 frame_->Push(&arguments);
5305 frame_->Push(key_literal->handle());
5306 *result = EmitKeyedLoad();
5307 done->Jump(result);
5308 }
5309 }
5310 }
5311 }
5312}
5313
5314
Steve Blocka7e24c12009-10-30 11:49:00 +00005315void CodeGenerator::StoreToSlot(Slot* slot, InitState init_state) {
5316 if (slot->type() == Slot::LOOKUP) {
5317 ASSERT(slot->var()->is_dynamic());
5318
5319 // For now, just do a runtime call. Since the call is inevitable,
5320 // we eagerly sync the virtual frame so we can directly push the
5321 // arguments into place.
5322 frame_->SyncRange(0, frame_->element_count() - 1);
5323
5324 frame_->EmitPush(esi);
5325 frame_->EmitPush(Immediate(slot->var()->name()));
5326
5327 Result value;
5328 if (init_state == CONST_INIT) {
5329 // Same as the case for a normal store, but ignores attribute
5330 // (e.g. READ_ONLY) of context slot so that we can initialize const
5331 // properties (introduced via eval("const foo = (some expr);")). Also,
5332 // uses the current function context instead of the top context.
5333 //
5334 // Note that we must declare the foo upon entry of eval(), via a
5335 // context slot declaration, but we cannot initialize it at the same
5336 // time, because the const declaration may be at the end of the eval
5337 // code (sigh...) and the const variable may have been used before
5338 // (where its value is 'undefined'). Thus, we can only do the
5339 // initialization when we actually encounter the expression and when
5340 // the expression operands are defined and valid, and thus we need the
5341 // split into 2 operations: declaration of the context slot followed
5342 // by initialization.
5343 value = frame_->CallRuntime(Runtime::kInitializeConstContextSlot, 3);
5344 } else {
5345 value = frame_->CallRuntime(Runtime::kStoreContextSlot, 3);
5346 }
5347 // Storing a variable must keep the (new) value on the expression
5348 // stack. This is necessary for compiling chained assignment
5349 // expressions.
5350 frame_->Push(&value);
5351
5352 } else {
5353 ASSERT(!slot->var()->is_dynamic());
5354
5355 JumpTarget exit;
5356 if (init_state == CONST_INIT) {
5357 ASSERT(slot->var()->mode() == Variable::CONST);
5358 // Only the first const initialization must be executed (the slot
5359 // still contains 'the hole' value). When the assignment is executed,
5360 // the code is identical to a normal store (see below).
5361 //
5362 // We spill the frame in the code below because the direct-frame
5363 // access of SlotOperand is potentially unsafe with an unspilled
5364 // frame.
5365 VirtualFrame::SpilledScope spilled_scope;
5366 Comment cmnt(masm_, "[ Init const");
5367 __ mov(ecx, SlotOperand(slot, ecx));
5368 __ cmp(ecx, Factory::the_hole_value());
5369 exit.Branch(not_equal);
5370 }
5371
5372 // We must execute the store. Storing a variable must keep the (new)
5373 // value on the stack. This is necessary for compiling assignment
5374 // expressions.
5375 //
5376 // Note: We will reach here even with slot->var()->mode() ==
5377 // Variable::CONST because of const declarations which will initialize
5378 // consts to 'the hole' value and by doing so, end up calling this code.
5379 if (slot->type() == Slot::PARAMETER) {
5380 frame_->StoreToParameterAt(slot->index());
5381 } else if (slot->type() == Slot::LOCAL) {
5382 frame_->StoreToLocalAt(slot->index());
5383 } else {
5384 // The other slot types (LOOKUP and GLOBAL) cannot reach here.
5385 //
5386 // The use of SlotOperand below is safe for an unspilled frame
5387 // because the slot is a context slot.
5388 ASSERT(slot->type() == Slot::CONTEXT);
5389 frame_->Dup();
5390 Result value = frame_->Pop();
5391 value.ToRegister();
5392 Result start = allocator_->Allocate();
5393 ASSERT(start.is_valid());
5394 __ mov(SlotOperand(slot, start.reg()), value.reg());
5395 // RecordWrite may destroy the value registers.
5396 //
5397 // TODO(204): Avoid actually spilling when the value is not
5398 // needed (probably the common case).
5399 frame_->Spill(value.reg());
5400 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
5401 Result temp = allocator_->Allocate();
5402 ASSERT(temp.is_valid());
5403 __ RecordWrite(start.reg(), offset, value.reg(), temp.reg());
5404 // The results start, value, and temp are unused by going out of
5405 // scope.
5406 }
5407
5408 exit.Bind();
5409 }
5410}
5411
5412
Steve Block6ded16b2010-05-10 14:33:55 +01005413void CodeGenerator::VisitSlot(Slot* slot) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005414 Comment cmnt(masm_, "[ Slot");
Steve Block6ded16b2010-05-10 14:33:55 +01005415 if (in_safe_int32_mode()) {
5416 if ((slot->type() == Slot::LOCAL && !slot->is_arguments())) {
5417 frame()->UntaggedPushLocalAt(slot->index());
5418 } else if (slot->type() == Slot::PARAMETER) {
5419 frame()->UntaggedPushParameterAt(slot->index());
5420 } else {
5421 UNREACHABLE();
5422 }
5423 } else {
Leon Clarkef7060e22010-06-03 12:02:55 +01005424 LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF);
Steve Block6ded16b2010-05-10 14:33:55 +01005425 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005426}
5427
5428
5429void CodeGenerator::VisitVariableProxy(VariableProxy* node) {
5430 Comment cmnt(masm_, "[ VariableProxy");
5431 Variable* var = node->var();
5432 Expression* expr = var->rewrite();
5433 if (expr != NULL) {
5434 Visit(expr);
5435 } else {
5436 ASSERT(var->is_global());
Steve Block6ded16b2010-05-10 14:33:55 +01005437 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005438 Reference ref(this, node);
Steve Blockd0582a62009-12-15 09:54:21 +00005439 ref.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00005440 }
5441}
5442
5443
5444void CodeGenerator::VisitLiteral(Literal* node) {
5445 Comment cmnt(masm_, "[ Literal");
Steve Block6ded16b2010-05-10 14:33:55 +01005446 if (in_safe_int32_mode()) {
5447 frame_->PushUntaggedElement(node->handle());
5448 } else {
5449 frame_->Push(node->handle());
5450 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005451}
5452
5453
Steve Blockd0582a62009-12-15 09:54:21 +00005454void CodeGenerator::PushUnsafeSmi(Handle<Object> value) {
5455 ASSERT(value->IsSmi());
5456 int bits = reinterpret_cast<int>(*value);
5457 __ push(Immediate(bits & 0x0000FFFF));
5458 __ or_(Operand(esp, 0), Immediate(bits & 0xFFFF0000));
5459}
5460
5461
5462void CodeGenerator::StoreUnsafeSmiToLocal(int offset, Handle<Object> value) {
5463 ASSERT(value->IsSmi());
5464 int bits = reinterpret_cast<int>(*value);
5465 __ mov(Operand(ebp, offset), Immediate(bits & 0x0000FFFF));
5466 __ or_(Operand(ebp, offset), Immediate(bits & 0xFFFF0000));
5467}
5468
5469
5470void CodeGenerator::MoveUnsafeSmi(Register target, Handle<Object> value) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005471 ASSERT(target.is_valid());
5472 ASSERT(value->IsSmi());
5473 int bits = reinterpret_cast<int>(*value);
5474 __ Set(target, Immediate(bits & 0x0000FFFF));
Steve Blockd0582a62009-12-15 09:54:21 +00005475 __ or_(target, bits & 0xFFFF0000);
Steve Blocka7e24c12009-10-30 11:49:00 +00005476}
5477
5478
5479bool CodeGenerator::IsUnsafeSmi(Handle<Object> value) {
5480 if (!value->IsSmi()) return false;
5481 int int_value = Smi::cast(*value)->value();
5482 return !is_intn(int_value, kMaxSmiInlinedBits);
5483}
5484
5485
5486// Materialize the regexp literal 'node' in the literals array
5487// 'literals' of the function. Leave the regexp boilerplate in
5488// 'boilerplate'.
5489class DeferredRegExpLiteral: public DeferredCode {
5490 public:
5491 DeferredRegExpLiteral(Register boilerplate,
5492 Register literals,
5493 RegExpLiteral* node)
5494 : boilerplate_(boilerplate), literals_(literals), node_(node) {
5495 set_comment("[ DeferredRegExpLiteral");
5496 }
5497
5498 void Generate();
5499
5500 private:
5501 Register boilerplate_;
5502 Register literals_;
5503 RegExpLiteral* node_;
5504};
5505
5506
5507void DeferredRegExpLiteral::Generate() {
5508 // Since the entry is undefined we call the runtime system to
5509 // compute the literal.
5510 // Literal array (0).
5511 __ push(literals_);
5512 // Literal index (1).
5513 __ push(Immediate(Smi::FromInt(node_->literal_index())));
5514 // RegExp pattern (2).
5515 __ push(Immediate(node_->pattern()));
5516 // RegExp flags (3).
5517 __ push(Immediate(node_->flags()));
5518 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
5519 if (!boilerplate_.is(eax)) __ mov(boilerplate_, eax);
5520}
5521
5522
Ben Murdochbb769b22010-08-11 14:56:33 +01005523class DeferredAllocateInNewSpace: public DeferredCode {
5524 public:
Steve Block791712a2010-08-27 10:21:07 +01005525 DeferredAllocateInNewSpace(int size,
5526 Register target,
5527 int registers_to_save = 0)
5528 : size_(size), target_(target), registers_to_save_(registers_to_save) {
Ben Murdochbb769b22010-08-11 14:56:33 +01005529 ASSERT(size >= kPointerSize && size <= Heap::MaxObjectSizeInNewSpace());
Steve Block791712a2010-08-27 10:21:07 +01005530 ASSERT_EQ(0, registers_to_save & target.bit());
Ben Murdochbb769b22010-08-11 14:56:33 +01005531 set_comment("[ DeferredAllocateInNewSpace");
5532 }
5533 void Generate();
5534
5535 private:
5536 int size_;
5537 Register target_;
Steve Block791712a2010-08-27 10:21:07 +01005538 int registers_to_save_;
Ben Murdochbb769b22010-08-11 14:56:33 +01005539};
5540
5541
5542void DeferredAllocateInNewSpace::Generate() {
Steve Block791712a2010-08-27 10:21:07 +01005543 for (int i = 0; i < kNumRegs; i++) {
5544 if (registers_to_save_ & (1 << i)) {
5545 Register save_register = { i };
5546 __ push(save_register);
5547 }
5548 }
Ben Murdochbb769b22010-08-11 14:56:33 +01005549 __ push(Immediate(Smi::FromInt(size_)));
5550 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
5551 if (!target_.is(eax)) {
5552 __ mov(target_, eax);
5553 }
Steve Block791712a2010-08-27 10:21:07 +01005554 for (int i = kNumRegs - 1; i >= 0; i--) {
5555 if (registers_to_save_ & (1 << i)) {
5556 Register save_register = { i };
5557 __ pop(save_register);
5558 }
5559 }
Ben Murdochbb769b22010-08-11 14:56:33 +01005560}
5561
5562
Steve Blocka7e24c12009-10-30 11:49:00 +00005563void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01005564 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005565 Comment cmnt(masm_, "[ RegExp Literal");
5566
5567 // Retrieve the literals array and check the allocated entry. Begin
5568 // with a writable copy of the function of this activation in a
5569 // register.
5570 frame_->PushFunction();
5571 Result literals = frame_->Pop();
5572 literals.ToRegister();
5573 frame_->Spill(literals.reg());
5574
5575 // Load the literals array of the function.
5576 __ mov(literals.reg(),
5577 FieldOperand(literals.reg(), JSFunction::kLiteralsOffset));
5578
5579 // Load the literal at the ast saved index.
5580 Result boilerplate = allocator_->Allocate();
5581 ASSERT(boilerplate.is_valid());
5582 int literal_offset =
5583 FixedArray::kHeaderSize + node->literal_index() * kPointerSize;
5584 __ mov(boilerplate.reg(), FieldOperand(literals.reg(), literal_offset));
5585
5586 // Check whether we need to materialize the RegExp object. If so,
5587 // jump to the deferred code passing the literals array.
5588 DeferredRegExpLiteral* deferred =
5589 new DeferredRegExpLiteral(boilerplate.reg(), literals.reg(), node);
5590 __ cmp(boilerplate.reg(), Factory::undefined_value());
5591 deferred->Branch(equal);
5592 deferred->BindExit();
Steve Blocka7e24c12009-10-30 11:49:00 +00005593
Ben Murdochbb769b22010-08-11 14:56:33 +01005594 // Register of boilerplate contains RegExp object.
5595
5596 Result tmp = allocator()->Allocate();
5597 ASSERT(tmp.is_valid());
5598
5599 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
5600
5601 DeferredAllocateInNewSpace* allocate_fallback =
5602 new DeferredAllocateInNewSpace(size, literals.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00005603 frame_->Push(&boilerplate);
Ben Murdochbb769b22010-08-11 14:56:33 +01005604 frame_->SpillTop();
5605 __ AllocateInNewSpace(size,
5606 literals.reg(),
5607 tmp.reg(),
5608 no_reg,
5609 allocate_fallback->entry_label(),
5610 TAG_OBJECT);
5611 allocate_fallback->BindExit();
5612 boilerplate = frame_->Pop();
5613 // Copy from boilerplate to clone and return clone.
5614
5615 for (int i = 0; i < size; i += kPointerSize) {
5616 __ mov(tmp.reg(), FieldOperand(boilerplate.reg(), i));
5617 __ mov(FieldOperand(literals.reg(), i), tmp.reg());
5618 }
5619 frame_->Push(&literals);
Steve Blocka7e24c12009-10-30 11:49:00 +00005620}
5621
5622
Steve Blocka7e24c12009-10-30 11:49:00 +00005623void CodeGenerator::VisitObjectLiteral(ObjectLiteral* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01005624 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005625 Comment cmnt(masm_, "[ ObjectLiteral");
5626
Leon Clarkee46be812010-01-19 14:06:41 +00005627 // Load a writable copy of the function of this activation in a
Steve Blocka7e24c12009-10-30 11:49:00 +00005628 // register.
5629 frame_->PushFunction();
5630 Result literals = frame_->Pop();
5631 literals.ToRegister();
5632 frame_->Spill(literals.reg());
5633
5634 // Load the literals array of the function.
5635 __ mov(literals.reg(),
5636 FieldOperand(literals.reg(), JSFunction::kLiteralsOffset));
Leon Clarkee46be812010-01-19 14:06:41 +00005637 // Literal array.
5638 frame_->Push(&literals);
5639 // Literal index.
5640 frame_->Push(Smi::FromInt(node->literal_index()));
5641 // Constant properties.
5642 frame_->Push(node->constant_properties());
Steve Block6ded16b2010-05-10 14:33:55 +01005643 // Should the object literal have fast elements?
5644 frame_->Push(Smi::FromInt(node->fast_elements() ? 1 : 0));
Leon Clarkee46be812010-01-19 14:06:41 +00005645 Result clone;
5646 if (node->depth() > 1) {
Steve Block6ded16b2010-05-10 14:33:55 +01005647 clone = frame_->CallRuntime(Runtime::kCreateObjectLiteral, 4);
Leon Clarkee46be812010-01-19 14:06:41 +00005648 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01005649 clone = frame_->CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
Steve Blocka7e24c12009-10-30 11:49:00 +00005650 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005651 frame_->Push(&clone);
5652
5653 for (int i = 0; i < node->properties()->length(); i++) {
5654 ObjectLiteral::Property* property = node->properties()->at(i);
5655 switch (property->kind()) {
5656 case ObjectLiteral::Property::CONSTANT:
5657 break;
5658 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
5659 if (CompileTimeValue::IsCompileTimeValue(property->value())) break;
5660 // else fall through.
5661 case ObjectLiteral::Property::COMPUTED: {
5662 Handle<Object> key(property->key()->handle());
5663 if (key->IsSymbol()) {
5664 // Duplicate the object as the IC receiver.
5665 frame_->Dup();
5666 Load(property->value());
Kristian Monsen50ef84f2010-07-29 15:18:00 +01005667 Result ignored =
5668 frame_->CallStoreIC(Handle<String>::cast(key), false);
5669 // A test eax instruction following the store IC call would
5670 // indicate the presence of an inlined version of the
5671 // store. Add a nop to indicate that there is no such
5672 // inlined version.
5673 __ nop();
Steve Blocka7e24c12009-10-30 11:49:00 +00005674 break;
5675 }
5676 // Fall through
5677 }
5678 case ObjectLiteral::Property::PROTOTYPE: {
5679 // Duplicate the object as an argument to the runtime call.
5680 frame_->Dup();
5681 Load(property->key());
5682 Load(property->value());
5683 Result ignored = frame_->CallRuntime(Runtime::kSetProperty, 3);
5684 // Ignore the result.
5685 break;
5686 }
5687 case ObjectLiteral::Property::SETTER: {
5688 // Duplicate the object as an argument to the runtime call.
5689 frame_->Dup();
5690 Load(property->key());
5691 frame_->Push(Smi::FromInt(1));
5692 Load(property->value());
5693 Result ignored = frame_->CallRuntime(Runtime::kDefineAccessor, 4);
5694 // Ignore the result.
5695 break;
5696 }
5697 case ObjectLiteral::Property::GETTER: {
5698 // Duplicate the object as an argument to the runtime call.
5699 frame_->Dup();
5700 Load(property->key());
5701 frame_->Push(Smi::FromInt(0));
5702 Load(property->value());
5703 Result ignored = frame_->CallRuntime(Runtime::kDefineAccessor, 4);
5704 // Ignore the result.
5705 break;
5706 }
5707 default: UNREACHABLE();
5708 }
5709 }
5710}
5711
5712
Steve Blocka7e24c12009-10-30 11:49:00 +00005713void CodeGenerator::VisitArrayLiteral(ArrayLiteral* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01005714 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005715 Comment cmnt(masm_, "[ ArrayLiteral");
5716
Leon Clarkee46be812010-01-19 14:06:41 +00005717 // Load a writable copy of the function of this activation in a
Steve Blocka7e24c12009-10-30 11:49:00 +00005718 // register.
5719 frame_->PushFunction();
5720 Result literals = frame_->Pop();
5721 literals.ToRegister();
5722 frame_->Spill(literals.reg());
5723
5724 // Load the literals array of the function.
5725 __ mov(literals.reg(),
5726 FieldOperand(literals.reg(), JSFunction::kLiteralsOffset));
5727
Leon Clarkee46be812010-01-19 14:06:41 +00005728 frame_->Push(&literals);
5729 frame_->Push(Smi::FromInt(node->literal_index()));
5730 frame_->Push(node->constant_elements());
5731 int length = node->values()->length();
5732 Result clone;
Iain Merrick75681382010-08-19 15:07:18 +01005733 if (node->constant_elements()->map() == Heap::fixed_cow_array_map()) {
5734 FastCloneShallowArrayStub stub(
5735 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
5736 clone = frame_->CallStub(&stub, 3);
5737 __ IncrementCounter(&Counters::cow_arrays_created_stub, 1);
5738 } else if (node->depth() > 1) {
Leon Clarkee46be812010-01-19 14:06:41 +00005739 clone = frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3);
Iain Merrick75681382010-08-19 15:07:18 +01005740 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
Leon Clarkee46be812010-01-19 14:06:41 +00005741 clone = frame_->CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
5742 } else {
Iain Merrick75681382010-08-19 15:07:18 +01005743 FastCloneShallowArrayStub stub(
5744 FastCloneShallowArrayStub::CLONE_ELEMENTS, length);
Leon Clarkee46be812010-01-19 14:06:41 +00005745 clone = frame_->CallStub(&stub, 3);
Steve Blocka7e24c12009-10-30 11:49:00 +00005746 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005747 frame_->Push(&clone);
5748
5749 // Generate code to set the elements in the array that are not
5750 // literals.
Leon Clarkee46be812010-01-19 14:06:41 +00005751 for (int i = 0; i < length; i++) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005752 Expression* value = node->values()->at(i);
5753
Iain Merrick75681382010-08-19 15:07:18 +01005754 if (!CompileTimeValue::ArrayLiteralElementNeedsInitialization(value)) {
5755 continue;
5756 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005757
5758 // The property must be set by generated code.
5759 Load(value);
5760
5761 // Get the property value off the stack.
5762 Result prop_value = frame_->Pop();
5763 prop_value.ToRegister();
5764
5765 // Fetch the array literal while leaving a copy on the stack and
5766 // use it to get the elements array.
5767 frame_->Dup();
5768 Result elements = frame_->Pop();
5769 elements.ToRegister();
5770 frame_->Spill(elements.reg());
5771 // Get the elements array.
5772 __ mov(elements.reg(),
5773 FieldOperand(elements.reg(), JSObject::kElementsOffset));
5774
5775 // Write to the indexed properties array.
5776 int offset = i * kPointerSize + FixedArray::kHeaderSize;
5777 __ mov(FieldOperand(elements.reg(), offset), prop_value.reg());
5778
5779 // Update the write barrier for the array address.
5780 frame_->Spill(prop_value.reg()); // Overwritten by the write barrier.
5781 Result scratch = allocator_->Allocate();
5782 ASSERT(scratch.is_valid());
5783 __ RecordWrite(elements.reg(), offset, prop_value.reg(), scratch.reg());
5784 }
5785}
5786
5787
5788void CodeGenerator::VisitCatchExtensionObject(CatchExtensionObject* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01005789 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005790 ASSERT(!in_spilled_code());
5791 // Call runtime routine to allocate the catch extension object and
5792 // assign the exception value to the catch variable.
5793 Comment cmnt(masm_, "[ CatchExtensionObject");
5794 Load(node->key());
5795 Load(node->value());
5796 Result result =
5797 frame_->CallRuntime(Runtime::kCreateCatchExtensionObject, 2);
5798 frame_->Push(&result);
5799}
5800
5801
Andrei Popescu402d9372010-02-26 13:31:12 +00005802void CodeGenerator::EmitSlotAssignment(Assignment* node) {
5803#ifdef DEBUG
5804 int original_height = frame()->height();
5805#endif
5806 Comment cmnt(masm(), "[ Variable Assignment");
5807 Variable* var = node->target()->AsVariableProxy()->AsVariable();
5808 ASSERT(var != NULL);
5809 Slot* slot = var->slot();
5810 ASSERT(slot != NULL);
5811
5812 // Evaluate the right-hand side.
5813 if (node->is_compound()) {
Steve Block6ded16b2010-05-10 14:33:55 +01005814 // For a compound assignment the right-hand side is a binary operation
5815 // between the current property value and the actual right-hand side.
Leon Clarkef7060e22010-06-03 12:02:55 +01005816 LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF);
Andrei Popescu402d9372010-02-26 13:31:12 +00005817 Load(node->value());
5818
Steve Block6ded16b2010-05-10 14:33:55 +01005819 // Perform the binary operation.
Andrei Popescu402d9372010-02-26 13:31:12 +00005820 bool overwrite_value =
5821 (node->value()->AsBinaryOperation() != NULL &&
5822 node->value()->AsBinaryOperation()->ResultOverwriteAllowed());
Steve Block6ded16b2010-05-10 14:33:55 +01005823 // Construct the implicit binary operation.
5824 BinaryOperation expr(node, node->binary_op(), node->target(),
5825 node->value());
5826 GenericBinaryOperation(&expr,
Andrei Popescu402d9372010-02-26 13:31:12 +00005827 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
5828 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01005829 // For non-compound assignment just load the right-hand side.
Andrei Popescu402d9372010-02-26 13:31:12 +00005830 Load(node->value());
5831 }
5832
5833 // Perform the assignment.
5834 if (var->mode() != Variable::CONST || node->op() == Token::INIT_CONST) {
5835 CodeForSourcePosition(node->position());
5836 StoreToSlot(slot,
5837 node->op() == Token::INIT_CONST ? CONST_INIT : NOT_CONST_INIT);
5838 }
5839 ASSERT(frame()->height() == original_height + 1);
5840}
5841
5842
5843void CodeGenerator::EmitNamedPropertyAssignment(Assignment* node) {
5844#ifdef DEBUG
5845 int original_height = frame()->height();
5846#endif
5847 Comment cmnt(masm(), "[ Named Property Assignment");
5848 Variable* var = node->target()->AsVariableProxy()->AsVariable();
5849 Property* prop = node->target()->AsProperty();
5850 ASSERT(var == NULL || (prop == NULL && var->is_global()));
5851
Steve Block6ded16b2010-05-10 14:33:55 +01005852 // Initialize name and evaluate the receiver sub-expression if necessary. If
5853 // the receiver is trivial it is not placed on the stack at this point, but
5854 // loaded whenever actually needed.
Andrei Popescu402d9372010-02-26 13:31:12 +00005855 Handle<String> name;
5856 bool is_trivial_receiver = false;
5857 if (var != NULL) {
5858 name = var->name();
5859 } else {
5860 Literal* lit = prop->key()->AsLiteral();
5861 ASSERT_NOT_NULL(lit);
5862 name = Handle<String>::cast(lit->handle());
5863 // Do not materialize the receiver on the frame if it is trivial.
5864 is_trivial_receiver = prop->obj()->IsTrivial();
5865 if (!is_trivial_receiver) Load(prop->obj());
5866 }
5867
Steve Block6ded16b2010-05-10 14:33:55 +01005868 // Change to slow case in the beginning of an initialization block to
5869 // avoid the quadratic behavior of repeatedly adding fast properties.
Andrei Popescu402d9372010-02-26 13:31:12 +00005870 if (node->starts_initialization_block()) {
Steve Block6ded16b2010-05-10 14:33:55 +01005871 // Initialization block consists of assignments of the form expr.x = ..., so
5872 // this will never be an assignment to a variable, so there must be a
5873 // receiver object.
Andrei Popescu402d9372010-02-26 13:31:12 +00005874 ASSERT_EQ(NULL, var);
Andrei Popescu402d9372010-02-26 13:31:12 +00005875 if (is_trivial_receiver) {
5876 frame()->Push(prop->obj());
5877 } else {
5878 frame()->Dup();
5879 }
5880 Result ignored = frame()->CallRuntime(Runtime::kToSlowProperties, 1);
5881 }
5882
Steve Block6ded16b2010-05-10 14:33:55 +01005883 // Change to fast case at the end of an initialization block. To prepare for
5884 // that add an extra copy of the receiver to the frame, so that it can be
5885 // converted back to fast case after the assignment.
Andrei Popescu402d9372010-02-26 13:31:12 +00005886 if (node->ends_initialization_block() && !is_trivial_receiver) {
Andrei Popescu402d9372010-02-26 13:31:12 +00005887 frame()->Dup();
5888 }
5889
Steve Block6ded16b2010-05-10 14:33:55 +01005890 // Stack layout:
5891 // [tos] : receiver (only materialized if non-trivial)
5892 // [tos+1] : receiver if at the end of an initialization block
5893
Andrei Popescu402d9372010-02-26 13:31:12 +00005894 // Evaluate the right-hand side.
5895 if (node->is_compound()) {
Steve Block6ded16b2010-05-10 14:33:55 +01005896 // For a compound assignment the right-hand side is a binary operation
5897 // between the current property value and the actual right-hand side.
Andrei Popescu402d9372010-02-26 13:31:12 +00005898 if (is_trivial_receiver) {
5899 frame()->Push(prop->obj());
5900 } else if (var != NULL) {
5901 // The LoadIC stub expects the object in eax.
5902 // Freeing eax causes the code generator to load the global into it.
5903 frame_->Spill(eax);
5904 LoadGlobal();
5905 } else {
5906 frame()->Dup();
5907 }
5908 Result value = EmitNamedLoad(name, var != NULL);
5909 frame()->Push(&value);
5910 Load(node->value());
5911
5912 bool overwrite_value =
5913 (node->value()->AsBinaryOperation() != NULL &&
5914 node->value()->AsBinaryOperation()->ResultOverwriteAllowed());
Steve Block6ded16b2010-05-10 14:33:55 +01005915 // Construct the implicit binary operation.
5916 BinaryOperation expr(node, node->binary_op(), node->target(),
5917 node->value());
5918 GenericBinaryOperation(&expr,
Andrei Popescu402d9372010-02-26 13:31:12 +00005919 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
5920 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01005921 // For non-compound assignment just load the right-hand side.
Andrei Popescu402d9372010-02-26 13:31:12 +00005922 Load(node->value());
5923 }
5924
Steve Block6ded16b2010-05-10 14:33:55 +01005925 // Stack layout:
5926 // [tos] : value
5927 // [tos+1] : receiver (only materialized if non-trivial)
5928 // [tos+2] : receiver if at the end of an initialization block
5929
Andrei Popescu402d9372010-02-26 13:31:12 +00005930 // Perform the assignment. It is safe to ignore constants here.
5931 ASSERT(var == NULL || var->mode() != Variable::CONST);
5932 ASSERT_NE(Token::INIT_CONST, node->op());
5933 if (is_trivial_receiver) {
5934 Result value = frame()->Pop();
5935 frame()->Push(prop->obj());
5936 frame()->Push(&value);
5937 }
5938 CodeForSourcePosition(node->position());
5939 bool is_contextual = (var != NULL);
5940 Result answer = EmitNamedStore(name, is_contextual);
5941 frame()->Push(&answer);
5942
Steve Block6ded16b2010-05-10 14:33:55 +01005943 // Stack layout:
5944 // [tos] : result
5945 // [tos+1] : receiver if at the end of an initialization block
5946
Andrei Popescu402d9372010-02-26 13:31:12 +00005947 if (node->ends_initialization_block()) {
5948 ASSERT_EQ(NULL, var);
5949 // The argument to the runtime call is the receiver.
5950 if (is_trivial_receiver) {
5951 frame()->Push(prop->obj());
5952 } else {
5953 // A copy of the receiver is below the value of the assignment. Swap
5954 // the receiver and the value of the assignment expression.
5955 Result result = frame()->Pop();
5956 Result receiver = frame()->Pop();
5957 frame()->Push(&result);
5958 frame()->Push(&receiver);
5959 }
5960 Result ignored = frame_->CallRuntime(Runtime::kToFastProperties, 1);
5961 }
5962
Steve Block6ded16b2010-05-10 14:33:55 +01005963 // Stack layout:
5964 // [tos] : result
5965
Andrei Popescu402d9372010-02-26 13:31:12 +00005966 ASSERT_EQ(frame()->height(), original_height + 1);
5967}
5968
5969
5970void CodeGenerator::EmitKeyedPropertyAssignment(Assignment* node) {
5971#ifdef DEBUG
5972 int original_height = frame()->height();
5973#endif
Steve Block6ded16b2010-05-10 14:33:55 +01005974 Comment cmnt(masm_, "[ Keyed Property Assignment");
Andrei Popescu402d9372010-02-26 13:31:12 +00005975 Property* prop = node->target()->AsProperty();
5976 ASSERT_NOT_NULL(prop);
5977
5978 // Evaluate the receiver subexpression.
5979 Load(prop->obj());
5980
Steve Block6ded16b2010-05-10 14:33:55 +01005981 // Change to slow case in the beginning of an initialization block to
5982 // avoid the quadratic behavior of repeatedly adding fast properties.
Andrei Popescu402d9372010-02-26 13:31:12 +00005983 if (node->starts_initialization_block()) {
Andrei Popescu402d9372010-02-26 13:31:12 +00005984 frame_->Dup();
5985 Result ignored = frame_->CallRuntime(Runtime::kToSlowProperties, 1);
5986 }
5987
Steve Block6ded16b2010-05-10 14:33:55 +01005988 // Change to fast case at the end of an initialization block. To prepare for
5989 // that add an extra copy of the receiver to the frame, so that it can be
5990 // converted back to fast case after the assignment.
Andrei Popescu402d9372010-02-26 13:31:12 +00005991 if (node->ends_initialization_block()) {
Andrei Popescu402d9372010-02-26 13:31:12 +00005992 frame_->Dup();
5993 }
5994
5995 // Evaluate the key subexpression.
5996 Load(prop->key());
5997
Steve Block6ded16b2010-05-10 14:33:55 +01005998 // Stack layout:
5999 // [tos] : key
6000 // [tos+1] : receiver
6001 // [tos+2] : receiver if at the end of an initialization block
6002
Andrei Popescu402d9372010-02-26 13:31:12 +00006003 // Evaluate the right-hand side.
6004 if (node->is_compound()) {
Steve Block6ded16b2010-05-10 14:33:55 +01006005 // For a compound assignment the right-hand side is a binary operation
6006 // between the current property value and the actual right-hand side.
6007 // Duplicate receiver and key for loading the current property value.
Andrei Popescu402d9372010-02-26 13:31:12 +00006008 frame()->PushElementAt(1);
6009 frame()->PushElementAt(1);
6010 Result value = EmitKeyedLoad();
6011 frame()->Push(&value);
6012 Load(node->value());
6013
Steve Block6ded16b2010-05-10 14:33:55 +01006014 // Perform the binary operation.
Andrei Popescu402d9372010-02-26 13:31:12 +00006015 bool overwrite_value =
6016 (node->value()->AsBinaryOperation() != NULL &&
6017 node->value()->AsBinaryOperation()->ResultOverwriteAllowed());
Steve Block6ded16b2010-05-10 14:33:55 +01006018 BinaryOperation expr(node, node->binary_op(), node->target(),
6019 node->value());
6020 GenericBinaryOperation(&expr,
Andrei Popescu402d9372010-02-26 13:31:12 +00006021 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
6022 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01006023 // For non-compound assignment just load the right-hand side.
Andrei Popescu402d9372010-02-26 13:31:12 +00006024 Load(node->value());
6025 }
6026
Steve Block6ded16b2010-05-10 14:33:55 +01006027 // Stack layout:
6028 // [tos] : value
6029 // [tos+1] : key
6030 // [tos+2] : receiver
6031 // [tos+3] : receiver if at the end of an initialization block
6032
Andrei Popescu402d9372010-02-26 13:31:12 +00006033 // Perform the assignment. It is safe to ignore constants here.
6034 ASSERT(node->op() != Token::INIT_CONST);
6035 CodeForSourcePosition(node->position());
6036 Result answer = EmitKeyedStore(prop->key()->type());
6037 frame()->Push(&answer);
6038
Steve Block6ded16b2010-05-10 14:33:55 +01006039 // Stack layout:
6040 // [tos] : result
6041 // [tos+1] : receiver if at the end of an initialization block
6042
6043 // Change to fast case at the end of an initialization block.
Andrei Popescu402d9372010-02-26 13:31:12 +00006044 if (node->ends_initialization_block()) {
6045 // The argument to the runtime call is the extra copy of the receiver,
6046 // which is below the value of the assignment. Swap the receiver and
6047 // the value of the assignment expression.
6048 Result result = frame()->Pop();
6049 Result receiver = frame()->Pop();
6050 frame()->Push(&result);
6051 frame()->Push(&receiver);
6052 Result ignored = frame_->CallRuntime(Runtime::kToFastProperties, 1);
6053 }
6054
Steve Block6ded16b2010-05-10 14:33:55 +01006055 // Stack layout:
6056 // [tos] : result
6057
Andrei Popescu402d9372010-02-26 13:31:12 +00006058 ASSERT(frame()->height() == original_height + 1);
6059}
6060
6061
Steve Blocka7e24c12009-10-30 11:49:00 +00006062void CodeGenerator::VisitAssignment(Assignment* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01006063 ASSERT(!in_safe_int32_mode());
Leon Clarked91b9f72010-01-27 17:25:45 +00006064#ifdef DEBUG
Andrei Popescu402d9372010-02-26 13:31:12 +00006065 int original_height = frame()->height();
Leon Clarked91b9f72010-01-27 17:25:45 +00006066#endif
Andrei Popescu402d9372010-02-26 13:31:12 +00006067 Variable* var = node->target()->AsVariableProxy()->AsVariable();
6068 Property* prop = node->target()->AsProperty();
Steve Blocka7e24c12009-10-30 11:49:00 +00006069
Andrei Popescu402d9372010-02-26 13:31:12 +00006070 if (var != NULL && !var->is_global()) {
6071 EmitSlotAssignment(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00006072
Andrei Popescu402d9372010-02-26 13:31:12 +00006073 } else if ((prop != NULL && prop->key()->IsPropertyName()) ||
6074 (var != NULL && var->is_global())) {
6075 // Properties whose keys are property names and global variables are
6076 // treated as named property references. We do not need to consider
6077 // global 'this' because it is not a valid left-hand side.
6078 EmitNamedPropertyAssignment(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00006079
Andrei Popescu402d9372010-02-26 13:31:12 +00006080 } else if (prop != NULL) {
6081 // Other properties (including rewritten parameters for a function that
6082 // uses arguments) are keyed property assignments.
6083 EmitKeyedPropertyAssignment(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00006084
Andrei Popescu402d9372010-02-26 13:31:12 +00006085 } else {
6086 // Invalid left-hand side.
6087 Load(node->target());
6088 Result result = frame()->CallRuntime(Runtime::kThrowReferenceError, 1);
6089 // The runtime call doesn't actually return but the code generator will
6090 // still generate code and expects a certain frame height.
6091 frame()->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00006092 }
Andrei Popescu402d9372010-02-26 13:31:12 +00006093
6094 ASSERT(frame()->height() == original_height + 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00006095}
6096
6097
6098void CodeGenerator::VisitThrow(Throw* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01006099 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00006100 Comment cmnt(masm_, "[ Throw");
6101 Load(node->exception());
6102 Result result = frame_->CallRuntime(Runtime::kThrow, 1);
6103 frame_->Push(&result);
6104}
6105
6106
6107void CodeGenerator::VisitProperty(Property* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01006108 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00006109 Comment cmnt(masm_, "[ Property");
6110 Reference property(this, node);
Steve Blockd0582a62009-12-15 09:54:21 +00006111 property.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00006112}
6113
6114
6115void CodeGenerator::VisitCall(Call* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01006116 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00006117 Comment cmnt(masm_, "[ Call");
6118
6119 Expression* function = node->expression();
6120 ZoneList<Expression*>* args = node->arguments();
6121
6122 // Check if the function is a variable or a property.
6123 Variable* var = function->AsVariableProxy()->AsVariable();
6124 Property* property = function->AsProperty();
6125
6126 // ------------------------------------------------------------------------
6127 // Fast-case: Use inline caching.
6128 // ---
6129 // According to ECMA-262, section 11.2.3, page 44, the function to call
6130 // must be resolved after the arguments have been evaluated. The IC code
6131 // automatically handles this by loading the arguments before the function
6132 // is resolved in cache misses (this also holds for megamorphic calls).
6133 // ------------------------------------------------------------------------
6134
6135 if (var != NULL && var->is_possibly_eval()) {
6136 // ----------------------------------
6137 // JavaScript example: 'eval(arg)' // eval is not known to be shadowed
6138 // ----------------------------------
6139
6140 // In a call to eval, we first call %ResolvePossiblyDirectEval to
6141 // resolve the function we need to call and the receiver of the
6142 // call. Then we call the resolved function using the given
6143 // arguments.
6144
6145 // Prepare the stack for the call to the resolved function.
6146 Load(function);
6147
6148 // Allocate a frame slot for the receiver.
6149 frame_->Push(Factory::undefined_value());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006150
6151 // Load the arguments.
Steve Blocka7e24c12009-10-30 11:49:00 +00006152 int arg_count = args->length();
6153 for (int i = 0; i < arg_count; i++) {
6154 Load(args->at(i));
Leon Clarkef7060e22010-06-03 12:02:55 +01006155 frame_->SpillTop();
Steve Blocka7e24c12009-10-30 11:49:00 +00006156 }
6157
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006158 // Result to hold the result of the function resolution and the
6159 // final result of the eval call.
6160 Result result;
6161
6162 // If we know that eval can only be shadowed by eval-introduced
6163 // variables we attempt to load the global eval function directly
6164 // in generated code. If we succeed, there is no need to perform a
6165 // context lookup in the runtime system.
6166 JumpTarget done;
6167 if (var->slot() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) {
6168 ASSERT(var->slot()->type() == Slot::LOOKUP);
6169 JumpTarget slow;
6170 // Prepare the stack for the call to
6171 // ResolvePossiblyDirectEvalNoLookup by pushing the loaded
6172 // function, the first argument to the eval call and the
6173 // receiver.
6174 Result fun = LoadFromGlobalSlotCheckExtensions(var->slot(),
6175 NOT_INSIDE_TYPEOF,
6176 &slow);
6177 frame_->Push(&fun);
6178 if (arg_count > 0) {
6179 frame_->PushElementAt(arg_count);
6180 } else {
6181 frame_->Push(Factory::undefined_value());
6182 }
6183 frame_->PushParameterAt(-1);
6184
6185 // Resolve the call.
6186 result =
6187 frame_->CallRuntime(Runtime::kResolvePossiblyDirectEvalNoLookup, 3);
6188
6189 done.Jump(&result);
6190 slow.Bind();
6191 }
6192
6193 // Prepare the stack for the call to ResolvePossiblyDirectEval by
6194 // pushing the loaded function, the first argument to the eval
6195 // call and the receiver.
Steve Blocka7e24c12009-10-30 11:49:00 +00006196 frame_->PushElementAt(arg_count + 1);
6197 if (arg_count > 0) {
6198 frame_->PushElementAt(arg_count);
6199 } else {
6200 frame_->Push(Factory::undefined_value());
6201 }
Leon Clarkee46be812010-01-19 14:06:41 +00006202 frame_->PushParameterAt(-1);
6203
Steve Blocka7e24c12009-10-30 11:49:00 +00006204 // Resolve the call.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006205 result = frame_->CallRuntime(Runtime::kResolvePossiblyDirectEval, 3);
6206
6207 // If we generated fast-case code bind the jump-target where fast
6208 // and slow case merge.
6209 if (done.is_linked()) done.Bind(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00006210
Leon Clarkee46be812010-01-19 14:06:41 +00006211 // The runtime call returns a pair of values in eax (function) and
6212 // edx (receiver). Touch up the stack with the right values.
6213 Result receiver = allocator_->Allocate(edx);
6214 frame_->SetElementAt(arg_count + 1, &result);
6215 frame_->SetElementAt(arg_count, &receiver);
6216 receiver.Unuse();
Steve Blocka7e24c12009-10-30 11:49:00 +00006217
6218 // Call the function.
6219 CodeForSourcePosition(node->position());
6220 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Leon Clarkee46be812010-01-19 14:06:41 +00006221 CallFunctionStub call_function(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
Steve Blocka7e24c12009-10-30 11:49:00 +00006222 result = frame_->CallStub(&call_function, arg_count + 1);
6223
6224 // Restore the context and overwrite the function on the stack with
6225 // the result.
6226 frame_->RestoreContextRegister();
6227 frame_->SetElementAt(0, &result);
6228
6229 } else if (var != NULL && !var->is_this() && var->is_global()) {
6230 // ----------------------------------
6231 // JavaScript example: 'foo(1, 2, 3)' // foo is global
6232 // ----------------------------------
6233
Steve Blocka7e24c12009-10-30 11:49:00 +00006234 // Pass the global object as the receiver and let the IC stub
6235 // patch the stack to use the global proxy as 'this' in the
6236 // invoked function.
6237 LoadGlobal();
6238
6239 // Load the arguments.
6240 int arg_count = args->length();
6241 for (int i = 0; i < arg_count; i++) {
6242 Load(args->at(i));
Leon Clarkef7060e22010-06-03 12:02:55 +01006243 frame_->SpillTop();
Steve Blocka7e24c12009-10-30 11:49:00 +00006244 }
6245
Leon Clarkee46be812010-01-19 14:06:41 +00006246 // Push the name of the function onto the frame.
6247 frame_->Push(var->name());
6248
Steve Blocka7e24c12009-10-30 11:49:00 +00006249 // Call the IC initialization code.
6250 CodeForSourcePosition(node->position());
6251 Result result = frame_->CallCallIC(RelocInfo::CODE_TARGET_CONTEXT,
6252 arg_count,
6253 loop_nesting());
6254 frame_->RestoreContextRegister();
Leon Clarkee46be812010-01-19 14:06:41 +00006255 frame_->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00006256
6257 } else if (var != NULL && var->slot() != NULL &&
6258 var->slot()->type() == Slot::LOOKUP) {
6259 // ----------------------------------
Kristian Monsen25f61362010-05-21 11:50:48 +01006260 // JavaScript examples:
6261 //
6262 // with (obj) foo(1, 2, 3) // foo may be in obj.
6263 //
6264 // function f() {};
6265 // function g() {
6266 // eval(...);
6267 // f(); // f could be in extension object.
6268 // }
Steve Blocka7e24c12009-10-30 11:49:00 +00006269 // ----------------------------------
6270
Kristian Monsen25f61362010-05-21 11:50:48 +01006271 JumpTarget slow, done;
6272 Result function;
6273
6274 // Generate fast case for loading functions from slots that
6275 // correspond to local/global variables or arguments unless they
6276 // are shadowed by eval-introduced bindings.
6277 EmitDynamicLoadFromSlotFastCase(var->slot(),
6278 NOT_INSIDE_TYPEOF,
6279 &function,
6280 &slow,
6281 &done);
6282
6283 slow.Bind();
6284 // Enter the runtime system to load the function from the context.
6285 // Sync the frame so we can push the arguments directly into
6286 // place.
Steve Blocka7e24c12009-10-30 11:49:00 +00006287 frame_->SyncRange(0, frame_->element_count() - 1);
6288 frame_->EmitPush(esi);
6289 frame_->EmitPush(Immediate(var->name()));
6290 frame_->CallRuntime(Runtime::kLoadContextSlot, 2);
6291 // The runtime call returns a pair of values in eax and edx. The
6292 // looked-up function is in eax and the receiver is in edx. These
6293 // register references are not ref counted here. We spill them
6294 // eagerly since they are arguments to an inevitable call (and are
6295 // not sharable by the arguments).
6296 ASSERT(!allocator()->is_used(eax));
6297 frame_->EmitPush(eax);
6298
6299 // Load the receiver.
6300 ASSERT(!allocator()->is_used(edx));
6301 frame_->EmitPush(edx);
6302
Kristian Monsen25f61362010-05-21 11:50:48 +01006303 // If fast case code has been generated, emit code to push the
6304 // function and receiver and have the slow path jump around this
6305 // code.
6306 if (done.is_linked()) {
6307 JumpTarget call;
6308 call.Jump();
6309 done.Bind(&function);
6310 frame_->Push(&function);
6311 LoadGlobalReceiver();
6312 call.Bind();
6313 }
6314
Steve Blocka7e24c12009-10-30 11:49:00 +00006315 // Call the function.
Leon Clarkee46be812010-01-19 14:06:41 +00006316 CallWithArguments(args, NO_CALL_FUNCTION_FLAGS, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00006317
6318 } else if (property != NULL) {
6319 // Check if the key is a literal string.
6320 Literal* literal = property->key()->AsLiteral();
6321
6322 if (literal != NULL && literal->handle()->IsSymbol()) {
6323 // ------------------------------------------------------------------
6324 // JavaScript example: 'object.foo(1, 2, 3)' or 'map["key"](1, 2, 3)'
6325 // ------------------------------------------------------------------
6326
6327 Handle<String> name = Handle<String>::cast(literal->handle());
6328
6329 if (ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION &&
6330 name->IsEqualTo(CStrVector("apply")) &&
6331 args->length() == 2 &&
6332 args->at(1)->AsVariableProxy() != NULL &&
6333 args->at(1)->AsVariableProxy()->IsArguments()) {
6334 // Use the optimized Function.prototype.apply that avoids
6335 // allocating lazily allocated arguments objects.
Leon Clarked91b9f72010-01-27 17:25:45 +00006336 CallApplyLazy(property->obj(),
Steve Blocka7e24c12009-10-30 11:49:00 +00006337 args->at(0),
6338 args->at(1)->AsVariableProxy(),
6339 node->position());
6340
6341 } else {
Leon Clarkee46be812010-01-19 14:06:41 +00006342 // Push the receiver onto the frame.
Steve Blocka7e24c12009-10-30 11:49:00 +00006343 Load(property->obj());
6344
6345 // Load the arguments.
6346 int arg_count = args->length();
6347 for (int i = 0; i < arg_count; i++) {
6348 Load(args->at(i));
Leon Clarkef7060e22010-06-03 12:02:55 +01006349 frame_->SpillTop();
Steve Blocka7e24c12009-10-30 11:49:00 +00006350 }
6351
Leon Clarkee46be812010-01-19 14:06:41 +00006352 // Push the name of the function onto the frame.
6353 frame_->Push(name);
6354
Steve Blocka7e24c12009-10-30 11:49:00 +00006355 // Call the IC initialization code.
6356 CodeForSourcePosition(node->position());
6357 Result result =
6358 frame_->CallCallIC(RelocInfo::CODE_TARGET, arg_count,
6359 loop_nesting());
6360 frame_->RestoreContextRegister();
Leon Clarkee46be812010-01-19 14:06:41 +00006361 frame_->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00006362 }
6363
6364 } else {
6365 // -------------------------------------------
6366 // JavaScript example: 'array[index](1, 2, 3)'
6367 // -------------------------------------------
6368
6369 // Load the function to call from the property through a reference.
Steve Blocka7e24c12009-10-30 11:49:00 +00006370
6371 // Pass receiver to called function.
6372 if (property->is_synthetic()) {
Leon Clarked91b9f72010-01-27 17:25:45 +00006373 Reference ref(this, property);
6374 ref.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00006375 // Use global object as receiver.
6376 LoadGlobalReceiver();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006377 // Call the function.
6378 CallWithArguments(args, RECEIVER_MIGHT_BE_VALUE, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00006379 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006380 // Push the receiver onto the frame.
Leon Clarked91b9f72010-01-27 17:25:45 +00006381 Load(property->obj());
Steve Blocka7e24c12009-10-30 11:49:00 +00006382
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006383 // Load the arguments.
6384 int arg_count = args->length();
6385 for (int i = 0; i < arg_count; i++) {
6386 Load(args->at(i));
6387 frame_->SpillTop();
6388 }
6389
6390 // Load the name of the function.
6391 Load(property->key());
6392
6393 // Call the IC initialization code.
6394 CodeForSourcePosition(node->position());
6395 Result result =
6396 frame_->CallKeyedCallIC(RelocInfo::CODE_TARGET,
6397 arg_count,
6398 loop_nesting());
6399 frame_->RestoreContextRegister();
6400 frame_->Push(&result);
6401 }
Steve Blocka7e24c12009-10-30 11:49:00 +00006402 }
6403
6404 } else {
6405 // ----------------------------------
6406 // JavaScript example: 'foo(1, 2, 3)' // foo is not global
6407 // ----------------------------------
6408
6409 // Load the function.
6410 Load(function);
6411
6412 // Pass the global proxy as the receiver.
6413 LoadGlobalReceiver();
6414
6415 // Call the function.
Leon Clarkee46be812010-01-19 14:06:41 +00006416 CallWithArguments(args, NO_CALL_FUNCTION_FLAGS, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00006417 }
6418}
6419
6420
6421void CodeGenerator::VisitCallNew(CallNew* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01006422 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00006423 Comment cmnt(masm_, "[ CallNew");
6424
6425 // According to ECMA-262, section 11.2.2, page 44, the function
6426 // expression in new calls must be evaluated before the
6427 // arguments. This is different from ordinary calls, where the
6428 // actual function to call is resolved after the arguments have been
6429 // evaluated.
6430
6431 // Compute function to call and use the global object as the
6432 // receiver. There is no need to use the global proxy here because
6433 // it will always be replaced with a newly allocated object.
6434 Load(node->expression());
6435 LoadGlobal();
6436
6437 // Push the arguments ("left-to-right") on the stack.
6438 ZoneList<Expression*>* args = node->arguments();
6439 int arg_count = args->length();
6440 for (int i = 0; i < arg_count; i++) {
6441 Load(args->at(i));
6442 }
6443
6444 // Call the construct call builtin that handles allocation and
6445 // constructor invocation.
6446 CodeForSourcePosition(node->position());
6447 Result result = frame_->CallConstructor(arg_count);
6448 // Replace the function on the stack with the result.
6449 frame_->SetElementAt(0, &result);
6450}
6451
6452
6453void CodeGenerator::GenerateIsSmi(ZoneList<Expression*>* args) {
6454 ASSERT(args->length() == 1);
6455 Load(args->at(0));
6456 Result value = frame_->Pop();
6457 value.ToRegister();
6458 ASSERT(value.is_valid());
6459 __ test(value.reg(), Immediate(kSmiTagMask));
6460 value.Unuse();
6461 destination()->Split(zero);
6462}
6463
6464
6465void CodeGenerator::GenerateLog(ZoneList<Expression*>* args) {
6466 // Conditionally generate a log call.
6467 // Args:
6468 // 0 (literal string): The type of logging (corresponds to the flags).
6469 // This is used to determine whether or not to generate the log call.
6470 // 1 (string): Format string. Access the string at argument index 2
6471 // with '%2s' (see Logger::LogRuntime for all the formats).
6472 // 2 (array): Arguments to the format string.
6473 ASSERT_EQ(args->length(), 3);
6474#ifdef ENABLE_LOGGING_AND_PROFILING
6475 if (ShouldGenerateLog(args->at(0))) {
6476 Load(args->at(1));
6477 Load(args->at(2));
6478 frame_->CallRuntime(Runtime::kLog, 2);
6479 }
6480#endif
6481 // Finally, we're expected to leave a value on the top of the stack.
6482 frame_->Push(Factory::undefined_value());
6483}
6484
6485
6486void CodeGenerator::GenerateIsNonNegativeSmi(ZoneList<Expression*>* args) {
6487 ASSERT(args->length() == 1);
6488 Load(args->at(0));
6489 Result value = frame_->Pop();
6490 value.ToRegister();
6491 ASSERT(value.is_valid());
Steve Block6ded16b2010-05-10 14:33:55 +01006492 __ test(value.reg(), Immediate(kSmiTagMask | kSmiSignMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00006493 value.Unuse();
6494 destination()->Split(zero);
6495}
6496
6497
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006498class DeferredStringCharCodeAt : public DeferredCode {
6499 public:
6500 DeferredStringCharCodeAt(Register object,
6501 Register index,
6502 Register scratch,
6503 Register result)
6504 : result_(result),
6505 char_code_at_generator_(object,
6506 index,
6507 scratch,
6508 result,
6509 &need_conversion_,
6510 &need_conversion_,
6511 &index_out_of_range_,
6512 STRING_INDEX_IS_NUMBER) {}
6513
6514 StringCharCodeAtGenerator* fast_case_generator() {
6515 return &char_code_at_generator_;
6516 }
6517
6518 virtual void Generate() {
6519 VirtualFrameRuntimeCallHelper call_helper(frame_state());
6520 char_code_at_generator_.GenerateSlow(masm(), call_helper);
6521
6522 __ bind(&need_conversion_);
6523 // Move the undefined value into the result register, which will
6524 // trigger conversion.
6525 __ Set(result_, Immediate(Factory::undefined_value()));
6526 __ jmp(exit_label());
6527
6528 __ bind(&index_out_of_range_);
6529 // When the index is out of range, the spec requires us to return
6530 // NaN.
6531 __ Set(result_, Immediate(Factory::nan_value()));
6532 __ jmp(exit_label());
6533 }
6534
6535 private:
6536 Register result_;
6537
6538 Label need_conversion_;
6539 Label index_out_of_range_;
6540
6541 StringCharCodeAtGenerator char_code_at_generator_;
6542};
6543
6544
6545// This generates code that performs a String.prototype.charCodeAt() call
6546// or returns a smi in order to trigger conversion.
6547void CodeGenerator::GenerateStringCharCodeAt(ZoneList<Expression*>* args) {
6548 Comment(masm_, "[ GenerateStringCharCodeAt");
Steve Blocka7e24c12009-10-30 11:49:00 +00006549 ASSERT(args->length() == 2);
6550
Steve Blocka7e24c12009-10-30 11:49:00 +00006551 Load(args->at(0));
6552 Load(args->at(1));
6553 Result index = frame_->Pop();
6554 Result object = frame_->Pop();
Steve Blocka7e24c12009-10-30 11:49:00 +00006555 object.ToRegister();
6556 index.ToRegister();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006557 // We might mutate the object register.
Steve Blocka7e24c12009-10-30 11:49:00 +00006558 frame_->Spill(object.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00006559
Steve Block6ded16b2010-05-10 14:33:55 +01006560 // We need two extra registers.
6561 Result result = allocator()->Allocate();
6562 ASSERT(result.is_valid());
6563 Result scratch = allocator()->Allocate();
6564 ASSERT(scratch.is_valid());
Steve Blocka7e24c12009-10-30 11:49:00 +00006565
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006566 DeferredStringCharCodeAt* deferred =
6567 new DeferredStringCharCodeAt(object.reg(),
6568 index.reg(),
6569 scratch.reg(),
6570 result.reg());
6571 deferred->fast_case_generator()->GenerateFast(masm_);
6572 deferred->BindExit();
Steve Block6ded16b2010-05-10 14:33:55 +01006573 frame_->Push(&result);
6574}
6575
6576
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006577class DeferredStringCharFromCode : public DeferredCode {
6578 public:
6579 DeferredStringCharFromCode(Register code,
6580 Register result)
6581 : char_from_code_generator_(code, result) {}
6582
6583 StringCharFromCodeGenerator* fast_case_generator() {
6584 return &char_from_code_generator_;
6585 }
6586
6587 virtual void Generate() {
6588 VirtualFrameRuntimeCallHelper call_helper(frame_state());
6589 char_from_code_generator_.GenerateSlow(masm(), call_helper);
6590 }
6591
6592 private:
6593 StringCharFromCodeGenerator char_from_code_generator_;
6594};
6595
6596
6597// Generates code for creating a one-char string from a char code.
6598void CodeGenerator::GenerateStringCharFromCode(ZoneList<Expression*>* args) {
6599 Comment(masm_, "[ GenerateStringCharFromCode");
Steve Block6ded16b2010-05-10 14:33:55 +01006600 ASSERT(args->length() == 1);
6601
6602 Load(args->at(0));
6603
6604 Result code = frame_->Pop();
6605 code.ToRegister();
6606 ASSERT(code.is_valid());
6607
Steve Block6ded16b2010-05-10 14:33:55 +01006608 Result result = allocator()->Allocate();
6609 ASSERT(result.is_valid());
6610
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006611 DeferredStringCharFromCode* deferred = new DeferredStringCharFromCode(
6612 code.reg(), result.reg());
6613 deferred->fast_case_generator()->GenerateFast(masm_);
6614 deferred->BindExit();
6615 frame_->Push(&result);
6616}
6617
6618
6619class DeferredStringCharAt : public DeferredCode {
6620 public:
6621 DeferredStringCharAt(Register object,
6622 Register index,
6623 Register scratch1,
6624 Register scratch2,
6625 Register result)
6626 : result_(result),
6627 char_at_generator_(object,
6628 index,
6629 scratch1,
6630 scratch2,
6631 result,
6632 &need_conversion_,
6633 &need_conversion_,
6634 &index_out_of_range_,
6635 STRING_INDEX_IS_NUMBER) {}
6636
6637 StringCharAtGenerator* fast_case_generator() {
6638 return &char_at_generator_;
6639 }
6640
6641 virtual void Generate() {
6642 VirtualFrameRuntimeCallHelper call_helper(frame_state());
6643 char_at_generator_.GenerateSlow(masm(), call_helper);
6644
6645 __ bind(&need_conversion_);
6646 // Move smi zero into the result register, which will trigger
6647 // conversion.
6648 __ Set(result_, Immediate(Smi::FromInt(0)));
6649 __ jmp(exit_label());
6650
6651 __ bind(&index_out_of_range_);
6652 // When the index is out of range, the spec requires us to return
6653 // the empty string.
6654 __ Set(result_, Immediate(Factory::empty_string()));
6655 __ jmp(exit_label());
6656 }
6657
6658 private:
6659 Register result_;
6660
6661 Label need_conversion_;
6662 Label index_out_of_range_;
6663
6664 StringCharAtGenerator char_at_generator_;
6665};
6666
6667
6668// This generates code that performs a String.prototype.charAt() call
6669// or returns a smi in order to trigger conversion.
6670void CodeGenerator::GenerateStringCharAt(ZoneList<Expression*>* args) {
6671 Comment(masm_, "[ GenerateStringCharAt");
6672 ASSERT(args->length() == 2);
6673
6674 Load(args->at(0));
6675 Load(args->at(1));
6676 Result index = frame_->Pop();
6677 Result object = frame_->Pop();
6678 object.ToRegister();
6679 index.ToRegister();
6680 // We might mutate the object register.
6681 frame_->Spill(object.reg());
6682
6683 // We need three extra registers.
6684 Result result = allocator()->Allocate();
6685 ASSERT(result.is_valid());
6686 Result scratch1 = allocator()->Allocate();
6687 ASSERT(scratch1.is_valid());
6688 Result scratch2 = allocator()->Allocate();
6689 ASSERT(scratch2.is_valid());
6690
6691 DeferredStringCharAt* deferred =
6692 new DeferredStringCharAt(object.reg(),
6693 index.reg(),
6694 scratch1.reg(),
6695 scratch2.reg(),
6696 result.reg());
6697 deferred->fast_case_generator()->GenerateFast(masm_);
6698 deferred->BindExit();
Steve Block6ded16b2010-05-10 14:33:55 +01006699 frame_->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00006700}
6701
6702
6703void CodeGenerator::GenerateIsArray(ZoneList<Expression*>* args) {
6704 ASSERT(args->length() == 1);
6705 Load(args->at(0));
6706 Result value = frame_->Pop();
6707 value.ToRegister();
6708 ASSERT(value.is_valid());
6709 __ test(value.reg(), Immediate(kSmiTagMask));
6710 destination()->false_target()->Branch(equal);
6711 // It is a heap object - get map.
6712 Result temp = allocator()->Allocate();
6713 ASSERT(temp.is_valid());
6714 // Check if the object is a JS array or not.
6715 __ CmpObjectType(value.reg(), JS_ARRAY_TYPE, temp.reg());
6716 value.Unuse();
6717 temp.Unuse();
6718 destination()->Split(equal);
6719}
6720
6721
Andrei Popescu402d9372010-02-26 13:31:12 +00006722void CodeGenerator::GenerateIsRegExp(ZoneList<Expression*>* args) {
6723 ASSERT(args->length() == 1);
6724 Load(args->at(0));
6725 Result value = frame_->Pop();
6726 value.ToRegister();
6727 ASSERT(value.is_valid());
6728 __ test(value.reg(), Immediate(kSmiTagMask));
6729 destination()->false_target()->Branch(equal);
6730 // It is a heap object - get map.
6731 Result temp = allocator()->Allocate();
6732 ASSERT(temp.is_valid());
6733 // Check if the object is a regexp.
6734 __ CmpObjectType(value.reg(), JS_REGEXP_TYPE, temp.reg());
6735 value.Unuse();
6736 temp.Unuse();
6737 destination()->Split(equal);
6738}
6739
6740
Steve Blockd0582a62009-12-15 09:54:21 +00006741void CodeGenerator::GenerateIsObject(ZoneList<Expression*>* args) {
6742 // This generates a fast version of:
6743 // (typeof(arg) === 'object' || %_ClassOf(arg) == 'RegExp')
6744 ASSERT(args->length() == 1);
6745 Load(args->at(0));
6746 Result obj = frame_->Pop();
6747 obj.ToRegister();
6748
6749 __ test(obj.reg(), Immediate(kSmiTagMask));
6750 destination()->false_target()->Branch(zero);
6751 __ cmp(obj.reg(), Factory::null_value());
6752 destination()->true_target()->Branch(equal);
6753
6754 Result map = allocator()->Allocate();
6755 ASSERT(map.is_valid());
6756 __ mov(map.reg(), FieldOperand(obj.reg(), HeapObject::kMapOffset));
6757 // Undetectable objects behave like undefined when tested with typeof.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006758 __ test_b(FieldOperand(map.reg(), Map::kBitFieldOffset),
6759 1 << Map::kIsUndetectable);
Steve Blockd0582a62009-12-15 09:54:21 +00006760 destination()->false_target()->Branch(not_zero);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006761 // Do a range test for JSObject type. We can't use
6762 // MacroAssembler::IsInstanceJSObjectType, because we are using a
6763 // ControlDestination, so we copy its implementation here.
Steve Blockd0582a62009-12-15 09:54:21 +00006764 __ movzx_b(map.reg(), FieldOperand(map.reg(), Map::kInstanceTypeOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006765 __ sub(Operand(map.reg()), Immediate(FIRST_JS_OBJECT_TYPE));
6766 __ cmp(map.reg(), LAST_JS_OBJECT_TYPE - FIRST_JS_OBJECT_TYPE);
Steve Blockd0582a62009-12-15 09:54:21 +00006767 obj.Unuse();
6768 map.Unuse();
Leon Clarkef7060e22010-06-03 12:02:55 +01006769 destination()->Split(below_equal);
Steve Blockd0582a62009-12-15 09:54:21 +00006770}
6771
6772
Iain Merrick75681382010-08-19 15:07:18 +01006773void CodeGenerator::GenerateIsSpecObject(ZoneList<Expression*>* args) {
Ben Murdoch3bec4d22010-07-22 14:51:16 +01006774 // This generates a fast version of:
6775 // (typeof(arg) === 'object' || %_ClassOf(arg) == 'RegExp' ||
6776 // typeof(arg) == function).
6777 // It includes undetectable objects (as opposed to IsObject).
6778 ASSERT(args->length() == 1);
6779 Load(args->at(0));
6780 Result value = frame_->Pop();
6781 value.ToRegister();
6782 ASSERT(value.is_valid());
6783 __ test(value.reg(), Immediate(kSmiTagMask));
6784 destination()->false_target()->Branch(equal);
6785
6786 // Check that this is an object.
6787 frame_->Spill(value.reg());
6788 __ CmpObjectType(value.reg(), FIRST_JS_OBJECT_TYPE, value.reg());
6789 value.Unuse();
6790 destination()->Split(above_equal);
6791}
6792
6793
Iain Merrick75681382010-08-19 15:07:18 +01006794// Deferred code to check whether the String JavaScript object is safe for using
6795// default value of. This code is called after the bit caching this information
6796// in the map has been checked with the map for the object in the map_result_
6797// register. On return the register map_result_ contains 1 for true and 0 for
6798// false.
6799class DeferredIsStringWrapperSafeForDefaultValueOf : public DeferredCode {
6800 public:
6801 DeferredIsStringWrapperSafeForDefaultValueOf(Register object,
6802 Register map_result,
6803 Register scratch1,
6804 Register scratch2)
6805 : object_(object),
6806 map_result_(map_result),
6807 scratch1_(scratch1),
6808 scratch2_(scratch2) { }
6809
6810 virtual void Generate() {
6811 Label false_result;
6812
6813 // Check that map is loaded as expected.
6814 if (FLAG_debug_code) {
6815 __ cmp(map_result_, FieldOperand(object_, HeapObject::kMapOffset));
6816 __ Assert(equal, "Map not in expected register");
6817 }
6818
6819 // Check for fast case object. Generate false result for slow case object.
6820 __ mov(scratch1_, FieldOperand(object_, JSObject::kPropertiesOffset));
6821 __ mov(scratch1_, FieldOperand(scratch1_, HeapObject::kMapOffset));
6822 __ cmp(scratch1_, Factory::hash_table_map());
6823 __ j(equal, &false_result);
6824
6825 // Look for valueOf symbol in the descriptor array, and indicate false if
6826 // found. The type is not checked, so if it is a transition it is a false
6827 // negative.
6828 __ mov(map_result_,
6829 FieldOperand(map_result_, Map::kInstanceDescriptorsOffset));
6830 __ mov(scratch1_, FieldOperand(map_result_, FixedArray::kLengthOffset));
6831 // map_result_: descriptor array
6832 // scratch1_: length of descriptor array
6833 // Calculate the end of the descriptor array.
6834 STATIC_ASSERT(kSmiTag == 0);
6835 STATIC_ASSERT(kSmiTagSize == 1);
6836 STATIC_ASSERT(kPointerSize == 4);
6837 __ lea(scratch1_,
6838 Operand(map_result_, scratch1_, times_2, FixedArray::kHeaderSize));
6839 // Calculate location of the first key name.
6840 __ add(Operand(map_result_),
6841 Immediate(FixedArray::kHeaderSize +
6842 DescriptorArray::kFirstIndex * kPointerSize));
6843 // Loop through all the keys in the descriptor array. If one of these is the
6844 // symbol valueOf the result is false.
6845 Label entry, loop;
6846 __ jmp(&entry);
6847 __ bind(&loop);
6848 __ mov(scratch2_, FieldOperand(map_result_, 0));
6849 __ cmp(scratch2_, Factory::value_of_symbol());
6850 __ j(equal, &false_result);
6851 __ add(Operand(map_result_), Immediate(kPointerSize));
6852 __ bind(&entry);
6853 __ cmp(map_result_, Operand(scratch1_));
6854 __ j(not_equal, &loop);
6855
6856 // Reload map as register map_result_ was used as temporary above.
6857 __ mov(map_result_, FieldOperand(object_, HeapObject::kMapOffset));
6858
6859 // If a valueOf property is not found on the object check that it's
6860 // prototype is the un-modified String prototype. If not result is false.
6861 __ mov(scratch1_, FieldOperand(map_result_, Map::kPrototypeOffset));
6862 __ test(scratch1_, Immediate(kSmiTagMask));
6863 __ j(zero, &false_result);
6864 __ mov(scratch1_, FieldOperand(scratch1_, HeapObject::kMapOffset));
6865 __ mov(scratch2_, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
6866 __ mov(scratch2_,
6867 FieldOperand(scratch2_, GlobalObject::kGlobalContextOffset));
6868 __ cmp(scratch1_,
6869 CodeGenerator::ContextOperand(
6870 scratch2_, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
6871 __ j(not_equal, &false_result);
6872 // Set the bit in the map to indicate that it has been checked safe for
6873 // default valueOf and set true result.
6874 __ or_(FieldOperand(map_result_, Map::kBitField2Offset),
6875 Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
6876 __ Set(map_result_, Immediate(1));
6877 __ jmp(exit_label());
6878 __ bind(&false_result);
6879 // Set false result.
6880 __ Set(map_result_, Immediate(0));
6881 }
6882
6883 private:
6884 Register object_;
6885 Register map_result_;
6886 Register scratch1_;
6887 Register scratch2_;
6888};
6889
6890
6891void CodeGenerator::GenerateIsStringWrapperSafeForDefaultValueOf(
6892 ZoneList<Expression*>* args) {
6893 ASSERT(args->length() == 1);
6894 Load(args->at(0));
6895 Result obj = frame_->Pop(); // Pop the string wrapper.
6896 obj.ToRegister();
6897 ASSERT(obj.is_valid());
6898 if (FLAG_debug_code) {
6899 __ AbortIfSmi(obj.reg());
6900 }
6901
6902 // Check whether this map has already been checked to be safe for default
6903 // valueOf.
6904 Result map_result = allocator()->Allocate();
6905 ASSERT(map_result.is_valid());
6906 __ mov(map_result.reg(), FieldOperand(obj.reg(), HeapObject::kMapOffset));
6907 __ test_b(FieldOperand(map_result.reg(), Map::kBitField2Offset),
6908 1 << Map::kStringWrapperSafeForDefaultValueOf);
6909 destination()->true_target()->Branch(not_zero);
6910
6911 // We need an additional two scratch registers for the deferred code.
6912 Result temp1 = allocator()->Allocate();
6913 ASSERT(temp1.is_valid());
6914 Result temp2 = allocator()->Allocate();
6915 ASSERT(temp2.is_valid());
6916
6917 DeferredIsStringWrapperSafeForDefaultValueOf* deferred =
6918 new DeferredIsStringWrapperSafeForDefaultValueOf(
6919 obj.reg(), map_result.reg(), temp1.reg(), temp2.reg());
6920 deferred->Branch(zero);
6921 deferred->BindExit();
6922 __ test(map_result.reg(), Operand(map_result.reg()));
6923 obj.Unuse();
6924 map_result.Unuse();
6925 temp1.Unuse();
6926 temp2.Unuse();
6927 destination()->Split(not_equal);
6928}
6929
6930
Steve Blockd0582a62009-12-15 09:54:21 +00006931void CodeGenerator::GenerateIsFunction(ZoneList<Expression*>* args) {
6932 // This generates a fast version of:
6933 // (%_ClassOf(arg) === 'Function')
6934 ASSERT(args->length() == 1);
6935 Load(args->at(0));
6936 Result obj = frame_->Pop();
6937 obj.ToRegister();
6938 __ test(obj.reg(), Immediate(kSmiTagMask));
6939 destination()->false_target()->Branch(zero);
6940 Result temp = allocator()->Allocate();
6941 ASSERT(temp.is_valid());
6942 __ CmpObjectType(obj.reg(), JS_FUNCTION_TYPE, temp.reg());
6943 obj.Unuse();
6944 temp.Unuse();
6945 destination()->Split(equal);
6946}
6947
6948
Leon Clarked91b9f72010-01-27 17:25:45 +00006949void CodeGenerator::GenerateIsUndetectableObject(ZoneList<Expression*>* args) {
6950 ASSERT(args->length() == 1);
6951 Load(args->at(0));
6952 Result obj = frame_->Pop();
6953 obj.ToRegister();
6954 __ test(obj.reg(), Immediate(kSmiTagMask));
6955 destination()->false_target()->Branch(zero);
6956 Result temp = allocator()->Allocate();
6957 ASSERT(temp.is_valid());
6958 __ mov(temp.reg(),
6959 FieldOperand(obj.reg(), HeapObject::kMapOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006960 __ test_b(FieldOperand(temp.reg(), Map::kBitFieldOffset),
6961 1 << Map::kIsUndetectable);
Leon Clarked91b9f72010-01-27 17:25:45 +00006962 obj.Unuse();
6963 temp.Unuse();
6964 destination()->Split(not_zero);
6965}
6966
6967
Steve Blocka7e24c12009-10-30 11:49:00 +00006968void CodeGenerator::GenerateIsConstructCall(ZoneList<Expression*>* args) {
6969 ASSERT(args->length() == 0);
6970
6971 // Get the frame pointer for the calling frame.
6972 Result fp = allocator()->Allocate();
6973 __ mov(fp.reg(), Operand(ebp, StandardFrameConstants::kCallerFPOffset));
6974
6975 // Skip the arguments adaptor frame if it exists.
6976 Label check_frame_marker;
6977 __ cmp(Operand(fp.reg(), StandardFrameConstants::kContextOffset),
6978 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
6979 __ j(not_equal, &check_frame_marker);
6980 __ mov(fp.reg(), Operand(fp.reg(), StandardFrameConstants::kCallerFPOffset));
6981
6982 // Check the marker in the calling frame.
6983 __ bind(&check_frame_marker);
6984 __ cmp(Operand(fp.reg(), StandardFrameConstants::kMarkerOffset),
6985 Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
6986 fp.Unuse();
6987 destination()->Split(equal);
6988}
6989
6990
6991void CodeGenerator::GenerateArgumentsLength(ZoneList<Expression*>* args) {
6992 ASSERT(args->length() == 0);
Steve Block6ded16b2010-05-10 14:33:55 +01006993
6994 Result fp = allocator_->Allocate();
6995 Result result = allocator_->Allocate();
6996 ASSERT(fp.is_valid() && result.is_valid());
6997
6998 Label exit;
6999
7000 // Get the number of formal parameters.
7001 __ Set(result.reg(), Immediate(Smi::FromInt(scope()->num_parameters())));
7002
7003 // Check if the calling frame is an arguments adaptor frame.
7004 __ mov(fp.reg(), Operand(ebp, StandardFrameConstants::kCallerFPOffset));
7005 __ cmp(Operand(fp.reg(), StandardFrameConstants::kContextOffset),
7006 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
7007 __ j(not_equal, &exit);
7008
7009 // Arguments adaptor case: Read the arguments length from the
7010 // adaptor frame.
7011 __ mov(result.reg(),
7012 Operand(fp.reg(), ArgumentsAdaptorFrameConstants::kLengthOffset));
7013
7014 __ bind(&exit);
7015 result.set_type_info(TypeInfo::Smi());
7016 if (FLAG_debug_code) __ AbortIfNotSmi(result.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00007017 frame_->Push(&result);
7018}
7019
7020
7021void CodeGenerator::GenerateClassOf(ZoneList<Expression*>* args) {
7022 ASSERT(args->length() == 1);
7023 JumpTarget leave, null, function, non_function_constructor;
7024 Load(args->at(0)); // Load the object.
7025 Result obj = frame_->Pop();
7026 obj.ToRegister();
7027 frame_->Spill(obj.reg());
7028
7029 // If the object is a smi, we return null.
7030 __ test(obj.reg(), Immediate(kSmiTagMask));
7031 null.Branch(zero);
7032
7033 // Check that the object is a JS object but take special care of JS
7034 // functions to make sure they have 'Function' as their class.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007035 __ CmpObjectType(obj.reg(), FIRST_JS_OBJECT_TYPE, obj.reg());
7036 null.Branch(below);
Steve Blocka7e24c12009-10-30 11:49:00 +00007037
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007038 // As long as JS_FUNCTION_TYPE is the last instance type and it is
7039 // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
7040 // LAST_JS_OBJECT_TYPE.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01007041 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
7042 STATIC_ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007043 __ CmpInstanceType(obj.reg(), JS_FUNCTION_TYPE);
7044 function.Branch(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +00007045
7046 // Check if the constructor in the map is a function.
7047 { Result tmp = allocator()->Allocate();
7048 __ mov(obj.reg(), FieldOperand(obj.reg(), Map::kConstructorOffset));
7049 __ CmpObjectType(obj.reg(), JS_FUNCTION_TYPE, tmp.reg());
7050 non_function_constructor.Branch(not_equal);
7051 }
7052
7053 // The map register now contains the constructor function. Grab the
7054 // instance class name from there.
7055 __ mov(obj.reg(),
7056 FieldOperand(obj.reg(), JSFunction::kSharedFunctionInfoOffset));
7057 __ mov(obj.reg(),
7058 FieldOperand(obj.reg(), SharedFunctionInfo::kInstanceClassNameOffset));
7059 frame_->Push(&obj);
7060 leave.Jump();
7061
7062 // Functions have class 'Function'.
7063 function.Bind();
7064 frame_->Push(Factory::function_class_symbol());
7065 leave.Jump();
7066
7067 // Objects with a non-function constructor have class 'Object'.
7068 non_function_constructor.Bind();
7069 frame_->Push(Factory::Object_symbol());
7070 leave.Jump();
7071
7072 // Non-JS objects have class null.
7073 null.Bind();
7074 frame_->Push(Factory::null_value());
7075
7076 // All done.
7077 leave.Bind();
7078}
7079
7080
7081void CodeGenerator::GenerateValueOf(ZoneList<Expression*>* args) {
7082 ASSERT(args->length() == 1);
7083 JumpTarget leave;
7084 Load(args->at(0)); // Load the object.
7085 frame_->Dup();
7086 Result object = frame_->Pop();
7087 object.ToRegister();
7088 ASSERT(object.is_valid());
7089 // if (object->IsSmi()) return object.
7090 __ test(object.reg(), Immediate(kSmiTagMask));
7091 leave.Branch(zero, taken);
7092 // It is a heap object - get map.
7093 Result temp = allocator()->Allocate();
7094 ASSERT(temp.is_valid());
7095 // if (!object->IsJSValue()) return object.
7096 __ CmpObjectType(object.reg(), JS_VALUE_TYPE, temp.reg());
7097 leave.Branch(not_equal, not_taken);
7098 __ mov(temp.reg(), FieldOperand(object.reg(), JSValue::kValueOffset));
7099 object.Unuse();
7100 frame_->SetElementAt(0, &temp);
7101 leave.Bind();
7102}
7103
7104
7105void CodeGenerator::GenerateSetValueOf(ZoneList<Expression*>* args) {
7106 ASSERT(args->length() == 2);
7107 JumpTarget leave;
7108 Load(args->at(0)); // Load the object.
7109 Load(args->at(1)); // Load the value.
7110 Result value = frame_->Pop();
7111 Result object = frame_->Pop();
7112 value.ToRegister();
7113 object.ToRegister();
7114
7115 // if (object->IsSmi()) return value.
7116 __ test(object.reg(), Immediate(kSmiTagMask));
7117 leave.Branch(zero, &value, taken);
7118
7119 // It is a heap object - get its map.
7120 Result scratch = allocator_->Allocate();
7121 ASSERT(scratch.is_valid());
7122 // if (!object->IsJSValue()) return value.
7123 __ CmpObjectType(object.reg(), JS_VALUE_TYPE, scratch.reg());
7124 leave.Branch(not_equal, &value, not_taken);
7125
7126 // Store the value.
7127 __ mov(FieldOperand(object.reg(), JSValue::kValueOffset), value.reg());
7128 // Update the write barrier. Save the value as it will be
7129 // overwritten by the write barrier code and is needed afterward.
7130 Result duplicate_value = allocator_->Allocate();
7131 ASSERT(duplicate_value.is_valid());
7132 __ mov(duplicate_value.reg(), value.reg());
7133 // The object register is also overwritten by the write barrier and
7134 // possibly aliased in the frame.
7135 frame_->Spill(object.reg());
7136 __ RecordWrite(object.reg(), JSValue::kValueOffset, duplicate_value.reg(),
7137 scratch.reg());
7138 object.Unuse();
7139 scratch.Unuse();
7140 duplicate_value.Unuse();
7141
7142 // Leave.
7143 leave.Bind(&value);
7144 frame_->Push(&value);
7145}
7146
7147
Steve Block6ded16b2010-05-10 14:33:55 +01007148void CodeGenerator::GenerateArguments(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00007149 ASSERT(args->length() == 1);
7150
7151 // ArgumentsAccessStub expects the key in edx and the formal
7152 // parameter count in eax.
7153 Load(args->at(0));
7154 Result key = frame_->Pop();
7155 // Explicitly create a constant result.
Andrei Popescu31002712010-02-23 13:46:05 +00007156 Result count(Handle<Smi>(Smi::FromInt(scope()->num_parameters())));
Steve Blocka7e24c12009-10-30 11:49:00 +00007157 // Call the shared stub to get to arguments[key].
7158 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
7159 Result result = frame_->CallStub(&stub, &key, &count);
7160 frame_->Push(&result);
7161}
7162
7163
7164void CodeGenerator::GenerateObjectEquals(ZoneList<Expression*>* args) {
7165 ASSERT(args->length() == 2);
7166
7167 // Load the two objects into registers and perform the comparison.
7168 Load(args->at(0));
7169 Load(args->at(1));
7170 Result right = frame_->Pop();
7171 Result left = frame_->Pop();
7172 right.ToRegister();
7173 left.ToRegister();
7174 __ cmp(right.reg(), Operand(left.reg()));
7175 right.Unuse();
7176 left.Unuse();
7177 destination()->Split(equal);
7178}
7179
7180
7181void CodeGenerator::GenerateGetFramePointer(ZoneList<Expression*>* args) {
7182 ASSERT(args->length() == 0);
Kristian Monsen50ef84f2010-07-29 15:18:00 +01007183 STATIC_ASSERT(kSmiTag == 0); // EBP value is aligned, so it looks like a Smi.
Steve Blocka7e24c12009-10-30 11:49:00 +00007184 Result ebp_as_smi = allocator_->Allocate();
7185 ASSERT(ebp_as_smi.is_valid());
7186 __ mov(ebp_as_smi.reg(), Operand(ebp));
7187 frame_->Push(&ebp_as_smi);
7188}
7189
7190
Steve Block6ded16b2010-05-10 14:33:55 +01007191void CodeGenerator::GenerateRandomHeapNumber(
7192 ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00007193 ASSERT(args->length() == 0);
7194 frame_->SpillAll();
7195
Steve Block6ded16b2010-05-10 14:33:55 +01007196 Label slow_allocate_heapnumber;
7197 Label heapnumber_allocated;
Steve Blocka7e24c12009-10-30 11:49:00 +00007198
Steve Block6ded16b2010-05-10 14:33:55 +01007199 __ AllocateHeapNumber(edi, ebx, ecx, &slow_allocate_heapnumber);
7200 __ jmp(&heapnumber_allocated);
Steve Blocka7e24c12009-10-30 11:49:00 +00007201
Steve Block6ded16b2010-05-10 14:33:55 +01007202 __ bind(&slow_allocate_heapnumber);
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007203 // Allocate a heap number.
7204 __ CallRuntime(Runtime::kNumberAlloc, 0);
Steve Block6ded16b2010-05-10 14:33:55 +01007205 __ mov(edi, eax);
7206
7207 __ bind(&heapnumber_allocated);
7208
7209 __ PrepareCallCFunction(0, ebx);
7210 __ CallCFunction(ExternalReference::random_uint32_function(), 0);
7211
7212 // Convert 32 random bits in eax to 0.(32 random bits) in a double
7213 // by computing:
7214 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
7215 // This is implemented on both SSE2 and FPU.
7216 if (CpuFeatures::IsSupported(SSE2)) {
7217 CpuFeatures::Scope fscope(SSE2);
7218 __ mov(ebx, Immediate(0x49800000)); // 1.0 x 2^20 as single.
7219 __ movd(xmm1, Operand(ebx));
7220 __ movd(xmm0, Operand(eax));
7221 __ cvtss2sd(xmm1, xmm1);
7222 __ pxor(xmm0, xmm1);
7223 __ subsd(xmm0, xmm1);
7224 __ movdbl(FieldOperand(edi, HeapNumber::kValueOffset), xmm0);
7225 } else {
7226 // 0x4130000000000000 is 1.0 x 2^20 as a double.
7227 __ mov(FieldOperand(edi, HeapNumber::kExponentOffset),
7228 Immediate(0x41300000));
7229 __ mov(FieldOperand(edi, HeapNumber::kMantissaOffset), eax);
7230 __ fld_d(FieldOperand(edi, HeapNumber::kValueOffset));
7231 __ mov(FieldOperand(edi, HeapNumber::kMantissaOffset), Immediate(0));
7232 __ fld_d(FieldOperand(edi, HeapNumber::kValueOffset));
7233 __ fsubp(1);
7234 __ fstp_d(FieldOperand(edi, HeapNumber::kValueOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00007235 }
Steve Block6ded16b2010-05-10 14:33:55 +01007236 __ mov(eax, edi);
Steve Blocka7e24c12009-10-30 11:49:00 +00007237
7238 Result result = allocator_->Allocate(eax);
7239 frame_->Push(&result);
7240}
7241
7242
Steve Blockd0582a62009-12-15 09:54:21 +00007243void CodeGenerator::GenerateStringAdd(ZoneList<Expression*>* args) {
7244 ASSERT_EQ(2, args->length());
7245
7246 Load(args->at(0));
7247 Load(args->at(1));
7248
7249 StringAddStub stub(NO_STRING_ADD_FLAGS);
7250 Result answer = frame_->CallStub(&stub, 2);
7251 frame_->Push(&answer);
7252}
7253
7254
Leon Clarkee46be812010-01-19 14:06:41 +00007255void CodeGenerator::GenerateSubString(ZoneList<Expression*>* args) {
7256 ASSERT_EQ(3, args->length());
7257
7258 Load(args->at(0));
7259 Load(args->at(1));
7260 Load(args->at(2));
7261
7262 SubStringStub stub;
7263 Result answer = frame_->CallStub(&stub, 3);
7264 frame_->Push(&answer);
7265}
7266
7267
7268void CodeGenerator::GenerateStringCompare(ZoneList<Expression*>* args) {
7269 ASSERT_EQ(2, args->length());
7270
7271 Load(args->at(0));
7272 Load(args->at(1));
7273
7274 StringCompareStub stub;
7275 Result answer = frame_->CallStub(&stub, 2);
7276 frame_->Push(&answer);
7277}
7278
7279
7280void CodeGenerator::GenerateRegExpExec(ZoneList<Expression*>* args) {
Steve Block6ded16b2010-05-10 14:33:55 +01007281 ASSERT_EQ(4, args->length());
Leon Clarkee46be812010-01-19 14:06:41 +00007282
7283 // Load the arguments on the stack and call the stub.
7284 Load(args->at(0));
7285 Load(args->at(1));
7286 Load(args->at(2));
7287 Load(args->at(3));
7288 RegExpExecStub stub;
7289 Result result = frame_->CallStub(&stub, 4);
7290 frame_->Push(&result);
7291}
7292
7293
Steve Block6ded16b2010-05-10 14:33:55 +01007294void CodeGenerator::GenerateRegExpConstructResult(ZoneList<Expression*>* args) {
7295 // No stub. This code only occurs a few times in regexp.js.
7296 const int kMaxInlineLength = 100;
7297 ASSERT_EQ(3, args->length());
7298 Load(args->at(0)); // Size of array, smi.
7299 Load(args->at(1)); // "index" property value.
7300 Load(args->at(2)); // "input" property value.
7301 {
7302 VirtualFrame::SpilledScope spilled_scope;
7303
7304 Label slowcase;
7305 Label done;
7306 __ mov(ebx, Operand(esp, kPointerSize * 2));
7307 __ test(ebx, Immediate(kSmiTagMask));
7308 __ j(not_zero, &slowcase);
7309 __ cmp(Operand(ebx), Immediate(Smi::FromInt(kMaxInlineLength)));
7310 __ j(above, &slowcase);
7311 // Smi-tagging is equivalent to multiplying by 2.
7312 STATIC_ASSERT(kSmiTag == 0);
7313 STATIC_ASSERT(kSmiTagSize == 1);
7314 // Allocate RegExpResult followed by FixedArray with size in ebx.
7315 // JSArray: [Map][empty properties][Elements][Length-smi][index][input]
7316 // Elements: [Map][Length][..elements..]
7317 __ AllocateInNewSpace(JSRegExpResult::kSize + FixedArray::kHeaderSize,
7318 times_half_pointer_size,
7319 ebx, // In: Number of elements (times 2, being a smi)
7320 eax, // Out: Start of allocation (tagged).
7321 ecx, // Out: End of allocation.
7322 edx, // Scratch register
7323 &slowcase,
7324 TAG_OBJECT);
7325 // eax: Start of allocated area, object-tagged.
7326
7327 // Set JSArray map to global.regexp_result_map().
7328 // Set empty properties FixedArray.
7329 // Set elements to point to FixedArray allocated right after the JSArray.
7330 // Interleave operations for better latency.
7331 __ mov(edx, ContextOperand(esi, Context::GLOBAL_INDEX));
7332 __ mov(ecx, Immediate(Factory::empty_fixed_array()));
7333 __ lea(ebx, Operand(eax, JSRegExpResult::kSize));
7334 __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalContextOffset));
7335 __ mov(FieldOperand(eax, JSObject::kElementsOffset), ebx);
7336 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset), ecx);
7337 __ mov(edx, ContextOperand(edx, Context::REGEXP_RESULT_MAP_INDEX));
7338 __ mov(FieldOperand(eax, HeapObject::kMapOffset), edx);
7339
7340 // Set input, index and length fields from arguments.
7341 __ pop(FieldOperand(eax, JSRegExpResult::kInputOffset));
7342 __ pop(FieldOperand(eax, JSRegExpResult::kIndexOffset));
7343 __ pop(ecx);
7344 __ mov(FieldOperand(eax, JSArray::kLengthOffset), ecx);
7345
7346 // Fill out the elements FixedArray.
7347 // eax: JSArray.
7348 // ebx: FixedArray.
7349 // ecx: Number of elements in array, as smi.
7350
7351 // Set map.
7352 __ mov(FieldOperand(ebx, HeapObject::kMapOffset),
7353 Immediate(Factory::fixed_array_map()));
7354 // Set length.
Steve Block6ded16b2010-05-10 14:33:55 +01007355 __ mov(FieldOperand(ebx, FixedArray::kLengthOffset), ecx);
7356 // Fill contents of fixed-array with the-hole.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007357 __ SmiUntag(ecx);
Steve Block6ded16b2010-05-10 14:33:55 +01007358 __ mov(edx, Immediate(Factory::the_hole_value()));
7359 __ lea(ebx, FieldOperand(ebx, FixedArray::kHeaderSize));
7360 // Fill fixed array elements with hole.
7361 // eax: JSArray.
7362 // ecx: Number of elements to fill.
7363 // ebx: Start of elements in FixedArray.
7364 // edx: the hole.
7365 Label loop;
7366 __ test(ecx, Operand(ecx));
7367 __ bind(&loop);
7368 __ j(less_equal, &done); // Jump if ecx is negative or zero.
7369 __ sub(Operand(ecx), Immediate(1));
7370 __ mov(Operand(ebx, ecx, times_pointer_size, 0), edx);
7371 __ jmp(&loop);
7372
7373 __ bind(&slowcase);
7374 __ CallRuntime(Runtime::kRegExpConstructResult, 3);
7375
7376 __ bind(&done);
7377 }
7378 frame_->Forget(3);
7379 frame_->Push(eax);
7380}
7381
7382
Steve Block791712a2010-08-27 10:21:07 +01007383void CodeGenerator::GenerateRegExpCloneResult(ZoneList<Expression*>* args) {
7384 ASSERT_EQ(1, args->length());
7385
7386 Load(args->at(0));
7387 Result object_result = frame_->Pop();
7388 object_result.ToRegister(eax);
7389 object_result.Unuse();
7390 {
7391 VirtualFrame::SpilledScope spilled_scope;
7392
7393 Label done;
7394
7395 __ test(eax, Immediate(kSmiTagMask));
7396 __ j(zero, &done);
7397
7398 // Load JSRegExpResult map into edx.
7399 // Arguments to this function should be results of calling RegExp exec,
7400 // which is either an unmodified JSRegExpResult or null. Anything not having
7401 // the unmodified JSRegExpResult map is returned unmodified.
7402 // This also ensures that elements are fast.
7403 __ mov(edx, ContextOperand(esi, Context::GLOBAL_INDEX));
7404 __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalContextOffset));
7405 __ mov(edx, ContextOperand(edx, Context::REGEXP_RESULT_MAP_INDEX));
7406 __ cmp(edx, FieldOperand(eax, HeapObject::kMapOffset));
7407 __ j(not_equal, &done);
7408
7409 if (FLAG_debug_code) {
7410 // Check that object really has empty properties array, as the map
7411 // should guarantee.
7412 __ cmp(FieldOperand(eax, JSObject::kPropertiesOffset),
7413 Immediate(Factory::empty_fixed_array()));
7414 __ Check(equal, "JSRegExpResult: default map but non-empty properties.");
7415 }
7416
7417 DeferredAllocateInNewSpace* allocate_fallback =
7418 new DeferredAllocateInNewSpace(JSRegExpResult::kSize,
7419 ebx,
7420 edx.bit() | eax.bit());
7421
7422 // All set, copy the contents to a new object.
7423 __ AllocateInNewSpace(JSRegExpResult::kSize,
7424 ebx,
7425 ecx,
7426 no_reg,
7427 allocate_fallback->entry_label(),
7428 TAG_OBJECT);
7429 __ bind(allocate_fallback->exit_label());
7430
7431 // Copy all fields from eax to ebx.
7432 STATIC_ASSERT(JSRegExpResult::kSize % (2 * kPointerSize) == 0);
7433 // There is an even number of fields, so unroll the loop once
7434 // for efficiency.
7435 for (int i = 0; i < JSRegExpResult::kSize; i += 2 * kPointerSize) {
7436 STATIC_ASSERT(JSObject::kMapOffset % (2 * kPointerSize) == 0);
7437 if (i != JSObject::kMapOffset) {
7438 // The map was already loaded into edx.
7439 __ mov(edx, FieldOperand(eax, i));
7440 }
7441 __ mov(ecx, FieldOperand(eax, i + kPointerSize));
7442
7443 STATIC_ASSERT(JSObject::kElementsOffset % (2 * kPointerSize) == 0);
7444 if (i == JSObject::kElementsOffset) {
7445 // If the elements array isn't empty, make it copy-on-write
7446 // before copying it.
7447 Label empty;
7448 __ cmp(Operand(edx), Immediate(Factory::empty_fixed_array()));
7449 __ j(equal, &empty);
7450 ASSERT(!Heap::InNewSpace(Heap::fixed_cow_array_map()));
7451 __ mov(FieldOperand(edx, HeapObject::kMapOffset),
7452 Immediate(Factory::fixed_cow_array_map()));
7453 __ bind(&empty);
7454 }
7455 __ mov(FieldOperand(ebx, i), edx);
7456 __ mov(FieldOperand(ebx, i + kPointerSize), ecx);
7457 }
7458 __ mov(eax, ebx);
7459
7460 __ bind(&done);
7461 }
7462 frame_->Push(eax);
7463}
7464
7465
Steve Block6ded16b2010-05-10 14:33:55 +01007466class DeferredSearchCache: public DeferredCode {
7467 public:
7468 DeferredSearchCache(Register dst, Register cache, Register key)
7469 : dst_(dst), cache_(cache), key_(key) {
7470 set_comment("[ DeferredSearchCache");
7471 }
7472
7473 virtual void Generate();
7474
7475 private:
Kristian Monsen25f61362010-05-21 11:50:48 +01007476 Register dst_; // on invocation Smi index of finger, on exit
7477 // holds value being looked up.
7478 Register cache_; // instance of JSFunctionResultCache.
7479 Register key_; // key being looked up.
Steve Block6ded16b2010-05-10 14:33:55 +01007480};
7481
7482
7483void DeferredSearchCache::Generate() {
Kristian Monsen25f61362010-05-21 11:50:48 +01007484 Label first_loop, search_further, second_loop, cache_miss;
7485
7486 // Smi-tagging is equivalent to multiplying by 2.
7487 STATIC_ASSERT(kSmiTag == 0);
7488 STATIC_ASSERT(kSmiTagSize == 1);
7489
7490 Smi* kEntrySizeSmi = Smi::FromInt(JSFunctionResultCache::kEntrySize);
7491 Smi* kEntriesIndexSmi = Smi::FromInt(JSFunctionResultCache::kEntriesIndex);
7492
7493 // Check the cache from finger to start of the cache.
7494 __ bind(&first_loop);
7495 __ sub(Operand(dst_), Immediate(kEntrySizeSmi));
7496 __ cmp(Operand(dst_), Immediate(kEntriesIndexSmi));
7497 __ j(less, &search_further);
7498
7499 __ cmp(key_, CodeGenerator::FixedArrayElementOperand(cache_, dst_));
7500 __ j(not_equal, &first_loop);
7501
7502 __ mov(FieldOperand(cache_, JSFunctionResultCache::kFingerOffset), dst_);
7503 __ mov(dst_, CodeGenerator::FixedArrayElementOperand(cache_, dst_, 1));
7504 __ jmp(exit_label());
7505
7506 __ bind(&search_further);
7507
7508 // Check the cache from end of cache up to finger.
7509 __ mov(dst_, FieldOperand(cache_, JSFunctionResultCache::kCacheSizeOffset));
7510
7511 __ bind(&second_loop);
7512 __ sub(Operand(dst_), Immediate(kEntrySizeSmi));
7513 // Consider prefetching into some reg.
7514 __ cmp(dst_, FieldOperand(cache_, JSFunctionResultCache::kFingerOffset));
7515 __ j(less_equal, &cache_miss);
7516
7517 __ cmp(key_, CodeGenerator::FixedArrayElementOperand(cache_, dst_));
7518 __ j(not_equal, &second_loop);
7519
7520 __ mov(FieldOperand(cache_, JSFunctionResultCache::kFingerOffset), dst_);
7521 __ mov(dst_, CodeGenerator::FixedArrayElementOperand(cache_, dst_, 1));
7522 __ jmp(exit_label());
7523
7524 __ bind(&cache_miss);
7525 __ push(cache_); // store a reference to cache
7526 __ push(key_); // store a key
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01007527 __ push(Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
Steve Block6ded16b2010-05-10 14:33:55 +01007528 __ push(key_);
Kristian Monsen25f61362010-05-21 11:50:48 +01007529 // On ia32 function must be in edi.
7530 __ mov(edi, FieldOperand(cache_, JSFunctionResultCache::kFactoryOffset));
7531 ParameterCount expected(1);
7532 __ InvokeFunction(edi, expected, CALL_FUNCTION);
7533
7534 // Find a place to put new cached value into.
7535 Label add_new_entry, update_cache;
7536 __ mov(ecx, Operand(esp, kPointerSize)); // restore the cache
7537 // Possible optimization: cache size is constant for the given cache
7538 // so technically we could use a constant here. However, if we have
7539 // cache miss this optimization would hardly matter much.
7540
7541 // Check if we could add new entry to cache.
7542 __ mov(ebx, FieldOperand(ecx, FixedArray::kLengthOffset));
Kristian Monsen25f61362010-05-21 11:50:48 +01007543 __ cmp(ebx, FieldOperand(ecx, JSFunctionResultCache::kCacheSizeOffset));
7544 __ j(greater, &add_new_entry);
7545
7546 // Check if we could evict entry after finger.
7547 __ mov(edx, FieldOperand(ecx, JSFunctionResultCache::kFingerOffset));
7548 __ add(Operand(edx), Immediate(kEntrySizeSmi));
7549 __ cmp(ebx, Operand(edx));
7550 __ j(greater, &update_cache);
7551
7552 // Need to wrap over the cache.
7553 __ mov(edx, Immediate(kEntriesIndexSmi));
7554 __ jmp(&update_cache);
7555
7556 __ bind(&add_new_entry);
7557 __ mov(edx, FieldOperand(ecx, JSFunctionResultCache::kCacheSizeOffset));
7558 __ lea(ebx, Operand(edx, JSFunctionResultCache::kEntrySize << 1));
7559 __ mov(FieldOperand(ecx, JSFunctionResultCache::kCacheSizeOffset), ebx);
7560
7561 // Update the cache itself.
7562 // edx holds the index.
7563 __ bind(&update_cache);
7564 __ pop(ebx); // restore the key
7565 __ mov(FieldOperand(ecx, JSFunctionResultCache::kFingerOffset), edx);
7566 // Store key.
7567 __ mov(CodeGenerator::FixedArrayElementOperand(ecx, edx), ebx);
7568 __ RecordWrite(ecx, 0, ebx, edx);
7569
7570 // Store value.
7571 __ pop(ecx); // restore the cache.
7572 __ mov(edx, FieldOperand(ecx, JSFunctionResultCache::kFingerOffset));
7573 __ add(Operand(edx), Immediate(Smi::FromInt(1)));
7574 __ mov(ebx, eax);
7575 __ mov(CodeGenerator::FixedArrayElementOperand(ecx, edx), ebx);
7576 __ RecordWrite(ecx, 0, ebx, edx);
7577
Steve Block6ded16b2010-05-10 14:33:55 +01007578 if (!dst_.is(eax)) {
7579 __ mov(dst_, eax);
7580 }
7581}
7582
7583
7584void CodeGenerator::GenerateGetFromCache(ZoneList<Expression*>* args) {
7585 ASSERT_EQ(2, args->length());
7586
7587 ASSERT_NE(NULL, args->at(0)->AsLiteral());
7588 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
7589
7590 Handle<FixedArray> jsfunction_result_caches(
7591 Top::global_context()->jsfunction_result_caches());
7592 if (jsfunction_result_caches->length() <= cache_id) {
7593 __ Abort("Attempt to use undefined cache.");
7594 frame_->Push(Factory::undefined_value());
7595 return;
7596 }
7597
7598 Load(args->at(1));
7599 Result key = frame_->Pop();
7600 key.ToRegister();
7601
7602 Result cache = allocator()->Allocate();
7603 ASSERT(cache.is_valid());
7604 __ mov(cache.reg(), ContextOperand(esi, Context::GLOBAL_INDEX));
7605 __ mov(cache.reg(),
7606 FieldOperand(cache.reg(), GlobalObject::kGlobalContextOffset));
7607 __ mov(cache.reg(),
7608 ContextOperand(cache.reg(), Context::JSFUNCTION_RESULT_CACHES_INDEX));
7609 __ mov(cache.reg(),
7610 FieldOperand(cache.reg(), FixedArray::OffsetOfElementAt(cache_id)));
7611
7612 Result tmp = allocator()->Allocate();
7613 ASSERT(tmp.is_valid());
7614
7615 DeferredSearchCache* deferred = new DeferredSearchCache(tmp.reg(),
7616 cache.reg(),
7617 key.reg());
7618
Steve Block6ded16b2010-05-10 14:33:55 +01007619 // tmp.reg() now holds finger offset as a smi.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01007620 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
Kristian Monsen25f61362010-05-21 11:50:48 +01007621 __ mov(tmp.reg(), FieldOperand(cache.reg(),
7622 JSFunctionResultCache::kFingerOffset));
7623 __ cmp(key.reg(), FixedArrayElementOperand(cache.reg(), tmp.reg()));
Steve Block6ded16b2010-05-10 14:33:55 +01007624 deferred->Branch(not_equal);
7625
Kristian Monsen25f61362010-05-21 11:50:48 +01007626 __ mov(tmp.reg(), FixedArrayElementOperand(cache.reg(), tmp.reg(), 1));
Steve Block6ded16b2010-05-10 14:33:55 +01007627
7628 deferred->BindExit();
7629 frame_->Push(&tmp);
7630}
7631
7632
Andrei Popescu402d9372010-02-26 13:31:12 +00007633void CodeGenerator::GenerateNumberToString(ZoneList<Expression*>* args) {
7634 ASSERT_EQ(args->length(), 1);
7635
7636 // Load the argument on the stack and call the stub.
7637 Load(args->at(0));
7638 NumberToStringStub stub;
7639 Result result = frame_->CallStub(&stub, 1);
7640 frame_->Push(&result);
7641}
7642
7643
Steve Block6ded16b2010-05-10 14:33:55 +01007644class DeferredSwapElements: public DeferredCode {
7645 public:
7646 DeferredSwapElements(Register object, Register index1, Register index2)
7647 : object_(object), index1_(index1), index2_(index2) {
7648 set_comment("[ DeferredSwapElements");
7649 }
7650
7651 virtual void Generate();
7652
7653 private:
7654 Register object_, index1_, index2_;
7655};
7656
7657
7658void DeferredSwapElements::Generate() {
7659 __ push(object_);
7660 __ push(index1_);
7661 __ push(index2_);
7662 __ CallRuntime(Runtime::kSwapElements, 3);
7663}
7664
7665
7666void CodeGenerator::GenerateSwapElements(ZoneList<Expression*>* args) {
7667 // Note: this code assumes that indices are passed are within
7668 // elements' bounds and refer to valid (not holes) values.
7669 Comment cmnt(masm_, "[ GenerateSwapElements");
7670
7671 ASSERT_EQ(3, args->length());
7672
7673 Load(args->at(0));
7674 Load(args->at(1));
7675 Load(args->at(2));
7676
7677 Result index2 = frame_->Pop();
7678 index2.ToRegister();
7679
7680 Result index1 = frame_->Pop();
7681 index1.ToRegister();
7682
7683 Result object = frame_->Pop();
7684 object.ToRegister();
7685
7686 Result tmp1 = allocator()->Allocate();
7687 tmp1.ToRegister();
7688 Result tmp2 = allocator()->Allocate();
7689 tmp2.ToRegister();
7690
7691 frame_->Spill(object.reg());
7692 frame_->Spill(index1.reg());
7693 frame_->Spill(index2.reg());
7694
7695 DeferredSwapElements* deferred = new DeferredSwapElements(object.reg(),
7696 index1.reg(),
7697 index2.reg());
7698
7699 // Fetch the map and check if array is in fast case.
7700 // Check that object doesn't require security checks and
7701 // has no indexed interceptor.
7702 __ CmpObjectType(object.reg(), FIRST_JS_OBJECT_TYPE, tmp1.reg());
Leon Clarkef7060e22010-06-03 12:02:55 +01007703 deferred->Branch(below);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007704 __ test_b(FieldOperand(tmp1.reg(), Map::kBitFieldOffset),
7705 KeyedLoadIC::kSlowCaseBitFieldMask);
Steve Block6ded16b2010-05-10 14:33:55 +01007706 deferred->Branch(not_zero);
7707
Iain Merrick75681382010-08-19 15:07:18 +01007708 // Check the object's elements are in fast case and writable.
Steve Block6ded16b2010-05-10 14:33:55 +01007709 __ mov(tmp1.reg(), FieldOperand(object.reg(), JSObject::kElementsOffset));
7710 __ cmp(FieldOperand(tmp1.reg(), HeapObject::kMapOffset),
7711 Immediate(Factory::fixed_array_map()));
7712 deferred->Branch(not_equal);
7713
7714 // Smi-tagging is equivalent to multiplying by 2.
7715 STATIC_ASSERT(kSmiTag == 0);
7716 STATIC_ASSERT(kSmiTagSize == 1);
7717
7718 // Check that both indices are smis.
7719 __ mov(tmp2.reg(), index1.reg());
7720 __ or_(tmp2.reg(), Operand(index2.reg()));
7721 __ test(tmp2.reg(), Immediate(kSmiTagMask));
7722 deferred->Branch(not_zero);
7723
7724 // Bring addresses into index1 and index2.
Kristian Monsen25f61362010-05-21 11:50:48 +01007725 __ lea(index1.reg(), FixedArrayElementOperand(tmp1.reg(), index1.reg()));
7726 __ lea(index2.reg(), FixedArrayElementOperand(tmp1.reg(), index2.reg()));
Steve Block6ded16b2010-05-10 14:33:55 +01007727
7728 // Swap elements.
7729 __ mov(object.reg(), Operand(index1.reg(), 0));
7730 __ mov(tmp2.reg(), Operand(index2.reg(), 0));
7731 __ mov(Operand(index2.reg(), 0), object.reg());
7732 __ mov(Operand(index1.reg(), 0), tmp2.reg());
7733
7734 Label done;
7735 __ InNewSpace(tmp1.reg(), tmp2.reg(), equal, &done);
7736 // Possible optimization: do a check that both values are Smis
7737 // (or them and test against Smi mask.)
7738
7739 __ mov(tmp2.reg(), tmp1.reg());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007740 __ RecordWriteHelper(tmp2.reg(), index1.reg(), object.reg());
7741 __ RecordWriteHelper(tmp1.reg(), index2.reg(), object.reg());
Steve Block6ded16b2010-05-10 14:33:55 +01007742 __ bind(&done);
7743
7744 deferred->BindExit();
7745 frame_->Push(Factory::undefined_value());
7746}
7747
7748
7749void CodeGenerator::GenerateCallFunction(ZoneList<Expression*>* args) {
7750 Comment cmnt(masm_, "[ GenerateCallFunction");
7751
7752 ASSERT(args->length() >= 2);
7753
7754 int n_args = args->length() - 2; // for receiver and function.
7755 Load(args->at(0)); // receiver
7756 for (int i = 0; i < n_args; i++) {
7757 Load(args->at(i + 1));
7758 }
7759 Load(args->at(n_args + 1)); // function
7760 Result result = frame_->CallJSFunction(n_args);
7761 frame_->Push(&result);
7762}
7763
7764
7765// Generates the Math.pow method. Only handles special cases and
7766// branches to the runtime system for everything else. Please note
7767// that this function assumes that the callsite has executed ToNumber
7768// on both arguments.
7769void CodeGenerator::GenerateMathPow(ZoneList<Expression*>* args) {
7770 ASSERT(args->length() == 2);
7771 Load(args->at(0));
7772 Load(args->at(1));
7773 if (!CpuFeatures::IsSupported(SSE2)) {
7774 Result res = frame_->CallRuntime(Runtime::kMath_pow, 2);
7775 frame_->Push(&res);
7776 } else {
7777 CpuFeatures::Scope use_sse2(SSE2);
7778 Label allocate_return;
7779 // Load the two operands while leaving the values on the frame.
7780 frame()->Dup();
7781 Result exponent = frame()->Pop();
7782 exponent.ToRegister();
7783 frame()->Spill(exponent.reg());
7784 frame()->PushElementAt(1);
7785 Result base = frame()->Pop();
7786 base.ToRegister();
7787 frame()->Spill(base.reg());
7788
7789 Result answer = allocator()->Allocate();
7790 ASSERT(answer.is_valid());
7791 ASSERT(!exponent.reg().is(base.reg()));
7792 JumpTarget call_runtime;
7793
7794 // Save 1 in xmm3 - we need this several times later on.
7795 __ mov(answer.reg(), Immediate(1));
7796 __ cvtsi2sd(xmm3, Operand(answer.reg()));
7797
7798 Label exponent_nonsmi;
7799 Label base_nonsmi;
7800 // If the exponent is a heap number go to that specific case.
7801 __ test(exponent.reg(), Immediate(kSmiTagMask));
7802 __ j(not_zero, &exponent_nonsmi);
7803 __ test(base.reg(), Immediate(kSmiTagMask));
7804 __ j(not_zero, &base_nonsmi);
7805
7806 // Optimized version when y is an integer.
7807 Label powi;
7808 __ SmiUntag(base.reg());
7809 __ cvtsi2sd(xmm0, Operand(base.reg()));
7810 __ jmp(&powi);
7811 // exponent is smi and base is a heapnumber.
7812 __ bind(&base_nonsmi);
7813 __ cmp(FieldOperand(base.reg(), HeapObject::kMapOffset),
7814 Factory::heap_number_map());
7815 call_runtime.Branch(not_equal);
7816
7817 __ movdbl(xmm0, FieldOperand(base.reg(), HeapNumber::kValueOffset));
7818
7819 // Optimized version of pow if y is an integer.
7820 __ bind(&powi);
7821 __ SmiUntag(exponent.reg());
7822
7823 // Save exponent in base as we need to check if exponent is negative later.
7824 // We know that base and exponent are in different registers.
7825 __ mov(base.reg(), exponent.reg());
7826
7827 // Get absolute value of exponent.
7828 Label no_neg;
7829 __ cmp(exponent.reg(), 0);
7830 __ j(greater_equal, &no_neg);
7831 __ neg(exponent.reg());
7832 __ bind(&no_neg);
7833
7834 // Load xmm1 with 1.
7835 __ movsd(xmm1, xmm3);
7836 Label while_true;
7837 Label no_multiply;
7838
7839 __ bind(&while_true);
7840 __ shr(exponent.reg(), 1);
7841 __ j(not_carry, &no_multiply);
7842 __ mulsd(xmm1, xmm0);
7843 __ bind(&no_multiply);
7844 __ test(exponent.reg(), Operand(exponent.reg()));
7845 __ mulsd(xmm0, xmm0);
7846 __ j(not_zero, &while_true);
7847
7848 // x has the original value of y - if y is negative return 1/result.
7849 __ test(base.reg(), Operand(base.reg()));
7850 __ j(positive, &allocate_return);
7851 // Special case if xmm1 has reached infinity.
7852 __ mov(answer.reg(), Immediate(0x7FB00000));
7853 __ movd(xmm0, Operand(answer.reg()));
7854 __ cvtss2sd(xmm0, xmm0);
7855 __ ucomisd(xmm0, xmm1);
7856 call_runtime.Branch(equal);
7857 __ divsd(xmm3, xmm1);
7858 __ movsd(xmm1, xmm3);
7859 __ jmp(&allocate_return);
7860
7861 // exponent (or both) is a heapnumber - no matter what we should now work
7862 // on doubles.
7863 __ bind(&exponent_nonsmi);
7864 __ cmp(FieldOperand(exponent.reg(), HeapObject::kMapOffset),
7865 Factory::heap_number_map());
7866 call_runtime.Branch(not_equal);
7867 __ movdbl(xmm1, FieldOperand(exponent.reg(), HeapNumber::kValueOffset));
7868 // Test if exponent is nan.
7869 __ ucomisd(xmm1, xmm1);
7870 call_runtime.Branch(parity_even);
7871
7872 Label base_not_smi;
7873 Label handle_special_cases;
7874 __ test(base.reg(), Immediate(kSmiTagMask));
7875 __ j(not_zero, &base_not_smi);
7876 __ SmiUntag(base.reg());
7877 __ cvtsi2sd(xmm0, Operand(base.reg()));
7878 __ jmp(&handle_special_cases);
7879 __ bind(&base_not_smi);
7880 __ cmp(FieldOperand(base.reg(), HeapObject::kMapOffset),
7881 Factory::heap_number_map());
7882 call_runtime.Branch(not_equal);
7883 __ mov(answer.reg(), FieldOperand(base.reg(), HeapNumber::kExponentOffset));
7884 __ and_(answer.reg(), HeapNumber::kExponentMask);
7885 __ cmp(Operand(answer.reg()), Immediate(HeapNumber::kExponentMask));
7886 // base is NaN or +/-Infinity
7887 call_runtime.Branch(greater_equal);
7888 __ movdbl(xmm0, FieldOperand(base.reg(), HeapNumber::kValueOffset));
7889
7890 // base is in xmm0 and exponent is in xmm1.
7891 __ bind(&handle_special_cases);
7892 Label not_minus_half;
7893 // Test for -0.5.
7894 // Load xmm2 with -0.5.
7895 __ mov(answer.reg(), Immediate(0xBF000000));
7896 __ movd(xmm2, Operand(answer.reg()));
7897 __ cvtss2sd(xmm2, xmm2);
7898 // xmm2 now has -0.5.
7899 __ ucomisd(xmm2, xmm1);
7900 __ j(not_equal, &not_minus_half);
7901
7902 // Calculates reciprocal of square root.
7903 // Note that 1/sqrt(x) = sqrt(1/x))
7904 __ divsd(xmm3, xmm0);
7905 __ movsd(xmm1, xmm3);
7906 __ sqrtsd(xmm1, xmm1);
7907 __ jmp(&allocate_return);
7908
7909 // Test for 0.5.
7910 __ bind(&not_minus_half);
7911 // Load xmm2 with 0.5.
7912 // Since xmm3 is 1 and xmm2 is -0.5 this is simply xmm2 + xmm3.
7913 __ addsd(xmm2, xmm3);
7914 // xmm2 now has 0.5.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01007915 __ ucomisd(xmm2, xmm1);
Steve Block6ded16b2010-05-10 14:33:55 +01007916 call_runtime.Branch(not_equal);
7917 // Calculates square root.
7918 __ movsd(xmm1, xmm0);
7919 __ sqrtsd(xmm1, xmm1);
7920
7921 JumpTarget done;
7922 Label failure, success;
7923 __ bind(&allocate_return);
7924 // Make a copy of the frame to enable us to handle allocation
7925 // failure after the JumpTarget jump.
7926 VirtualFrame* clone = new VirtualFrame(frame());
7927 __ AllocateHeapNumber(answer.reg(), exponent.reg(),
7928 base.reg(), &failure);
7929 __ movdbl(FieldOperand(answer.reg(), HeapNumber::kValueOffset), xmm1);
7930 // Remove the two original values from the frame - we only need those
7931 // in the case where we branch to runtime.
7932 frame()->Drop(2);
7933 exponent.Unuse();
7934 base.Unuse();
7935 done.Jump(&answer);
7936 // Use the copy of the original frame as our current frame.
7937 RegisterFile empty_regs;
7938 SetFrame(clone, &empty_regs);
7939 // If we experience an allocation failure we branch to runtime.
7940 __ bind(&failure);
7941 call_runtime.Bind();
7942 answer = frame()->CallRuntime(Runtime::kMath_pow_cfunction, 2);
7943
7944 done.Bind(&answer);
7945 frame()->Push(&answer);
7946 }
7947}
7948
7949
Andrei Popescu402d9372010-02-26 13:31:12 +00007950void CodeGenerator::GenerateMathSin(ZoneList<Expression*>* args) {
7951 ASSERT_EQ(args->length(), 1);
7952 Load(args->at(0));
7953 TranscendentalCacheStub stub(TranscendentalCache::SIN);
7954 Result result = frame_->CallStub(&stub, 1);
7955 frame_->Push(&result);
7956}
7957
7958
7959void CodeGenerator::GenerateMathCos(ZoneList<Expression*>* args) {
7960 ASSERT_EQ(args->length(), 1);
7961 Load(args->at(0));
7962 TranscendentalCacheStub stub(TranscendentalCache::COS);
7963 Result result = frame_->CallStub(&stub, 1);
7964 frame_->Push(&result);
7965}
7966
7967
Steve Block6ded16b2010-05-10 14:33:55 +01007968// Generates the Math.sqrt method. Please note - this function assumes that
7969// the callsite has executed ToNumber on the argument.
7970void CodeGenerator::GenerateMathSqrt(ZoneList<Expression*>* args) {
7971 ASSERT_EQ(args->length(), 1);
7972 Load(args->at(0));
7973
7974 if (!CpuFeatures::IsSupported(SSE2)) {
7975 Result result = frame()->CallRuntime(Runtime::kMath_sqrt, 1);
7976 frame()->Push(&result);
7977 } else {
7978 CpuFeatures::Scope use_sse2(SSE2);
7979 // Leave original value on the frame if we need to call runtime.
7980 frame()->Dup();
7981 Result result = frame()->Pop();
7982 result.ToRegister();
7983 frame()->Spill(result.reg());
7984 Label runtime;
7985 Label non_smi;
7986 Label load_done;
7987 JumpTarget end;
7988
7989 __ test(result.reg(), Immediate(kSmiTagMask));
7990 __ j(not_zero, &non_smi);
7991 __ SmiUntag(result.reg());
7992 __ cvtsi2sd(xmm0, Operand(result.reg()));
7993 __ jmp(&load_done);
7994 __ bind(&non_smi);
7995 __ cmp(FieldOperand(result.reg(), HeapObject::kMapOffset),
7996 Factory::heap_number_map());
7997 __ j(not_equal, &runtime);
7998 __ movdbl(xmm0, FieldOperand(result.reg(), HeapNumber::kValueOffset));
7999
8000 __ bind(&load_done);
8001 __ sqrtsd(xmm0, xmm0);
8002 // A copy of the virtual frame to allow us to go to runtime after the
8003 // JumpTarget jump.
8004 Result scratch = allocator()->Allocate();
8005 VirtualFrame* clone = new VirtualFrame(frame());
8006 __ AllocateHeapNumber(result.reg(), scratch.reg(), no_reg, &runtime);
8007
8008 __ movdbl(FieldOperand(result.reg(), HeapNumber::kValueOffset), xmm0);
8009 frame()->Drop(1);
8010 scratch.Unuse();
8011 end.Jump(&result);
8012 // We only branch to runtime if we have an allocation error.
8013 // Use the copy of the original frame as our current frame.
8014 RegisterFile empty_regs;
8015 SetFrame(clone, &empty_regs);
8016 __ bind(&runtime);
8017 result = frame()->CallRuntime(Runtime::kMath_sqrt, 1);
8018
8019 end.Bind(&result);
8020 frame()->Push(&result);
8021 }
8022}
8023
8024
Ben Murdochbb769b22010-08-11 14:56:33 +01008025void CodeGenerator::GenerateIsRegExpEquivalent(ZoneList<Expression*>* args) {
8026 ASSERT_EQ(2, args->length());
8027 Load(args->at(0));
8028 Load(args->at(1));
8029 Result right_res = frame_->Pop();
8030 Result left_res = frame_->Pop();
8031 right_res.ToRegister();
8032 left_res.ToRegister();
8033 Result tmp_res = allocator()->Allocate();
8034 ASSERT(tmp_res.is_valid());
8035 Register right = right_res.reg();
8036 Register left = left_res.reg();
8037 Register tmp = tmp_res.reg();
8038 right_res.Unuse();
8039 left_res.Unuse();
8040 tmp_res.Unuse();
8041 __ cmp(left, Operand(right));
8042 destination()->true_target()->Branch(equal);
8043 // Fail if either is a non-HeapObject.
8044 __ mov(tmp, left);
8045 __ and_(Operand(tmp), right);
8046 __ test(Operand(tmp), Immediate(kSmiTagMask));
8047 destination()->false_target()->Branch(equal);
8048 __ CmpObjectType(left, JS_REGEXP_TYPE, tmp);
8049 destination()->false_target()->Branch(not_equal);
8050 __ cmp(tmp, FieldOperand(right, HeapObject::kMapOffset));
8051 destination()->false_target()->Branch(not_equal);
8052 __ mov(tmp, FieldOperand(left, JSRegExp::kDataOffset));
8053 __ cmp(tmp, FieldOperand(right, JSRegExp::kDataOffset));
8054 destination()->Split(equal);
8055}
8056
8057
Steve Blocka7e24c12009-10-30 11:49:00 +00008058void CodeGenerator::VisitCallRuntime(CallRuntime* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01008059 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00008060 if (CheckForInlineRuntimeCall(node)) {
8061 return;
8062 }
8063
8064 ZoneList<Expression*>* args = node->arguments();
8065 Comment cmnt(masm_, "[ CallRuntime");
8066 Runtime::Function* function = node->function();
8067
8068 if (function == NULL) {
Steve Blocka7e24c12009-10-30 11:49:00 +00008069 // Push the builtins object found in the current global object.
8070 Result temp = allocator()->Allocate();
8071 ASSERT(temp.is_valid());
8072 __ mov(temp.reg(), GlobalObject());
8073 __ mov(temp.reg(), FieldOperand(temp.reg(), GlobalObject::kBuiltinsOffset));
8074 frame_->Push(&temp);
8075 }
8076
8077 // Push the arguments ("left-to-right").
8078 int arg_count = args->length();
8079 for (int i = 0; i < arg_count; i++) {
8080 Load(args->at(i));
8081 }
8082
8083 if (function == NULL) {
8084 // Call the JS runtime function.
Leon Clarkee46be812010-01-19 14:06:41 +00008085 frame_->Push(node->name());
Steve Blocka7e24c12009-10-30 11:49:00 +00008086 Result answer = frame_->CallCallIC(RelocInfo::CODE_TARGET,
8087 arg_count,
8088 loop_nesting_);
8089 frame_->RestoreContextRegister();
Leon Clarkee46be812010-01-19 14:06:41 +00008090 frame_->Push(&answer);
Steve Blocka7e24c12009-10-30 11:49:00 +00008091 } else {
8092 // Call the C runtime function.
8093 Result answer = frame_->CallRuntime(function, arg_count);
8094 frame_->Push(&answer);
8095 }
8096}
8097
8098
8099void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00008100 Comment cmnt(masm_, "[ UnaryOperation");
8101
8102 Token::Value op = node->op();
8103
8104 if (op == Token::NOT) {
8105 // Swap the true and false targets but keep the same actual label
8106 // as the fall through.
8107 destination()->Invert();
Steve Blockd0582a62009-12-15 09:54:21 +00008108 LoadCondition(node->expression(), destination(), true);
Steve Blocka7e24c12009-10-30 11:49:00 +00008109 // Swap the labels back.
8110 destination()->Invert();
8111
8112 } else if (op == Token::DELETE) {
8113 Property* property = node->expression()->AsProperty();
8114 if (property != NULL) {
8115 Load(property->obj());
8116 Load(property->key());
8117 Result answer = frame_->InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, 2);
8118 frame_->Push(&answer);
8119 return;
8120 }
8121
8122 Variable* variable = node->expression()->AsVariableProxy()->AsVariable();
8123 if (variable != NULL) {
8124 Slot* slot = variable->slot();
8125 if (variable->is_global()) {
8126 LoadGlobal();
8127 frame_->Push(variable->name());
8128 Result answer = frame_->InvokeBuiltin(Builtins::DELETE,
8129 CALL_FUNCTION, 2);
8130 frame_->Push(&answer);
8131 return;
8132
8133 } else if (slot != NULL && slot->type() == Slot::LOOKUP) {
8134 // Call the runtime to look up the context holding the named
8135 // variable. Sync the virtual frame eagerly so we can push the
8136 // arguments directly into place.
8137 frame_->SyncRange(0, frame_->element_count() - 1);
8138 frame_->EmitPush(esi);
8139 frame_->EmitPush(Immediate(variable->name()));
8140 Result context = frame_->CallRuntime(Runtime::kLookupContext, 2);
8141 ASSERT(context.is_register());
8142 frame_->EmitPush(context.reg());
8143 context.Unuse();
8144 frame_->EmitPush(Immediate(variable->name()));
8145 Result answer = frame_->InvokeBuiltin(Builtins::DELETE,
8146 CALL_FUNCTION, 2);
8147 frame_->Push(&answer);
8148 return;
8149 }
8150
8151 // Default: Result of deleting non-global, not dynamically
8152 // introduced variables is false.
8153 frame_->Push(Factory::false_value());
8154
8155 } else {
8156 // Default: Result of deleting expressions is true.
8157 Load(node->expression()); // may have side-effects
8158 frame_->SetElementAt(0, Factory::true_value());
8159 }
8160
8161 } else if (op == Token::TYPEOF) {
8162 // Special case for loading the typeof expression; see comment on
8163 // LoadTypeofExpression().
8164 LoadTypeofExpression(node->expression());
8165 Result answer = frame_->CallRuntime(Runtime::kTypeof, 1);
8166 frame_->Push(&answer);
8167
8168 } else if (op == Token::VOID) {
8169 Expression* expression = node->expression();
8170 if (expression && expression->AsLiteral() && (
8171 expression->AsLiteral()->IsTrue() ||
8172 expression->AsLiteral()->IsFalse() ||
8173 expression->AsLiteral()->handle()->IsNumber() ||
8174 expression->AsLiteral()->handle()->IsString() ||
8175 expression->AsLiteral()->handle()->IsJSRegExp() ||
8176 expression->AsLiteral()->IsNull())) {
8177 // Omit evaluating the value of the primitive literal.
8178 // It will be discarded anyway, and can have no side effect.
8179 frame_->Push(Factory::undefined_value());
8180 } else {
8181 Load(node->expression());
8182 frame_->SetElementAt(0, Factory::undefined_value());
8183 }
8184
8185 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01008186 if (in_safe_int32_mode()) {
8187 Visit(node->expression());
8188 Result value = frame_->Pop();
8189 ASSERT(value.is_untagged_int32());
8190 // Registers containing an int32 value are not multiply used.
8191 ASSERT(!value.is_register() || !frame_->is_used(value.reg()));
8192 value.ToRegister();
8193 switch (op) {
8194 case Token::SUB: {
8195 __ neg(value.reg());
8196 if (node->no_negative_zero()) {
8197 // -MIN_INT is MIN_INT with the overflow flag set.
8198 unsafe_bailout_->Branch(overflow);
8199 } else {
8200 // MIN_INT and 0 both have bad negations. They both have 31 zeros.
8201 __ test(value.reg(), Immediate(0x7FFFFFFF));
8202 unsafe_bailout_->Branch(zero);
8203 }
8204 break;
8205 }
8206 case Token::BIT_NOT: {
8207 __ not_(value.reg());
8208 break;
8209 }
8210 case Token::ADD: {
8211 // Unary plus has no effect on int32 values.
8212 break;
8213 }
8214 default:
8215 UNREACHABLE();
8216 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00008217 }
Steve Block6ded16b2010-05-10 14:33:55 +01008218 frame_->Push(&value);
8219 } else {
8220 Load(node->expression());
Leon Clarkeac952652010-07-15 11:15:24 +01008221 bool can_overwrite =
Steve Block6ded16b2010-05-10 14:33:55 +01008222 (node->expression()->AsBinaryOperation() != NULL &&
8223 node->expression()->AsBinaryOperation()->ResultOverwriteAllowed());
Leon Clarkeac952652010-07-15 11:15:24 +01008224 UnaryOverwriteMode overwrite =
8225 can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
8226 bool no_negative_zero = node->expression()->no_negative_zero();
Steve Block6ded16b2010-05-10 14:33:55 +01008227 switch (op) {
8228 case Token::NOT:
8229 case Token::DELETE:
8230 case Token::TYPEOF:
8231 UNREACHABLE(); // handled above
8232 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00008233
Steve Block6ded16b2010-05-10 14:33:55 +01008234 case Token::SUB: {
Leon Clarkeac952652010-07-15 11:15:24 +01008235 GenericUnaryOpStub stub(
8236 Token::SUB,
8237 overwrite,
8238 no_negative_zero ? kIgnoreNegativeZero : kStrictNegativeZero);
Steve Block6ded16b2010-05-10 14:33:55 +01008239 Result operand = frame_->Pop();
8240 Result answer = frame_->CallStub(&stub, &operand);
8241 answer.set_type_info(TypeInfo::Number());
8242 frame_->Push(&answer);
8243 break;
8244 }
8245 case Token::BIT_NOT: {
8246 // Smi check.
8247 JumpTarget smi_label;
8248 JumpTarget continue_label;
8249 Result operand = frame_->Pop();
8250 TypeInfo operand_info = operand.type_info();
8251 operand.ToRegister();
8252 if (operand_info.IsSmi()) {
8253 if (FLAG_debug_code) __ AbortIfNotSmi(operand.reg());
8254 frame_->Spill(operand.reg());
8255 // Set smi tag bit. It will be reset by the not operation.
8256 __ lea(operand.reg(), Operand(operand.reg(), kSmiTagMask));
8257 __ not_(operand.reg());
8258 Result answer = operand;
8259 answer.set_type_info(TypeInfo::Smi());
8260 frame_->Push(&answer);
8261 } else {
8262 __ test(operand.reg(), Immediate(kSmiTagMask));
8263 smi_label.Branch(zero, &operand, taken);
Steve Blocka7e24c12009-10-30 11:49:00 +00008264
Steve Block6ded16b2010-05-10 14:33:55 +01008265 GenericUnaryOpStub stub(Token::BIT_NOT, overwrite);
8266 Result answer = frame_->CallStub(&stub, &operand);
8267 continue_label.Jump(&answer);
Leon Clarkee46be812010-01-19 14:06:41 +00008268
Steve Block6ded16b2010-05-10 14:33:55 +01008269 smi_label.Bind(&answer);
8270 answer.ToRegister();
8271 frame_->Spill(answer.reg());
8272 // Set smi tag bit. It will be reset by the not operation.
8273 __ lea(answer.reg(), Operand(answer.reg(), kSmiTagMask));
8274 __ not_(answer.reg());
Leon Clarkee46be812010-01-19 14:06:41 +00008275
Steve Block6ded16b2010-05-10 14:33:55 +01008276 continue_label.Bind(&answer);
8277 answer.set_type_info(TypeInfo::Integer32());
8278 frame_->Push(&answer);
8279 }
8280 break;
8281 }
8282 case Token::ADD: {
8283 // Smi check.
8284 JumpTarget continue_label;
8285 Result operand = frame_->Pop();
8286 TypeInfo operand_info = operand.type_info();
8287 operand.ToRegister();
8288 __ test(operand.reg(), Immediate(kSmiTagMask));
8289 continue_label.Branch(zero, &operand, taken);
Steve Blocka7e24c12009-10-30 11:49:00 +00008290
Steve Block6ded16b2010-05-10 14:33:55 +01008291 frame_->Push(&operand);
8292 Result answer = frame_->InvokeBuiltin(Builtins::TO_NUMBER,
Steve Blocka7e24c12009-10-30 11:49:00 +00008293 CALL_FUNCTION, 1);
8294
Steve Block6ded16b2010-05-10 14:33:55 +01008295 continue_label.Bind(&answer);
8296 if (operand_info.IsSmi()) {
8297 answer.set_type_info(TypeInfo::Smi());
8298 } else if (operand_info.IsInteger32()) {
8299 answer.set_type_info(TypeInfo::Integer32());
8300 } else {
8301 answer.set_type_info(TypeInfo::Number());
8302 }
8303 frame_->Push(&answer);
8304 break;
8305 }
8306 default:
8307 UNREACHABLE();
Steve Blocka7e24c12009-10-30 11:49:00 +00008308 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008309 }
8310 }
8311}
8312
8313
8314// The value in dst was optimistically incremented or decremented. The
8315// result overflowed or was not smi tagged. Undo the operation, call
8316// into the runtime to convert the argument to a number, and call the
8317// specialized add or subtract stub. The result is left in dst.
8318class DeferredPrefixCountOperation: public DeferredCode {
8319 public:
Steve Block6ded16b2010-05-10 14:33:55 +01008320 DeferredPrefixCountOperation(Register dst,
8321 bool is_increment,
8322 TypeInfo input_type)
8323 : dst_(dst), is_increment_(is_increment), input_type_(input_type) {
Steve Blocka7e24c12009-10-30 11:49:00 +00008324 set_comment("[ DeferredCountOperation");
8325 }
8326
8327 virtual void Generate();
8328
8329 private:
8330 Register dst_;
8331 bool is_increment_;
Steve Block6ded16b2010-05-10 14:33:55 +01008332 TypeInfo input_type_;
Steve Blocka7e24c12009-10-30 11:49:00 +00008333};
8334
8335
8336void DeferredPrefixCountOperation::Generate() {
8337 // Undo the optimistic smi operation.
8338 if (is_increment_) {
8339 __ sub(Operand(dst_), Immediate(Smi::FromInt(1)));
8340 } else {
8341 __ add(Operand(dst_), Immediate(Smi::FromInt(1)));
8342 }
Steve Block6ded16b2010-05-10 14:33:55 +01008343 Register left;
8344 if (input_type_.IsNumber()) {
8345 left = dst_;
Steve Blocka7e24c12009-10-30 11:49:00 +00008346 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01008347 __ push(dst_);
8348 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION);
8349 left = eax;
Steve Blocka7e24c12009-10-30 11:49:00 +00008350 }
Steve Block6ded16b2010-05-10 14:33:55 +01008351
8352 GenericBinaryOpStub stub(is_increment_ ? Token::ADD : Token::SUB,
8353 NO_OVERWRITE,
8354 NO_GENERIC_BINARY_FLAGS,
8355 TypeInfo::Number());
8356 stub.GenerateCall(masm_, left, Smi::FromInt(1));
8357
Steve Blocka7e24c12009-10-30 11:49:00 +00008358 if (!dst_.is(eax)) __ mov(dst_, eax);
8359}
8360
8361
8362// The value in dst was optimistically incremented or decremented. The
8363// result overflowed or was not smi tagged. Undo the operation and call
8364// into the runtime to convert the argument to a number. Update the
8365// original value in old. Call the specialized add or subtract stub.
8366// The result is left in dst.
8367class DeferredPostfixCountOperation: public DeferredCode {
8368 public:
Steve Block6ded16b2010-05-10 14:33:55 +01008369 DeferredPostfixCountOperation(Register dst,
8370 Register old,
8371 bool is_increment,
8372 TypeInfo input_type)
8373 : dst_(dst),
8374 old_(old),
8375 is_increment_(is_increment),
8376 input_type_(input_type) {
Steve Blocka7e24c12009-10-30 11:49:00 +00008377 set_comment("[ DeferredCountOperation");
8378 }
8379
8380 virtual void Generate();
8381
8382 private:
8383 Register dst_;
8384 Register old_;
8385 bool is_increment_;
Steve Block6ded16b2010-05-10 14:33:55 +01008386 TypeInfo input_type_;
Steve Blocka7e24c12009-10-30 11:49:00 +00008387};
8388
8389
8390void DeferredPostfixCountOperation::Generate() {
8391 // Undo the optimistic smi operation.
8392 if (is_increment_) {
8393 __ sub(Operand(dst_), Immediate(Smi::FromInt(1)));
8394 } else {
8395 __ add(Operand(dst_), Immediate(Smi::FromInt(1)));
8396 }
Steve Block6ded16b2010-05-10 14:33:55 +01008397 Register left;
8398 if (input_type_.IsNumber()) {
8399 __ push(dst_); // Save the input to use as the old value.
8400 left = dst_;
Steve Blocka7e24c12009-10-30 11:49:00 +00008401 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01008402 __ push(dst_);
8403 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION);
8404 __ push(eax); // Save the result of ToNumber to use as the old value.
8405 left = eax;
Steve Blocka7e24c12009-10-30 11:49:00 +00008406 }
Steve Block6ded16b2010-05-10 14:33:55 +01008407
8408 GenericBinaryOpStub stub(is_increment_ ? Token::ADD : Token::SUB,
8409 NO_OVERWRITE,
8410 NO_GENERIC_BINARY_FLAGS,
8411 TypeInfo::Number());
8412 stub.GenerateCall(masm_, left, Smi::FromInt(1));
8413
Steve Blocka7e24c12009-10-30 11:49:00 +00008414 if (!dst_.is(eax)) __ mov(dst_, eax);
8415 __ pop(old_);
8416}
8417
8418
8419void CodeGenerator::VisitCountOperation(CountOperation* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01008420 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00008421 Comment cmnt(masm_, "[ CountOperation");
8422
8423 bool is_postfix = node->is_postfix();
8424 bool is_increment = node->op() == Token::INC;
8425
8426 Variable* var = node->expression()->AsVariableProxy()->AsVariable();
8427 bool is_const = (var != NULL && var->mode() == Variable::CONST);
8428
8429 // Postfix operations need a stack slot under the reference to hold
8430 // the old value while the new value is being stored. This is so that
8431 // in the case that storing the new value requires a call, the old
8432 // value will be in the frame to be spilled.
8433 if (is_postfix) frame_->Push(Smi::FromInt(0));
8434
Leon Clarked91b9f72010-01-27 17:25:45 +00008435 // A constant reference is not saved to, so a constant reference is not a
8436 // compound assignment reference.
8437 { Reference target(this, node->expression(), !is_const);
Steve Blocka7e24c12009-10-30 11:49:00 +00008438 if (target.is_illegal()) {
8439 // Spoof the virtual frame to have the expected height (one higher
8440 // than on entry).
8441 if (!is_postfix) frame_->Push(Smi::FromInt(0));
8442 return;
8443 }
Steve Blockd0582a62009-12-15 09:54:21 +00008444 target.TakeValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00008445
8446 Result new_value = frame_->Pop();
8447 new_value.ToRegister();
8448
8449 Result old_value; // Only allocated in the postfix case.
8450 if (is_postfix) {
8451 // Allocate a temporary to preserve the old value.
8452 old_value = allocator_->Allocate();
8453 ASSERT(old_value.is_valid());
8454 __ mov(old_value.reg(), new_value.reg());
Steve Block6ded16b2010-05-10 14:33:55 +01008455
8456 // The return value for postfix operations is ToNumber(input).
8457 // Keep more precise type info if the input is some kind of
8458 // number already. If the input is not a number we have to wait
8459 // for the deferred code to convert it.
8460 if (new_value.type_info().IsNumber()) {
8461 old_value.set_type_info(new_value.type_info());
8462 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008463 }
Steve Block6ded16b2010-05-10 14:33:55 +01008464
Steve Blocka7e24c12009-10-30 11:49:00 +00008465 // Ensure the new value is writable.
8466 frame_->Spill(new_value.reg());
8467
Steve Block6ded16b2010-05-10 14:33:55 +01008468 Result tmp;
8469 if (new_value.is_smi()) {
8470 if (FLAG_debug_code) __ AbortIfNotSmi(new_value.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00008471 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01008472 // We don't know statically if the input is a smi.
8473 // In order to combine the overflow and the smi tag check, we need
8474 // to be able to allocate a byte register. We attempt to do so
8475 // without spilling. If we fail, we will generate separate overflow
8476 // and smi tag checks.
8477 // We allocate and clear a temporary byte register before performing
8478 // the count operation since clearing the register using xor will clear
8479 // the overflow flag.
8480 tmp = allocator_->AllocateByteRegisterWithoutSpilling();
8481 if (tmp.is_valid()) {
8482 __ Set(tmp.reg(), Immediate(0));
8483 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008484 }
8485
8486 if (is_increment) {
8487 __ add(Operand(new_value.reg()), Immediate(Smi::FromInt(1)));
8488 } else {
8489 __ sub(Operand(new_value.reg()), Immediate(Smi::FromInt(1)));
8490 }
8491
Steve Block6ded16b2010-05-10 14:33:55 +01008492 DeferredCode* deferred = NULL;
8493 if (is_postfix) {
8494 deferred = new DeferredPostfixCountOperation(new_value.reg(),
8495 old_value.reg(),
8496 is_increment,
8497 new_value.type_info());
8498 } else {
8499 deferred = new DeferredPrefixCountOperation(new_value.reg(),
8500 is_increment,
8501 new_value.type_info());
8502 }
8503
8504 if (new_value.is_smi()) {
8505 // In case we have a smi as input just check for overflow.
8506 deferred->Branch(overflow);
8507 } else {
8508 // If the count operation didn't overflow and the result is a valid
8509 // smi, we're done. Otherwise, we jump to the deferred slow-case
8510 // code.
Steve Blocka7e24c12009-10-30 11:49:00 +00008511 // We combine the overflow and the smi tag check if we could
8512 // successfully allocate a temporary byte register.
Steve Block6ded16b2010-05-10 14:33:55 +01008513 if (tmp.is_valid()) {
8514 __ setcc(overflow, tmp.reg());
8515 __ or_(Operand(tmp.reg()), new_value.reg());
8516 __ test(tmp.reg(), Immediate(kSmiTagMask));
8517 tmp.Unuse();
8518 deferred->Branch(not_zero);
8519 } else {
8520 // Otherwise we test separately for overflow and smi tag.
8521 deferred->Branch(overflow);
8522 __ test(new_value.reg(), Immediate(kSmiTagMask));
8523 deferred->Branch(not_zero);
8524 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008525 }
8526 deferred->BindExit();
8527
Steve Block6ded16b2010-05-10 14:33:55 +01008528 // Postfix count operations return their input converted to
8529 // number. The case when the input is already a number is covered
8530 // above in the allocation code for old_value.
8531 if (is_postfix && !new_value.type_info().IsNumber()) {
8532 old_value.set_type_info(TypeInfo::Number());
8533 }
8534
8535 // The result of ++ or -- is an Integer32 if the
8536 // input is a smi. Otherwise it is a number.
8537 if (new_value.is_smi()) {
8538 new_value.set_type_info(TypeInfo::Integer32());
8539 } else {
8540 new_value.set_type_info(TypeInfo::Number());
8541 }
8542
Steve Blocka7e24c12009-10-30 11:49:00 +00008543 // Postfix: store the old value in the allocated slot under the
8544 // reference.
8545 if (is_postfix) frame_->SetElementAt(target.size(), &old_value);
8546
8547 frame_->Push(&new_value);
8548 // Non-constant: update the reference.
8549 if (!is_const) target.SetValue(NOT_CONST_INIT);
8550 }
8551
8552 // Postfix: drop the new value and use the old.
8553 if (is_postfix) frame_->Drop();
8554}
8555
8556
Steve Block6ded16b2010-05-10 14:33:55 +01008557void CodeGenerator::Int32BinaryOperation(BinaryOperation* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00008558 Token::Value op = node->op();
Steve Block6ded16b2010-05-10 14:33:55 +01008559 Comment cmnt(masm_, "[ Int32BinaryOperation");
8560 ASSERT(in_safe_int32_mode());
8561 ASSERT(safe_int32_mode_enabled());
8562 ASSERT(FLAG_safe_int32_compiler);
Steve Blocka7e24c12009-10-30 11:49:00 +00008563
Steve Block6ded16b2010-05-10 14:33:55 +01008564 if (op == Token::COMMA) {
8565 // Discard left value.
8566 frame_->Nip(1);
8567 return;
8568 }
8569
8570 Result right = frame_->Pop();
8571 Result left = frame_->Pop();
8572
8573 ASSERT(right.is_untagged_int32());
8574 ASSERT(left.is_untagged_int32());
8575 // Registers containing an int32 value are not multiply used.
8576 ASSERT(!left.is_register() || !frame_->is_used(left.reg()));
8577 ASSERT(!right.is_register() || !frame_->is_used(right.reg()));
8578
8579 switch (op) {
8580 case Token::COMMA:
8581 case Token::OR:
8582 case Token::AND:
8583 UNREACHABLE();
8584 break;
8585 case Token::BIT_OR:
8586 case Token::BIT_XOR:
8587 case Token::BIT_AND:
8588 if (left.is_constant() || right.is_constant()) {
8589 int32_t value; // Put constant in value, non-constant in left.
8590 // Constants are known to be int32 values, from static analysis,
8591 // or else will be converted to int32 by implicit ECMA [[ToInt32]].
8592 if (left.is_constant()) {
8593 ASSERT(left.handle()->IsSmi() || left.handle()->IsHeapNumber());
8594 value = NumberToInt32(*left.handle());
8595 left = right;
8596 } else {
8597 ASSERT(right.handle()->IsSmi() || right.handle()->IsHeapNumber());
8598 value = NumberToInt32(*right.handle());
8599 }
8600
8601 left.ToRegister();
8602 if (op == Token::BIT_OR) {
8603 __ or_(Operand(left.reg()), Immediate(value));
8604 } else if (op == Token::BIT_XOR) {
8605 __ xor_(Operand(left.reg()), Immediate(value));
8606 } else {
8607 ASSERT(op == Token::BIT_AND);
8608 __ and_(Operand(left.reg()), Immediate(value));
8609 }
8610 } else {
8611 ASSERT(left.is_register());
8612 ASSERT(right.is_register());
8613 if (op == Token::BIT_OR) {
8614 __ or_(left.reg(), Operand(right.reg()));
8615 } else if (op == Token::BIT_XOR) {
8616 __ xor_(left.reg(), Operand(right.reg()));
8617 } else {
8618 ASSERT(op == Token::BIT_AND);
8619 __ and_(left.reg(), Operand(right.reg()));
8620 }
8621 }
8622 frame_->Push(&left);
8623 right.Unuse();
8624 break;
8625 case Token::SAR:
8626 case Token::SHL:
8627 case Token::SHR: {
8628 bool test_shr_overflow = false;
8629 left.ToRegister();
8630 if (right.is_constant()) {
8631 ASSERT(right.handle()->IsSmi() || right.handle()->IsHeapNumber());
8632 int shift_amount = NumberToInt32(*right.handle()) & 0x1F;
8633 if (op == Token::SAR) {
8634 __ sar(left.reg(), shift_amount);
8635 } else if (op == Token::SHL) {
8636 __ shl(left.reg(), shift_amount);
8637 } else {
8638 ASSERT(op == Token::SHR);
8639 __ shr(left.reg(), shift_amount);
8640 if (shift_amount == 0) test_shr_overflow = true;
8641 }
8642 } else {
8643 // Move right to ecx
8644 if (left.is_register() && left.reg().is(ecx)) {
8645 right.ToRegister();
8646 __ xchg(left.reg(), right.reg());
8647 left = right; // Left is unused here, copy of right unused by Push.
8648 } else {
8649 right.ToRegister(ecx);
8650 left.ToRegister();
8651 }
8652 if (op == Token::SAR) {
8653 __ sar_cl(left.reg());
8654 } else if (op == Token::SHL) {
8655 __ shl_cl(left.reg());
8656 } else {
8657 ASSERT(op == Token::SHR);
8658 __ shr_cl(left.reg());
8659 test_shr_overflow = true;
8660 }
8661 }
8662 {
8663 Register left_reg = left.reg();
8664 frame_->Push(&left);
8665 right.Unuse();
8666 if (test_shr_overflow && !node->to_int32()) {
8667 // Uint32 results with top bit set are not Int32 values.
8668 // If they will be forced to Int32, skip the test.
8669 // Test is needed because shr with shift amount 0 does not set flags.
8670 __ test(left_reg, Operand(left_reg));
8671 unsafe_bailout_->Branch(sign);
8672 }
8673 }
8674 break;
8675 }
8676 case Token::ADD:
8677 case Token::SUB:
8678 case Token::MUL:
8679 if ((left.is_constant() && op != Token::SUB) || right.is_constant()) {
8680 int32_t value; // Put constant in value, non-constant in left.
8681 if (right.is_constant()) {
8682 ASSERT(right.handle()->IsSmi() || right.handle()->IsHeapNumber());
8683 value = NumberToInt32(*right.handle());
8684 } else {
8685 ASSERT(left.handle()->IsSmi() || left.handle()->IsHeapNumber());
8686 value = NumberToInt32(*left.handle());
8687 left = right;
8688 }
8689
8690 left.ToRegister();
8691 if (op == Token::ADD) {
8692 __ add(Operand(left.reg()), Immediate(value));
8693 } else if (op == Token::SUB) {
8694 __ sub(Operand(left.reg()), Immediate(value));
8695 } else {
8696 ASSERT(op == Token::MUL);
8697 __ imul(left.reg(), left.reg(), value);
8698 }
8699 } else {
8700 left.ToRegister();
8701 ASSERT(left.is_register());
8702 ASSERT(right.is_register());
8703 if (op == Token::ADD) {
8704 __ add(left.reg(), Operand(right.reg()));
8705 } else if (op == Token::SUB) {
8706 __ sub(left.reg(), Operand(right.reg()));
8707 } else {
8708 ASSERT(op == Token::MUL);
8709 // We have statically verified that a negative zero can be ignored.
8710 __ imul(left.reg(), Operand(right.reg()));
8711 }
8712 }
8713 right.Unuse();
8714 frame_->Push(&left);
8715 if (!node->to_int32()) {
8716 // If ToInt32 is called on the result of ADD, SUB, or MUL, we don't
8717 // care about overflows.
8718 unsafe_bailout_->Branch(overflow);
8719 }
8720 break;
8721 case Token::DIV:
8722 case Token::MOD: {
8723 if (right.is_register() && (right.reg().is(eax) || right.reg().is(edx))) {
8724 if (left.is_register() && left.reg().is(edi)) {
8725 right.ToRegister(ebx);
8726 } else {
8727 right.ToRegister(edi);
8728 }
8729 }
8730 left.ToRegister(eax);
8731 Result edx_reg = allocator_->Allocate(edx);
8732 right.ToRegister();
8733 // The results are unused here because BreakTarget::Branch cannot handle
8734 // live results.
8735 Register right_reg = right.reg();
8736 left.Unuse();
8737 right.Unuse();
8738 edx_reg.Unuse();
8739 __ cmp(right_reg, 0);
8740 // Ensure divisor is positive: no chance of non-int32 or -0 result.
8741 unsafe_bailout_->Branch(less_equal);
8742 __ cdq(); // Sign-extend eax into edx:eax
8743 __ idiv(right_reg);
8744 if (op == Token::MOD) {
8745 // Negative zero can arise as a negative divident with a zero result.
8746 if (!node->no_negative_zero()) {
8747 Label not_negative_zero;
8748 __ test(edx, Operand(edx));
8749 __ j(not_zero, &not_negative_zero);
8750 __ test(eax, Operand(eax));
8751 unsafe_bailout_->Branch(negative);
8752 __ bind(&not_negative_zero);
8753 }
8754 Result edx_result(edx, TypeInfo::Integer32());
8755 edx_result.set_untagged_int32(true);
8756 frame_->Push(&edx_result);
8757 } else {
8758 ASSERT(op == Token::DIV);
8759 __ test(edx, Operand(edx));
8760 unsafe_bailout_->Branch(not_equal);
8761 Result eax_result(eax, TypeInfo::Integer32());
8762 eax_result.set_untagged_int32(true);
8763 frame_->Push(&eax_result);
8764 }
8765 break;
8766 }
8767 default:
8768 UNREACHABLE();
8769 break;
8770 }
8771}
8772
8773
8774void CodeGenerator::GenerateLogicalBooleanOperation(BinaryOperation* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00008775 // According to ECMA-262 section 11.11, page 58, the binary logical
8776 // operators must yield the result of one of the two expressions
8777 // before any ToBoolean() conversions. This means that the value
8778 // produced by a && or || operator is not necessarily a boolean.
8779
8780 // NOTE: If the left hand side produces a materialized value (not
8781 // control flow), we force the right hand side to do the same. This
8782 // is necessary because we assume that if we get control flow on the
8783 // last path out of an expression we got it on all paths.
Steve Block6ded16b2010-05-10 14:33:55 +01008784 if (node->op() == Token::AND) {
8785 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00008786 JumpTarget is_true;
8787 ControlDestination dest(&is_true, destination()->false_target(), true);
Steve Blockd0582a62009-12-15 09:54:21 +00008788 LoadCondition(node->left(), &dest, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00008789
8790 if (dest.false_was_fall_through()) {
8791 // The current false target was used as the fall-through. If
8792 // there are no dangling jumps to is_true then the left
8793 // subexpression was unconditionally false. Otherwise we have
8794 // paths where we do have to evaluate the right subexpression.
8795 if (is_true.is_linked()) {
8796 // We need to compile the right subexpression. If the jump to
8797 // the current false target was a forward jump then we have a
8798 // valid frame, we have just bound the false target, and we
8799 // have to jump around the code for the right subexpression.
8800 if (has_valid_frame()) {
8801 destination()->false_target()->Unuse();
8802 destination()->false_target()->Jump();
8803 }
8804 is_true.Bind();
8805 // The left subexpression compiled to control flow, so the
8806 // right one is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00008807 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00008808 } else {
8809 // We have actually just jumped to or bound the current false
8810 // target but the current control destination is not marked as
8811 // used.
8812 destination()->Use(false);
8813 }
8814
8815 } else if (dest.is_used()) {
8816 // The left subexpression compiled to control flow (and is_true
8817 // was just bound), so the right is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00008818 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00008819
8820 } else {
8821 // We have a materialized value on the frame, so we exit with
8822 // one on all paths. There are possibly also jumps to is_true
8823 // from nested subexpressions.
8824 JumpTarget pop_and_continue;
8825 JumpTarget exit;
8826
8827 // Avoid popping the result if it converts to 'false' using the
8828 // standard ToBoolean() conversion as described in ECMA-262,
8829 // section 9.2, page 30.
8830 //
8831 // Duplicate the TOS value. The duplicate will be popped by
8832 // ToBoolean.
8833 frame_->Dup();
8834 ControlDestination dest(&pop_and_continue, &exit, true);
8835 ToBoolean(&dest);
8836
8837 // Pop the result of evaluating the first part.
8838 frame_->Drop();
8839
8840 // Compile right side expression.
8841 is_true.Bind();
8842 Load(node->right());
8843
8844 // Exit (always with a materialized value).
8845 exit.Bind();
8846 }
8847
Steve Block6ded16b2010-05-10 14:33:55 +01008848 } else {
8849 ASSERT(node->op() == Token::OR);
8850 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00008851 JumpTarget is_false;
8852 ControlDestination dest(destination()->true_target(), &is_false, false);
Steve Blockd0582a62009-12-15 09:54:21 +00008853 LoadCondition(node->left(), &dest, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00008854
8855 if (dest.true_was_fall_through()) {
8856 // The current true target was used as the fall-through. If
8857 // there are no dangling jumps to is_false then the left
8858 // subexpression was unconditionally true. Otherwise we have
8859 // paths where we do have to evaluate the right subexpression.
8860 if (is_false.is_linked()) {
8861 // We need to compile the right subexpression. If the jump to
8862 // the current true target was a forward jump then we have a
8863 // valid frame, we have just bound the true target, and we
8864 // have to jump around the code for the right subexpression.
8865 if (has_valid_frame()) {
8866 destination()->true_target()->Unuse();
8867 destination()->true_target()->Jump();
8868 }
8869 is_false.Bind();
8870 // The left subexpression compiled to control flow, so the
8871 // right one is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00008872 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00008873 } else {
8874 // We have just jumped to or bound the current true target but
8875 // the current control destination is not marked as used.
8876 destination()->Use(true);
8877 }
8878
8879 } else if (dest.is_used()) {
8880 // The left subexpression compiled to control flow (and is_false
8881 // was just bound), so the right is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00008882 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00008883
8884 } else {
8885 // We have a materialized value on the frame, so we exit with
8886 // one on all paths. There are possibly also jumps to is_false
8887 // from nested subexpressions.
8888 JumpTarget pop_and_continue;
8889 JumpTarget exit;
8890
8891 // Avoid popping the result if it converts to 'true' using the
8892 // standard ToBoolean() conversion as described in ECMA-262,
8893 // section 9.2, page 30.
8894 //
8895 // Duplicate the TOS value. The duplicate will be popped by
8896 // ToBoolean.
8897 frame_->Dup();
8898 ControlDestination dest(&exit, &pop_and_continue, false);
8899 ToBoolean(&dest);
8900
8901 // Pop the result of evaluating the first part.
8902 frame_->Drop();
8903
8904 // Compile right side expression.
8905 is_false.Bind();
8906 Load(node->right());
8907
8908 // Exit (always with a materialized value).
8909 exit.Bind();
8910 }
Steve Block6ded16b2010-05-10 14:33:55 +01008911 }
8912}
Steve Blocka7e24c12009-10-30 11:49:00 +00008913
Steve Block6ded16b2010-05-10 14:33:55 +01008914
8915void CodeGenerator::VisitBinaryOperation(BinaryOperation* node) {
8916 Comment cmnt(masm_, "[ BinaryOperation");
8917
8918 if (node->op() == Token::AND || node->op() == Token::OR) {
8919 GenerateLogicalBooleanOperation(node);
8920 } else if (in_safe_int32_mode()) {
8921 Visit(node->left());
8922 Visit(node->right());
8923 Int32BinaryOperation(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00008924 } else {
8925 // NOTE: The code below assumes that the slow cases (calls to runtime)
8926 // never return a constant/immutable object.
8927 OverwriteMode overwrite_mode = NO_OVERWRITE;
8928 if (node->left()->AsBinaryOperation() != NULL &&
8929 node->left()->AsBinaryOperation()->ResultOverwriteAllowed()) {
8930 overwrite_mode = OVERWRITE_LEFT;
8931 } else if (node->right()->AsBinaryOperation() != NULL &&
8932 node->right()->AsBinaryOperation()->ResultOverwriteAllowed()) {
8933 overwrite_mode = OVERWRITE_RIGHT;
8934 }
8935
Steve Block6ded16b2010-05-10 14:33:55 +01008936 if (node->left()->IsTrivial()) {
8937 Load(node->right());
8938 Result right = frame_->Pop();
8939 frame_->Push(node->left());
8940 frame_->Push(&right);
8941 } else {
8942 Load(node->left());
8943 Load(node->right());
8944 }
8945 GenericBinaryOperation(node, overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00008946 }
8947}
8948
8949
8950void CodeGenerator::VisitThisFunction(ThisFunction* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01008951 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00008952 frame_->PushFunction();
8953}
8954
8955
8956void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01008957 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00008958 Comment cmnt(masm_, "[ CompareOperation");
8959
Leon Clarkee46be812010-01-19 14:06:41 +00008960 bool left_already_loaded = false;
8961
Steve Blocka7e24c12009-10-30 11:49:00 +00008962 // Get the expressions from the node.
8963 Expression* left = node->left();
8964 Expression* right = node->right();
8965 Token::Value op = node->op();
8966 // To make typeof testing for natives implemented in JavaScript really
8967 // efficient, we generate special code for expressions of the form:
8968 // 'typeof <expression> == <string>'.
8969 UnaryOperation* operation = left->AsUnaryOperation();
8970 if ((op == Token::EQ || op == Token::EQ_STRICT) &&
8971 (operation != NULL && operation->op() == Token::TYPEOF) &&
8972 (right->AsLiteral() != NULL &&
8973 right->AsLiteral()->handle()->IsString())) {
8974 Handle<String> check(String::cast(*right->AsLiteral()->handle()));
8975
8976 // Load the operand and move it to a register.
8977 LoadTypeofExpression(operation->expression());
8978 Result answer = frame_->Pop();
8979 answer.ToRegister();
8980
8981 if (check->Equals(Heap::number_symbol())) {
8982 __ test(answer.reg(), Immediate(kSmiTagMask));
8983 destination()->true_target()->Branch(zero);
8984 frame_->Spill(answer.reg());
8985 __ mov(answer.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset));
8986 __ cmp(answer.reg(), Factory::heap_number_map());
8987 answer.Unuse();
8988 destination()->Split(equal);
8989
8990 } else if (check->Equals(Heap::string_symbol())) {
8991 __ test(answer.reg(), Immediate(kSmiTagMask));
8992 destination()->false_target()->Branch(zero);
8993
8994 // It can be an undetectable string object.
8995 Result temp = allocator()->Allocate();
8996 ASSERT(temp.is_valid());
8997 __ mov(temp.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01008998 __ test_b(FieldOperand(temp.reg(), Map::kBitFieldOffset),
8999 1 << Map::kIsUndetectable);
Steve Blocka7e24c12009-10-30 11:49:00 +00009000 destination()->false_target()->Branch(not_zero);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009001 __ CmpInstanceType(temp.reg(), FIRST_NONSTRING_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00009002 temp.Unuse();
9003 answer.Unuse();
Andrei Popescu402d9372010-02-26 13:31:12 +00009004 destination()->Split(below);
Steve Blocka7e24c12009-10-30 11:49:00 +00009005
9006 } else if (check->Equals(Heap::boolean_symbol())) {
9007 __ cmp(answer.reg(), Factory::true_value());
9008 destination()->true_target()->Branch(equal);
9009 __ cmp(answer.reg(), Factory::false_value());
9010 answer.Unuse();
9011 destination()->Split(equal);
9012
9013 } else if (check->Equals(Heap::undefined_symbol())) {
9014 __ cmp(answer.reg(), Factory::undefined_value());
9015 destination()->true_target()->Branch(equal);
9016
9017 __ test(answer.reg(), Immediate(kSmiTagMask));
9018 destination()->false_target()->Branch(zero);
9019
9020 // It can be an undetectable object.
9021 frame_->Spill(answer.reg());
9022 __ mov(answer.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009023 __ test_b(FieldOperand(answer.reg(), Map::kBitFieldOffset),
9024 1 << Map::kIsUndetectable);
Steve Blocka7e24c12009-10-30 11:49:00 +00009025 answer.Unuse();
9026 destination()->Split(not_zero);
9027
9028 } else if (check->Equals(Heap::function_symbol())) {
9029 __ test(answer.reg(), Immediate(kSmiTagMask));
9030 destination()->false_target()->Branch(zero);
9031 frame_->Spill(answer.reg());
9032 __ CmpObjectType(answer.reg(), JS_FUNCTION_TYPE, answer.reg());
Steve Blockd0582a62009-12-15 09:54:21 +00009033 destination()->true_target()->Branch(equal);
9034 // Regular expressions are callable so typeof == 'function'.
9035 __ CmpInstanceType(answer.reg(), JS_REGEXP_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00009036 answer.Unuse();
9037 destination()->Split(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +00009038 } else if (check->Equals(Heap::object_symbol())) {
9039 __ test(answer.reg(), Immediate(kSmiTagMask));
9040 destination()->false_target()->Branch(zero);
9041 __ cmp(answer.reg(), Factory::null_value());
9042 destination()->true_target()->Branch(equal);
9043
Steve Blocka7e24c12009-10-30 11:49:00 +00009044 Result map = allocator()->Allocate();
9045 ASSERT(map.is_valid());
Steve Blockd0582a62009-12-15 09:54:21 +00009046 // Regular expressions are typeof == 'function', not 'object'.
9047 __ CmpObjectType(answer.reg(), JS_REGEXP_TYPE, map.reg());
9048 destination()->false_target()->Branch(equal);
9049
9050 // It can be an undetectable object.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009051 __ test_b(FieldOperand(map.reg(), Map::kBitFieldOffset),
9052 1 << Map::kIsUndetectable);
Steve Blocka7e24c12009-10-30 11:49:00 +00009053 destination()->false_target()->Branch(not_zero);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009054 // Do a range test for JSObject type. We can't use
9055 // MacroAssembler::IsInstanceJSObjectType, because we are using a
9056 // ControlDestination, so we copy its implementation here.
Steve Blocka7e24c12009-10-30 11:49:00 +00009057 __ movzx_b(map.reg(), FieldOperand(map.reg(), Map::kInstanceTypeOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009058 __ sub(Operand(map.reg()), Immediate(FIRST_JS_OBJECT_TYPE));
9059 __ cmp(map.reg(), LAST_JS_OBJECT_TYPE - FIRST_JS_OBJECT_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00009060 answer.Unuse();
9061 map.Unuse();
Leon Clarkef7060e22010-06-03 12:02:55 +01009062 destination()->Split(below_equal);
Steve Blocka7e24c12009-10-30 11:49:00 +00009063 } else {
9064 // Uncommon case: typeof testing against a string literal that is
9065 // never returned from the typeof operator.
9066 answer.Unuse();
9067 destination()->Goto(false);
9068 }
9069 return;
Leon Clarkee46be812010-01-19 14:06:41 +00009070 } else if (op == Token::LT &&
9071 right->AsLiteral() != NULL &&
9072 right->AsLiteral()->handle()->IsHeapNumber()) {
9073 Handle<HeapNumber> check(HeapNumber::cast(*right->AsLiteral()->handle()));
9074 if (check->value() == 2147483648.0) { // 0x80000000.
9075 Load(left);
9076 left_already_loaded = true;
9077 Result lhs = frame_->Pop();
9078 lhs.ToRegister();
9079 __ test(lhs.reg(), Immediate(kSmiTagMask));
9080 destination()->true_target()->Branch(zero); // All Smis are less.
9081 Result scratch = allocator()->Allocate();
9082 ASSERT(scratch.is_valid());
9083 __ mov(scratch.reg(), FieldOperand(lhs.reg(), HeapObject::kMapOffset));
9084 __ cmp(scratch.reg(), Factory::heap_number_map());
9085 JumpTarget not_a_number;
9086 not_a_number.Branch(not_equal, &lhs);
9087 __ mov(scratch.reg(),
9088 FieldOperand(lhs.reg(), HeapNumber::kExponentOffset));
9089 __ cmp(Operand(scratch.reg()), Immediate(0xfff00000));
9090 not_a_number.Branch(above_equal, &lhs); // It's a negative NaN or -Inf.
9091 const uint32_t borderline_exponent =
9092 (HeapNumber::kExponentBias + 31) << HeapNumber::kExponentShift;
9093 __ cmp(Operand(scratch.reg()), Immediate(borderline_exponent));
9094 scratch.Unuse();
9095 lhs.Unuse();
9096 destination()->true_target()->Branch(less);
9097 destination()->false_target()->Jump();
9098
9099 not_a_number.Bind(&lhs);
9100 frame_->Push(&lhs);
9101 }
Steve Blocka7e24c12009-10-30 11:49:00 +00009102 }
9103
9104 Condition cc = no_condition;
9105 bool strict = false;
9106 switch (op) {
9107 case Token::EQ_STRICT:
9108 strict = true;
9109 // Fall through
9110 case Token::EQ:
9111 cc = equal;
9112 break;
9113 case Token::LT:
9114 cc = less;
9115 break;
9116 case Token::GT:
9117 cc = greater;
9118 break;
9119 case Token::LTE:
9120 cc = less_equal;
9121 break;
9122 case Token::GTE:
9123 cc = greater_equal;
9124 break;
9125 case Token::IN: {
Leon Clarkee46be812010-01-19 14:06:41 +00009126 if (!left_already_loaded) Load(left);
Steve Blocka7e24c12009-10-30 11:49:00 +00009127 Load(right);
9128 Result answer = frame_->InvokeBuiltin(Builtins::IN, CALL_FUNCTION, 2);
9129 frame_->Push(&answer); // push the result
9130 return;
9131 }
9132 case Token::INSTANCEOF: {
Leon Clarkee46be812010-01-19 14:06:41 +00009133 if (!left_already_loaded) Load(left);
Steve Blocka7e24c12009-10-30 11:49:00 +00009134 Load(right);
9135 InstanceofStub stub;
9136 Result answer = frame_->CallStub(&stub, 2);
9137 answer.ToRegister();
9138 __ test(answer.reg(), Operand(answer.reg()));
9139 answer.Unuse();
9140 destination()->Split(zero);
9141 return;
9142 }
9143 default:
9144 UNREACHABLE();
9145 }
Steve Block6ded16b2010-05-10 14:33:55 +01009146
9147 if (left->IsTrivial()) {
9148 if (!left_already_loaded) {
9149 Load(right);
9150 Result right_result = frame_->Pop();
9151 frame_->Push(left);
9152 frame_->Push(&right_result);
9153 } else {
9154 Load(right);
9155 }
9156 } else {
9157 if (!left_already_loaded) Load(left);
9158 Load(right);
9159 }
Leon Clarkee46be812010-01-19 14:06:41 +00009160 Comparison(node, cc, strict, destination());
Steve Blocka7e24c12009-10-30 11:49:00 +00009161}
9162
9163
9164#ifdef DEBUG
9165bool CodeGenerator::HasValidEntryRegisters() {
9166 return (allocator()->count(eax) == (frame()->is_used(eax) ? 1 : 0))
9167 && (allocator()->count(ebx) == (frame()->is_used(ebx) ? 1 : 0))
9168 && (allocator()->count(ecx) == (frame()->is_used(ecx) ? 1 : 0))
9169 && (allocator()->count(edx) == (frame()->is_used(edx) ? 1 : 0))
9170 && (allocator()->count(edi) == (frame()->is_used(edi) ? 1 : 0));
9171}
9172#endif
9173
9174
9175// Emit a LoadIC call to get the value from receiver and leave it in
Andrei Popescu402d9372010-02-26 13:31:12 +00009176// dst.
Steve Blocka7e24c12009-10-30 11:49:00 +00009177class DeferredReferenceGetNamedValue: public DeferredCode {
9178 public:
9179 DeferredReferenceGetNamedValue(Register dst,
9180 Register receiver,
9181 Handle<String> name)
9182 : dst_(dst), receiver_(receiver), name_(name) {
9183 set_comment("[ DeferredReferenceGetNamedValue");
9184 }
9185
9186 virtual void Generate();
9187
9188 Label* patch_site() { return &patch_site_; }
9189
9190 private:
9191 Label patch_site_;
9192 Register dst_;
9193 Register receiver_;
9194 Handle<String> name_;
9195};
9196
9197
9198void DeferredReferenceGetNamedValue::Generate() {
Andrei Popescu402d9372010-02-26 13:31:12 +00009199 if (!receiver_.is(eax)) {
9200 __ mov(eax, receiver_);
9201 }
Steve Blocka7e24c12009-10-30 11:49:00 +00009202 __ Set(ecx, Immediate(name_));
9203 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
9204 __ call(ic, RelocInfo::CODE_TARGET);
9205 // The call must be followed by a test eax instruction to indicate
9206 // that the inobject property case was inlined.
9207 //
9208 // Store the delta to the map check instruction here in the test
9209 // instruction. Use masm_-> instead of the __ macro since the
9210 // latter can't return a value.
9211 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
9212 // Here we use masm_-> instead of the __ macro because this is the
9213 // instruction that gets patched and coverage code gets in the way.
9214 masm_->test(eax, Immediate(-delta_to_patch_site));
9215 __ IncrementCounter(&Counters::named_load_inline_miss, 1);
9216
9217 if (!dst_.is(eax)) __ mov(dst_, eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00009218}
9219
9220
9221class DeferredReferenceGetKeyedValue: public DeferredCode {
9222 public:
9223 explicit DeferredReferenceGetKeyedValue(Register dst,
9224 Register receiver,
Andrei Popescu402d9372010-02-26 13:31:12 +00009225 Register key)
9226 : dst_(dst), receiver_(receiver), key_(key) {
Steve Blocka7e24c12009-10-30 11:49:00 +00009227 set_comment("[ DeferredReferenceGetKeyedValue");
9228 }
9229
9230 virtual void Generate();
9231
9232 Label* patch_site() { return &patch_site_; }
9233
9234 private:
9235 Label patch_site_;
9236 Register dst_;
9237 Register receiver_;
9238 Register key_;
Steve Blocka7e24c12009-10-30 11:49:00 +00009239};
9240
9241
9242void DeferredReferenceGetKeyedValue::Generate() {
Andrei Popescu402d9372010-02-26 13:31:12 +00009243 if (!receiver_.is(eax)) {
9244 // Register eax is available for key.
9245 if (!key_.is(eax)) {
9246 __ mov(eax, key_);
9247 }
9248 if (!receiver_.is(edx)) {
9249 __ mov(edx, receiver_);
9250 }
9251 } else if (!key_.is(edx)) {
9252 // Register edx is available for receiver.
9253 if (!receiver_.is(edx)) {
9254 __ mov(edx, receiver_);
9255 }
9256 if (!key_.is(eax)) {
9257 __ mov(eax, key_);
9258 }
9259 } else {
9260 __ xchg(edx, eax);
9261 }
Steve Blocka7e24c12009-10-30 11:49:00 +00009262 // Calculate the delta from the IC call instruction to the map check
9263 // cmp instruction in the inlined version. This delta is stored in
9264 // a test(eax, delta) instruction after the call so that we can find
9265 // it in the IC initialization code and patch the cmp instruction.
9266 // This means that we cannot allow test instructions after calls to
9267 // KeyedLoadIC stubs in other places.
9268 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
Andrei Popescu402d9372010-02-26 13:31:12 +00009269 __ call(ic, RelocInfo::CODE_TARGET);
Steve Blocka7e24c12009-10-30 11:49:00 +00009270 // The delta from the start of the map-compare instruction to the
9271 // test instruction. We use masm_-> directly here instead of the __
9272 // macro because the macro sometimes uses macro expansion to turn
9273 // into something that can't return a value. This is encountered
9274 // when doing generated code coverage tests.
9275 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
9276 // Here we use masm_-> instead of the __ macro because this is the
9277 // instruction that gets patched and coverage code gets in the way.
9278 masm_->test(eax, Immediate(-delta_to_patch_site));
9279 __ IncrementCounter(&Counters::keyed_load_inline_miss, 1);
9280
9281 if (!dst_.is(eax)) __ mov(dst_, eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00009282}
9283
9284
9285class DeferredReferenceSetKeyedValue: public DeferredCode {
9286 public:
9287 DeferredReferenceSetKeyedValue(Register value,
9288 Register key,
Steve Block6ded16b2010-05-10 14:33:55 +01009289 Register receiver,
9290 Register scratch)
9291 : value_(value),
9292 key_(key),
9293 receiver_(receiver),
9294 scratch_(scratch) {
Steve Blocka7e24c12009-10-30 11:49:00 +00009295 set_comment("[ DeferredReferenceSetKeyedValue");
9296 }
9297
9298 virtual void Generate();
9299
9300 Label* patch_site() { return &patch_site_; }
9301
9302 private:
9303 Register value_;
9304 Register key_;
9305 Register receiver_;
Steve Block6ded16b2010-05-10 14:33:55 +01009306 Register scratch_;
Steve Blocka7e24c12009-10-30 11:49:00 +00009307 Label patch_site_;
9308};
9309
9310
9311void DeferredReferenceSetKeyedValue::Generate() {
9312 __ IncrementCounter(&Counters::keyed_store_inline_miss, 1);
Steve Block6ded16b2010-05-10 14:33:55 +01009313 // Move value_ to eax, key_ to ecx, and receiver_ to edx.
9314 Register old_value = value_;
9315
9316 // First, move value to eax.
9317 if (!value_.is(eax)) {
9318 if (key_.is(eax)) {
9319 // Move key_ out of eax, preferably to ecx.
9320 if (!value_.is(ecx) && !receiver_.is(ecx)) {
9321 __ mov(ecx, key_);
9322 key_ = ecx;
9323 } else {
9324 __ mov(scratch_, key_);
9325 key_ = scratch_;
9326 }
9327 }
9328 if (receiver_.is(eax)) {
9329 // Move receiver_ out of eax, preferably to edx.
9330 if (!value_.is(edx) && !key_.is(edx)) {
9331 __ mov(edx, receiver_);
9332 receiver_ = edx;
9333 } else {
9334 // Both moves to scratch are from eax, also, no valid path hits both.
9335 __ mov(scratch_, receiver_);
9336 receiver_ = scratch_;
9337 }
9338 }
9339 __ mov(eax, value_);
9340 value_ = eax;
9341 }
9342
9343 // Now value_ is in eax. Move the other two to the right positions.
9344 // We do not update the variables key_ and receiver_ to ecx and edx.
9345 if (key_.is(ecx)) {
9346 if (!receiver_.is(edx)) {
9347 __ mov(edx, receiver_);
9348 }
9349 } else if (key_.is(edx)) {
9350 if (receiver_.is(ecx)) {
9351 __ xchg(edx, ecx);
9352 } else {
9353 __ mov(ecx, key_);
9354 if (!receiver_.is(edx)) {
9355 __ mov(edx, receiver_);
9356 }
9357 }
9358 } else { // Key is not in edx or ecx.
9359 if (!receiver_.is(edx)) {
9360 __ mov(edx, receiver_);
9361 }
9362 __ mov(ecx, key_);
9363 }
9364
Steve Blocka7e24c12009-10-30 11:49:00 +00009365 // Call the IC stub.
9366 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
9367 __ call(ic, RelocInfo::CODE_TARGET);
9368 // The delta from the start of the map-compare instruction to the
9369 // test instruction. We use masm_-> directly here instead of the
9370 // __ macro because the macro sometimes uses macro expansion to turn
9371 // into something that can't return a value. This is encountered
9372 // when doing generated code coverage tests.
9373 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
9374 // Here we use masm_-> instead of the __ macro because this is the
9375 // instruction that gets patched and coverage code gets in the way.
9376 masm_->test(eax, Immediate(-delta_to_patch_site));
Steve Block6ded16b2010-05-10 14:33:55 +01009377 // Restore value (returned from store IC) register.
9378 if (!old_value.is(eax)) __ mov(old_value, eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00009379}
9380
9381
Andrei Popescu402d9372010-02-26 13:31:12 +00009382Result CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) {
9383#ifdef DEBUG
9384 int original_height = frame()->height();
9385#endif
9386 Result result;
9387 // Do not inline the inobject property case for loads from the global
9388 // object. Also do not inline for unoptimized code. This saves time in
9389 // the code generator. Unoptimized code is toplevel code or code that is
9390 // not in a loop.
9391 if (is_contextual || scope()->is_global_scope() || loop_nesting() == 0) {
9392 Comment cmnt(masm(), "[ Load from named Property");
9393 frame()->Push(name);
9394
9395 RelocInfo::Mode mode = is_contextual
9396 ? RelocInfo::CODE_TARGET_CONTEXT
9397 : RelocInfo::CODE_TARGET;
9398 result = frame()->CallLoadIC(mode);
9399 // A test eax instruction following the call signals that the inobject
9400 // property case was inlined. Ensure that there is not a test eax
9401 // instruction here.
9402 __ nop();
9403 } else {
9404 // Inline the inobject property case.
9405 Comment cmnt(masm(), "[ Inlined named property load");
9406 Result receiver = frame()->Pop();
9407 receiver.ToRegister();
9408
9409 result = allocator()->Allocate();
9410 ASSERT(result.is_valid());
9411 DeferredReferenceGetNamedValue* deferred =
9412 new DeferredReferenceGetNamedValue(result.reg(), receiver.reg(), name);
9413
9414 // Check that the receiver is a heap object.
9415 __ test(receiver.reg(), Immediate(kSmiTagMask));
9416 deferred->Branch(zero);
9417
9418 __ bind(deferred->patch_site());
9419 // This is the map check instruction that will be patched (so we can't
9420 // use the double underscore macro that may insert instructions).
9421 // Initially use an invalid map to force a failure.
9422 masm()->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
9423 Immediate(Factory::null_value()));
9424 // This branch is always a forwards branch so it's always a fixed size
9425 // which allows the assert below to succeed and patching to work.
9426 deferred->Branch(not_equal);
9427
9428 // The delta from the patch label to the load offset must be statically
9429 // known.
9430 ASSERT(masm()->SizeOfCodeGeneratedSince(deferred->patch_site()) ==
9431 LoadIC::kOffsetToLoadInstruction);
9432 // The initial (invalid) offset has to be large enough to force a 32-bit
9433 // instruction encoding to allow patching with an arbitrary offset. Use
9434 // kMaxInt (minus kHeapObjectTag).
9435 int offset = kMaxInt;
9436 masm()->mov(result.reg(), FieldOperand(receiver.reg(), offset));
9437
9438 __ IncrementCounter(&Counters::named_load_inline, 1);
9439 deferred->BindExit();
9440 }
9441 ASSERT(frame()->height() == original_height - 1);
9442 return result;
9443}
9444
9445
9446Result CodeGenerator::EmitNamedStore(Handle<String> name, bool is_contextual) {
9447#ifdef DEBUG
9448 int expected_height = frame()->height() - (is_contextual ? 1 : 2);
9449#endif
Kristian Monsen50ef84f2010-07-29 15:18:00 +01009450
9451 Result result;
9452 if (is_contextual || scope()->is_global_scope() || loop_nesting() == 0) {
9453 result = frame()->CallStoreIC(name, is_contextual);
9454 // A test eax instruction following the call signals that the inobject
9455 // property case was inlined. Ensure that there is not a test eax
9456 // instruction here.
9457 __ nop();
9458 } else {
9459 // Inline the in-object property case.
9460 JumpTarget slow, done;
9461 Label patch_site;
9462
9463 // Get the value and receiver from the stack.
9464 Result value = frame()->Pop();
9465 value.ToRegister();
9466 Result receiver = frame()->Pop();
9467 receiver.ToRegister();
9468
9469 // Allocate result register.
9470 result = allocator()->Allocate();
9471 ASSERT(result.is_valid() && receiver.is_valid() && value.is_valid());
9472
9473 // Check that the receiver is a heap object.
9474 __ test(receiver.reg(), Immediate(kSmiTagMask));
9475 slow.Branch(zero, &value, &receiver);
9476
9477 // This is the map check instruction that will be patched (so we can't
9478 // use the double underscore macro that may insert instructions).
9479 // Initially use an invalid map to force a failure.
9480 __ bind(&patch_site);
9481 masm()->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
9482 Immediate(Factory::null_value()));
9483 // This branch is always a forwards branch so it's always a fixed size
9484 // which allows the assert below to succeed and patching to work.
9485 slow.Branch(not_equal, &value, &receiver);
9486
9487 // The delta from the patch label to the store offset must be
9488 // statically known.
9489 ASSERT(masm()->SizeOfCodeGeneratedSince(&patch_site) ==
9490 StoreIC::kOffsetToStoreInstruction);
9491
9492 // The initial (invalid) offset has to be large enough to force a 32-bit
9493 // instruction encoding to allow patching with an arbitrary offset. Use
9494 // kMaxInt (minus kHeapObjectTag).
9495 int offset = kMaxInt;
9496 __ mov(FieldOperand(receiver.reg(), offset), value.reg());
9497 __ mov(result.reg(), Operand(value.reg()));
9498
9499 // Allocate scratch register for write barrier.
9500 Result scratch = allocator()->Allocate();
9501 ASSERT(scratch.is_valid());
9502
9503 // The write barrier clobbers all input registers, so spill the
9504 // receiver and the value.
9505 frame_->Spill(receiver.reg());
9506 frame_->Spill(value.reg());
9507
9508 // If the receiver and the value share a register allocate a new
9509 // register for the receiver.
9510 if (receiver.reg().is(value.reg())) {
9511 receiver = allocator()->Allocate();
9512 ASSERT(receiver.is_valid());
9513 __ mov(receiver.reg(), Operand(value.reg()));
9514 }
9515
9516 // Update the write barrier. To save instructions in the inlined
9517 // version we do not filter smis.
9518 Label skip_write_barrier;
9519 __ InNewSpace(receiver.reg(), value.reg(), equal, &skip_write_barrier);
9520 int delta_to_record_write = masm_->SizeOfCodeGeneratedSince(&patch_site);
9521 __ lea(scratch.reg(), Operand(receiver.reg(), offset));
9522 __ RecordWriteHelper(receiver.reg(), scratch.reg(), value.reg());
9523 if (FLAG_debug_code) {
9524 __ mov(receiver.reg(), Immediate(BitCast<int32_t>(kZapValue)));
9525 __ mov(value.reg(), Immediate(BitCast<int32_t>(kZapValue)));
9526 __ mov(scratch.reg(), Immediate(BitCast<int32_t>(kZapValue)));
9527 }
9528 __ bind(&skip_write_barrier);
9529 value.Unuse();
9530 scratch.Unuse();
9531 receiver.Unuse();
9532 done.Jump(&result);
9533
9534 slow.Bind(&value, &receiver);
9535 frame()->Push(&receiver);
9536 frame()->Push(&value);
9537 result = frame()->CallStoreIC(name, is_contextual);
9538 // Encode the offset to the map check instruction and the offset
9539 // to the write barrier store address computation in a test eax
9540 // instruction.
9541 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site);
9542 __ test(eax,
9543 Immediate((delta_to_record_write << 16) | delta_to_patch_site));
9544 done.Bind(&result);
9545 }
Andrei Popescu402d9372010-02-26 13:31:12 +00009546
9547 ASSERT_EQ(expected_height, frame()->height());
9548 return result;
9549}
9550
9551
9552Result CodeGenerator::EmitKeyedLoad() {
9553#ifdef DEBUG
9554 int original_height = frame()->height();
9555#endif
9556 Result result;
9557 // Inline array load code if inside of a loop. We do not know the
9558 // receiver map yet, so we initially generate the code with a check
9559 // against an invalid map. In the inline cache code, we patch the map
9560 // check if appropriate.
Leon Clarked91b9f72010-01-27 17:25:45 +00009561 if (loop_nesting() > 0) {
9562 Comment cmnt(masm_, "[ Inlined load from keyed Property");
9563
Leon Clarked91b9f72010-01-27 17:25:45 +00009564 // Use a fresh temporary to load the elements without destroying
9565 // the receiver which is needed for the deferred slow case.
9566 Result elements = allocator()->Allocate();
9567 ASSERT(elements.is_valid());
9568
Leon Clarkef7060e22010-06-03 12:02:55 +01009569 Result key = frame_->Pop();
9570 Result receiver = frame_->Pop();
9571 key.ToRegister();
9572 receiver.ToRegister();
9573
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009574 // If key and receiver are shared registers on the frame, their values will
9575 // be automatically saved and restored when going to deferred code.
9576 // The result is in elements, which is guaranteed non-shared.
Leon Clarked91b9f72010-01-27 17:25:45 +00009577 DeferredReferenceGetKeyedValue* deferred =
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009578 new DeferredReferenceGetKeyedValue(elements.reg(),
Leon Clarked91b9f72010-01-27 17:25:45 +00009579 receiver.reg(),
Andrei Popescu402d9372010-02-26 13:31:12 +00009580 key.reg());
Leon Clarked91b9f72010-01-27 17:25:45 +00009581
Andrei Popescu402d9372010-02-26 13:31:12 +00009582 __ test(receiver.reg(), Immediate(kSmiTagMask));
9583 deferred->Branch(zero);
Leon Clarked91b9f72010-01-27 17:25:45 +00009584
Leon Clarkef7060e22010-06-03 12:02:55 +01009585 // Check that the receiver has the expected map.
Leon Clarked91b9f72010-01-27 17:25:45 +00009586 // Initially, use an invalid map. The map is patched in the IC
9587 // initialization code.
9588 __ bind(deferred->patch_site());
9589 // Use masm-> here instead of the double underscore macro since extra
9590 // coverage code can interfere with the patching.
9591 masm_->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
Steve Block8defd9f2010-07-08 12:39:36 +01009592 Immediate(Factory::null_value()));
Leon Clarked91b9f72010-01-27 17:25:45 +00009593 deferred->Branch(not_equal);
9594
9595 // Check that the key is a smi.
Steve Block6ded16b2010-05-10 14:33:55 +01009596 if (!key.is_smi()) {
9597 __ test(key.reg(), Immediate(kSmiTagMask));
9598 deferred->Branch(not_zero);
9599 } else {
9600 if (FLAG_debug_code) __ AbortIfNotSmi(key.reg());
9601 }
Leon Clarked91b9f72010-01-27 17:25:45 +00009602
Iain Merrick75681382010-08-19 15:07:18 +01009603 // Get the elements array from the receiver.
Leon Clarked91b9f72010-01-27 17:25:45 +00009604 __ mov(elements.reg(),
9605 FieldOperand(receiver.reg(), JSObject::kElementsOffset));
Iain Merrick75681382010-08-19 15:07:18 +01009606 __ AssertFastElements(elements.reg());
Leon Clarked91b9f72010-01-27 17:25:45 +00009607
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009608 // Check that the key is within bounds.
9609 __ cmp(key.reg(),
Leon Clarked91b9f72010-01-27 17:25:45 +00009610 FieldOperand(elements.reg(), FixedArray::kLengthOffset));
9611 deferred->Branch(above_equal);
9612
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009613 // Load and check that the result is not the hole.
9614 // Key holds a smi.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01009615 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009616 __ mov(elements.reg(),
9617 FieldOperand(elements.reg(),
9618 key.reg(),
9619 times_2,
9620 FixedArray::kHeaderSize));
9621 result = elements;
Andrei Popescu402d9372010-02-26 13:31:12 +00009622 __ cmp(Operand(result.reg()), Immediate(Factory::the_hole_value()));
Leon Clarked91b9f72010-01-27 17:25:45 +00009623 deferred->Branch(equal);
9624 __ IncrementCounter(&Counters::keyed_load_inline, 1);
9625
9626 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00009627 } else {
9628 Comment cmnt(masm_, "[ Load from keyed Property");
Andrei Popescu402d9372010-02-26 13:31:12 +00009629 result = frame_->CallKeyedLoadIC(RelocInfo::CODE_TARGET);
Leon Clarked91b9f72010-01-27 17:25:45 +00009630 // Make sure that we do not have a test instruction after the
9631 // call. A test instruction after the call is used to
9632 // indicate that we have generated an inline version of the
9633 // keyed load. The explicit nop instruction is here because
9634 // the push that follows might be peep-hole optimized away.
9635 __ nop();
Leon Clarked91b9f72010-01-27 17:25:45 +00009636 }
Andrei Popescu402d9372010-02-26 13:31:12 +00009637 ASSERT(frame()->height() == original_height - 2);
9638 return result;
9639}
9640
9641
9642Result CodeGenerator::EmitKeyedStore(StaticType* key_type) {
9643#ifdef DEBUG
9644 int original_height = frame()->height();
9645#endif
9646 Result result;
9647 // Generate inlined version of the keyed store if the code is in a loop
9648 // and the key is likely to be a smi.
9649 if (loop_nesting() > 0 && key_type->IsLikelySmi()) {
9650 Comment cmnt(masm(), "[ Inlined store to keyed Property");
9651
9652 // Get the receiver, key and value into registers.
9653 result = frame()->Pop();
9654 Result key = frame()->Pop();
9655 Result receiver = frame()->Pop();
9656
9657 Result tmp = allocator_->Allocate();
9658 ASSERT(tmp.is_valid());
Steve Block6ded16b2010-05-10 14:33:55 +01009659 Result tmp2 = allocator_->Allocate();
9660 ASSERT(tmp2.is_valid());
Andrei Popescu402d9372010-02-26 13:31:12 +00009661
9662 // Determine whether the value is a constant before putting it in a
9663 // register.
9664 bool value_is_constant = result.is_constant();
9665
9666 // Make sure that value, key and receiver are in registers.
9667 result.ToRegister();
9668 key.ToRegister();
9669 receiver.ToRegister();
9670
9671 DeferredReferenceSetKeyedValue* deferred =
9672 new DeferredReferenceSetKeyedValue(result.reg(),
9673 key.reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01009674 receiver.reg(),
9675 tmp.reg());
Andrei Popescu402d9372010-02-26 13:31:12 +00009676
9677 // Check that the receiver is not a smi.
9678 __ test(receiver.reg(), Immediate(kSmiTagMask));
9679 deferred->Branch(zero);
9680
Steve Block6ded16b2010-05-10 14:33:55 +01009681 // Check that the key is a smi.
9682 if (!key.is_smi()) {
9683 __ test(key.reg(), Immediate(kSmiTagMask));
9684 deferred->Branch(not_zero);
9685 } else {
9686 if (FLAG_debug_code) __ AbortIfNotSmi(key.reg());
9687 }
9688
Andrei Popescu402d9372010-02-26 13:31:12 +00009689 // Check that the receiver is a JSArray.
Steve Block6ded16b2010-05-10 14:33:55 +01009690 __ CmpObjectType(receiver.reg(), JS_ARRAY_TYPE, tmp.reg());
Andrei Popescu402d9372010-02-26 13:31:12 +00009691 deferred->Branch(not_equal);
9692
9693 // Check that the key is within bounds. Both the key and the length of
Steve Block6ded16b2010-05-10 14:33:55 +01009694 // the JSArray are smis. Use unsigned comparison to handle negative keys.
Andrei Popescu402d9372010-02-26 13:31:12 +00009695 __ cmp(key.reg(),
9696 FieldOperand(receiver.reg(), JSArray::kLengthOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01009697 deferred->Branch(above_equal);
Andrei Popescu402d9372010-02-26 13:31:12 +00009698
9699 // Get the elements array from the receiver and check that it is not a
9700 // dictionary.
9701 __ mov(tmp.reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01009702 FieldOperand(receiver.reg(), JSArray::kElementsOffset));
9703
9704 // Check whether it is possible to omit the write barrier. If the elements
9705 // array is in new space or the value written is a smi we can safely update
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009706 // the elements array without write barrier.
Steve Block6ded16b2010-05-10 14:33:55 +01009707 Label in_new_space;
9708 __ InNewSpace(tmp.reg(), tmp2.reg(), equal, &in_new_space);
9709 if (!value_is_constant) {
9710 __ test(result.reg(), Immediate(kSmiTagMask));
9711 deferred->Branch(not_zero);
9712 }
9713
9714 __ bind(&in_new_space);
Andrei Popescu402d9372010-02-26 13:31:12 +00009715 // Bind the deferred code patch site to be able to locate the fixed
9716 // array map comparison. When debugging, we patch this comparison to
9717 // always fail so that we will hit the IC call in the deferred code
9718 // which will allow the debugger to break for fast case stores.
9719 __ bind(deferred->patch_site());
9720 __ cmp(FieldOperand(tmp.reg(), HeapObject::kMapOffset),
9721 Immediate(Factory::fixed_array_map()));
9722 deferred->Branch(not_equal);
9723
9724 // Store the value.
Kristian Monsen25f61362010-05-21 11:50:48 +01009725 __ mov(FixedArrayElementOperand(tmp.reg(), key.reg()), result.reg());
Andrei Popescu402d9372010-02-26 13:31:12 +00009726 __ IncrementCounter(&Counters::keyed_store_inline, 1);
9727
9728 deferred->BindExit();
9729 } else {
9730 result = frame()->CallKeyedStoreIC();
9731 // Make sure that we do not have a test instruction after the
9732 // call. A test instruction after the call is used to
9733 // indicate that we have generated an inline version of the
9734 // keyed store.
9735 __ nop();
Andrei Popescu402d9372010-02-26 13:31:12 +00009736 }
9737 ASSERT(frame()->height() == original_height - 3);
9738 return result;
Leon Clarked91b9f72010-01-27 17:25:45 +00009739}
9740
9741
Steve Blocka7e24c12009-10-30 11:49:00 +00009742#undef __
9743#define __ ACCESS_MASM(masm)
9744
9745
9746Handle<String> Reference::GetName() {
9747 ASSERT(type_ == NAMED);
9748 Property* property = expression_->AsProperty();
9749 if (property == NULL) {
9750 // Global variable reference treated as a named property reference.
9751 VariableProxy* proxy = expression_->AsVariableProxy();
9752 ASSERT(proxy->AsVariable() != NULL);
9753 ASSERT(proxy->AsVariable()->is_global());
9754 return proxy->name();
9755 } else {
9756 Literal* raw_name = property->key()->AsLiteral();
9757 ASSERT(raw_name != NULL);
Andrei Popescu402d9372010-02-26 13:31:12 +00009758 return Handle<String>::cast(raw_name->handle());
Steve Blocka7e24c12009-10-30 11:49:00 +00009759 }
9760}
9761
9762
Steve Blockd0582a62009-12-15 09:54:21 +00009763void Reference::GetValue() {
Steve Blocka7e24c12009-10-30 11:49:00 +00009764 ASSERT(!cgen_->in_spilled_code());
9765 ASSERT(cgen_->HasValidEntryRegisters());
9766 ASSERT(!is_illegal());
9767 MacroAssembler* masm = cgen_->masm();
9768
9769 // Record the source position for the property load.
9770 Property* property = expression_->AsProperty();
9771 if (property != NULL) {
9772 cgen_->CodeForSourcePosition(property->position());
9773 }
9774
9775 switch (type_) {
9776 case SLOT: {
9777 Comment cmnt(masm, "[ Load from Slot");
9778 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
9779 ASSERT(slot != NULL);
Leon Clarkef7060e22010-06-03 12:02:55 +01009780 cgen_->LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF);
Andrei Popescu402d9372010-02-26 13:31:12 +00009781 if (!persist_after_get_) set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00009782 break;
9783 }
9784
9785 case NAMED: {
Steve Blocka7e24c12009-10-30 11:49:00 +00009786 Variable* var = expression_->AsVariableProxy()->AsVariable();
9787 bool is_global = var != NULL;
9788 ASSERT(!is_global || var->is_global());
Andrei Popescu402d9372010-02-26 13:31:12 +00009789 if (persist_after_get_) cgen_->frame()->Dup();
9790 Result result = cgen_->EmitNamedLoad(GetName(), is_global);
9791 if (!persist_after_get_) set_unloaded();
9792 cgen_->frame()->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00009793 break;
9794 }
9795
9796 case KEYED: {
Andrei Popescu402d9372010-02-26 13:31:12 +00009797 if (persist_after_get_) {
9798 cgen_->frame()->PushElementAt(1);
9799 cgen_->frame()->PushElementAt(1);
9800 }
9801 Result value = cgen_->EmitKeyedLoad();
Leon Clarked91b9f72010-01-27 17:25:45 +00009802 cgen_->frame()->Push(&value);
Andrei Popescu402d9372010-02-26 13:31:12 +00009803 if (!persist_after_get_) set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00009804 break;
9805 }
9806
9807 default:
9808 UNREACHABLE();
9809 }
9810}
9811
9812
Steve Blockd0582a62009-12-15 09:54:21 +00009813void Reference::TakeValue() {
Steve Blocka7e24c12009-10-30 11:49:00 +00009814 // For non-constant frame-allocated slots, we invalidate the value in the
9815 // slot. For all others, we fall back on GetValue.
9816 ASSERT(!cgen_->in_spilled_code());
9817 ASSERT(!is_illegal());
9818 if (type_ != SLOT) {
Steve Blockd0582a62009-12-15 09:54:21 +00009819 GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00009820 return;
9821 }
9822
9823 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
9824 ASSERT(slot != NULL);
9825 if (slot->type() == Slot::LOOKUP ||
9826 slot->type() == Slot::CONTEXT ||
9827 slot->var()->mode() == Variable::CONST ||
9828 slot->is_arguments()) {
Steve Blockd0582a62009-12-15 09:54:21 +00009829 GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00009830 return;
9831 }
9832
9833 // Only non-constant, frame-allocated parameters and locals can
9834 // reach here. Be careful not to use the optimizations for arguments
9835 // object access since it may not have been initialized yet.
9836 ASSERT(!slot->is_arguments());
9837 if (slot->type() == Slot::PARAMETER) {
9838 cgen_->frame()->TakeParameterAt(slot->index());
9839 } else {
9840 ASSERT(slot->type() == Slot::LOCAL);
9841 cgen_->frame()->TakeLocalAt(slot->index());
9842 }
Leon Clarked91b9f72010-01-27 17:25:45 +00009843
9844 ASSERT(persist_after_get_);
9845 // Do not unload the reference, because it is used in SetValue.
Steve Blocka7e24c12009-10-30 11:49:00 +00009846}
9847
9848
9849void Reference::SetValue(InitState init_state) {
9850 ASSERT(cgen_->HasValidEntryRegisters());
9851 ASSERT(!is_illegal());
9852 MacroAssembler* masm = cgen_->masm();
9853 switch (type_) {
9854 case SLOT: {
9855 Comment cmnt(masm, "[ Store to Slot");
9856 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
9857 ASSERT(slot != NULL);
9858 cgen_->StoreToSlot(slot, init_state);
Andrei Popescu402d9372010-02-26 13:31:12 +00009859 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00009860 break;
9861 }
9862
9863 case NAMED: {
9864 Comment cmnt(masm, "[ Store to named Property");
Andrei Popescu402d9372010-02-26 13:31:12 +00009865 Result answer = cgen_->EmitNamedStore(GetName(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00009866 cgen_->frame()->Push(&answer);
Leon Clarke4515c472010-02-03 11:58:03 +00009867 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00009868 break;
9869 }
9870
9871 case KEYED: {
9872 Comment cmnt(masm, "[ Store to keyed Property");
Steve Blocka7e24c12009-10-30 11:49:00 +00009873 Property* property = expression()->AsProperty();
9874 ASSERT(property != NULL);
Steve Block6ded16b2010-05-10 14:33:55 +01009875
Andrei Popescu402d9372010-02-26 13:31:12 +00009876 Result answer = cgen_->EmitKeyedStore(property->key()->type());
9877 cgen_->frame()->Push(&answer);
9878 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00009879 break;
9880 }
9881
Andrei Popescu402d9372010-02-26 13:31:12 +00009882 case UNLOADED:
9883 case ILLEGAL:
Steve Blocka7e24c12009-10-30 11:49:00 +00009884 UNREACHABLE();
9885 }
9886}
9887
9888
Leon Clarkee46be812010-01-19 14:06:41 +00009889void FastNewClosureStub::Generate(MacroAssembler* masm) {
Steve Block6ded16b2010-05-10 14:33:55 +01009890 // Create a new closure from the given function info in new
9891 // space. Set the context to the current context in esi.
Leon Clarkee46be812010-01-19 14:06:41 +00009892 Label gc;
9893 __ AllocateInNewSpace(JSFunction::kSize, eax, ebx, ecx, &gc, TAG_OBJECT);
9894
Steve Block6ded16b2010-05-10 14:33:55 +01009895 // Get the function info from the stack.
Leon Clarkee46be812010-01-19 14:06:41 +00009896 __ mov(edx, Operand(esp, 1 * kPointerSize));
9897
9898 // Compute the function map in the current global context and set that
9899 // as the map of the allocated object.
9900 __ mov(ecx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
9901 __ mov(ecx, FieldOperand(ecx, GlobalObject::kGlobalContextOffset));
9902 __ mov(ecx, Operand(ecx, Context::SlotOffset(Context::FUNCTION_MAP_INDEX)));
9903 __ mov(FieldOperand(eax, JSObject::kMapOffset), ecx);
9904
Steve Block6ded16b2010-05-10 14:33:55 +01009905 // Initialize the rest of the function. We don't have to update the
9906 // write barrier because the allocated object is in new space.
9907 __ mov(ebx, Immediate(Factory::empty_fixed_array()));
9908 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset), ebx);
9909 __ mov(FieldOperand(eax, JSObject::kElementsOffset), ebx);
9910 __ mov(FieldOperand(eax, JSFunction::kPrototypeOrInitialMapOffset),
9911 Immediate(Factory::the_hole_value()));
9912 __ mov(FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset), edx);
9913 __ mov(FieldOperand(eax, JSFunction::kContextOffset), esi);
9914 __ mov(FieldOperand(eax, JSFunction::kLiteralsOffset), ebx);
Leon Clarkee46be812010-01-19 14:06:41 +00009915
Iain Merrick75681382010-08-19 15:07:18 +01009916 // Initialize the code pointer in the function to be the one
9917 // found in the shared function info object.
9918 __ mov(edx, FieldOperand(edx, SharedFunctionInfo::kCodeOffset));
Steve Block791712a2010-08-27 10:21:07 +01009919 __ lea(edx, FieldOperand(edx, Code::kHeaderSize));
9920 __ mov(FieldOperand(eax, JSFunction::kCodeEntryOffset), edx);
Iain Merrick75681382010-08-19 15:07:18 +01009921
Leon Clarkee46be812010-01-19 14:06:41 +00009922 // Return and remove the on-stack parameter.
9923 __ ret(1 * kPointerSize);
9924
9925 // Create a new closure through the slower runtime call.
9926 __ bind(&gc);
9927 __ pop(ecx); // Temporarily remove return address.
9928 __ pop(edx);
9929 __ push(esi);
9930 __ push(edx);
9931 __ push(ecx); // Restore return address.
Steve Block6ded16b2010-05-10 14:33:55 +01009932 __ TailCallRuntime(Runtime::kNewClosure, 2, 1);
Leon Clarkee46be812010-01-19 14:06:41 +00009933}
9934
9935
9936void FastNewContextStub::Generate(MacroAssembler* masm) {
9937 // Try to allocate the context in new space.
9938 Label gc;
9939 int length = slots_ + Context::MIN_CONTEXT_SLOTS;
9940 __ AllocateInNewSpace((length * kPointerSize) + FixedArray::kHeaderSize,
9941 eax, ebx, ecx, &gc, TAG_OBJECT);
9942
9943 // Get the function from the stack.
9944 __ mov(ecx, Operand(esp, 1 * kPointerSize));
9945
9946 // Setup the object header.
9947 __ mov(FieldOperand(eax, HeapObject::kMapOffset), Factory::context_map());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009948 __ mov(FieldOperand(eax, Context::kLengthOffset),
9949 Immediate(Smi::FromInt(length)));
Leon Clarkee46be812010-01-19 14:06:41 +00009950
9951 // Setup the fixed slots.
9952 __ xor_(ebx, Operand(ebx)); // Set to NULL.
9953 __ mov(Operand(eax, Context::SlotOffset(Context::CLOSURE_INDEX)), ecx);
9954 __ mov(Operand(eax, Context::SlotOffset(Context::FCONTEXT_INDEX)), eax);
9955 __ mov(Operand(eax, Context::SlotOffset(Context::PREVIOUS_INDEX)), ebx);
9956 __ mov(Operand(eax, Context::SlotOffset(Context::EXTENSION_INDEX)), ebx);
9957
9958 // Copy the global object from the surrounding context. We go through the
9959 // context in the function (ecx) to match the allocation behavior we have
9960 // in the runtime system (see Heap::AllocateFunctionContext).
9961 __ mov(ebx, FieldOperand(ecx, JSFunction::kContextOffset));
9962 __ mov(ebx, Operand(ebx, Context::SlotOffset(Context::GLOBAL_INDEX)));
9963 __ mov(Operand(eax, Context::SlotOffset(Context::GLOBAL_INDEX)), ebx);
9964
9965 // Initialize the rest of the slots to undefined.
9966 __ mov(ebx, Factory::undefined_value());
9967 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) {
9968 __ mov(Operand(eax, Context::SlotOffset(i)), ebx);
9969 }
9970
9971 // Return and remove the on-stack parameter.
9972 __ mov(esi, Operand(eax));
9973 __ ret(1 * kPointerSize);
9974
9975 // Need to collect. Call into runtime system.
9976 __ bind(&gc);
Steve Block6ded16b2010-05-10 14:33:55 +01009977 __ TailCallRuntime(Runtime::kNewContext, 1, 1);
Leon Clarkee46be812010-01-19 14:06:41 +00009978}
9979
9980
9981void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
Andrei Popescu402d9372010-02-26 13:31:12 +00009982 // Stack layout on entry:
9983 //
9984 // [esp + kPointerSize]: constant elements.
9985 // [esp + (2 * kPointerSize)]: literal index.
9986 // [esp + (3 * kPointerSize)]: literals array.
9987
9988 // All sizes here are multiples of kPointerSize.
Leon Clarkee46be812010-01-19 14:06:41 +00009989 int elements_size = (length_ > 0) ? FixedArray::SizeFor(length_) : 0;
9990 int size = JSArray::kSize + elements_size;
9991
9992 // Load boilerplate object into ecx and check if we need to create a
9993 // boilerplate.
9994 Label slow_case;
9995 __ mov(ecx, Operand(esp, 3 * kPointerSize));
9996 __ mov(eax, Operand(esp, 2 * kPointerSize));
Kristian Monsen50ef84f2010-07-29 15:18:00 +01009997 STATIC_ASSERT(kPointerSize == 4);
9998 STATIC_ASSERT(kSmiTagSize == 1);
9999 STATIC_ASSERT(kSmiTag == 0);
Kristian Monsen25f61362010-05-21 11:50:48 +010010000 __ mov(ecx, CodeGenerator::FixedArrayElementOperand(ecx, eax));
Leon Clarkee46be812010-01-19 14:06:41 +000010001 __ cmp(ecx, Factory::undefined_value());
10002 __ j(equal, &slow_case);
10003
Iain Merrick75681382010-08-19 15:07:18 +010010004 if (FLAG_debug_code) {
10005 const char* message;
10006 Handle<Map> expected_map;
10007 if (mode_ == CLONE_ELEMENTS) {
10008 message = "Expected (writable) fixed array";
10009 expected_map = Factory::fixed_array_map();
10010 } else {
10011 ASSERT(mode_ == COPY_ON_WRITE_ELEMENTS);
10012 message = "Expected copy-on-write fixed array";
10013 expected_map = Factory::fixed_cow_array_map();
10014 }
10015 __ push(ecx);
10016 __ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset));
10017 __ cmp(FieldOperand(ecx, HeapObject::kMapOffset), expected_map);
10018 __ Assert(equal, message);
10019 __ pop(ecx);
10020 }
10021
Leon Clarkee46be812010-01-19 14:06:41 +000010022 // Allocate both the JS array and the elements array in one big
10023 // allocation. This avoids multiple limit checks.
10024 __ AllocateInNewSpace(size, eax, ebx, edx, &slow_case, TAG_OBJECT);
10025
10026 // Copy the JS array part.
10027 for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
10028 if ((i != JSArray::kElementsOffset) || (length_ == 0)) {
10029 __ mov(ebx, FieldOperand(ecx, i));
10030 __ mov(FieldOperand(eax, i), ebx);
10031 }
10032 }
10033
10034 if (length_ > 0) {
10035 // Get hold of the elements array of the boilerplate and setup the
10036 // elements pointer in the resulting object.
10037 __ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset));
10038 __ lea(edx, Operand(eax, JSArray::kSize));
10039 __ mov(FieldOperand(eax, JSArray::kElementsOffset), edx);
10040
10041 // Copy the elements array.
10042 for (int i = 0; i < elements_size; i += kPointerSize) {
10043 __ mov(ebx, FieldOperand(ecx, i));
10044 __ mov(FieldOperand(edx, i), ebx);
10045 }
10046 }
10047
10048 // Return and remove the on-stack parameters.
10049 __ ret(3 * kPointerSize);
10050
10051 __ bind(&slow_case);
Steve Block6ded16b2010-05-10 14:33:55 +010010052 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1);
Leon Clarkee46be812010-01-19 14:06:41 +000010053}
10054
10055
Steve Blocka7e24c12009-10-30 11:49:00 +000010056// NOTE: The stub does not handle the inlined cases (Smis, Booleans, undefined).
10057void ToBooleanStub::Generate(MacroAssembler* masm) {
10058 Label false_result, true_result, not_string;
10059 __ mov(eax, Operand(esp, 1 * kPointerSize));
10060
10061 // 'null' => false.
10062 __ cmp(eax, Factory::null_value());
10063 __ j(equal, &false_result);
10064
10065 // Get the map and type of the heap object.
10066 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
10067 __ movzx_b(ecx, FieldOperand(edx, Map::kInstanceTypeOffset));
10068
10069 // Undetectable => false.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010010070 __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
10071 1 << Map::kIsUndetectable);
Steve Blocka7e24c12009-10-30 11:49:00 +000010072 __ j(not_zero, &false_result);
10073
10074 // JavaScript object => true.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010010075 __ CmpInstanceType(edx, FIRST_JS_OBJECT_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +000010076 __ j(above_equal, &true_result);
10077
10078 // String value => false iff empty.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010010079 __ CmpInstanceType(edx, FIRST_NONSTRING_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +000010080 __ j(above_equal, &not_string);
Kristian Monsen50ef84f2010-07-29 15:18:00 +010010081 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010010082 __ cmp(FieldOperand(eax, String::kLengthOffset), Immediate(0));
Steve Blocka7e24c12009-10-30 11:49:00 +000010083 __ j(zero, &false_result);
10084 __ jmp(&true_result);
10085
10086 __ bind(&not_string);
10087 // HeapNumber => false iff +0, -0, or NaN.
10088 __ cmp(edx, Factory::heap_number_map());
10089 __ j(not_equal, &true_result);
10090 __ fldz();
10091 __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset));
Steve Block3ce2e202009-11-05 08:53:23 +000010092 __ FCmp();
Steve Blocka7e24c12009-10-30 11:49:00 +000010093 __ j(zero, &false_result);
10094 // Fall through to |true_result|.
10095
10096 // Return 1/0 for true/false in eax.
10097 __ bind(&true_result);
10098 __ mov(eax, 1);
10099 __ ret(1 * kPointerSize);
10100 __ bind(&false_result);
10101 __ mov(eax, 0);
10102 __ ret(1 * kPointerSize);
10103}
10104
10105
Steve Block3ce2e202009-11-05 08:53:23 +000010106void GenericBinaryOpStub::GenerateCall(
10107 MacroAssembler* masm,
10108 Register left,
10109 Register right) {
10110 if (!ArgsInRegistersSupported()) {
10111 // Pass arguments on the stack.
10112 __ push(left);
10113 __ push(right);
10114 } else {
10115 // The calling convention with registers is left in edx and right in eax.
Steve Blockd0582a62009-12-15 09:54:21 +000010116 Register left_arg = edx;
10117 Register right_arg = eax;
10118 if (!(left.is(left_arg) && right.is(right_arg))) {
10119 if (left.is(right_arg) && right.is(left_arg)) {
Steve Block3ce2e202009-11-05 08:53:23 +000010120 if (IsOperationCommutative()) {
10121 SetArgsReversed();
10122 } else {
10123 __ xchg(left, right);
10124 }
Steve Blockd0582a62009-12-15 09:54:21 +000010125 } else if (left.is(left_arg)) {
10126 __ mov(right_arg, right);
Andrei Popescu402d9372010-02-26 13:31:12 +000010127 } else if (right.is(right_arg)) {
10128 __ mov(left_arg, left);
Steve Blockd0582a62009-12-15 09:54:21 +000010129 } else if (left.is(right_arg)) {
Steve Block3ce2e202009-11-05 08:53:23 +000010130 if (IsOperationCommutative()) {
Steve Blockd0582a62009-12-15 09:54:21 +000010131 __ mov(left_arg, right);
Steve Block3ce2e202009-11-05 08:53:23 +000010132 SetArgsReversed();
10133 } else {
Steve Blockd0582a62009-12-15 09:54:21 +000010134 // Order of moves important to avoid destroying left argument.
10135 __ mov(left_arg, left);
10136 __ mov(right_arg, right);
Steve Block3ce2e202009-11-05 08:53:23 +000010137 }
Steve Blockd0582a62009-12-15 09:54:21 +000010138 } else if (right.is(left_arg)) {
Steve Block3ce2e202009-11-05 08:53:23 +000010139 if (IsOperationCommutative()) {
Steve Blockd0582a62009-12-15 09:54:21 +000010140 __ mov(right_arg, left);
Steve Block3ce2e202009-11-05 08:53:23 +000010141 SetArgsReversed();
10142 } else {
Steve Blockd0582a62009-12-15 09:54:21 +000010143 // Order of moves important to avoid destroying right argument.
10144 __ mov(right_arg, right);
10145 __ mov(left_arg, left);
Steve Block3ce2e202009-11-05 08:53:23 +000010146 }
Steve Block3ce2e202009-11-05 08:53:23 +000010147 } else {
Steve Blockd0582a62009-12-15 09:54:21 +000010148 // Order of moves is not important.
10149 __ mov(left_arg, left);
10150 __ mov(right_arg, right);
Steve Block3ce2e202009-11-05 08:53:23 +000010151 }
10152 }
10153
10154 // Update flags to indicate that arguments are in registers.
10155 SetArgsInRegisters();
Steve Blockd0582a62009-12-15 09:54:21 +000010156 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
Steve Block3ce2e202009-11-05 08:53:23 +000010157 }
10158
10159 // Call the stub.
10160 __ CallStub(this);
10161}
10162
10163
10164void GenericBinaryOpStub::GenerateCall(
10165 MacroAssembler* masm,
10166 Register left,
10167 Smi* right) {
10168 if (!ArgsInRegistersSupported()) {
10169 // Pass arguments on the stack.
10170 __ push(left);
10171 __ push(Immediate(right));
10172 } else {
Steve Blockd0582a62009-12-15 09:54:21 +000010173 // The calling convention with registers is left in edx and right in eax.
10174 Register left_arg = edx;
10175 Register right_arg = eax;
10176 if (left.is(left_arg)) {
10177 __ mov(right_arg, Immediate(right));
10178 } else if (left.is(right_arg) && IsOperationCommutative()) {
10179 __ mov(left_arg, Immediate(right));
Steve Block3ce2e202009-11-05 08:53:23 +000010180 SetArgsReversed();
10181 } else {
Andrei Popescu402d9372010-02-26 13:31:12 +000010182 // For non-commutative operations, left and right_arg might be
10183 // the same register. Therefore, the order of the moves is
10184 // important here in order to not overwrite left before moving
10185 // it to left_arg.
Steve Blockd0582a62009-12-15 09:54:21 +000010186 __ mov(left_arg, left);
10187 __ mov(right_arg, Immediate(right));
Steve Block3ce2e202009-11-05 08:53:23 +000010188 }
10189
10190 // Update flags to indicate that arguments are in registers.
10191 SetArgsInRegisters();
Steve Blockd0582a62009-12-15 09:54:21 +000010192 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
Steve Block3ce2e202009-11-05 08:53:23 +000010193 }
10194
10195 // Call the stub.
10196 __ CallStub(this);
10197}
10198
10199
10200void GenericBinaryOpStub::GenerateCall(
10201 MacroAssembler* masm,
10202 Smi* left,
10203 Register right) {
10204 if (!ArgsInRegistersSupported()) {
10205 // Pass arguments on the stack.
10206 __ push(Immediate(left));
10207 __ push(right);
10208 } else {
Steve Blockd0582a62009-12-15 09:54:21 +000010209 // The calling convention with registers is left in edx and right in eax.
10210 Register left_arg = edx;
10211 Register right_arg = eax;
10212 if (right.is(right_arg)) {
10213 __ mov(left_arg, Immediate(left));
10214 } else if (right.is(left_arg) && IsOperationCommutative()) {
10215 __ mov(right_arg, Immediate(left));
10216 SetArgsReversed();
Steve Block3ce2e202009-11-05 08:53:23 +000010217 } else {
Andrei Popescu402d9372010-02-26 13:31:12 +000010218 // For non-commutative operations, right and left_arg might be
10219 // the same register. Therefore, the order of the moves is
10220 // important here in order to not overwrite right before moving
10221 // it to right_arg.
Steve Blockd0582a62009-12-15 09:54:21 +000010222 __ mov(right_arg, right);
Andrei Popescu402d9372010-02-26 13:31:12 +000010223 __ mov(left_arg, Immediate(left));
Steve Block3ce2e202009-11-05 08:53:23 +000010224 }
10225 // Update flags to indicate that arguments are in registers.
10226 SetArgsInRegisters();
Steve Blockd0582a62009-12-15 09:54:21 +000010227 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
Steve Block3ce2e202009-11-05 08:53:23 +000010228 }
10229
10230 // Call the stub.
10231 __ CallStub(this);
10232}
10233
10234
Leon Clarked91b9f72010-01-27 17:25:45 +000010235Result GenericBinaryOpStub::GenerateCall(MacroAssembler* masm,
10236 VirtualFrame* frame,
10237 Result* left,
10238 Result* right) {
10239 if (ArgsInRegistersSupported()) {
10240 SetArgsInRegisters();
10241 return frame->CallStub(this, left, right);
10242 } else {
10243 frame->Push(left);
10244 frame->Push(right);
10245 return frame->CallStub(this, 2);
10246 }
10247}
10248
10249
Steve Blocka7e24c12009-10-30 11:49:00 +000010250void GenericBinaryOpStub::GenerateSmiCode(MacroAssembler* masm, Label* slow) {
Leon Clarked91b9f72010-01-27 17:25:45 +000010251 // 1. Move arguments into edx, eax except for DIV and MOD, which need the
10252 // dividend in eax and edx free for the division. Use eax, ebx for those.
10253 Comment load_comment(masm, "-- Load arguments");
10254 Register left = edx;
10255 Register right = eax;
10256 if (op_ == Token::DIV || op_ == Token::MOD) {
10257 left = eax;
10258 right = ebx;
10259 if (HasArgsInRegisters()) {
10260 __ mov(ebx, eax);
10261 __ mov(eax, edx);
10262 }
10263 }
10264 if (!HasArgsInRegisters()) {
10265 __ mov(right, Operand(esp, 1 * kPointerSize));
10266 __ mov(left, Operand(esp, 2 * kPointerSize));
10267 }
Steve Blocka7e24c12009-10-30 11:49:00 +000010268
Steve Block6ded16b2010-05-10 14:33:55 +010010269 if (static_operands_type_.IsSmi()) {
10270 if (FLAG_debug_code) {
10271 __ AbortIfNotSmi(left);
10272 __ AbortIfNotSmi(right);
10273 }
10274 if (op_ == Token::BIT_OR) {
10275 __ or_(right, Operand(left));
10276 GenerateReturn(masm);
10277 return;
10278 } else if (op_ == Token::BIT_AND) {
10279 __ and_(right, Operand(left));
10280 GenerateReturn(masm);
10281 return;
10282 } else if (op_ == Token::BIT_XOR) {
10283 __ xor_(right, Operand(left));
10284 GenerateReturn(masm);
10285 return;
10286 }
10287 }
10288
Leon Clarked91b9f72010-01-27 17:25:45 +000010289 // 2. Prepare the smi check of both operands by oring them together.
10290 Comment smi_check_comment(masm, "-- Smi check arguments");
10291 Label not_smis;
10292 Register combined = ecx;
10293 ASSERT(!left.is(combined) && !right.is(combined));
Steve Blocka7e24c12009-10-30 11:49:00 +000010294 switch (op_) {
Leon Clarked91b9f72010-01-27 17:25:45 +000010295 case Token::BIT_OR:
10296 // Perform the operation into eax and smi check the result. Preserve
10297 // eax in case the result is not a smi.
10298 ASSERT(!left.is(ecx) && !right.is(ecx));
10299 __ mov(ecx, right);
10300 __ or_(right, Operand(left)); // Bitwise or is commutative.
10301 combined = right;
10302 break;
10303
10304 case Token::BIT_XOR:
10305 case Token::BIT_AND:
Leon Clarkeeab96aa2010-01-27 16:31:12 +000010306 case Token::ADD:
Steve Blocka7e24c12009-10-30 11:49:00 +000010307 case Token::SUB:
Leon Clarked91b9f72010-01-27 17:25:45 +000010308 case Token::MUL:
Steve Blocka7e24c12009-10-30 11:49:00 +000010309 case Token::DIV:
10310 case Token::MOD:
Leon Clarked91b9f72010-01-27 17:25:45 +000010311 __ mov(combined, right);
10312 __ or_(combined, Operand(left));
10313 break;
10314
10315 case Token::SHL:
10316 case Token::SAR:
10317 case Token::SHR:
10318 // Move the right operand into ecx for the shift operation, use eax
10319 // for the smi check register.
10320 ASSERT(!left.is(ecx) && !right.is(ecx));
10321 __ mov(ecx, right);
10322 __ or_(right, Operand(left));
10323 combined = right;
Steve Blocka7e24c12009-10-30 11:49:00 +000010324 break;
10325
10326 default:
Steve Blocka7e24c12009-10-30 11:49:00 +000010327 break;
10328 }
10329
Leon Clarked91b9f72010-01-27 17:25:45 +000010330 // 3. Perform the smi check of the operands.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010010331 STATIC_ASSERT(kSmiTag == 0); // Adjust zero check if not the case.
Leon Clarked91b9f72010-01-27 17:25:45 +000010332 __ test(combined, Immediate(kSmiTagMask));
10333 __ j(not_zero, &not_smis, not_taken);
Steve Blocka7e24c12009-10-30 11:49:00 +000010334
Leon Clarked91b9f72010-01-27 17:25:45 +000010335 // 4. Operands are both smis, perform the operation leaving the result in
10336 // eax and check the result if necessary.
10337 Comment perform_smi(masm, "-- Perform smi operation");
10338 Label use_fp_on_smis;
Steve Blocka7e24c12009-10-30 11:49:00 +000010339 switch (op_) {
Leon Clarked91b9f72010-01-27 17:25:45 +000010340 case Token::BIT_OR:
10341 // Nothing to do.
10342 break;
10343
10344 case Token::BIT_XOR:
10345 ASSERT(right.is(eax));
10346 __ xor_(right, Operand(left)); // Bitwise xor is commutative.
10347 break;
10348
10349 case Token::BIT_AND:
10350 ASSERT(right.is(eax));
10351 __ and_(right, Operand(left)); // Bitwise and is commutative.
10352 break;
10353
10354 case Token::SHL:
10355 // Remove tags from operands (but keep sign).
10356 __ SmiUntag(left);
10357 __ SmiUntag(ecx);
10358 // Perform the operation.
10359 __ shl_cl(left);
10360 // Check that the *signed* result fits in a smi.
10361 __ cmp(left, 0xc0000000);
10362 __ j(sign, &use_fp_on_smis, not_taken);
10363 // Tag the result and store it in register eax.
10364 __ SmiTag(left);
10365 __ mov(eax, left);
10366 break;
10367
10368 case Token::SAR:
10369 // Remove tags from operands (but keep sign).
10370 __ SmiUntag(left);
10371 __ SmiUntag(ecx);
10372 // Perform the operation.
10373 __ sar_cl(left);
10374 // Tag the result and store it in register eax.
10375 __ SmiTag(left);
10376 __ mov(eax, left);
10377 break;
10378
10379 case Token::SHR:
10380 // Remove tags from operands (but keep sign).
10381 __ SmiUntag(left);
10382 __ SmiUntag(ecx);
10383 // Perform the operation.
10384 __ shr_cl(left);
10385 // Check that the *unsigned* result fits in a smi.
10386 // Neither of the two high-order bits can be set:
10387 // - 0x80000000: high bit would be lost when smi tagging.
10388 // - 0x40000000: this number would convert to negative when
10389 // Smi tagging these two cases can only happen with shifts
10390 // by 0 or 1 when handed a valid smi.
10391 __ test(left, Immediate(0xc0000000));
10392 __ j(not_zero, slow, not_taken);
10393 // Tag the result and store it in register eax.
10394 __ SmiTag(left);
10395 __ mov(eax, left);
10396 break;
10397
Steve Blocka7e24c12009-10-30 11:49:00 +000010398 case Token::ADD:
Leon Clarked91b9f72010-01-27 17:25:45 +000010399 ASSERT(right.is(eax));
10400 __ add(right, Operand(left)); // Addition is commutative.
10401 __ j(overflow, &use_fp_on_smis, not_taken);
10402 break;
10403
Steve Blocka7e24c12009-10-30 11:49:00 +000010404 case Token::SUB:
Leon Clarked91b9f72010-01-27 17:25:45 +000010405 __ sub(left, Operand(right));
10406 __ j(overflow, &use_fp_on_smis, not_taken);
10407 __ mov(eax, left);
Steve Blocka7e24c12009-10-30 11:49:00 +000010408 break;
10409
10410 case Token::MUL:
10411 // If the smi tag is 0 we can just leave the tag on one operand.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010010412 STATIC_ASSERT(kSmiTag == 0); // Adjust code below if not the case.
Leon Clarked91b9f72010-01-27 17:25:45 +000010413 // We can't revert the multiplication if the result is not a smi
10414 // so save the right operand.
10415 __ mov(ebx, right);
Steve Blocka7e24c12009-10-30 11:49:00 +000010416 // Remove tag from one of the operands (but keep sign).
Leon Clarked91b9f72010-01-27 17:25:45 +000010417 __ SmiUntag(right);
Steve Blocka7e24c12009-10-30 11:49:00 +000010418 // Do multiplication.
Leon Clarked91b9f72010-01-27 17:25:45 +000010419 __ imul(right, Operand(left)); // Multiplication is commutative.
10420 __ j(overflow, &use_fp_on_smis, not_taken);
10421 // Check for negative zero result. Use combined = left | right.
10422 __ NegativeZeroTest(right, combined, &use_fp_on_smis);
Steve Blocka7e24c12009-10-30 11:49:00 +000010423 break;
10424
10425 case Token::DIV:
Leon Clarked91b9f72010-01-27 17:25:45 +000010426 // We can't revert the division if the result is not a smi so
10427 // save the left operand.
10428 __ mov(edi, left);
10429 // Check for 0 divisor.
10430 __ test(right, Operand(right));
10431 __ j(zero, &use_fp_on_smis, not_taken);
10432 // Sign extend left into edx:eax.
10433 ASSERT(left.is(eax));
10434 __ cdq();
10435 // Divide edx:eax by right.
10436 __ idiv(right);
10437 // Check for the corner case of dividing the most negative smi by
10438 // -1. We cannot use the overflow flag, since it is not set by idiv
10439 // instruction.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010010440 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
Steve Blocka7e24c12009-10-30 11:49:00 +000010441 __ cmp(eax, 0x40000000);
Leon Clarked91b9f72010-01-27 17:25:45 +000010442 __ j(equal, &use_fp_on_smis);
10443 // Check for negative zero result. Use combined = left | right.
10444 __ NegativeZeroTest(eax, combined, &use_fp_on_smis);
Steve Blocka7e24c12009-10-30 11:49:00 +000010445 // Check that the remainder is zero.
10446 __ test(edx, Operand(edx));
Leon Clarked91b9f72010-01-27 17:25:45 +000010447 __ j(not_zero, &use_fp_on_smis);
Steve Blocka7e24c12009-10-30 11:49:00 +000010448 // Tag the result and store it in register eax.
Leon Clarkee46be812010-01-19 14:06:41 +000010449 __ SmiTag(eax);
Steve Blocka7e24c12009-10-30 11:49:00 +000010450 break;
10451
10452 case Token::MOD:
Leon Clarked91b9f72010-01-27 17:25:45 +000010453 // Check for 0 divisor.
10454 __ test(right, Operand(right));
10455 __ j(zero, &not_smis, not_taken);
10456
10457 // Sign extend left into edx:eax.
10458 ASSERT(left.is(eax));
10459 __ cdq();
10460 // Divide edx:eax by right.
10461 __ idiv(right);
10462 // Check for negative zero result. Use combined = left | right.
10463 __ NegativeZeroTest(edx, combined, slow);
Steve Blocka7e24c12009-10-30 11:49:00 +000010464 // Move remainder to register eax.
Leon Clarked91b9f72010-01-27 17:25:45 +000010465 __ mov(eax, edx);
Steve Blocka7e24c12009-10-30 11:49:00 +000010466 break;
10467
10468 default:
10469 UNREACHABLE();
Leon Clarked91b9f72010-01-27 17:25:45 +000010470 }
10471
10472 // 5. Emit return of result in eax.
10473 GenerateReturn(masm);
10474
10475 // 6. For some operations emit inline code to perform floating point
10476 // operations on known smis (e.g., if the result of the operation
10477 // overflowed the smi range).
10478 switch (op_) {
10479 case Token::SHL: {
10480 Comment perform_float(masm, "-- Perform float operation on smis");
10481 __ bind(&use_fp_on_smis);
10482 // Result we want is in left == edx, so we can put the allocated heap
10483 // number in eax.
10484 __ AllocateHeapNumber(eax, ecx, ebx, slow);
10485 // Store the result in the HeapNumber and return.
10486 if (CpuFeatures::IsSupported(SSE2)) {
10487 CpuFeatures::Scope use_sse2(SSE2);
10488 __ cvtsi2sd(xmm0, Operand(left));
10489 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
10490 } else {
10491 // It's OK to overwrite the right argument on the stack because we
10492 // are about to return.
10493 __ mov(Operand(esp, 1 * kPointerSize), left);
10494 __ fild_s(Operand(esp, 1 * kPointerSize));
10495 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
10496 }
10497 GenerateReturn(masm);
10498 break;
10499 }
10500
10501 case Token::ADD:
10502 case Token::SUB:
10503 case Token::MUL:
10504 case Token::DIV: {
10505 Comment perform_float(masm, "-- Perform float operation on smis");
10506 __ bind(&use_fp_on_smis);
10507 // Restore arguments to edx, eax.
10508 switch (op_) {
10509 case Token::ADD:
10510 // Revert right = right + left.
10511 __ sub(right, Operand(left));
10512 break;
10513 case Token::SUB:
10514 // Revert left = left - right.
10515 __ add(left, Operand(right));
10516 break;
10517 case Token::MUL:
10518 // Right was clobbered but a copy is in ebx.
10519 __ mov(right, ebx);
10520 break;
10521 case Token::DIV:
10522 // Left was clobbered but a copy is in edi. Right is in ebx for
10523 // division.
10524 __ mov(edx, edi);
10525 __ mov(eax, right);
10526 break;
10527 default: UNREACHABLE();
10528 break;
10529 }
10530 __ AllocateHeapNumber(ecx, ebx, no_reg, slow);
10531 if (CpuFeatures::IsSupported(SSE2)) {
10532 CpuFeatures::Scope use_sse2(SSE2);
10533 FloatingPointHelper::LoadSSE2Smis(masm, ebx);
10534 switch (op_) {
10535 case Token::ADD: __ addsd(xmm0, xmm1); break;
10536 case Token::SUB: __ subsd(xmm0, xmm1); break;
10537 case Token::MUL: __ mulsd(xmm0, xmm1); break;
10538 case Token::DIV: __ divsd(xmm0, xmm1); break;
10539 default: UNREACHABLE();
10540 }
10541 __ movdbl(FieldOperand(ecx, HeapNumber::kValueOffset), xmm0);
10542 } else { // SSE2 not available, use FPU.
10543 FloatingPointHelper::LoadFloatSmis(masm, ebx);
10544 switch (op_) {
10545 case Token::ADD: __ faddp(1); break;
10546 case Token::SUB: __ fsubp(1); break;
10547 case Token::MUL: __ fmulp(1); break;
10548 case Token::DIV: __ fdivp(1); break;
10549 default: UNREACHABLE();
10550 }
10551 __ fstp_d(FieldOperand(ecx, HeapNumber::kValueOffset));
10552 }
10553 __ mov(eax, ecx);
10554 GenerateReturn(masm);
10555 break;
10556 }
10557
10558 default:
10559 break;
10560 }
10561
10562 // 7. Non-smi operands, fall out to the non-smi code with the operands in
10563 // edx and eax.
10564 Comment done_comment(masm, "-- Enter non-smi code");
10565 __ bind(&not_smis);
10566 switch (op_) {
10567 case Token::BIT_OR:
10568 case Token::SHL:
10569 case Token::SAR:
10570 case Token::SHR:
10571 // Right operand is saved in ecx and eax was destroyed by the smi
10572 // check.
10573 __ mov(eax, ecx);
10574 break;
10575
10576 case Token::DIV:
10577 case Token::MOD:
10578 // Operands are in eax, ebx at this point.
10579 __ mov(edx, eax);
10580 __ mov(eax, ebx);
10581 break;
10582
10583 default:
Steve Blocka7e24c12009-10-30 11:49:00 +000010584 break;
10585 }
10586}
10587
10588
10589void GenericBinaryOpStub::Generate(MacroAssembler* masm) {
10590 Label call_runtime;
10591
Steve Block3ce2e202009-11-05 08:53:23 +000010592 __ IncrementCounter(&Counters::generic_binary_stub_calls, 1);
Steve Blocka7e24c12009-10-30 11:49:00 +000010593
Steve Block3ce2e202009-11-05 08:53:23 +000010594 // Generate fast case smi code if requested. This flag is set when the fast
10595 // case smi code is not generated by the caller. Generating it here will speed
10596 // up common operations.
Steve Block6ded16b2010-05-10 14:33:55 +010010597 if (ShouldGenerateSmiCode()) {
Leon Clarked91b9f72010-01-27 17:25:45 +000010598 GenerateSmiCode(masm, &call_runtime);
10599 } else if (op_ != Token::MOD) { // MOD goes straight to runtime.
Steve Block6ded16b2010-05-10 14:33:55 +010010600 if (!HasArgsInRegisters()) {
10601 GenerateLoadArguments(masm);
10602 }
Steve Blocka7e24c12009-10-30 11:49:00 +000010603 }
10604
Steve Blocka7e24c12009-10-30 11:49:00 +000010605 // Floating point case.
Steve Block6ded16b2010-05-10 14:33:55 +010010606 if (ShouldGenerateFPCode()) {
10607 switch (op_) {
10608 case Token::ADD:
10609 case Token::SUB:
10610 case Token::MUL:
10611 case Token::DIV: {
10612 if (runtime_operands_type_ == BinaryOpIC::DEFAULT &&
10613 HasSmiCodeInStub()) {
10614 // Execution reaches this point when the first non-smi argument occurs
10615 // (and only if smi code is generated). This is the right moment to
10616 // patch to HEAP_NUMBERS state. The transition is attempted only for
10617 // the four basic operations. The stub stays in the DEFAULT state
10618 // forever for all other operations (also if smi code is skipped).
10619 GenerateTypeTransition(masm);
Leon Clarkeac952652010-07-15 11:15:24 +010010620 break;
Andrei Popescu402d9372010-02-26 13:31:12 +000010621 }
Steve Blocka7e24c12009-10-30 11:49:00 +000010622
Steve Block6ded16b2010-05-10 14:33:55 +010010623 Label not_floats;
Leon Clarkee46be812010-01-19 14:06:41 +000010624 if (CpuFeatures::IsSupported(SSE2)) {
10625 CpuFeatures::Scope use_sse2(SSE2);
Steve Block6ded16b2010-05-10 14:33:55 +010010626 if (static_operands_type_.IsNumber()) {
10627 if (FLAG_debug_code) {
10628 // Assert at runtime that inputs are only numbers.
10629 __ AbortIfNotNumber(edx);
10630 __ AbortIfNotNumber(eax);
10631 }
10632 if (static_operands_type_.IsSmi()) {
10633 if (FLAG_debug_code) {
10634 __ AbortIfNotSmi(edx);
10635 __ AbortIfNotSmi(eax);
10636 }
10637 FloatingPointHelper::LoadSSE2Smis(masm, ecx);
10638 } else {
10639 FloatingPointHelper::LoadSSE2Operands(masm);
10640 }
10641 } else {
10642 FloatingPointHelper::LoadSSE2Operands(masm, &call_runtime);
10643 }
10644
10645 switch (op_) {
10646 case Token::ADD: __ addsd(xmm0, xmm1); break;
10647 case Token::SUB: __ subsd(xmm0, xmm1); break;
10648 case Token::MUL: __ mulsd(xmm0, xmm1); break;
10649 case Token::DIV: __ divsd(xmm0, xmm1); break;
10650 default: UNREACHABLE();
10651 }
10652 GenerateHeapResultAllocation(masm, &call_runtime);
Leon Clarkee46be812010-01-19 14:06:41 +000010653 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
Steve Block6ded16b2010-05-10 14:33:55 +010010654 GenerateReturn(masm);
10655 } else { // SSE2 not available, use FPU.
10656 if (static_operands_type_.IsNumber()) {
10657 if (FLAG_debug_code) {
10658 // Assert at runtime that inputs are only numbers.
10659 __ AbortIfNotNumber(edx);
10660 __ AbortIfNotNumber(eax);
10661 }
10662 } else {
10663 FloatingPointHelper::CheckFloatOperands(masm, &call_runtime, ebx);
10664 }
10665 FloatingPointHelper::LoadFloatOperands(
10666 masm,
10667 ecx,
10668 FloatingPointHelper::ARGS_IN_REGISTERS);
10669 switch (op_) {
10670 case Token::ADD: __ faddp(1); break;
10671 case Token::SUB: __ fsubp(1); break;
10672 case Token::MUL: __ fmulp(1); break;
10673 case Token::DIV: __ fdivp(1); break;
10674 default: UNREACHABLE();
10675 }
10676 Label after_alloc_failure;
10677 GenerateHeapResultAllocation(masm, &after_alloc_failure);
Leon Clarkee46be812010-01-19 14:06:41 +000010678 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
Steve Block6ded16b2010-05-10 14:33:55 +010010679 GenerateReturn(masm);
10680 __ bind(&after_alloc_failure);
10681 __ ffree();
10682 __ jmp(&call_runtime);
Leon Clarkee46be812010-01-19 14:06:41 +000010683 }
Steve Block6ded16b2010-05-10 14:33:55 +010010684 __ bind(&not_floats);
10685 if (runtime_operands_type_ == BinaryOpIC::DEFAULT &&
10686 !HasSmiCodeInStub()) {
10687 // Execution reaches this point when the first non-number argument
10688 // occurs (and only if smi code is skipped from the stub, otherwise
10689 // the patching has already been done earlier in this case branch).
10690 // Try patching to STRINGS for ADD operation.
10691 if (op_ == Token::ADD) {
10692 GenerateTypeTransition(masm);
10693 }
10694 }
10695 break;
Steve Blocka7e24c12009-10-30 11:49:00 +000010696 }
Steve Block6ded16b2010-05-10 14:33:55 +010010697 case Token::MOD: {
10698 // For MOD we go directly to runtime in the non-smi case.
10699 break;
10700 }
10701 case Token::BIT_OR:
10702 case Token::BIT_AND:
10703 case Token::BIT_XOR:
10704 case Token::SAR:
10705 case Token::SHL:
10706 case Token::SHR: {
10707 Label non_smi_result;
10708 FloatingPointHelper::LoadAsIntegers(masm,
10709 static_operands_type_,
10710 use_sse3_,
10711 &call_runtime);
10712 switch (op_) {
10713 case Token::BIT_OR: __ or_(eax, Operand(ecx)); break;
10714 case Token::BIT_AND: __ and_(eax, Operand(ecx)); break;
10715 case Token::BIT_XOR: __ xor_(eax, Operand(ecx)); break;
10716 case Token::SAR: __ sar_cl(eax); break;
10717 case Token::SHL: __ shl_cl(eax); break;
10718 case Token::SHR: __ shr_cl(eax); break;
10719 default: UNREACHABLE();
10720 }
10721 if (op_ == Token::SHR) {
10722 // Check if result is non-negative and fits in a smi.
10723 __ test(eax, Immediate(0xc0000000));
10724 __ j(not_zero, &call_runtime);
10725 } else {
10726 // Check if result fits in a smi.
10727 __ cmp(eax, 0xc0000000);
10728 __ j(negative, &non_smi_result);
10729 }
10730 // Tag smi result and return.
10731 __ SmiTag(eax);
10732 GenerateReturn(masm);
10733
10734 // All ops except SHR return a signed int32 that we load in
10735 // a HeapNumber.
10736 if (op_ != Token::SHR) {
10737 __ bind(&non_smi_result);
10738 // Allocate a heap number if needed.
10739 __ mov(ebx, Operand(eax)); // ebx: result
10740 Label skip_allocation;
10741 switch (mode_) {
10742 case OVERWRITE_LEFT:
10743 case OVERWRITE_RIGHT:
10744 // If the operand was an object, we skip the
10745 // allocation of a heap number.
10746 __ mov(eax, Operand(esp, mode_ == OVERWRITE_RIGHT ?
10747 1 * kPointerSize : 2 * kPointerSize));
10748 __ test(eax, Immediate(kSmiTagMask));
10749 __ j(not_zero, &skip_allocation, not_taken);
10750 // Fall through!
10751 case NO_OVERWRITE:
10752 __ AllocateHeapNumber(eax, ecx, edx, &call_runtime);
10753 __ bind(&skip_allocation);
10754 break;
10755 default: UNREACHABLE();
10756 }
10757 // Store the result in the HeapNumber and return.
10758 if (CpuFeatures::IsSupported(SSE2)) {
10759 CpuFeatures::Scope use_sse2(SSE2);
10760 __ cvtsi2sd(xmm0, Operand(ebx));
10761 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
10762 } else {
10763 __ mov(Operand(esp, 1 * kPointerSize), ebx);
10764 __ fild_s(Operand(esp, 1 * kPointerSize));
10765 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
10766 }
10767 GenerateReturn(masm);
10768 }
10769 break;
10770 }
10771 default: UNREACHABLE(); break;
Steve Blocka7e24c12009-10-30 11:49:00 +000010772 }
Steve Blocka7e24c12009-10-30 11:49:00 +000010773 }
10774
10775 // If all else fails, use the runtime system to get the correct
Steve Block3ce2e202009-11-05 08:53:23 +000010776 // result. If arguments was passed in registers now place them on the
Steve Blockd0582a62009-12-15 09:54:21 +000010777 // stack in the correct order below the return address.
Steve Blocka7e24c12009-10-30 11:49:00 +000010778 __ bind(&call_runtime);
Leon Clarked91b9f72010-01-27 17:25:45 +000010779 if (HasArgsInRegisters()) {
Steve Block6ded16b2010-05-10 14:33:55 +010010780 GenerateRegisterArgsPush(masm);
Steve Block3ce2e202009-11-05 08:53:23 +000010781 }
Steve Block6ded16b2010-05-10 14:33:55 +010010782
Steve Blocka7e24c12009-10-30 11:49:00 +000010783 switch (op_) {
10784 case Token::ADD: {
10785 // Test for string arguments before calling runtime.
Andrei Popescu402d9372010-02-26 13:31:12 +000010786 Label not_strings, not_string1, string1, string1_smi2;
Steve Block6ded16b2010-05-10 14:33:55 +010010787
10788 // If this stub has already generated FP-specific code then the arguments
10789 // are already in edx, eax
10790 if (!ShouldGenerateFPCode() && !HasArgsInRegisters()) {
10791 GenerateLoadArguments(masm);
10792 }
10793
10794 // Registers containing left and right operands respectively.
10795 Register lhs, rhs;
10796 if (HasArgsReversed()) {
10797 lhs = eax;
10798 rhs = edx;
10799 } else {
10800 lhs = edx;
10801 rhs = eax;
10802 }
10803
10804 // Test if first argument is a string.
10805 __ test(lhs, Immediate(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +000010806 __ j(zero, &not_string1);
Steve Block6ded16b2010-05-10 14:33:55 +010010807 __ CmpObjectType(lhs, FIRST_NONSTRING_TYPE, ecx);
Steve Blocka7e24c12009-10-30 11:49:00 +000010808 __ j(above_equal, &not_string1);
10809
Leon Clarked91b9f72010-01-27 17:25:45 +000010810 // First argument is a string, test second.
Steve Block6ded16b2010-05-10 14:33:55 +010010811 __ test(rhs, Immediate(kSmiTagMask));
Andrei Popescu402d9372010-02-26 13:31:12 +000010812 __ j(zero, &string1_smi2);
Steve Block6ded16b2010-05-10 14:33:55 +010010813 __ CmpObjectType(rhs, FIRST_NONSTRING_TYPE, ecx);
Steve Blocka7e24c12009-10-30 11:49:00 +000010814 __ j(above_equal, &string1);
10815
Steve Blockd0582a62009-12-15 09:54:21 +000010816 // First and second argument are strings. Jump to the string add stub.
Andrei Popescu402d9372010-02-26 13:31:12 +000010817 StringAddStub string_add_stub(NO_STRING_CHECK_IN_STUB);
10818 __ TailCallStub(&string_add_stub);
Steve Blocka7e24c12009-10-30 11:49:00 +000010819
Andrei Popescu402d9372010-02-26 13:31:12 +000010820 __ bind(&string1_smi2);
10821 // First argument is a string, second is a smi. Try to lookup the number
10822 // string for the smi in the number string cache.
10823 NumberToStringStub::GenerateLookupNumberStringCache(
Steve Block6ded16b2010-05-10 14:33:55 +010010824 masm, rhs, edi, ebx, ecx, true, &string1);
Andrei Popescu402d9372010-02-26 13:31:12 +000010825
Steve Block6ded16b2010-05-10 14:33:55 +010010826 // Replace second argument on stack and tailcall string add stub to make
10827 // the result.
10828 __ mov(Operand(esp, 1 * kPointerSize), edi);
10829 __ TailCallStub(&string_add_stub);
Andrei Popescu402d9372010-02-26 13:31:12 +000010830
Steve Block6ded16b2010-05-10 14:33:55 +010010831 // Only first argument is a string.
Steve Blocka7e24c12009-10-30 11:49:00 +000010832 __ bind(&string1);
Steve Block6ded16b2010-05-10 14:33:55 +010010833 __ InvokeBuiltin(Builtins::STRING_ADD_LEFT, JUMP_FUNCTION);
Steve Blocka7e24c12009-10-30 11:49:00 +000010834
10835 // First argument was not a string, test second.
10836 __ bind(&not_string1);
Steve Block6ded16b2010-05-10 14:33:55 +010010837 __ test(rhs, Immediate(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +000010838 __ j(zero, &not_strings);
Steve Block6ded16b2010-05-10 14:33:55 +010010839 __ CmpObjectType(rhs, FIRST_NONSTRING_TYPE, ecx);
Steve Blocka7e24c12009-10-30 11:49:00 +000010840 __ j(above_equal, &not_strings);
10841
10842 // Only second argument is a string.
Steve Block6ded16b2010-05-10 14:33:55 +010010843 __ InvokeBuiltin(Builtins::STRING_ADD_RIGHT, JUMP_FUNCTION);
Steve Blocka7e24c12009-10-30 11:49:00 +000010844
10845 __ bind(&not_strings);
10846 // Neither argument is a string.
10847 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION);
10848 break;
10849 }
10850 case Token::SUB:
10851 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION);
10852 break;
10853 case Token::MUL:
10854 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION);
Leon Clarked91b9f72010-01-27 17:25:45 +000010855 break;
Steve Blocka7e24c12009-10-30 11:49:00 +000010856 case Token::DIV:
10857 __ InvokeBuiltin(Builtins::DIV, JUMP_FUNCTION);
10858 break;
10859 case Token::MOD:
10860 __ InvokeBuiltin(Builtins::MOD, JUMP_FUNCTION);
10861 break;
10862 case Token::BIT_OR:
10863 __ InvokeBuiltin(Builtins::BIT_OR, JUMP_FUNCTION);
10864 break;
10865 case Token::BIT_AND:
10866 __ InvokeBuiltin(Builtins::BIT_AND, JUMP_FUNCTION);
10867 break;
10868 case Token::BIT_XOR:
10869 __ InvokeBuiltin(Builtins::BIT_XOR, JUMP_FUNCTION);
10870 break;
10871 case Token::SAR:
10872 __ InvokeBuiltin(Builtins::SAR, JUMP_FUNCTION);
10873 break;
10874 case Token::SHL:
10875 __ InvokeBuiltin(Builtins::SHL, JUMP_FUNCTION);
10876 break;
10877 case Token::SHR:
10878 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION);
10879 break;
10880 default:
10881 UNREACHABLE();
10882 }
10883}
10884
10885
Leon Clarked91b9f72010-01-27 17:25:45 +000010886void GenericBinaryOpStub::GenerateHeapResultAllocation(MacroAssembler* masm,
10887 Label* alloc_failure) {
10888 Label skip_allocation;
10889 OverwriteMode mode = mode_;
10890 if (HasArgsReversed()) {
10891 if (mode == OVERWRITE_RIGHT) {
10892 mode = OVERWRITE_LEFT;
10893 } else if (mode == OVERWRITE_LEFT) {
10894 mode = OVERWRITE_RIGHT;
10895 }
10896 }
10897 switch (mode) {
10898 case OVERWRITE_LEFT: {
10899 // If the argument in edx is already an object, we skip the
10900 // allocation of a heap number.
10901 __ test(edx, Immediate(kSmiTagMask));
10902 __ j(not_zero, &skip_allocation, not_taken);
10903 // Allocate a heap number for the result. Keep eax and edx intact
10904 // for the possible runtime call.
10905 __ AllocateHeapNumber(ebx, ecx, no_reg, alloc_failure);
10906 // Now edx can be overwritten losing one of the arguments as we are
10907 // now done and will not need it any more.
10908 __ mov(edx, Operand(ebx));
10909 __ bind(&skip_allocation);
10910 // Use object in edx as a result holder
10911 __ mov(eax, Operand(edx));
10912 break;
10913 }
10914 case OVERWRITE_RIGHT:
10915 // If the argument in eax is already an object, we skip the
10916 // allocation of a heap number.
10917 __ test(eax, Immediate(kSmiTagMask));
10918 __ j(not_zero, &skip_allocation, not_taken);
10919 // Fall through!
10920 case NO_OVERWRITE:
10921 // Allocate a heap number for the result. Keep eax and edx intact
10922 // for the possible runtime call.
10923 __ AllocateHeapNumber(ebx, ecx, no_reg, alloc_failure);
10924 // Now eax can be overwritten losing one of the arguments as we are
10925 // now done and will not need it any more.
10926 __ mov(eax, ebx);
10927 __ bind(&skip_allocation);
10928 break;
10929 default: UNREACHABLE();
10930 }
10931}
10932
10933
Steve Block3ce2e202009-11-05 08:53:23 +000010934void GenericBinaryOpStub::GenerateLoadArguments(MacroAssembler* masm) {
10935 // If arguments are not passed in registers read them from the stack.
Steve Block6ded16b2010-05-10 14:33:55 +010010936 ASSERT(!HasArgsInRegisters());
10937 __ mov(eax, Operand(esp, 1 * kPointerSize));
10938 __ mov(edx, Operand(esp, 2 * kPointerSize));
Steve Block3ce2e202009-11-05 08:53:23 +000010939}
Steve Blocka7e24c12009-10-30 11:49:00 +000010940
Steve Block3ce2e202009-11-05 08:53:23 +000010941
10942void GenericBinaryOpStub::GenerateReturn(MacroAssembler* masm) {
10943 // If arguments are not passed in registers remove them from the stack before
10944 // returning.
Leon Clarked91b9f72010-01-27 17:25:45 +000010945 if (!HasArgsInRegisters()) {
Steve Block3ce2e202009-11-05 08:53:23 +000010946 __ ret(2 * kPointerSize); // Remove both operands
10947 } else {
10948 __ ret(0);
10949 }
Steve Blocka7e24c12009-10-30 11:49:00 +000010950}
10951
10952
Steve Block6ded16b2010-05-10 14:33:55 +010010953void GenericBinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) {
10954 ASSERT(HasArgsInRegisters());
10955 __ pop(ecx);
10956 if (HasArgsReversed()) {
10957 __ push(eax);
10958 __ push(edx);
10959 } else {
10960 __ push(edx);
10961 __ push(eax);
10962 }
10963 __ push(ecx);
10964}
10965
10966
10967void GenericBinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
Leon Clarkeac952652010-07-15 11:15:24 +010010968 // Ensure the operands are on the stack.
Steve Block6ded16b2010-05-10 14:33:55 +010010969 if (HasArgsInRegisters()) {
10970 GenerateRegisterArgsPush(masm);
Steve Block6ded16b2010-05-10 14:33:55 +010010971 }
10972
Leon Clarkeac952652010-07-15 11:15:24 +010010973 __ pop(ecx); // Save return address.
Steve Block6ded16b2010-05-10 14:33:55 +010010974
Steve Block6ded16b2010-05-10 14:33:55 +010010975 // Left and right arguments are now on top.
Steve Block6ded16b2010-05-10 14:33:55 +010010976 // Push this stub's key. Although the operation and the type info are
10977 // encoded into the key, the encoding is opaque, so push them too.
10978 __ push(Immediate(Smi::FromInt(MinorKey())));
10979 __ push(Immediate(Smi::FromInt(op_)));
10980 __ push(Immediate(Smi::FromInt(runtime_operands_type_)));
10981
Leon Clarkeac952652010-07-15 11:15:24 +010010982 __ push(ecx); // Push return address.
Steve Block6ded16b2010-05-10 14:33:55 +010010983
Leon Clarkeac952652010-07-15 11:15:24 +010010984 // Patch the caller to an appropriate specialized stub and return the
10985 // operation result to the caller of the stub.
Steve Block6ded16b2010-05-10 14:33:55 +010010986 __ TailCallExternalReference(
10987 ExternalReference(IC_Utility(IC::kBinaryOp_Patch)),
Leon Clarkeac952652010-07-15 11:15:24 +010010988 5,
Steve Block6ded16b2010-05-10 14:33:55 +010010989 1);
Steve Block6ded16b2010-05-10 14:33:55 +010010990}
10991
10992
10993Handle<Code> GetBinaryOpStub(int key, BinaryOpIC::TypeInfo type_info) {
10994 GenericBinaryOpStub stub(key, type_info);
10995 return stub.GetCode();
10996}
10997
10998
Andrei Popescu402d9372010-02-26 13:31:12 +000010999void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
11000 // Input on stack:
11001 // esp[4]: argument (should be number).
11002 // esp[0]: return address.
11003 // Test that eax is a number.
11004 Label runtime_call;
11005 Label runtime_call_clear_stack;
11006 Label input_not_smi;
11007 Label loaded;
11008 __ mov(eax, Operand(esp, kPointerSize));
11009 __ test(eax, Immediate(kSmiTagMask));
11010 __ j(not_zero, &input_not_smi);
11011 // Input is a smi. Untag and load it onto the FPU stack.
11012 // Then load the low and high words of the double into ebx, edx.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010011013 STATIC_ASSERT(kSmiTagSize == 1);
Andrei Popescu402d9372010-02-26 13:31:12 +000011014 __ sar(eax, 1);
11015 __ sub(Operand(esp), Immediate(2 * kPointerSize));
11016 __ mov(Operand(esp, 0), eax);
11017 __ fild_s(Operand(esp, 0));
11018 __ fst_d(Operand(esp, 0));
11019 __ pop(edx);
11020 __ pop(ebx);
11021 __ jmp(&loaded);
11022 __ bind(&input_not_smi);
11023 // Check if input is a HeapNumber.
11024 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
11025 __ cmp(Operand(ebx), Immediate(Factory::heap_number_map()));
11026 __ j(not_equal, &runtime_call);
11027 // Input is a HeapNumber. Push it on the FPU stack and load its
11028 // low and high words into ebx, edx.
11029 __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset));
11030 __ mov(edx, FieldOperand(eax, HeapNumber::kExponentOffset));
11031 __ mov(ebx, FieldOperand(eax, HeapNumber::kMantissaOffset));
11032
11033 __ bind(&loaded);
11034 // ST[0] == double value
11035 // ebx = low 32 bits of double value
11036 // edx = high 32 bits of double value
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010011037 // Compute hash (the shifts are arithmetic):
Andrei Popescu402d9372010-02-26 13:31:12 +000011038 // h = (low ^ high); h ^= h >> 16; h ^= h >> 8; h = h & (cacheSize - 1);
11039 __ mov(ecx, ebx);
11040 __ xor_(ecx, Operand(edx));
11041 __ mov(eax, ecx);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010011042 __ sar(eax, 16);
Andrei Popescu402d9372010-02-26 13:31:12 +000011043 __ xor_(ecx, Operand(eax));
11044 __ mov(eax, ecx);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010011045 __ sar(eax, 8);
Andrei Popescu402d9372010-02-26 13:31:12 +000011046 __ xor_(ecx, Operand(eax));
11047 ASSERT(IsPowerOf2(TranscendentalCache::kCacheSize));
11048 __ and_(Operand(ecx), Immediate(TranscendentalCache::kCacheSize - 1));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010011049
Andrei Popescu402d9372010-02-26 13:31:12 +000011050 // ST[0] == double value.
11051 // ebx = low 32 bits of double value.
11052 // edx = high 32 bits of double value.
11053 // ecx = TranscendentalCache::hash(double value).
11054 __ mov(eax,
11055 Immediate(ExternalReference::transcendental_cache_array_address()));
11056 // Eax points to cache array.
11057 __ mov(eax, Operand(eax, type_ * sizeof(TranscendentalCache::caches_[0])));
11058 // Eax points to the cache for the type type_.
11059 // If NULL, the cache hasn't been initialized yet, so go through runtime.
11060 __ test(eax, Operand(eax));
11061 __ j(zero, &runtime_call_clear_stack);
11062#ifdef DEBUG
11063 // Check that the layout of cache elements match expectations.
Steve Block6ded16b2010-05-10 14:33:55 +010011064 { TranscendentalCache::Element test_elem[2];
Andrei Popescu402d9372010-02-26 13:31:12 +000011065 char* elem_start = reinterpret_cast<char*>(&test_elem[0]);
11066 char* elem2_start = reinterpret_cast<char*>(&test_elem[1]);
11067 char* elem_in0 = reinterpret_cast<char*>(&(test_elem[0].in[0]));
11068 char* elem_in1 = reinterpret_cast<char*>(&(test_elem[0].in[1]));
11069 char* elem_out = reinterpret_cast<char*>(&(test_elem[0].output));
11070 CHECK_EQ(12, elem2_start - elem_start); // Two uint_32's and a pointer.
11071 CHECK_EQ(0, elem_in0 - elem_start);
11072 CHECK_EQ(kIntSize, elem_in1 - elem_start);
11073 CHECK_EQ(2 * kIntSize, elem_out - elem_start);
11074 }
11075#endif
11076 // Find the address of the ecx'th entry in the cache, i.e., &eax[ecx*12].
11077 __ lea(ecx, Operand(ecx, ecx, times_2, 0));
11078 __ lea(ecx, Operand(eax, ecx, times_4, 0));
11079 // Check if cache matches: Double value is stored in uint32_t[2] array.
11080 Label cache_miss;
11081 __ cmp(ebx, Operand(ecx, 0));
11082 __ j(not_equal, &cache_miss);
11083 __ cmp(edx, Operand(ecx, kIntSize));
11084 __ j(not_equal, &cache_miss);
11085 // Cache hit!
11086 __ mov(eax, Operand(ecx, 2 * kIntSize));
11087 __ fstp(0);
11088 __ ret(kPointerSize);
11089
11090 __ bind(&cache_miss);
11091 // Update cache with new value.
11092 // We are short on registers, so use no_reg as scratch.
11093 // This gives slightly larger code.
11094 __ AllocateHeapNumber(eax, edi, no_reg, &runtime_call_clear_stack);
11095 GenerateOperation(masm);
11096 __ mov(Operand(ecx, 0), ebx);
11097 __ mov(Operand(ecx, kIntSize), edx);
11098 __ mov(Operand(ecx, 2 * kIntSize), eax);
11099 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
11100 __ ret(kPointerSize);
11101
11102 __ bind(&runtime_call_clear_stack);
11103 __ fstp(0);
11104 __ bind(&runtime_call);
Steve Block6ded16b2010-05-10 14:33:55 +010011105 __ TailCallExternalReference(ExternalReference(RuntimeFunction()), 1, 1);
Andrei Popescu402d9372010-02-26 13:31:12 +000011106}
11107
11108
11109Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() {
11110 switch (type_) {
11111 // Add more cases when necessary.
11112 case TranscendentalCache::SIN: return Runtime::kMath_sin;
11113 case TranscendentalCache::COS: return Runtime::kMath_cos;
11114 default:
11115 UNIMPLEMENTED();
11116 return Runtime::kAbort;
11117 }
11118}
11119
11120
11121void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm) {
11122 // Only free register is edi.
11123 Label done;
11124 ASSERT(type_ == TranscendentalCache::SIN ||
11125 type_ == TranscendentalCache::COS);
11126 // More transcendental types can be added later.
11127
11128 // Both fsin and fcos require arguments in the range +/-2^63 and
11129 // return NaN for infinities and NaN. They can share all code except
11130 // the actual fsin/fcos operation.
11131 Label in_range;
11132 // If argument is outside the range -2^63..2^63, fsin/cos doesn't
11133 // work. We must reduce it to the appropriate range.
11134 __ mov(edi, edx);
11135 __ and_(Operand(edi), Immediate(0x7ff00000)); // Exponent only.
11136 int supported_exponent_limit =
11137 (63 + HeapNumber::kExponentBias) << HeapNumber::kExponentShift;
11138 __ cmp(Operand(edi), Immediate(supported_exponent_limit));
11139 __ j(below, &in_range, taken);
11140 // Check for infinity and NaN. Both return NaN for sin.
11141 __ cmp(Operand(edi), Immediate(0x7ff00000));
11142 Label non_nan_result;
11143 __ j(not_equal, &non_nan_result, taken);
11144 // Input is +/-Infinity or NaN. Result is NaN.
11145 __ fstp(0);
11146 // NaN is represented by 0x7ff8000000000000.
11147 __ push(Immediate(0x7ff80000));
11148 __ push(Immediate(0));
11149 __ fld_d(Operand(esp, 0));
11150 __ add(Operand(esp), Immediate(2 * kPointerSize));
11151 __ jmp(&done);
11152
11153 __ bind(&non_nan_result);
11154
11155 // Use fpmod to restrict argument to the range +/-2*PI.
11156 __ mov(edi, eax); // Save eax before using fnstsw_ax.
11157 __ fldpi();
11158 __ fadd(0);
11159 __ fld(1);
11160 // FPU Stack: input, 2*pi, input.
11161 {
11162 Label no_exceptions;
11163 __ fwait();
11164 __ fnstsw_ax();
11165 // Clear if Illegal Operand or Zero Division exceptions are set.
11166 __ test(Operand(eax), Immediate(5));
11167 __ j(zero, &no_exceptions);
11168 __ fnclex();
11169 __ bind(&no_exceptions);
11170 }
11171
11172 // Compute st(0) % st(1)
11173 {
11174 Label partial_remainder_loop;
11175 __ bind(&partial_remainder_loop);
11176 __ fprem1();
11177 __ fwait();
11178 __ fnstsw_ax();
11179 __ test(Operand(eax), Immediate(0x400 /* C2 */));
11180 // If C2 is set, computation only has partial result. Loop to
11181 // continue computation.
11182 __ j(not_zero, &partial_remainder_loop);
11183 }
11184 // FPU Stack: input, 2*pi, input % 2*pi
11185 __ fstp(2);
11186 __ fstp(0);
11187 __ mov(eax, edi); // Restore eax (allocated HeapNumber pointer).
11188
11189 // FPU Stack: input % 2*pi
11190 __ bind(&in_range);
11191 switch (type_) {
11192 case TranscendentalCache::SIN:
11193 __ fsin();
11194 break;
11195 case TranscendentalCache::COS:
11196 __ fcos();
11197 break;
11198 default:
11199 UNREACHABLE();
11200 }
11201 __ bind(&done);
11202}
11203
11204
Leon Clarkee46be812010-01-19 14:06:41 +000011205// Get the integer part of a heap number. Surprisingly, all this bit twiddling
11206// is faster than using the built-in instructions on floating point registers.
11207// Trashes edi and ebx. Dest is ecx. Source cannot be ecx or one of the
11208// trashed registers.
11209void IntegerConvert(MacroAssembler* masm,
11210 Register source,
Steve Block6ded16b2010-05-10 14:33:55 +010011211 TypeInfo type_info,
Leon Clarkee46be812010-01-19 14:06:41 +000011212 bool use_sse3,
11213 Label* conversion_failure) {
Leon Clarked91b9f72010-01-27 17:25:45 +000011214 ASSERT(!source.is(ecx) && !source.is(edi) && !source.is(ebx));
Leon Clarkee46be812010-01-19 14:06:41 +000011215 Label done, right_exponent, normal_exponent;
11216 Register scratch = ebx;
11217 Register scratch2 = edi;
Kristian Monsen25f61362010-05-21 11:50:48 +010011218 if (type_info.IsInteger32() && CpuFeatures::IsEnabled(SSE2)) {
11219 CpuFeatures::Scope scope(SSE2);
11220 __ cvttsd2si(ecx, FieldOperand(source, HeapNumber::kValueOffset));
11221 return;
11222 }
Steve Block6ded16b2010-05-10 14:33:55 +010011223 if (!type_info.IsInteger32() || !use_sse3) {
11224 // Get exponent word.
11225 __ mov(scratch, FieldOperand(source, HeapNumber::kExponentOffset));
11226 // Get exponent alone in scratch2.
11227 __ mov(scratch2, scratch);
11228 __ and_(scratch2, HeapNumber::kExponentMask);
11229 }
Leon Clarkee46be812010-01-19 14:06:41 +000011230 if (use_sse3) {
11231 CpuFeatures::Scope scope(SSE3);
Steve Block6ded16b2010-05-10 14:33:55 +010011232 if (!type_info.IsInteger32()) {
11233 // Check whether the exponent is too big for a 64 bit signed integer.
11234 static const uint32_t kTooBigExponent =
11235 (HeapNumber::kExponentBias + 63) << HeapNumber::kExponentShift;
11236 __ cmp(Operand(scratch2), Immediate(kTooBigExponent));
11237 __ j(greater_equal, conversion_failure);
11238 }
Leon Clarkee46be812010-01-19 14:06:41 +000011239 // Load x87 register with heap number.
11240 __ fld_d(FieldOperand(source, HeapNumber::kValueOffset));
11241 // Reserve space for 64 bit answer.
11242 __ sub(Operand(esp), Immediate(sizeof(uint64_t))); // Nolint.
11243 // Do conversion, which cannot fail because we checked the exponent.
11244 __ fisttp_d(Operand(esp, 0));
11245 __ mov(ecx, Operand(esp, 0)); // Load low word of answer into ecx.
11246 __ add(Operand(esp), Immediate(sizeof(uint64_t))); // Nolint.
11247 } else {
11248 // Load ecx with zero. We use this either for the final shift or
11249 // for the answer.
11250 __ xor_(ecx, Operand(ecx));
11251 // Check whether the exponent matches a 32 bit signed int that cannot be
11252 // represented by a Smi. A non-smi 32 bit integer is 1.xxx * 2^30 so the
11253 // exponent is 30 (biased). This is the exponent that we are fastest at and
11254 // also the highest exponent we can handle here.
11255 const uint32_t non_smi_exponent =
11256 (HeapNumber::kExponentBias + 30) << HeapNumber::kExponentShift;
11257 __ cmp(Operand(scratch2), Immediate(non_smi_exponent));
11258 // If we have a match of the int32-but-not-Smi exponent then skip some
11259 // logic.
11260 __ j(equal, &right_exponent);
11261 // If the exponent is higher than that then go to slow case. This catches
11262 // numbers that don't fit in a signed int32, infinities and NaNs.
11263 __ j(less, &normal_exponent);
11264
11265 {
11266 // Handle a big exponent. The only reason we have this code is that the
11267 // >>> operator has a tendency to generate numbers with an exponent of 31.
11268 const uint32_t big_non_smi_exponent =
11269 (HeapNumber::kExponentBias + 31) << HeapNumber::kExponentShift;
11270 __ cmp(Operand(scratch2), Immediate(big_non_smi_exponent));
11271 __ j(not_equal, conversion_failure);
11272 // We have the big exponent, typically from >>>. This means the number is
11273 // in the range 2^31 to 2^32 - 1. Get the top bits of the mantissa.
11274 __ mov(scratch2, scratch);
11275 __ and_(scratch2, HeapNumber::kMantissaMask);
11276 // Put back the implicit 1.
11277 __ or_(scratch2, 1 << HeapNumber::kExponentShift);
11278 // Shift up the mantissa bits to take up the space the exponent used to
11279 // take. We just orred in the implicit bit so that took care of one and
11280 // we want to use the full unsigned range so we subtract 1 bit from the
11281 // shift distance.
11282 const int big_shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 1;
11283 __ shl(scratch2, big_shift_distance);
11284 // Get the second half of the double.
11285 __ mov(ecx, FieldOperand(source, HeapNumber::kMantissaOffset));
11286 // Shift down 21 bits to get the most significant 11 bits or the low
11287 // mantissa word.
11288 __ shr(ecx, 32 - big_shift_distance);
11289 __ or_(ecx, Operand(scratch2));
11290 // We have the answer in ecx, but we may need to negate it.
11291 __ test(scratch, Operand(scratch));
11292 __ j(positive, &done);
11293 __ neg(ecx);
11294 __ jmp(&done);
11295 }
11296
11297 __ bind(&normal_exponent);
11298 // Exponent word in scratch, exponent part of exponent word in scratch2.
11299 // Zero in ecx.
11300 // We know the exponent is smaller than 30 (biased). If it is less than
11301 // 0 (biased) then the number is smaller in magnitude than 1.0 * 2^0, ie
11302 // it rounds to zero.
11303 const uint32_t zero_exponent =
11304 (HeapNumber::kExponentBias + 0) << HeapNumber::kExponentShift;
11305 __ sub(Operand(scratch2), Immediate(zero_exponent));
11306 // ecx already has a Smi zero.
11307 __ j(less, &done);
11308
11309 // We have a shifted exponent between 0 and 30 in scratch2.
11310 __ shr(scratch2, HeapNumber::kExponentShift);
11311 __ mov(ecx, Immediate(30));
11312 __ sub(ecx, Operand(scratch2));
11313
11314 __ bind(&right_exponent);
11315 // Here ecx is the shift, scratch is the exponent word.
11316 // Get the top bits of the mantissa.
11317 __ and_(scratch, HeapNumber::kMantissaMask);
11318 // Put back the implicit 1.
11319 __ or_(scratch, 1 << HeapNumber::kExponentShift);
11320 // Shift up the mantissa bits to take up the space the exponent used to
11321 // take. We have kExponentShift + 1 significant bits int he low end of the
11322 // word. Shift them to the top bits.
11323 const int shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 2;
11324 __ shl(scratch, shift_distance);
11325 // Get the second half of the double. For some exponents we don't
11326 // actually need this because the bits get shifted out again, but
11327 // it's probably slower to test than just to do it.
11328 __ mov(scratch2, FieldOperand(source, HeapNumber::kMantissaOffset));
11329 // Shift down 22 bits to get the most significant 10 bits or the low
11330 // mantissa word.
11331 __ shr(scratch2, 32 - shift_distance);
11332 __ or_(scratch2, Operand(scratch));
11333 // Move down according to the exponent.
11334 __ shr_cl(scratch2);
11335 // Now the unsigned answer is in scratch2. We need to move it to ecx and
11336 // we may need to fix the sign.
11337 Label negative;
11338 __ xor_(ecx, Operand(ecx));
11339 __ cmp(ecx, FieldOperand(source, HeapNumber::kExponentOffset));
11340 __ j(greater, &negative);
11341 __ mov(ecx, scratch2);
11342 __ jmp(&done);
11343 __ bind(&negative);
11344 __ sub(ecx, Operand(scratch2));
11345 __ bind(&done);
11346 }
11347}
11348
11349
11350// Input: edx, eax are the left and right objects of a bit op.
11351// Output: eax, ecx are left and right integers for a bit op.
Steve Block6ded16b2010-05-10 14:33:55 +010011352void FloatingPointHelper::LoadNumbersAsIntegers(MacroAssembler* masm,
11353 TypeInfo type_info,
11354 bool use_sse3,
11355 Label* conversion_failure) {
Leon Clarkee46be812010-01-19 14:06:41 +000011356 // Check float operands.
11357 Label arg1_is_object, check_undefined_arg1;
11358 Label arg2_is_object, check_undefined_arg2;
11359 Label load_arg2, done;
11360
Steve Block6ded16b2010-05-10 14:33:55 +010011361 if (!type_info.IsDouble()) {
11362 if (!type_info.IsSmi()) {
11363 __ test(edx, Immediate(kSmiTagMask));
11364 __ j(not_zero, &arg1_is_object);
11365 } else {
11366 if (FLAG_debug_code) __ AbortIfNotSmi(edx);
11367 }
11368 __ SmiUntag(edx);
11369 __ jmp(&load_arg2);
11370 }
11371
11372 __ bind(&arg1_is_object);
11373
11374 // Get the untagged integer version of the edx heap number in ecx.
11375 IntegerConvert(masm, edx, type_info, use_sse3, conversion_failure);
11376 __ mov(edx, ecx);
11377
11378 // Here edx has the untagged integer, eax has a Smi or a heap number.
11379 __ bind(&load_arg2);
11380 if (!type_info.IsDouble()) {
11381 // Test if arg2 is a Smi.
11382 if (!type_info.IsSmi()) {
11383 __ test(eax, Immediate(kSmiTagMask));
11384 __ j(not_zero, &arg2_is_object);
11385 } else {
11386 if (FLAG_debug_code) __ AbortIfNotSmi(eax);
11387 }
11388 __ SmiUntag(eax);
11389 __ mov(ecx, eax);
11390 __ jmp(&done);
11391 }
11392
11393 __ bind(&arg2_is_object);
11394
11395 // Get the untagged integer version of the eax heap number in ecx.
11396 IntegerConvert(masm, eax, type_info, use_sse3, conversion_failure);
11397 __ bind(&done);
11398 __ mov(eax, edx);
11399}
11400
11401
11402// Input: edx, eax are the left and right objects of a bit op.
11403// Output: eax, ecx are left and right integers for a bit op.
11404void FloatingPointHelper::LoadUnknownsAsIntegers(MacroAssembler* masm,
11405 bool use_sse3,
11406 Label* conversion_failure) {
11407 // Check float operands.
11408 Label arg1_is_object, check_undefined_arg1;
11409 Label arg2_is_object, check_undefined_arg2;
11410 Label load_arg2, done;
11411
11412 // Test if arg1 is a Smi.
Leon Clarkee46be812010-01-19 14:06:41 +000011413 __ test(edx, Immediate(kSmiTagMask));
11414 __ j(not_zero, &arg1_is_object);
Steve Block6ded16b2010-05-10 14:33:55 +010011415
Leon Clarkee46be812010-01-19 14:06:41 +000011416 __ SmiUntag(edx);
11417 __ jmp(&load_arg2);
11418
11419 // If the argument is undefined it converts to zero (ECMA-262, section 9.5).
11420 __ bind(&check_undefined_arg1);
11421 __ cmp(edx, Factory::undefined_value());
11422 __ j(not_equal, conversion_failure);
11423 __ mov(edx, Immediate(0));
11424 __ jmp(&load_arg2);
11425
11426 __ bind(&arg1_is_object);
11427 __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset));
11428 __ cmp(ebx, Factory::heap_number_map());
11429 __ j(not_equal, &check_undefined_arg1);
Steve Block6ded16b2010-05-10 14:33:55 +010011430
Leon Clarkee46be812010-01-19 14:06:41 +000011431 // Get the untagged integer version of the edx heap number in ecx.
Steve Block6ded16b2010-05-10 14:33:55 +010011432 IntegerConvert(masm,
11433 edx,
11434 TypeInfo::Unknown(),
11435 use_sse3,
11436 conversion_failure);
Leon Clarkee46be812010-01-19 14:06:41 +000011437 __ mov(edx, ecx);
11438
11439 // Here edx has the untagged integer, eax has a Smi or a heap number.
11440 __ bind(&load_arg2);
Steve Block6ded16b2010-05-10 14:33:55 +010011441
Leon Clarkee46be812010-01-19 14:06:41 +000011442 // Test if arg2 is a Smi.
11443 __ test(eax, Immediate(kSmiTagMask));
11444 __ j(not_zero, &arg2_is_object);
Steve Block6ded16b2010-05-10 14:33:55 +010011445
Leon Clarkee46be812010-01-19 14:06:41 +000011446 __ SmiUntag(eax);
11447 __ mov(ecx, eax);
11448 __ jmp(&done);
11449
11450 // If the argument is undefined it converts to zero (ECMA-262, section 9.5).
11451 __ bind(&check_undefined_arg2);
11452 __ cmp(eax, Factory::undefined_value());
11453 __ j(not_equal, conversion_failure);
11454 __ mov(ecx, Immediate(0));
11455 __ jmp(&done);
11456
11457 __ bind(&arg2_is_object);
11458 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
11459 __ cmp(ebx, Factory::heap_number_map());
11460 __ j(not_equal, &check_undefined_arg2);
Steve Block6ded16b2010-05-10 14:33:55 +010011461
Leon Clarkee46be812010-01-19 14:06:41 +000011462 // Get the untagged integer version of the eax heap number in ecx.
Steve Block6ded16b2010-05-10 14:33:55 +010011463 IntegerConvert(masm,
11464 eax,
11465 TypeInfo::Unknown(),
11466 use_sse3,
11467 conversion_failure);
Leon Clarkee46be812010-01-19 14:06:41 +000011468 __ bind(&done);
11469 __ mov(eax, edx);
11470}
11471
11472
Steve Block6ded16b2010-05-10 14:33:55 +010011473void FloatingPointHelper::LoadAsIntegers(MacroAssembler* masm,
11474 TypeInfo type_info,
11475 bool use_sse3,
11476 Label* conversion_failure) {
11477 if (type_info.IsNumber()) {
11478 LoadNumbersAsIntegers(masm, type_info, use_sse3, conversion_failure);
11479 } else {
11480 LoadUnknownsAsIntegers(masm, use_sse3, conversion_failure);
11481 }
11482}
11483
11484
Steve Blocka7e24c12009-10-30 11:49:00 +000011485void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm,
11486 Register number) {
11487 Label load_smi, done;
11488
11489 __ test(number, Immediate(kSmiTagMask));
11490 __ j(zero, &load_smi, not_taken);
11491 __ fld_d(FieldOperand(number, HeapNumber::kValueOffset));
11492 __ jmp(&done);
11493
11494 __ bind(&load_smi);
Leon Clarkee46be812010-01-19 14:06:41 +000011495 __ SmiUntag(number);
Steve Blocka7e24c12009-10-30 11:49:00 +000011496 __ push(number);
11497 __ fild_s(Operand(esp, 0));
11498 __ pop(number);
11499
11500 __ bind(&done);
11501}
11502
11503
Andrei Popescu402d9372010-02-26 13:31:12 +000011504void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm) {
11505 Label load_smi_edx, load_eax, load_smi_eax, done;
11506 // Load operand in edx into xmm0.
11507 __ test(edx, Immediate(kSmiTagMask));
11508 __ j(zero, &load_smi_edx, not_taken); // Argument in edx is a smi.
11509 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
11510
11511 __ bind(&load_eax);
11512 // Load operand in eax into xmm1.
11513 __ test(eax, Immediate(kSmiTagMask));
11514 __ j(zero, &load_smi_eax, not_taken); // Argument in eax is a smi.
11515 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
11516 __ jmp(&done);
11517
11518 __ bind(&load_smi_edx);
11519 __ SmiUntag(edx); // Untag smi before converting to float.
11520 __ cvtsi2sd(xmm0, Operand(edx));
11521 __ SmiTag(edx); // Retag smi for heap number overwriting test.
11522 __ jmp(&load_eax);
11523
11524 __ bind(&load_smi_eax);
11525 __ SmiUntag(eax); // Untag smi before converting to float.
11526 __ cvtsi2sd(xmm1, Operand(eax));
11527 __ SmiTag(eax); // Retag smi for heap number overwriting test.
11528
11529 __ bind(&done);
11530}
11531
11532
Leon Clarked91b9f72010-01-27 17:25:45 +000011533void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm,
Steve Blocka7e24c12009-10-30 11:49:00 +000011534 Label* not_numbers) {
11535 Label load_smi_edx, load_eax, load_smi_eax, load_float_eax, done;
11536 // Load operand in edx into xmm0, or branch to not_numbers.
11537 __ test(edx, Immediate(kSmiTagMask));
11538 __ j(zero, &load_smi_edx, not_taken); // Argument in edx is a smi.
11539 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), Factory::heap_number_map());
11540 __ j(not_equal, not_numbers); // Argument in edx is not a number.
11541 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
11542 __ bind(&load_eax);
11543 // Load operand in eax into xmm1, or branch to not_numbers.
11544 __ test(eax, Immediate(kSmiTagMask));
11545 __ j(zero, &load_smi_eax, not_taken); // Argument in eax is a smi.
11546 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), Factory::heap_number_map());
11547 __ j(equal, &load_float_eax);
11548 __ jmp(not_numbers); // Argument in eax is not a number.
11549 __ bind(&load_smi_edx);
Leon Clarkee46be812010-01-19 14:06:41 +000011550 __ SmiUntag(edx); // Untag smi before converting to float.
Steve Blocka7e24c12009-10-30 11:49:00 +000011551 __ cvtsi2sd(xmm0, Operand(edx));
Leon Clarkee46be812010-01-19 14:06:41 +000011552 __ SmiTag(edx); // Retag smi for heap number overwriting test.
Steve Blocka7e24c12009-10-30 11:49:00 +000011553 __ jmp(&load_eax);
11554 __ bind(&load_smi_eax);
Leon Clarkee46be812010-01-19 14:06:41 +000011555 __ SmiUntag(eax); // Untag smi before converting to float.
Steve Blocka7e24c12009-10-30 11:49:00 +000011556 __ cvtsi2sd(xmm1, Operand(eax));
Leon Clarkee46be812010-01-19 14:06:41 +000011557 __ SmiTag(eax); // Retag smi for heap number overwriting test.
Steve Blocka7e24c12009-10-30 11:49:00 +000011558 __ jmp(&done);
11559 __ bind(&load_float_eax);
11560 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
11561 __ bind(&done);
11562}
11563
11564
Leon Clarked91b9f72010-01-27 17:25:45 +000011565void FloatingPointHelper::LoadSSE2Smis(MacroAssembler* masm,
11566 Register scratch) {
11567 const Register left = edx;
11568 const Register right = eax;
11569 __ mov(scratch, left);
11570 ASSERT(!scratch.is(right)); // We're about to clobber scratch.
11571 __ SmiUntag(scratch);
11572 __ cvtsi2sd(xmm0, Operand(scratch));
11573
11574 __ mov(scratch, right);
11575 __ SmiUntag(scratch);
11576 __ cvtsi2sd(xmm1, Operand(scratch));
11577}
11578
11579
Steve Blocka7e24c12009-10-30 11:49:00 +000011580void FloatingPointHelper::LoadFloatOperands(MacroAssembler* masm,
Leon Clarked91b9f72010-01-27 17:25:45 +000011581 Register scratch,
11582 ArgLocation arg_location) {
Steve Blocka7e24c12009-10-30 11:49:00 +000011583 Label load_smi_1, load_smi_2, done_load_1, done;
Leon Clarked91b9f72010-01-27 17:25:45 +000011584 if (arg_location == ARGS_IN_REGISTERS) {
11585 __ mov(scratch, edx);
11586 } else {
11587 __ mov(scratch, Operand(esp, 2 * kPointerSize));
11588 }
Steve Blocka7e24c12009-10-30 11:49:00 +000011589 __ test(scratch, Immediate(kSmiTagMask));
11590 __ j(zero, &load_smi_1, not_taken);
11591 __ fld_d(FieldOperand(scratch, HeapNumber::kValueOffset));
11592 __ bind(&done_load_1);
11593
Leon Clarked91b9f72010-01-27 17:25:45 +000011594 if (arg_location == ARGS_IN_REGISTERS) {
11595 __ mov(scratch, eax);
11596 } else {
11597 __ mov(scratch, Operand(esp, 1 * kPointerSize));
11598 }
Steve Blocka7e24c12009-10-30 11:49:00 +000011599 __ test(scratch, Immediate(kSmiTagMask));
11600 __ j(zero, &load_smi_2, not_taken);
11601 __ fld_d(FieldOperand(scratch, HeapNumber::kValueOffset));
11602 __ jmp(&done);
11603
11604 __ bind(&load_smi_1);
Leon Clarkee46be812010-01-19 14:06:41 +000011605 __ SmiUntag(scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +000011606 __ push(scratch);
11607 __ fild_s(Operand(esp, 0));
11608 __ pop(scratch);
11609 __ jmp(&done_load_1);
11610
11611 __ bind(&load_smi_2);
Leon Clarkee46be812010-01-19 14:06:41 +000011612 __ SmiUntag(scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +000011613 __ push(scratch);
11614 __ fild_s(Operand(esp, 0));
11615 __ pop(scratch);
11616
11617 __ bind(&done);
11618}
11619
11620
Leon Clarked91b9f72010-01-27 17:25:45 +000011621void FloatingPointHelper::LoadFloatSmis(MacroAssembler* masm,
11622 Register scratch) {
11623 const Register left = edx;
11624 const Register right = eax;
11625 __ mov(scratch, left);
11626 ASSERT(!scratch.is(right)); // We're about to clobber scratch.
11627 __ SmiUntag(scratch);
11628 __ push(scratch);
11629 __ fild_s(Operand(esp, 0));
11630
11631 __ mov(scratch, right);
11632 __ SmiUntag(scratch);
11633 __ mov(Operand(esp, 0), scratch);
11634 __ fild_s(Operand(esp, 0));
11635 __ pop(scratch);
11636}
11637
11638
Steve Blocka7e24c12009-10-30 11:49:00 +000011639void FloatingPointHelper::CheckFloatOperands(MacroAssembler* masm,
11640 Label* non_float,
11641 Register scratch) {
11642 Label test_other, done;
11643 // Test if both operands are floats or smi -> scratch=k_is_float;
11644 // Otherwise scratch = k_not_float.
11645 __ test(edx, Immediate(kSmiTagMask));
11646 __ j(zero, &test_other, not_taken); // argument in edx is OK
11647 __ mov(scratch, FieldOperand(edx, HeapObject::kMapOffset));
11648 __ cmp(scratch, Factory::heap_number_map());
11649 __ j(not_equal, non_float); // argument in edx is not a number -> NaN
11650
11651 __ bind(&test_other);
11652 __ test(eax, Immediate(kSmiTagMask));
11653 __ j(zero, &done); // argument in eax is OK
11654 __ mov(scratch, FieldOperand(eax, HeapObject::kMapOffset));
11655 __ cmp(scratch, Factory::heap_number_map());
11656 __ j(not_equal, non_float); // argument in eax is not a number -> NaN
11657
11658 // Fall-through: Both operands are numbers.
11659 __ bind(&done);
11660}
11661
11662
Leon Clarkee46be812010-01-19 14:06:41 +000011663void GenericUnaryOpStub::Generate(MacroAssembler* masm) {
11664 Label slow, done;
Steve Blocka7e24c12009-10-30 11:49:00 +000011665
Leon Clarkee46be812010-01-19 14:06:41 +000011666 if (op_ == Token::SUB) {
11667 // Check whether the value is a smi.
11668 Label try_float;
11669 __ test(eax, Immediate(kSmiTagMask));
11670 __ j(not_zero, &try_float, not_taken);
Steve Blocka7e24c12009-10-30 11:49:00 +000011671
Leon Clarkeac952652010-07-15 11:15:24 +010011672 if (negative_zero_ == kStrictNegativeZero) {
11673 // Go slow case if the value of the expression is zero
11674 // to make sure that we switch between 0 and -0.
11675 __ test(eax, Operand(eax));
11676 __ j(zero, &slow, not_taken);
11677 }
Steve Blocka7e24c12009-10-30 11:49:00 +000011678
Leon Clarkee46be812010-01-19 14:06:41 +000011679 // The value of the expression is a smi that is not zero. Try
11680 // optimistic subtraction '0 - value'.
11681 Label undo;
Steve Blocka7e24c12009-10-30 11:49:00 +000011682 __ mov(edx, Operand(eax));
Leon Clarkee46be812010-01-19 14:06:41 +000011683 __ Set(eax, Immediate(0));
11684 __ sub(eax, Operand(edx));
Leon Clarkeac952652010-07-15 11:15:24 +010011685 __ j(no_overflow, &done, taken);
Leon Clarkee46be812010-01-19 14:06:41 +000011686
11687 // Restore eax and go slow case.
11688 __ bind(&undo);
11689 __ mov(eax, Operand(edx));
11690 __ jmp(&slow);
11691
11692 // Try floating point case.
11693 __ bind(&try_float);
11694 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
11695 __ cmp(edx, Factory::heap_number_map());
11696 __ j(not_equal, &slow);
Leon Clarkeac952652010-07-15 11:15:24 +010011697 if (overwrite_ == UNARY_OVERWRITE) {
Leon Clarkee46be812010-01-19 14:06:41 +000011698 __ mov(edx, FieldOperand(eax, HeapNumber::kExponentOffset));
11699 __ xor_(edx, HeapNumber::kSignMask); // Flip sign.
11700 __ mov(FieldOperand(eax, HeapNumber::kExponentOffset), edx);
11701 } else {
11702 __ mov(edx, Operand(eax));
11703 // edx: operand
11704 __ AllocateHeapNumber(eax, ebx, ecx, &undo);
11705 // eax: allocated 'empty' number
11706 __ mov(ecx, FieldOperand(edx, HeapNumber::kExponentOffset));
11707 __ xor_(ecx, HeapNumber::kSignMask); // Flip sign.
11708 __ mov(FieldOperand(eax, HeapNumber::kExponentOffset), ecx);
11709 __ mov(ecx, FieldOperand(edx, HeapNumber::kMantissaOffset));
11710 __ mov(FieldOperand(eax, HeapNumber::kMantissaOffset), ecx);
11711 }
11712 } else if (op_ == Token::BIT_NOT) {
11713 // Check if the operand is a heap number.
11714 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
11715 __ cmp(edx, Factory::heap_number_map());
11716 __ j(not_equal, &slow, not_taken);
11717
11718 // Convert the heap number in eax to an untagged integer in ecx.
Steve Block6ded16b2010-05-10 14:33:55 +010011719 IntegerConvert(masm,
11720 eax,
11721 TypeInfo::Unknown(),
11722 CpuFeatures::IsSupported(SSE3),
11723 &slow);
Leon Clarkee46be812010-01-19 14:06:41 +000011724
11725 // Do the bitwise operation and check if the result fits in a smi.
11726 Label try_float;
11727 __ not_(ecx);
11728 __ cmp(ecx, 0xc0000000);
11729 __ j(sign, &try_float, not_taken);
11730
11731 // Tag the result as a smi and we're done.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010011732 STATIC_ASSERT(kSmiTagSize == 1);
Leon Clarkee46be812010-01-19 14:06:41 +000011733 __ lea(eax, Operand(ecx, times_2, kSmiTag));
11734 __ jmp(&done);
11735
11736 // Try to store the result in a heap number.
11737 __ bind(&try_float);
Leon Clarkeac952652010-07-15 11:15:24 +010011738 if (overwrite_ == UNARY_NO_OVERWRITE) {
Leon Clarkee46be812010-01-19 14:06:41 +000011739 // Allocate a fresh heap number, but don't overwrite eax until
11740 // we're sure we can do it without going through the slow case
11741 // that needs the value in eax.
11742 __ AllocateHeapNumber(ebx, edx, edi, &slow);
11743 __ mov(eax, Operand(ebx));
11744 }
11745 if (CpuFeatures::IsSupported(SSE2)) {
11746 CpuFeatures::Scope use_sse2(SSE2);
11747 __ cvtsi2sd(xmm0, Operand(ecx));
11748 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
11749 } else {
11750 __ push(ecx);
11751 __ fild_s(Operand(esp, 0));
11752 __ pop(ecx);
11753 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
11754 }
11755 } else {
11756 UNIMPLEMENTED();
Steve Blocka7e24c12009-10-30 11:49:00 +000011757 }
11758
Leon Clarkee46be812010-01-19 14:06:41 +000011759 // Return from the stub.
Steve Blocka7e24c12009-10-30 11:49:00 +000011760 __ bind(&done);
Steve Blocka7e24c12009-10-30 11:49:00 +000011761 __ StubReturn(1);
Leon Clarkee46be812010-01-19 14:06:41 +000011762
11763 // Handle the slow case by jumping to the JavaScript builtin.
11764 __ bind(&slow);
11765 __ pop(ecx); // pop return address.
11766 __ push(eax);
11767 __ push(ecx); // push return address
11768 switch (op_) {
11769 case Token::SUB:
11770 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION);
11771 break;
11772 case Token::BIT_NOT:
11773 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION);
11774 break;
11775 default:
11776 UNREACHABLE();
11777 }
Steve Blocka7e24c12009-10-30 11:49:00 +000011778}
11779
11780
Steve Blocka7e24c12009-10-30 11:49:00 +000011781void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
11782 // The key is in edx and the parameter count is in eax.
11783
11784 // The displacement is used for skipping the frame pointer on the
11785 // stack. It is the offset of the last parameter (if any) relative
11786 // to the frame pointer.
11787 static const int kDisplacement = 1 * kPointerSize;
11788
11789 // Check that the key is a smi.
11790 Label slow;
11791 __ test(edx, Immediate(kSmiTagMask));
11792 __ j(not_zero, &slow, not_taken);
11793
11794 // Check if the calling frame is an arguments adaptor frame.
11795 Label adaptor;
11796 __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
11797 __ mov(ecx, Operand(ebx, StandardFrameConstants::kContextOffset));
11798 __ cmp(Operand(ecx), Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
11799 __ j(equal, &adaptor);
11800
11801 // Check index against formal parameters count limit passed in
11802 // through register eax. Use unsigned comparison to get negative
11803 // check for free.
11804 __ cmp(edx, Operand(eax));
11805 __ j(above_equal, &slow, not_taken);
11806
11807 // Read the argument from the stack and return it.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010011808 STATIC_ASSERT(kSmiTagSize == 1);
11809 STATIC_ASSERT(kSmiTag == 0); // Shifting code depends on these.
Steve Blocka7e24c12009-10-30 11:49:00 +000011810 __ lea(ebx, Operand(ebp, eax, times_2, 0));
11811 __ neg(edx);
11812 __ mov(eax, Operand(ebx, edx, times_2, kDisplacement));
11813 __ ret(0);
11814
11815 // Arguments adaptor case: Check index against actual arguments
11816 // limit found in the arguments adaptor frame. Use unsigned
11817 // comparison to get negative check for free.
11818 __ bind(&adaptor);
11819 __ mov(ecx, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
11820 __ cmp(edx, Operand(ecx));
11821 __ j(above_equal, &slow, not_taken);
11822
11823 // Read the argument from the stack and return it.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010011824 STATIC_ASSERT(kSmiTagSize == 1);
11825 STATIC_ASSERT(kSmiTag == 0); // Shifting code depends on these.
Steve Blocka7e24c12009-10-30 11:49:00 +000011826 __ lea(ebx, Operand(ebx, ecx, times_2, 0));
11827 __ neg(edx);
11828 __ mov(eax, Operand(ebx, edx, times_2, kDisplacement));
11829 __ ret(0);
11830
11831 // Slow-case: Handle non-smi or out-of-bounds access to arguments
11832 // by calling the runtime system.
11833 __ bind(&slow);
11834 __ pop(ebx); // Return address.
11835 __ push(edx);
11836 __ push(ebx);
Steve Block6ded16b2010-05-10 14:33:55 +010011837 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
Steve Blocka7e24c12009-10-30 11:49:00 +000011838}
11839
11840
11841void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
Andrei Popescu402d9372010-02-26 13:31:12 +000011842 // esp[0] : return address
11843 // esp[4] : number of parameters
11844 // esp[8] : receiver displacement
11845 // esp[16] : function
11846
Steve Blocka7e24c12009-10-30 11:49:00 +000011847 // The displacement is used for skipping the return address and the
11848 // frame pointer on the stack. It is the offset of the last
11849 // parameter (if any) relative to the frame pointer.
11850 static const int kDisplacement = 2 * kPointerSize;
11851
11852 // Check if the calling frame is an arguments adaptor frame.
Leon Clarkee46be812010-01-19 14:06:41 +000011853 Label adaptor_frame, try_allocate, runtime;
Steve Blocka7e24c12009-10-30 11:49:00 +000011854 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
11855 __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
11856 __ cmp(Operand(ecx), Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
Leon Clarkee46be812010-01-19 14:06:41 +000011857 __ j(equal, &adaptor_frame);
11858
11859 // Get the length from the frame.
11860 __ mov(ecx, Operand(esp, 1 * kPointerSize));
11861 __ jmp(&try_allocate);
Steve Blocka7e24c12009-10-30 11:49:00 +000011862
11863 // Patch the arguments.length and the parameters pointer.
Leon Clarkee46be812010-01-19 14:06:41 +000011864 __ bind(&adaptor_frame);
Steve Blocka7e24c12009-10-30 11:49:00 +000011865 __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
11866 __ mov(Operand(esp, 1 * kPointerSize), ecx);
11867 __ lea(edx, Operand(edx, ecx, times_2, kDisplacement));
11868 __ mov(Operand(esp, 2 * kPointerSize), edx);
11869
Leon Clarkee46be812010-01-19 14:06:41 +000011870 // Try the new space allocation. Start out with computing the size of
11871 // the arguments object and the elements array.
11872 Label add_arguments_object;
11873 __ bind(&try_allocate);
11874 __ test(ecx, Operand(ecx));
11875 __ j(zero, &add_arguments_object);
11876 __ lea(ecx, Operand(ecx, times_2, FixedArray::kHeaderSize));
11877 __ bind(&add_arguments_object);
11878 __ add(Operand(ecx), Immediate(Heap::kArgumentsObjectSize));
11879
11880 // Do the allocation of both objects in one go.
11881 __ AllocateInNewSpace(ecx, eax, edx, ebx, &runtime, TAG_OBJECT);
11882
11883 // Get the arguments boilerplate from the current (global) context.
11884 int offset = Context::SlotOffset(Context::ARGUMENTS_BOILERPLATE_INDEX);
11885 __ mov(edi, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
11886 __ mov(edi, FieldOperand(edi, GlobalObject::kGlobalContextOffset));
11887 __ mov(edi, Operand(edi, offset));
11888
11889 // Copy the JS object part.
11890 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
11891 __ mov(ebx, FieldOperand(edi, i));
11892 __ mov(FieldOperand(eax, i), ebx);
11893 }
11894
11895 // Setup the callee in-object property.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010011896 STATIC_ASSERT(Heap::arguments_callee_index == 0);
Leon Clarkee46be812010-01-19 14:06:41 +000011897 __ mov(ebx, Operand(esp, 3 * kPointerSize));
11898 __ mov(FieldOperand(eax, JSObject::kHeaderSize), ebx);
11899
11900 // Get the length (smi tagged) and set that as an in-object property too.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010011901 STATIC_ASSERT(Heap::arguments_length_index == 1);
Leon Clarkee46be812010-01-19 14:06:41 +000011902 __ mov(ecx, Operand(esp, 1 * kPointerSize));
11903 __ mov(FieldOperand(eax, JSObject::kHeaderSize + kPointerSize), ecx);
11904
11905 // If there are no actual arguments, we're done.
11906 Label done;
11907 __ test(ecx, Operand(ecx));
11908 __ j(zero, &done);
11909
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010011910 // Get the parameters pointer from the stack.
Leon Clarkee46be812010-01-19 14:06:41 +000011911 __ mov(edx, Operand(esp, 2 * kPointerSize));
Leon Clarkee46be812010-01-19 14:06:41 +000011912
11913 // Setup the elements pointer in the allocated arguments object and
11914 // initialize the header in the elements fixed array.
11915 __ lea(edi, Operand(eax, Heap::kArgumentsObjectSize));
11916 __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi);
11917 __ mov(FieldOperand(edi, FixedArray::kMapOffset),
11918 Immediate(Factory::fixed_array_map()));
11919 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010011920 // Untag the length for the loop below.
11921 __ SmiUntag(ecx);
Leon Clarkee46be812010-01-19 14:06:41 +000011922
11923 // Copy the fixed array slots.
11924 Label loop;
11925 __ bind(&loop);
11926 __ mov(ebx, Operand(edx, -1 * kPointerSize)); // Skip receiver.
11927 __ mov(FieldOperand(edi, FixedArray::kHeaderSize), ebx);
11928 __ add(Operand(edi), Immediate(kPointerSize));
11929 __ sub(Operand(edx), Immediate(kPointerSize));
11930 __ dec(ecx);
Leon Clarkee46be812010-01-19 14:06:41 +000011931 __ j(not_zero, &loop);
11932
11933 // Return and remove the on-stack parameters.
11934 __ bind(&done);
11935 __ ret(3 * kPointerSize);
11936
Steve Blocka7e24c12009-10-30 11:49:00 +000011937 // Do the runtime call to allocate the arguments object.
11938 __ bind(&runtime);
Steve Block6ded16b2010-05-10 14:33:55 +010011939 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
Steve Blocka7e24c12009-10-30 11:49:00 +000011940}
11941
11942
Leon Clarkee46be812010-01-19 14:06:41 +000011943void RegExpExecStub::Generate(MacroAssembler* masm) {
Leon Clarke4515c472010-02-03 11:58:03 +000011944 // Just jump directly to runtime if native RegExp is not selected at compile
11945 // time or if regexp entry in generated code is turned off runtime switch or
11946 // at compilation.
Steve Block6ded16b2010-05-10 14:33:55 +010011947#ifdef V8_INTERPRETED_REGEXP
11948 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
11949#else // V8_INTERPRETED_REGEXP
Leon Clarkee46be812010-01-19 14:06:41 +000011950 if (!FLAG_regexp_entry_native) {
Steve Block6ded16b2010-05-10 14:33:55 +010011951 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
Leon Clarkee46be812010-01-19 14:06:41 +000011952 return;
11953 }
11954
11955 // Stack frame on entry.
11956 // esp[0]: return address
11957 // esp[4]: last_match_info (expected JSArray)
11958 // esp[8]: previous index
11959 // esp[12]: subject string
11960 // esp[16]: JSRegExp object
11961
Leon Clarked91b9f72010-01-27 17:25:45 +000011962 static const int kLastMatchInfoOffset = 1 * kPointerSize;
11963 static const int kPreviousIndexOffset = 2 * kPointerSize;
11964 static const int kSubjectOffset = 3 * kPointerSize;
11965 static const int kJSRegExpOffset = 4 * kPointerSize;
11966
11967 Label runtime, invoke_regexp;
11968
11969 // Ensure that a RegExp stack is allocated.
11970 ExternalReference address_of_regexp_stack_memory_address =
11971 ExternalReference::address_of_regexp_stack_memory_address();
11972 ExternalReference address_of_regexp_stack_memory_size =
11973 ExternalReference::address_of_regexp_stack_memory_size();
11974 __ mov(ebx, Operand::StaticVariable(address_of_regexp_stack_memory_size));
11975 __ test(ebx, Operand(ebx));
11976 __ j(zero, &runtime, not_taken);
Leon Clarkee46be812010-01-19 14:06:41 +000011977
11978 // Check that the first argument is a JSRegExp object.
Leon Clarked91b9f72010-01-27 17:25:45 +000011979 __ mov(eax, Operand(esp, kJSRegExpOffset));
Kristian Monsen50ef84f2010-07-29 15:18:00 +010011980 STATIC_ASSERT(kSmiTag == 0);
Leon Clarkee46be812010-01-19 14:06:41 +000011981 __ test(eax, Immediate(kSmiTagMask));
11982 __ j(zero, &runtime);
11983 __ CmpObjectType(eax, JS_REGEXP_TYPE, ecx);
11984 __ j(not_equal, &runtime);
11985 // Check that the RegExp has been compiled (data contains a fixed array).
11986 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
Leon Clarke4515c472010-02-03 11:58:03 +000011987 if (FLAG_debug_code) {
11988 __ test(ecx, Immediate(kSmiTagMask));
11989 __ Check(not_zero, "Unexpected type for RegExp data, FixedArray expected");
11990 __ CmpObjectType(ecx, FIXED_ARRAY_TYPE, ebx);
11991 __ Check(equal, "Unexpected type for RegExp data, FixedArray expected");
11992 }
Leon Clarkee46be812010-01-19 14:06:41 +000011993
11994 // ecx: RegExp data (FixedArray)
11995 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
11996 __ mov(ebx, FieldOperand(ecx, JSRegExp::kDataTagOffset));
11997 __ cmp(Operand(ebx), Immediate(Smi::FromInt(JSRegExp::IRREGEXP)));
11998 __ j(not_equal, &runtime);
11999
12000 // ecx: RegExp data (FixedArray)
12001 // Check that the number of captures fit in the static offsets vector buffer.
12002 __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset));
12003 // Calculate number of capture registers (number_of_captures + 1) * 2. This
12004 // uses the asumption that smis are 2 * their untagged value.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012005 STATIC_ASSERT(kSmiTag == 0);
12006 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
Leon Clarkee46be812010-01-19 14:06:41 +000012007 __ add(Operand(edx), Immediate(2)); // edx was a smi.
12008 // Check that the static offsets vector buffer is large enough.
12009 __ cmp(edx, OffsetsVector::kStaticOffsetsVectorSize);
12010 __ j(above, &runtime);
12011
12012 // ecx: RegExp data (FixedArray)
12013 // edx: Number of capture registers
12014 // Check that the second argument is a string.
Leon Clarked91b9f72010-01-27 17:25:45 +000012015 __ mov(eax, Operand(esp, kSubjectOffset));
Leon Clarkee46be812010-01-19 14:06:41 +000012016 __ test(eax, Immediate(kSmiTagMask));
12017 __ j(zero, &runtime);
12018 Condition is_string = masm->IsObjectStringType(eax, ebx, ebx);
12019 __ j(NegateCondition(is_string), &runtime);
12020 // Get the length of the string to ebx.
12021 __ mov(ebx, FieldOperand(eax, String::kLengthOffset));
12022
Steve Block6ded16b2010-05-10 14:33:55 +010012023 // ebx: Length of subject string as a smi
Leon Clarkee46be812010-01-19 14:06:41 +000012024 // ecx: RegExp data (FixedArray)
12025 // edx: Number of capture registers
Leon Clarke4515c472010-02-03 11:58:03 +000012026 // Check that the third argument is a positive smi less than the subject
Steve Block6ded16b2010-05-10 14:33:55 +010012027 // string length. A negative value will be greater (unsigned comparison).
Leon Clarked91b9f72010-01-27 17:25:45 +000012028 __ mov(eax, Operand(esp, kPreviousIndexOffset));
Steve Block6ded16b2010-05-10 14:33:55 +010012029 __ test(eax, Immediate(kSmiTagMask));
Kristian Monsen25f61362010-05-21 11:50:48 +010012030 __ j(not_zero, &runtime);
Leon Clarkee46be812010-01-19 14:06:41 +000012031 __ cmp(eax, Operand(ebx));
Steve Block6ded16b2010-05-10 14:33:55 +010012032 __ j(above_equal, &runtime);
Leon Clarkee46be812010-01-19 14:06:41 +000012033
12034 // ecx: RegExp data (FixedArray)
12035 // edx: Number of capture registers
12036 // Check that the fourth object is a JSArray object.
Leon Clarked91b9f72010-01-27 17:25:45 +000012037 __ mov(eax, Operand(esp, kLastMatchInfoOffset));
Leon Clarkee46be812010-01-19 14:06:41 +000012038 __ test(eax, Immediate(kSmiTagMask));
12039 __ j(zero, &runtime);
12040 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
12041 __ j(not_equal, &runtime);
12042 // Check that the JSArray is in fast case.
12043 __ mov(ebx, FieldOperand(eax, JSArray::kElementsOffset));
12044 __ mov(eax, FieldOperand(ebx, HeapObject::kMapOffset));
12045 __ cmp(eax, Factory::fixed_array_map());
12046 __ j(not_equal, &runtime);
12047 // Check that the last match info has space for the capture registers and the
12048 // additional information.
12049 __ mov(eax, FieldOperand(ebx, FixedArray::kLengthOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010012050 __ SmiUntag(eax);
Leon Clarkee46be812010-01-19 14:06:41 +000012051 __ add(Operand(edx), Immediate(RegExpImpl::kLastMatchOverhead));
12052 __ cmp(edx, Operand(eax));
12053 __ j(greater, &runtime);
12054
12055 // ecx: RegExp data (FixedArray)
Leon Clarked91b9f72010-01-27 17:25:45 +000012056 // Check the representation and encoding of the subject string.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010012057 Label seq_ascii_string, seq_two_byte_string, check_code;
Leon Clarked91b9f72010-01-27 17:25:45 +000012058 __ mov(eax, Operand(esp, kSubjectOffset));
Leon Clarkee46be812010-01-19 14:06:41 +000012059 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
12060 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010012061 // First check for flat two byte string.
12062 __ and_(ebx,
12063 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012064 STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010012065 __ j(zero, &seq_two_byte_string);
12066 // Any other flat string must be a flat ascii string.
Leon Clarked91b9f72010-01-27 17:25:45 +000012067 __ test(Operand(ebx),
12068 Immediate(kIsNotStringMask | kStringRepresentationMask));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010012069 __ j(zero, &seq_ascii_string);
Leon Clarked91b9f72010-01-27 17:25:45 +000012070
12071 // Check for flat cons string.
12072 // A flat cons string is a cons string where the second part is the empty
12073 // string. In that case the subject string is just the first part of the cons
12074 // string. Also in this case the first part of the cons string is known to be
Leon Clarke4515c472010-02-03 11:58:03 +000012075 // a sequential string or an external string.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012076 STATIC_ASSERT(kExternalStringTag != 0);
12077 STATIC_ASSERT((kConsStringTag & kExternalStringTag) == 0);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010012078 __ test(Operand(ebx),
12079 Immediate(kIsNotStringMask | kExternalStringTag));
12080 __ j(not_zero, &runtime);
12081 // String is a cons string.
Leon Clarked91b9f72010-01-27 17:25:45 +000012082 __ mov(edx, FieldOperand(eax, ConsString::kSecondOffset));
Leon Clarke4515c472010-02-03 11:58:03 +000012083 __ cmp(Operand(edx), Factory::empty_string());
Leon Clarked91b9f72010-01-27 17:25:45 +000012084 __ j(not_equal, &runtime);
12085 __ mov(eax, FieldOperand(eax, ConsString::kFirstOffset));
12086 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010012087 // String is a cons string with empty second part.
12088 // eax: first part of cons string.
12089 // ebx: map of first part of cons string.
12090 // Is first part a flat two byte string?
12091 __ test_b(FieldOperand(ebx, Map::kInstanceTypeOffset),
12092 kStringRepresentationMask | kStringEncodingMask);
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012093 STATIC_ASSERT((kSeqStringTag | kTwoByteStringTag) == 0);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010012094 __ j(zero, &seq_two_byte_string);
12095 // Any other flat string must be ascii.
12096 __ test_b(FieldOperand(ebx, Map::kInstanceTypeOffset),
12097 kStringRepresentationMask);
Leon Clarke4515c472010-02-03 11:58:03 +000012098 __ j(not_zero, &runtime);
Leon Clarkee46be812010-01-19 14:06:41 +000012099
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010012100 __ bind(&seq_ascii_string);
12101 // eax: subject string (flat ascii)
Leon Clarkee46be812010-01-19 14:06:41 +000012102 // ecx: RegExp data (FixedArray)
Leon Clarkee46be812010-01-19 14:06:41 +000012103 __ mov(edx, FieldOperand(ecx, JSRegExp::kDataAsciiCodeOffset));
Leon Clarked91b9f72010-01-27 17:25:45 +000012104 __ Set(edi, Immediate(1)); // Type is ascii.
12105 __ jmp(&check_code);
12106
12107 __ bind(&seq_two_byte_string);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010012108 // eax: subject string (flat two byte)
Leon Clarked91b9f72010-01-27 17:25:45 +000012109 // ecx: RegExp data (FixedArray)
12110 __ mov(edx, FieldOperand(ecx, JSRegExp::kDataUC16CodeOffset));
12111 __ Set(edi, Immediate(0)); // Type is two byte.
12112
12113 __ bind(&check_code);
Leon Clarke4515c472010-02-03 11:58:03 +000012114 // Check that the irregexp code has been generated for the actual string
12115 // encoding. If it has, the field contains a code object otherwise it contains
12116 // the hole.
Leon Clarkee46be812010-01-19 14:06:41 +000012117 __ CmpObjectType(edx, CODE_TYPE, ebx);
12118 __ j(not_equal, &runtime);
12119
Leon Clarked91b9f72010-01-27 17:25:45 +000012120 // eax: subject string
12121 // edx: code
Leon Clarke4515c472010-02-03 11:58:03 +000012122 // edi: encoding of subject string (1 if ascii, 0 if two_byte);
Leon Clarkee46be812010-01-19 14:06:41 +000012123 // Load used arguments before starting to push arguments for call to native
12124 // RegExp code to avoid handling changing stack height.
Leon Clarked91b9f72010-01-27 17:25:45 +000012125 __ mov(ebx, Operand(esp, kPreviousIndexOffset));
Leon Clarked91b9f72010-01-27 17:25:45 +000012126 __ SmiUntag(ebx); // Previous index from smi.
Leon Clarkee46be812010-01-19 14:06:41 +000012127
12128 // eax: subject string
12129 // ebx: previous index
12130 // edx: code
Leon Clarke4515c472010-02-03 11:58:03 +000012131 // edi: encoding of subject string (1 if ascii 0 if two_byte);
Leon Clarkee46be812010-01-19 14:06:41 +000012132 // All checks done. Now push arguments for native regexp code.
12133 __ IncrementCounter(&Counters::regexp_entry_native, 1);
12134
Steve Block6ded16b2010-05-10 14:33:55 +010012135 static const int kRegExpExecuteArguments = 7;
12136 __ PrepareCallCFunction(kRegExpExecuteArguments, ecx);
12137
Leon Clarked91b9f72010-01-27 17:25:45 +000012138 // Argument 7: Indicate that this is a direct call from JavaScript.
Steve Block6ded16b2010-05-10 14:33:55 +010012139 __ mov(Operand(esp, 6 * kPointerSize), Immediate(1));
Leon Clarkee46be812010-01-19 14:06:41 +000012140
Leon Clarked91b9f72010-01-27 17:25:45 +000012141 // Argument 6: Start (high end) of backtracking stack memory area.
Leon Clarkee46be812010-01-19 14:06:41 +000012142 __ mov(ecx, Operand::StaticVariable(address_of_regexp_stack_memory_address));
12143 __ add(ecx, Operand::StaticVariable(address_of_regexp_stack_memory_size));
Steve Block6ded16b2010-05-10 14:33:55 +010012144 __ mov(Operand(esp, 5 * kPointerSize), ecx);
Leon Clarkee46be812010-01-19 14:06:41 +000012145
Leon Clarkee46be812010-01-19 14:06:41 +000012146 // Argument 5: static offsets vector buffer.
Steve Block6ded16b2010-05-10 14:33:55 +010012147 __ mov(Operand(esp, 4 * kPointerSize),
12148 Immediate(ExternalReference::address_of_static_offsets_vector()));
Leon Clarkee46be812010-01-19 14:06:41 +000012149
Leon Clarked91b9f72010-01-27 17:25:45 +000012150 // Argument 4: End of string data
12151 // Argument 3: Start of string data
Steve Block6ded16b2010-05-10 14:33:55 +010012152 Label setup_two_byte, setup_rest;
Leon Clarked91b9f72010-01-27 17:25:45 +000012153 __ test(edi, Operand(edi));
12154 __ mov(edi, FieldOperand(eax, String::kLengthOffset));
Steve Block6ded16b2010-05-10 14:33:55 +010012155 __ j(zero, &setup_two_byte);
12156 __ SmiUntag(edi);
Leon Clarked91b9f72010-01-27 17:25:45 +000012157 __ lea(ecx, FieldOperand(eax, edi, times_1, SeqAsciiString::kHeaderSize));
Steve Block6ded16b2010-05-10 14:33:55 +010012158 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4.
Leon Clarked91b9f72010-01-27 17:25:45 +000012159 __ lea(ecx, FieldOperand(eax, ebx, times_1, SeqAsciiString::kHeaderSize));
Steve Block6ded16b2010-05-10 14:33:55 +010012160 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3.
12161 __ jmp(&setup_rest);
Leon Clarkee46be812010-01-19 14:06:41 +000012162
Steve Block6ded16b2010-05-10 14:33:55 +010012163 __ bind(&setup_two_byte);
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012164 STATIC_ASSERT(kSmiTag == 0);
12165 STATIC_ASSERT(kSmiTagSize == 1); // edi is smi (powered by 2).
Steve Block6ded16b2010-05-10 14:33:55 +010012166 __ lea(ecx, FieldOperand(eax, edi, times_1, SeqTwoByteString::kHeaderSize));
12167 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4.
Leon Clarked91b9f72010-01-27 17:25:45 +000012168 __ lea(ecx, FieldOperand(eax, ebx, times_2, SeqTwoByteString::kHeaderSize));
Steve Block6ded16b2010-05-10 14:33:55 +010012169 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3.
Leon Clarked91b9f72010-01-27 17:25:45 +000012170
Steve Block6ded16b2010-05-10 14:33:55 +010012171 __ bind(&setup_rest);
Leon Clarkee46be812010-01-19 14:06:41 +000012172
12173 // Argument 2: Previous index.
Steve Block6ded16b2010-05-10 14:33:55 +010012174 __ mov(Operand(esp, 1 * kPointerSize), ebx);
Leon Clarkee46be812010-01-19 14:06:41 +000012175
12176 // Argument 1: Subject string.
Steve Block6ded16b2010-05-10 14:33:55 +010012177 __ mov(Operand(esp, 0 * kPointerSize), eax);
Leon Clarkee46be812010-01-19 14:06:41 +000012178
12179 // Locate the code entry and call it.
12180 __ add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
Steve Block6ded16b2010-05-10 14:33:55 +010012181 __ CallCFunction(edx, kRegExpExecuteArguments);
Leon Clarkee46be812010-01-19 14:06:41 +000012182
12183 // Check the result.
12184 Label success;
12185 __ cmp(eax, NativeRegExpMacroAssembler::SUCCESS);
12186 __ j(equal, &success, taken);
12187 Label failure;
12188 __ cmp(eax, NativeRegExpMacroAssembler::FAILURE);
12189 __ j(equal, &failure, taken);
12190 __ cmp(eax, NativeRegExpMacroAssembler::EXCEPTION);
12191 // If not exception it can only be retry. Handle that in the runtime system.
12192 __ j(not_equal, &runtime);
12193 // Result must now be exception. If there is no pending exception already a
12194 // stack overflow (on the backtrack stack) was detected in RegExp code but
12195 // haven't created the exception yet. Handle that in the runtime system.
Steve Block6ded16b2010-05-10 14:33:55 +010012196 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
Leon Clarkee46be812010-01-19 14:06:41 +000012197 ExternalReference pending_exception(Top::k_pending_exception_address);
12198 __ mov(eax,
12199 Operand::StaticVariable(ExternalReference::the_hole_value_location()));
12200 __ cmp(eax, Operand::StaticVariable(pending_exception));
12201 __ j(equal, &runtime);
12202 __ bind(&failure);
12203 // For failure and exception return null.
12204 __ mov(Operand(eax), Factory::null_value());
12205 __ ret(4 * kPointerSize);
12206
12207 // Load RegExp data.
12208 __ bind(&success);
Leon Clarked91b9f72010-01-27 17:25:45 +000012209 __ mov(eax, Operand(esp, kJSRegExpOffset));
Leon Clarkee46be812010-01-19 14:06:41 +000012210 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
12211 __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset));
12212 // Calculate number of capture registers (number_of_captures + 1) * 2.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012213 STATIC_ASSERT(kSmiTag == 0);
12214 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
Leon Clarkee46be812010-01-19 14:06:41 +000012215 __ add(Operand(edx), Immediate(2)); // edx was a smi.
12216
12217 // edx: Number of capture registers
12218 // Load last_match_info which is still known to be a fast case JSArray.
Leon Clarked91b9f72010-01-27 17:25:45 +000012219 __ mov(eax, Operand(esp, kLastMatchInfoOffset));
Leon Clarkee46be812010-01-19 14:06:41 +000012220 __ mov(ebx, FieldOperand(eax, JSArray::kElementsOffset));
12221
12222 // ebx: last_match_info backing store (FixedArray)
12223 // edx: number of capture registers
12224 // Store the capture count.
12225 __ SmiTag(edx); // Number of capture registers to smi.
12226 __ mov(FieldOperand(ebx, RegExpImpl::kLastCaptureCountOffset), edx);
12227 __ SmiUntag(edx); // Number of capture registers back from smi.
12228 // Store last subject and last input.
Leon Clarked91b9f72010-01-27 17:25:45 +000012229 __ mov(eax, Operand(esp, kSubjectOffset));
Leon Clarkee46be812010-01-19 14:06:41 +000012230 __ mov(FieldOperand(ebx, RegExpImpl::kLastSubjectOffset), eax);
12231 __ mov(ecx, ebx);
12232 __ RecordWrite(ecx, RegExpImpl::kLastSubjectOffset, eax, edi);
Leon Clarked91b9f72010-01-27 17:25:45 +000012233 __ mov(eax, Operand(esp, kSubjectOffset));
Leon Clarkee46be812010-01-19 14:06:41 +000012234 __ mov(FieldOperand(ebx, RegExpImpl::kLastInputOffset), eax);
12235 __ mov(ecx, ebx);
12236 __ RecordWrite(ecx, RegExpImpl::kLastInputOffset, eax, edi);
12237
12238 // Get the static offsets vector filled by the native regexp code.
12239 ExternalReference address_of_static_offsets_vector =
12240 ExternalReference::address_of_static_offsets_vector();
12241 __ mov(ecx, Immediate(address_of_static_offsets_vector));
12242
12243 // ebx: last_match_info backing store (FixedArray)
12244 // ecx: offsets vector
12245 // edx: number of capture registers
12246 Label next_capture, done;
Leon Clarkee46be812010-01-19 14:06:41 +000012247 // Capture register counter starts from number of capture registers and
12248 // counts down until wraping after zero.
12249 __ bind(&next_capture);
12250 __ sub(Operand(edx), Immediate(1));
12251 __ j(negative, &done);
12252 // Read the value from the static offsets vector buffer.
Leon Clarke4515c472010-02-03 11:58:03 +000012253 __ mov(edi, Operand(ecx, edx, times_int_size, 0));
Steve Block6ded16b2010-05-10 14:33:55 +010012254 __ SmiTag(edi);
Leon Clarkee46be812010-01-19 14:06:41 +000012255 // Store the smi value in the last match info.
12256 __ mov(FieldOperand(ebx,
12257 edx,
12258 times_pointer_size,
12259 RegExpImpl::kFirstCaptureOffset),
12260 edi);
12261 __ jmp(&next_capture);
12262 __ bind(&done);
12263
12264 // Return last match info.
Leon Clarked91b9f72010-01-27 17:25:45 +000012265 __ mov(eax, Operand(esp, kLastMatchInfoOffset));
Leon Clarkee46be812010-01-19 14:06:41 +000012266 __ ret(4 * kPointerSize);
12267
12268 // Do the runtime call to execute the regexp.
12269 __ bind(&runtime);
Steve Block6ded16b2010-05-10 14:33:55 +010012270 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
12271#endif // V8_INTERPRETED_REGEXP
Leon Clarkee46be812010-01-19 14:06:41 +000012272}
12273
12274
Andrei Popescu402d9372010-02-26 13:31:12 +000012275void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm,
12276 Register object,
12277 Register result,
12278 Register scratch1,
12279 Register scratch2,
12280 bool object_is_smi,
12281 Label* not_found) {
Andrei Popescu402d9372010-02-26 13:31:12 +000012282 // Use of registers. Register result is used as a temporary.
12283 Register number_string_cache = result;
12284 Register mask = scratch1;
12285 Register scratch = scratch2;
12286
12287 // Load the number string cache.
12288 ExternalReference roots_address = ExternalReference::roots_address();
12289 __ mov(scratch, Immediate(Heap::kNumberStringCacheRootIndex));
12290 __ mov(number_string_cache,
12291 Operand::StaticArray(scratch, times_pointer_size, roots_address));
12292 // Make the hash mask from the length of the number string cache. It
12293 // contains two elements (number and string) for each cache entry.
12294 __ mov(mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010012295 __ shr(mask, kSmiTagSize + 1); // Untag length and divide it by two.
Andrei Popescu402d9372010-02-26 13:31:12 +000012296 __ sub(Operand(mask), Immediate(1)); // Make mask.
Steve Block6ded16b2010-05-10 14:33:55 +010012297
Andrei Popescu402d9372010-02-26 13:31:12 +000012298 // Calculate the entry in the number string cache. The hash value in the
Steve Block6ded16b2010-05-10 14:33:55 +010012299 // number string cache for smis is just the smi value, and the hash for
12300 // doubles is the xor of the upper and lower words. See
12301 // Heap::GetNumberStringCache.
12302 Label smi_hash_calculated;
12303 Label load_result_from_cache;
12304 if (object_is_smi) {
12305 __ mov(scratch, object);
12306 __ SmiUntag(scratch);
12307 } else {
12308 Label not_smi, hash_calculated;
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012309 STATIC_ASSERT(kSmiTag == 0);
Steve Block6ded16b2010-05-10 14:33:55 +010012310 __ test(object, Immediate(kSmiTagMask));
12311 __ j(not_zero, &not_smi);
12312 __ mov(scratch, object);
12313 __ SmiUntag(scratch);
12314 __ jmp(&smi_hash_calculated);
12315 __ bind(&not_smi);
12316 __ cmp(FieldOperand(object, HeapObject::kMapOffset),
12317 Factory::heap_number_map());
12318 __ j(not_equal, not_found);
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012319 STATIC_ASSERT(8 == kDoubleSize);
Steve Block6ded16b2010-05-10 14:33:55 +010012320 __ mov(scratch, FieldOperand(object, HeapNumber::kValueOffset));
12321 __ xor_(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4));
12322 // Object is heap number and hash is now in scratch. Calculate cache index.
12323 __ and_(scratch, Operand(mask));
12324 Register index = scratch;
12325 Register probe = mask;
12326 __ mov(probe,
12327 FieldOperand(number_string_cache,
12328 index,
12329 times_twice_pointer_size,
12330 FixedArray::kHeaderSize));
12331 __ test(probe, Immediate(kSmiTagMask));
12332 __ j(zero, not_found);
12333 if (CpuFeatures::IsSupported(SSE2)) {
12334 CpuFeatures::Scope fscope(SSE2);
12335 __ movdbl(xmm0, FieldOperand(object, HeapNumber::kValueOffset));
12336 __ movdbl(xmm1, FieldOperand(probe, HeapNumber::kValueOffset));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010012337 __ ucomisd(xmm0, xmm1);
Steve Block6ded16b2010-05-10 14:33:55 +010012338 } else {
12339 __ fld_d(FieldOperand(object, HeapNumber::kValueOffset));
12340 __ fld_d(FieldOperand(probe, HeapNumber::kValueOffset));
12341 __ FCmp();
12342 }
12343 __ j(parity_even, not_found); // Bail out if NaN is involved.
12344 __ j(not_equal, not_found); // The cache did not contain this value.
12345 __ jmp(&load_result_from_cache);
12346 }
12347
12348 __ bind(&smi_hash_calculated);
12349 // Object is smi and hash is now in scratch. Calculate cache index.
Andrei Popescu402d9372010-02-26 13:31:12 +000012350 __ and_(scratch, Operand(mask));
Steve Block6ded16b2010-05-10 14:33:55 +010012351 Register index = scratch;
Andrei Popescu402d9372010-02-26 13:31:12 +000012352 // Check if the entry is the smi we are looking for.
12353 __ cmp(object,
12354 FieldOperand(number_string_cache,
Steve Block6ded16b2010-05-10 14:33:55 +010012355 index,
Andrei Popescu402d9372010-02-26 13:31:12 +000012356 times_twice_pointer_size,
12357 FixedArray::kHeaderSize));
12358 __ j(not_equal, not_found);
12359
12360 // Get the result from the cache.
Steve Block6ded16b2010-05-10 14:33:55 +010012361 __ bind(&load_result_from_cache);
Andrei Popescu402d9372010-02-26 13:31:12 +000012362 __ mov(result,
12363 FieldOperand(number_string_cache,
Steve Block6ded16b2010-05-10 14:33:55 +010012364 index,
Andrei Popescu402d9372010-02-26 13:31:12 +000012365 times_twice_pointer_size,
12366 FixedArray::kHeaderSize + kPointerSize));
12367 __ IncrementCounter(&Counters::number_to_string_native, 1);
12368}
12369
12370
12371void NumberToStringStub::Generate(MacroAssembler* masm) {
12372 Label runtime;
12373
12374 __ mov(ebx, Operand(esp, kPointerSize));
12375
12376 // Generate code to lookup number in the number string cache.
12377 GenerateLookupNumberStringCache(masm, ebx, eax, ecx, edx, false, &runtime);
12378 __ ret(1 * kPointerSize);
12379
12380 __ bind(&runtime);
12381 // Handle number to string in the runtime system if not found in the cache.
Steve Block6ded16b2010-05-10 14:33:55 +010012382 __ TailCallRuntime(Runtime::kNumberToStringSkipCache, 1, 1);
12383}
12384
12385
Steve Block6ded16b2010-05-10 14:33:55 +010012386static int NegativeComparisonResult(Condition cc) {
12387 ASSERT(cc != equal);
12388 ASSERT((cc == less) || (cc == less_equal)
12389 || (cc == greater) || (cc == greater_equal));
12390 return (cc == greater || cc == greater_equal) ? LESS : GREATER;
Andrei Popescu402d9372010-02-26 13:31:12 +000012391}
12392
12393
Steve Blocka7e24c12009-10-30 11:49:00 +000012394void CompareStub::Generate(MacroAssembler* masm) {
Ben Murdoch3bec4d22010-07-22 14:51:16 +010012395 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
12396
Steve Block8defd9f2010-07-08 12:39:36 +010012397 Label check_unequal_objects, done;
Steve Blocka7e24c12009-10-30 11:49:00 +000012398
12399 // NOTICE! This code is only reached after a smi-fast-case check, so
12400 // it is certain that at least one operand isn't a smi.
12401
Steve Block6ded16b2010-05-10 14:33:55 +010012402 // Identical objects can be compared fast, but there are some tricky cases
12403 // for NaN and undefined.
12404 {
12405 Label not_identical;
12406 __ cmp(eax, Operand(edx));
12407 __ j(not_equal, &not_identical);
Steve Blocka7e24c12009-10-30 11:49:00 +000012408
Steve Block6ded16b2010-05-10 14:33:55 +010012409 if (cc_ != equal) {
12410 // Check for undefined. undefined OP undefined is false even though
12411 // undefined == undefined.
12412 Label check_for_nan;
12413 __ cmp(edx, Factory::undefined_value());
12414 __ j(not_equal, &check_for_nan);
12415 __ Set(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc_))));
12416 __ ret(0);
12417 __ bind(&check_for_nan);
12418 }
Steve Blocka7e24c12009-10-30 11:49:00 +000012419
Steve Block6ded16b2010-05-10 14:33:55 +010012420 // Test for NaN. Sadly, we can't just compare to Factory::nan_value(),
12421 // so we do the second best thing - test it ourselves.
12422 // Note: if cc_ != equal, never_nan_nan_ is not used.
12423 if (never_nan_nan_ && (cc_ == equal)) {
12424 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
12425 __ ret(0);
12426 } else {
Steve Block6ded16b2010-05-10 14:33:55 +010012427 Label heap_number;
Steve Block6ded16b2010-05-10 14:33:55 +010012428 __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
12429 Immediate(Factory::heap_number_map()));
12430 __ j(equal, &heap_number);
Steve Block8defd9f2010-07-08 12:39:36 +010012431 if (cc_ != equal) {
12432 // Call runtime on identical JSObjects. Otherwise return equal.
12433 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
12434 __ j(above_equal, &not_identical);
12435 }
Steve Block6ded16b2010-05-10 14:33:55 +010012436 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
12437 __ ret(0);
Steve Blocka7e24c12009-10-30 11:49:00 +000012438
Steve Block6ded16b2010-05-10 14:33:55 +010012439 __ bind(&heap_number);
12440 // It is a heap number, so return non-equal if it's NaN and equal if
12441 // it's not NaN.
12442 // The representation of NaN values has all exponent bits (52..62) set,
12443 // and not all mantissa bits (0..51) clear.
12444 // We only accept QNaNs, which have bit 51 set.
12445 // Read top bits of double representation (second word of value).
12446
12447 // Value is a QNaN if value & kQuietNaNMask == kQuietNaNMask, i.e.,
12448 // all bits in the mask are set. We only need to check the word
12449 // that contains the exponent and high bit of the mantissa.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012450 STATIC_ASSERT(((kQuietNaNHighBitsMask << 1) & 0x80000000u) != 0);
Steve Block6ded16b2010-05-10 14:33:55 +010012451 __ mov(edx, FieldOperand(edx, HeapNumber::kExponentOffset));
12452 __ xor_(eax, Operand(eax));
12453 // Shift value and mask so kQuietNaNHighBitsMask applies to topmost
12454 // bits.
12455 __ add(edx, Operand(edx));
12456 __ cmp(edx, kQuietNaNHighBitsMask << 1);
12457 if (cc_ == equal) {
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012458 STATIC_ASSERT(EQUAL != 1);
Leon Clarkee46be812010-01-19 14:06:41 +000012459 __ setcc(above_equal, eax);
12460 __ ret(0);
Steve Block6ded16b2010-05-10 14:33:55 +010012461 } else {
12462 Label nan;
12463 __ j(above_equal, &nan);
12464 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
12465 __ ret(0);
12466 __ bind(&nan);
12467 __ Set(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc_))));
12468 __ ret(0);
Leon Clarkee46be812010-01-19 14:06:41 +000012469 }
Steve Blocka7e24c12009-10-30 11:49:00 +000012470 }
12471
Steve Block6ded16b2010-05-10 14:33:55 +010012472 __ bind(&not_identical);
12473 }
12474
Steve Block8defd9f2010-07-08 12:39:36 +010012475 // Strict equality can quickly decide whether objects are equal.
12476 // Non-strict object equality is slower, so it is handled later in the stub.
12477 if (cc_ == equal && strict_) {
Steve Block6ded16b2010-05-10 14:33:55 +010012478 Label slow; // Fallthrough label.
Steve Block8defd9f2010-07-08 12:39:36 +010012479 Label not_smis;
Steve Blocka7e24c12009-10-30 11:49:00 +000012480 // If we're doing a strict equality comparison, we don't have to do
12481 // type conversion, so we generate code to do fast comparison for objects
12482 // and oddballs. Non-smi numbers and strings still go through the usual
12483 // slow-case code.
Steve Block8defd9f2010-07-08 12:39:36 +010012484 // If either is a Smi (we know that not both are), then they can only
12485 // be equal if the other is a HeapNumber. If so, use the slow case.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012486 STATIC_ASSERT(kSmiTag == 0);
Steve Block8defd9f2010-07-08 12:39:36 +010012487 ASSERT_EQ(0, Smi::FromInt(0));
12488 __ mov(ecx, Immediate(kSmiTagMask));
12489 __ and_(ecx, Operand(eax));
12490 __ test(ecx, Operand(edx));
12491 __ j(not_zero, &not_smis);
12492 // One operand is a smi.
Steve Blocka7e24c12009-10-30 11:49:00 +000012493
Steve Block8defd9f2010-07-08 12:39:36 +010012494 // Check whether the non-smi is a heap number.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012495 STATIC_ASSERT(kSmiTagMask == 1);
Steve Block8defd9f2010-07-08 12:39:36 +010012496 // ecx still holds eax & kSmiTag, which is either zero or one.
12497 __ sub(Operand(ecx), Immediate(0x01));
12498 __ mov(ebx, edx);
12499 __ xor_(ebx, Operand(eax));
12500 __ and_(ebx, Operand(ecx)); // ebx holds either 0 or eax ^ edx.
12501 __ xor_(ebx, Operand(eax));
12502 // if eax was smi, ebx is now edx, else eax.
Steve Blocka7e24c12009-10-30 11:49:00 +000012503
Steve Block8defd9f2010-07-08 12:39:36 +010012504 // Check if the non-smi operand is a heap number.
12505 __ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
12506 Immediate(Factory::heap_number_map()));
12507 // If heap number, handle it in the slow case.
12508 __ j(equal, &slow);
12509 // Return non-equal (ebx is not zero)
12510 __ mov(eax, ebx);
12511 __ ret(0);
Steve Blocka7e24c12009-10-30 11:49:00 +000012512
Steve Block8defd9f2010-07-08 12:39:36 +010012513 __ bind(&not_smis);
12514 // If either operand is a JSObject or an oddball value, then they are not
12515 // equal since their pointers are different
12516 // There is no test for undetectability in strict equality.
Steve Blocka7e24c12009-10-30 11:49:00 +000012517
Steve Block8defd9f2010-07-08 12:39:36 +010012518 // Get the type of the first operand.
12519 // If the first object is a JS object, we have done pointer comparison.
12520 Label first_non_object;
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012521 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
Steve Block8defd9f2010-07-08 12:39:36 +010012522 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
12523 __ j(below, &first_non_object);
Steve Blocka7e24c12009-10-30 11:49:00 +000012524
Steve Block8defd9f2010-07-08 12:39:36 +010012525 // Return non-zero (eax is not zero)
12526 Label return_not_equal;
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012527 STATIC_ASSERT(kHeapObjectTag != 0);
Steve Block8defd9f2010-07-08 12:39:36 +010012528 __ bind(&return_not_equal);
12529 __ ret(0);
Steve Blocka7e24c12009-10-30 11:49:00 +000012530
Steve Block8defd9f2010-07-08 12:39:36 +010012531 __ bind(&first_non_object);
12532 // Check for oddballs: true, false, null, undefined.
12533 __ CmpInstanceType(ecx, ODDBALL_TYPE);
12534 __ j(equal, &return_not_equal);
Steve Blocka7e24c12009-10-30 11:49:00 +000012535
Steve Block8defd9f2010-07-08 12:39:36 +010012536 __ CmpObjectType(edx, FIRST_JS_OBJECT_TYPE, ecx);
12537 __ j(above_equal, &return_not_equal);
Steve Blocka7e24c12009-10-30 11:49:00 +000012538
Steve Block8defd9f2010-07-08 12:39:36 +010012539 // Check for oddballs: true, false, null, undefined.
12540 __ CmpInstanceType(ecx, ODDBALL_TYPE);
12541 __ j(equal, &return_not_equal);
Steve Blocka7e24c12009-10-30 11:49:00 +000012542
Steve Block8defd9f2010-07-08 12:39:36 +010012543 // Fall through to the general case.
Steve Blocka7e24c12009-10-30 11:49:00 +000012544 __ bind(&slow);
12545 }
12546
Steve Block6ded16b2010-05-10 14:33:55 +010012547 // Generate the number comparison code.
12548 if (include_number_compare_) {
12549 Label non_number_comparison;
12550 Label unordered;
12551 if (CpuFeatures::IsSupported(SSE2)) {
12552 CpuFeatures::Scope use_sse2(SSE2);
12553 CpuFeatures::Scope use_cmov(CMOV);
Steve Blocka7e24c12009-10-30 11:49:00 +000012554
Steve Block6ded16b2010-05-10 14:33:55 +010012555 FloatingPointHelper::LoadSSE2Operands(masm, &non_number_comparison);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010012556 __ ucomisd(xmm0, xmm1);
Steve Blocka7e24c12009-10-30 11:49:00 +000012557
Steve Block6ded16b2010-05-10 14:33:55 +010012558 // Don't base result on EFLAGS when a NaN is involved.
12559 __ j(parity_even, &unordered, not_taken);
12560 // Return a result of -1, 0, or 1, based on EFLAGS.
12561 __ mov(eax, 0); // equal
12562 __ mov(ecx, Immediate(Smi::FromInt(1)));
12563 __ cmov(above, eax, Operand(ecx));
12564 __ mov(ecx, Immediate(Smi::FromInt(-1)));
12565 __ cmov(below, eax, Operand(ecx));
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012566 __ ret(0);
Steve Block6ded16b2010-05-10 14:33:55 +010012567 } else {
12568 FloatingPointHelper::CheckFloatOperands(
12569 masm, &non_number_comparison, ebx);
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012570 FloatingPointHelper::LoadFloatOperand(masm, eax);
12571 FloatingPointHelper::LoadFloatOperand(masm, edx);
Steve Block6ded16b2010-05-10 14:33:55 +010012572 __ FCmp();
Steve Blocka7e24c12009-10-30 11:49:00 +000012573
Steve Block6ded16b2010-05-10 14:33:55 +010012574 // Don't base result on EFLAGS when a NaN is involved.
12575 __ j(parity_even, &unordered, not_taken);
Steve Blocka7e24c12009-10-30 11:49:00 +000012576
Steve Block6ded16b2010-05-10 14:33:55 +010012577 Label below_label, above_label;
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012578 // Return a result of -1, 0, or 1, based on EFLAGS.
Steve Block6ded16b2010-05-10 14:33:55 +010012579 __ j(below, &below_label, not_taken);
12580 __ j(above, &above_label, not_taken);
Steve Blocka7e24c12009-10-30 11:49:00 +000012581
Steve Block6ded16b2010-05-10 14:33:55 +010012582 __ xor_(eax, Operand(eax));
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012583 __ ret(0);
Steve Blocka7e24c12009-10-30 11:49:00 +000012584
Steve Block6ded16b2010-05-10 14:33:55 +010012585 __ bind(&below_label);
12586 __ mov(eax, Immediate(Smi::FromInt(-1)));
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012587 __ ret(0);
Steve Blocka7e24c12009-10-30 11:49:00 +000012588
Steve Block6ded16b2010-05-10 14:33:55 +010012589 __ bind(&above_label);
12590 __ mov(eax, Immediate(Smi::FromInt(1)));
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012591 __ ret(0);
Steve Block6ded16b2010-05-10 14:33:55 +010012592 }
12593
12594 // If one of the numbers was NaN, then the result is always false.
12595 // The cc is never not-equal.
12596 __ bind(&unordered);
12597 ASSERT(cc_ != not_equal);
12598 if (cc_ == less || cc_ == less_equal) {
12599 __ mov(eax, Immediate(Smi::FromInt(1)));
12600 } else {
12601 __ mov(eax, Immediate(Smi::FromInt(-1)));
12602 }
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012603 __ ret(0);
Steve Block6ded16b2010-05-10 14:33:55 +010012604
12605 // The number comparison code did not provide a valid result.
12606 __ bind(&non_number_comparison);
Steve Blocka7e24c12009-10-30 11:49:00 +000012607 }
Steve Blocka7e24c12009-10-30 11:49:00 +000012608
12609 // Fast negative check for symbol-to-symbol equality.
Leon Clarkee46be812010-01-19 14:06:41 +000012610 Label check_for_strings;
Steve Blocka7e24c12009-10-30 11:49:00 +000012611 if (cc_ == equal) {
Leon Clarkee46be812010-01-19 14:06:41 +000012612 BranchIfNonSymbol(masm, &check_for_strings, eax, ecx);
12613 BranchIfNonSymbol(masm, &check_for_strings, edx, ecx);
Steve Blocka7e24c12009-10-30 11:49:00 +000012614
12615 // We've already checked for object identity, so if both operands
12616 // are symbols they aren't equal. Register eax already holds a
12617 // non-zero value, which indicates not equal, so just return.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012618 __ ret(0);
Steve Blocka7e24c12009-10-30 11:49:00 +000012619 }
12620
Leon Clarkee46be812010-01-19 14:06:41 +000012621 __ bind(&check_for_strings);
12622
Steve Block8defd9f2010-07-08 12:39:36 +010012623 __ JumpIfNotBothSequentialAsciiStrings(edx, eax, ecx, ebx,
12624 &check_unequal_objects);
Leon Clarkee46be812010-01-19 14:06:41 +000012625
12626 // Inline comparison of ascii strings.
12627 StringCompareStub::GenerateCompareFlatAsciiStrings(masm,
12628 edx,
12629 eax,
12630 ecx,
12631 ebx,
12632 edi);
12633#ifdef DEBUG
12634 __ Abort("Unexpected fall-through from string comparison");
12635#endif
12636
Steve Block8defd9f2010-07-08 12:39:36 +010012637 __ bind(&check_unequal_objects);
12638 if (cc_ == equal && !strict_) {
12639 // Non-strict equality. Objects are unequal if
12640 // they are both JSObjects and not undetectable,
12641 // and their pointers are different.
12642 Label not_both_objects;
12643 Label return_unequal;
12644 // At most one is a smi, so we can test for smi by adding the two.
12645 // A smi plus a heap object has the low bit set, a heap object plus
12646 // a heap object has the low bit clear.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012647 STATIC_ASSERT(kSmiTag == 0);
12648 STATIC_ASSERT(kSmiTagMask == 1);
Steve Block8defd9f2010-07-08 12:39:36 +010012649 __ lea(ecx, Operand(eax, edx, times_1, 0));
12650 __ test(ecx, Immediate(kSmiTagMask));
12651 __ j(not_zero, &not_both_objects);
12652 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
12653 __ j(below, &not_both_objects);
12654 __ CmpObjectType(edx, FIRST_JS_OBJECT_TYPE, ebx);
12655 __ j(below, &not_both_objects);
12656 // We do not bail out after this point. Both are JSObjects, and
12657 // they are equal if and only if both are undetectable.
12658 // The and of the undetectable flags is 1 if and only if they are equal.
12659 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
12660 1 << Map::kIsUndetectable);
12661 __ j(zero, &return_unequal);
12662 __ test_b(FieldOperand(ebx, Map::kBitFieldOffset),
12663 1 << Map::kIsUndetectable);
12664 __ j(zero, &return_unequal);
12665 // The objects are both undetectable, so they both compare as the value
12666 // undefined, and are equal.
12667 __ Set(eax, Immediate(EQUAL));
12668 __ bind(&return_unequal);
12669 // Return non-equal by returning the non-zero object pointer in eax,
12670 // or return equal if we fell through to here.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012671 __ ret(0); // rax, rdx were pushed
Steve Block8defd9f2010-07-08 12:39:36 +010012672 __ bind(&not_both_objects);
12673 }
12674
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012675 // Push arguments below the return address.
Steve Blocka7e24c12009-10-30 11:49:00 +000012676 __ pop(ecx);
Steve Blocka7e24c12009-10-30 11:49:00 +000012677 __ push(edx);
12678 __ push(eax);
12679
12680 // Figure out which native to call and setup the arguments.
12681 Builtins::JavaScript builtin;
12682 if (cc_ == equal) {
12683 builtin = strict_ ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
12684 } else {
12685 builtin = Builtins::COMPARE;
Steve Block6ded16b2010-05-10 14:33:55 +010012686 __ push(Immediate(Smi::FromInt(NegativeComparisonResult(cc_))));
Steve Blocka7e24c12009-10-30 11:49:00 +000012687 }
12688
12689 // Restore return address on the stack.
12690 __ push(ecx);
12691
12692 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
12693 // tagged as a small integer.
12694 __ InvokeBuiltin(builtin, JUMP_FUNCTION);
12695}
12696
12697
12698void CompareStub::BranchIfNonSymbol(MacroAssembler* masm,
12699 Label* label,
12700 Register object,
12701 Register scratch) {
12702 __ test(object, Immediate(kSmiTagMask));
12703 __ j(zero, label);
12704 __ mov(scratch, FieldOperand(object, HeapObject::kMapOffset));
12705 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
12706 __ and_(scratch, kIsSymbolMask | kIsNotStringMask);
12707 __ cmp(scratch, kSymbolTag | kStringTag);
12708 __ j(not_equal, label);
12709}
12710
12711
12712void StackCheckStub::Generate(MacroAssembler* masm) {
12713 // Because builtins always remove the receiver from the stack, we
12714 // have to fake one to avoid underflowing the stack. The receiver
12715 // must be inserted below the return address on the stack so we
12716 // temporarily store that in a register.
12717 __ pop(eax);
12718 __ push(Immediate(Smi::FromInt(0)));
12719 __ push(eax);
12720
12721 // Do tail-call to runtime routine.
Steve Block6ded16b2010-05-10 14:33:55 +010012722 __ TailCallRuntime(Runtime::kStackGuard, 1, 1);
Steve Blocka7e24c12009-10-30 11:49:00 +000012723}
12724
12725
12726void CallFunctionStub::Generate(MacroAssembler* masm) {
12727 Label slow;
12728
Leon Clarkee46be812010-01-19 14:06:41 +000012729 // If the receiver might be a value (string, number or boolean) check for this
12730 // and box it if it is.
12731 if (ReceiverMightBeValue()) {
12732 // Get the receiver from the stack.
12733 // +1 ~ return address
12734 Label receiver_is_value, receiver_is_js_object;
12735 __ mov(eax, Operand(esp, (argc_ + 1) * kPointerSize));
12736
12737 // Check if receiver is a smi (which is a number value).
12738 __ test(eax, Immediate(kSmiTagMask));
12739 __ j(zero, &receiver_is_value, not_taken);
12740
12741 // Check if the receiver is a valid JS object.
12742 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, edi);
12743 __ j(above_equal, &receiver_is_js_object);
12744
12745 // Call the runtime to box the value.
12746 __ bind(&receiver_is_value);
12747 __ EnterInternalFrame();
12748 __ push(eax);
12749 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
12750 __ LeaveInternalFrame();
12751 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), eax);
12752
12753 __ bind(&receiver_is_js_object);
12754 }
12755
Steve Blocka7e24c12009-10-30 11:49:00 +000012756 // Get the function to call from the stack.
12757 // +2 ~ receiver, return address
12758 __ mov(edi, Operand(esp, (argc_ + 2) * kPointerSize));
12759
12760 // Check that the function really is a JavaScript function.
12761 __ test(edi, Immediate(kSmiTagMask));
12762 __ j(zero, &slow, not_taken);
12763 // Goto slow case if we do not have a function.
12764 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
12765 __ j(not_equal, &slow, not_taken);
12766
12767 // Fast-case: Just invoke the function.
12768 ParameterCount actual(argc_);
12769 __ InvokeFunction(edi, actual, JUMP_FUNCTION);
12770
12771 // Slow-case: Non-function called.
12772 __ bind(&slow);
Andrei Popescu402d9372010-02-26 13:31:12 +000012773 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
12774 // of the original receiver from the call site).
12775 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), edi);
Steve Blocka7e24c12009-10-30 11:49:00 +000012776 __ Set(eax, Immediate(argc_));
12777 __ Set(ebx, Immediate(0));
12778 __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
12779 Handle<Code> adaptor(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
12780 __ jmp(adaptor, RelocInfo::CODE_TARGET);
12781}
12782
12783
Steve Blocka7e24c12009-10-30 11:49:00 +000012784void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
12785 // eax holds the exception.
12786
12787 // Adjust this code if not the case.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012788 STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
Steve Blocka7e24c12009-10-30 11:49:00 +000012789
12790 // Drop the sp to the top of the handler.
12791 ExternalReference handler_address(Top::k_handler_address);
12792 __ mov(esp, Operand::StaticVariable(handler_address));
12793
12794 // Restore next handler and frame pointer, discard handler state.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012795 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +000012796 __ pop(Operand::StaticVariable(handler_address));
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012797 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 1 * kPointerSize);
Steve Blocka7e24c12009-10-30 11:49:00 +000012798 __ pop(ebp);
12799 __ pop(edx); // Remove state.
12800
12801 // Before returning we restore the context from the frame pointer if
12802 // not NULL. The frame pointer is NULL in the exception handler of
12803 // a JS entry frame.
12804 __ xor_(esi, Operand(esi)); // Tentatively set context pointer to NULL.
12805 Label skip;
12806 __ cmp(ebp, 0);
12807 __ j(equal, &skip, not_taken);
12808 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
12809 __ bind(&skip);
12810
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012811 STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
Steve Blocka7e24c12009-10-30 11:49:00 +000012812 __ ret(0);
12813}
12814
12815
Steve Blockd0582a62009-12-15 09:54:21 +000012816// If true, a Handle<T> passed by value is passed and returned by
12817// using the location_ field directly. If false, it is passed and
12818// returned as a pointer to a handle.
Steve Block6ded16b2010-05-10 14:33:55 +010012819#ifdef USING_BSD_ABI
Steve Blockd0582a62009-12-15 09:54:21 +000012820static const bool kPassHandlesDirectly = true;
12821#else
12822static const bool kPassHandlesDirectly = false;
12823#endif
12824
12825
12826void ApiGetterEntryStub::Generate(MacroAssembler* masm) {
Ben Murdochbb769b22010-08-11 14:56:33 +010012827 Label empty_handle;
Steve Blockd0582a62009-12-15 09:54:21 +000012828 Label prologue;
12829 Label promote_scheduled_exception;
12830 __ EnterApiExitFrame(ExitFrame::MODE_NORMAL, kStackSpace, kArgc);
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012831 STATIC_ASSERT(kArgc == 4);
Steve Blockd0582a62009-12-15 09:54:21 +000012832 if (kPassHandlesDirectly) {
12833 // When handles as passed directly we don't have to allocate extra
12834 // space for and pass an out parameter.
12835 __ mov(Operand(esp, 0 * kPointerSize), ebx); // name.
12836 __ mov(Operand(esp, 1 * kPointerSize), eax); // arguments pointer.
12837 } else {
12838 // The function expects three arguments to be passed but we allocate
12839 // four to get space for the output cell. The argument slots are filled
12840 // as follows:
12841 //
12842 // 3: output cell
12843 // 2: arguments pointer
12844 // 1: name
12845 // 0: pointer to the output cell
12846 //
12847 // Note that this is one more "argument" than the function expects
12848 // so the out cell will have to be popped explicitly after returning
12849 // from the function.
12850 __ mov(Operand(esp, 1 * kPointerSize), ebx); // name.
12851 __ mov(Operand(esp, 2 * kPointerSize), eax); // arguments pointer.
12852 __ mov(ebx, esp);
12853 __ add(Operand(ebx), Immediate(3 * kPointerSize));
12854 __ mov(Operand(esp, 0 * kPointerSize), ebx); // output
12855 __ mov(Operand(esp, 3 * kPointerSize), Immediate(0)); // out cell.
12856 }
12857 // Call the api function!
12858 __ call(fun()->address(), RelocInfo::RUNTIME_ENTRY);
12859 // Check if the function scheduled an exception.
12860 ExternalReference scheduled_exception_address =
12861 ExternalReference::scheduled_exception_address();
12862 __ cmp(Operand::StaticVariable(scheduled_exception_address),
12863 Immediate(Factory::the_hole_value()));
12864 __ j(not_equal, &promote_scheduled_exception, not_taken);
12865 if (!kPassHandlesDirectly) {
12866 // The returned value is a pointer to the handle holding the result.
12867 // Dereference this to get to the location.
12868 __ mov(eax, Operand(eax, 0));
12869 }
Ben Murdochbb769b22010-08-11 14:56:33 +010012870 // Check if the result handle holds 0.
Steve Blockd0582a62009-12-15 09:54:21 +000012871 __ test(eax, Operand(eax));
Ben Murdochbb769b22010-08-11 14:56:33 +010012872 __ j(zero, &empty_handle, not_taken);
Steve Blockd0582a62009-12-15 09:54:21 +000012873 // It was non-zero. Dereference to get the result value.
Steve Blockd0582a62009-12-15 09:54:21 +000012874 __ mov(eax, Operand(eax, 0));
12875 __ bind(&prologue);
12876 __ LeaveExitFrame(ExitFrame::MODE_NORMAL);
12877 __ ret(0);
12878 __ bind(&promote_scheduled_exception);
Steve Block6ded16b2010-05-10 14:33:55 +010012879 __ TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
Ben Murdochbb769b22010-08-11 14:56:33 +010012880 __ bind(&empty_handle);
12881 // It was zero; the result is undefined.
12882 __ mov(eax, Factory::undefined_value());
12883 __ jmp(&prologue);
Steve Blockd0582a62009-12-15 09:54:21 +000012884}
12885
12886
Steve Blocka7e24c12009-10-30 11:49:00 +000012887void CEntryStub::GenerateCore(MacroAssembler* masm,
12888 Label* throw_normal_exception,
12889 Label* throw_termination_exception,
12890 Label* throw_out_of_memory_exception,
Steve Blocka7e24c12009-10-30 11:49:00 +000012891 bool do_gc,
Steve Block6ded16b2010-05-10 14:33:55 +010012892 bool always_allocate_scope,
12893 int /* alignment_skew */) {
Steve Blocka7e24c12009-10-30 11:49:00 +000012894 // eax: result parameter for PerformGC, if any
12895 // ebx: pointer to C function (C callee-saved)
12896 // ebp: frame pointer (restored after C call)
12897 // esp: stack pointer (restored after C call)
12898 // edi: number of arguments including receiver (C callee-saved)
12899 // esi: pointer to the first argument (C callee-saved)
12900
Leon Clarke4515c472010-02-03 11:58:03 +000012901 // Result returned in eax, or eax+edx if result_size_ is 2.
12902
Steve Block6ded16b2010-05-10 14:33:55 +010012903 // Check stack alignment.
12904 if (FLAG_debug_code) {
12905 __ CheckStackAlignment();
12906 }
12907
Steve Blocka7e24c12009-10-30 11:49:00 +000012908 if (do_gc) {
Steve Block6ded16b2010-05-10 14:33:55 +010012909 // Pass failure code returned from last attempt as first argument to
12910 // PerformGC. No need to use PrepareCallCFunction/CallCFunction here as the
12911 // stack alignment is known to be correct. This function takes one argument
12912 // which is passed on the stack, and we know that the stack has been
12913 // prepared to pass at least one argument.
Steve Blocka7e24c12009-10-30 11:49:00 +000012914 __ mov(Operand(esp, 0 * kPointerSize), eax); // Result.
12915 __ call(FUNCTION_ADDR(Runtime::PerformGC), RelocInfo::RUNTIME_ENTRY);
12916 }
12917
12918 ExternalReference scope_depth =
12919 ExternalReference::heap_always_allocate_scope_depth();
12920 if (always_allocate_scope) {
12921 __ inc(Operand::StaticVariable(scope_depth));
12922 }
12923
12924 // Call C function.
12925 __ mov(Operand(esp, 0 * kPointerSize), edi); // argc.
12926 __ mov(Operand(esp, 1 * kPointerSize), esi); // argv.
12927 __ call(Operand(ebx));
12928 // Result is in eax or edx:eax - do not destroy these registers!
12929
12930 if (always_allocate_scope) {
12931 __ dec(Operand::StaticVariable(scope_depth));
12932 }
12933
12934 // Make sure we're not trying to return 'the hole' from the runtime
12935 // call as this may lead to crashes in the IC code later.
12936 if (FLAG_debug_code) {
12937 Label okay;
12938 __ cmp(eax, Factory::the_hole_value());
12939 __ j(not_equal, &okay);
12940 __ int3();
12941 __ bind(&okay);
12942 }
12943
12944 // Check for failure result.
12945 Label failure_returned;
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012946 STATIC_ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +000012947 __ lea(ecx, Operand(eax, 1));
12948 // Lower 2 bits of ecx are 0 iff eax has failure tag.
12949 __ test(ecx, Immediate(kFailureTagMask));
12950 __ j(zero, &failure_returned, not_taken);
12951
12952 // Exit the JavaScript to C++ exit frame.
Leon Clarke4515c472010-02-03 11:58:03 +000012953 __ LeaveExitFrame(mode_);
Steve Blocka7e24c12009-10-30 11:49:00 +000012954 __ ret(0);
12955
12956 // Handling of failure.
12957 __ bind(&failure_returned);
12958
12959 Label retry;
12960 // If the returned exception is RETRY_AFTER_GC continue at retry label
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012961 STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +000012962 __ test(eax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
12963 __ j(zero, &retry, taken);
12964
12965 // Special handling of out of memory exceptions.
12966 __ cmp(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException()));
12967 __ j(equal, throw_out_of_memory_exception);
12968
12969 // Retrieve the pending exception and clear the variable.
12970 ExternalReference pending_exception_address(Top::k_pending_exception_address);
12971 __ mov(eax, Operand::StaticVariable(pending_exception_address));
12972 __ mov(edx,
12973 Operand::StaticVariable(ExternalReference::the_hole_value_location()));
12974 __ mov(Operand::StaticVariable(pending_exception_address), edx);
12975
12976 // Special handling of termination exceptions which are uncatchable
12977 // by javascript code.
12978 __ cmp(eax, Factory::termination_exception());
12979 __ j(equal, throw_termination_exception);
12980
12981 // Handle normal exception.
12982 __ jmp(throw_normal_exception);
12983
12984 // Retry.
12985 __ bind(&retry);
12986}
12987
12988
12989void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm,
12990 UncatchableExceptionType type) {
12991 // Adjust this code if not the case.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012992 STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
Steve Blocka7e24c12009-10-30 11:49:00 +000012993
12994 // Drop sp to the top stack handler.
12995 ExternalReference handler_address(Top::k_handler_address);
12996 __ mov(esp, Operand::StaticVariable(handler_address));
12997
12998 // Unwind the handlers until the ENTRY handler is found.
12999 Label loop, done;
13000 __ bind(&loop);
13001 // Load the type of the current stack handler.
13002 const int kStateOffset = StackHandlerConstants::kStateOffset;
13003 __ cmp(Operand(esp, kStateOffset), Immediate(StackHandler::ENTRY));
13004 __ j(equal, &done);
13005 // Fetch the next handler in the list.
13006 const int kNextOffset = StackHandlerConstants::kNextOffset;
13007 __ mov(esp, Operand(esp, kNextOffset));
13008 __ jmp(&loop);
13009 __ bind(&done);
13010
13011 // Set the top handler address to next handler past the current ENTRY handler.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010013012 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +000013013 __ pop(Operand::StaticVariable(handler_address));
13014
13015 if (type == OUT_OF_MEMORY) {
13016 // Set external caught exception to false.
13017 ExternalReference external_caught(Top::k_external_caught_exception_address);
13018 __ mov(eax, false);
13019 __ mov(Operand::StaticVariable(external_caught), eax);
13020
13021 // Set pending exception and eax to out of memory exception.
13022 ExternalReference pending_exception(Top::k_pending_exception_address);
13023 __ mov(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException()));
13024 __ mov(Operand::StaticVariable(pending_exception), eax);
13025 }
13026
13027 // Clear the context pointer.
13028 __ xor_(esi, Operand(esi));
13029
13030 // Restore fp from handler and discard handler state.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010013031 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 1 * kPointerSize);
Steve Blocka7e24c12009-10-30 11:49:00 +000013032 __ pop(ebp);
13033 __ pop(edx); // State.
13034
Kristian Monsen50ef84f2010-07-29 15:18:00 +010013035 STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
Steve Blocka7e24c12009-10-30 11:49:00 +000013036 __ ret(0);
13037}
13038
13039
Leon Clarke4515c472010-02-03 11:58:03 +000013040void CEntryStub::Generate(MacroAssembler* masm) {
Steve Blocka7e24c12009-10-30 11:49:00 +000013041 // eax: number of arguments including receiver
13042 // ebx: pointer to C function (C callee-saved)
13043 // ebp: frame pointer (restored after C call)
13044 // esp: stack pointer (restored after C call)
13045 // esi: current context (C callee-saved)
13046 // edi: JS function of the caller (C callee-saved)
13047
13048 // NOTE: Invocations of builtins may return failure objects instead
13049 // of a proper result. The builtin entry handles this by performing
13050 // a garbage collection and retrying the builtin (twice).
13051
Steve Blocka7e24c12009-10-30 11:49:00 +000013052 // Enter the exit frame that transitions from JavaScript to C++.
Leon Clarke4515c472010-02-03 11:58:03 +000013053 __ EnterExitFrame(mode_);
Steve Blocka7e24c12009-10-30 11:49:00 +000013054
13055 // eax: result parameter for PerformGC, if any (setup below)
13056 // ebx: pointer to builtin function (C callee-saved)
13057 // ebp: frame pointer (restored after C call)
13058 // esp: stack pointer (restored after C call)
13059 // edi: number of arguments including receiver (C callee-saved)
13060 // esi: argv pointer (C callee-saved)
13061
13062 Label throw_normal_exception;
13063 Label throw_termination_exception;
13064 Label throw_out_of_memory_exception;
13065
13066 // Call into the runtime system.
13067 GenerateCore(masm,
13068 &throw_normal_exception,
13069 &throw_termination_exception,
13070 &throw_out_of_memory_exception,
Steve Blocka7e24c12009-10-30 11:49:00 +000013071 false,
13072 false);
13073
13074 // Do space-specific GC and retry runtime call.
13075 GenerateCore(masm,
13076 &throw_normal_exception,
13077 &throw_termination_exception,
13078 &throw_out_of_memory_exception,
Steve Blocka7e24c12009-10-30 11:49:00 +000013079 true,
13080 false);
13081
13082 // Do full GC and retry runtime call one final time.
13083 Failure* failure = Failure::InternalError();
13084 __ mov(eax, Immediate(reinterpret_cast<int32_t>(failure)));
13085 GenerateCore(masm,
13086 &throw_normal_exception,
13087 &throw_termination_exception,
13088 &throw_out_of_memory_exception,
Steve Blocka7e24c12009-10-30 11:49:00 +000013089 true,
13090 true);
13091
13092 __ bind(&throw_out_of_memory_exception);
13093 GenerateThrowUncatchable(masm, OUT_OF_MEMORY);
13094
13095 __ bind(&throw_termination_exception);
13096 GenerateThrowUncatchable(masm, TERMINATION);
13097
13098 __ bind(&throw_normal_exception);
13099 GenerateThrowTOS(masm);
13100}
13101
13102
13103void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
13104 Label invoke, exit;
13105#ifdef ENABLE_LOGGING_AND_PROFILING
13106 Label not_outermost_js, not_outermost_js_2;
13107#endif
13108
13109 // Setup frame.
13110 __ push(ebp);
13111 __ mov(ebp, Operand(esp));
13112
13113 // Push marker in two places.
13114 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
13115 __ push(Immediate(Smi::FromInt(marker))); // context slot
13116 __ push(Immediate(Smi::FromInt(marker))); // function slot
13117 // Save callee-saved registers (C calling conventions).
13118 __ push(edi);
13119 __ push(esi);
13120 __ push(ebx);
13121
13122 // Save copies of the top frame descriptor on the stack.
13123 ExternalReference c_entry_fp(Top::k_c_entry_fp_address);
13124 __ push(Operand::StaticVariable(c_entry_fp));
13125
13126#ifdef ENABLE_LOGGING_AND_PROFILING
13127 // If this is the outermost JS call, set js_entry_sp value.
13128 ExternalReference js_entry_sp(Top::k_js_entry_sp_address);
13129 __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0));
13130 __ j(not_equal, &not_outermost_js);
13131 __ mov(Operand::StaticVariable(js_entry_sp), ebp);
13132 __ bind(&not_outermost_js);
13133#endif
13134
13135 // Call a faked try-block that does the invoke.
13136 __ call(&invoke);
13137
13138 // Caught exception: Store result (exception) in the pending
13139 // exception field in the JSEnv and return a failure sentinel.
13140 ExternalReference pending_exception(Top::k_pending_exception_address);
13141 __ mov(Operand::StaticVariable(pending_exception), eax);
13142 __ mov(eax, reinterpret_cast<int32_t>(Failure::Exception()));
13143 __ jmp(&exit);
13144
13145 // Invoke: Link this frame into the handler chain.
13146 __ bind(&invoke);
13147 __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER);
13148
13149 // Clear any pending exceptions.
13150 __ mov(edx,
13151 Operand::StaticVariable(ExternalReference::the_hole_value_location()));
13152 __ mov(Operand::StaticVariable(pending_exception), edx);
13153
13154 // Fake a receiver (NULL).
13155 __ push(Immediate(0)); // receiver
13156
13157 // Invoke the function by calling through JS entry trampoline
13158 // builtin and pop the faked function when we return. Notice that we
13159 // cannot store a reference to the trampoline code directly in this
13160 // stub, because the builtin stubs may not have been generated yet.
13161 if (is_construct) {
13162 ExternalReference construct_entry(Builtins::JSConstructEntryTrampoline);
13163 __ mov(edx, Immediate(construct_entry));
13164 } else {
13165 ExternalReference entry(Builtins::JSEntryTrampoline);
13166 __ mov(edx, Immediate(entry));
13167 }
13168 __ mov(edx, Operand(edx, 0)); // deref address
13169 __ lea(edx, FieldOperand(edx, Code::kHeaderSize));
13170 __ call(Operand(edx));
13171
13172 // Unlink this frame from the handler chain.
13173 __ pop(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
13174 // Pop next_sp.
13175 __ add(Operand(esp), Immediate(StackHandlerConstants::kSize - kPointerSize));
13176
13177#ifdef ENABLE_LOGGING_AND_PROFILING
13178 // If current EBP value is the same as js_entry_sp value, it means that
13179 // the current function is the outermost.
13180 __ cmp(ebp, Operand::StaticVariable(js_entry_sp));
13181 __ j(not_equal, &not_outermost_js_2);
13182 __ mov(Operand::StaticVariable(js_entry_sp), Immediate(0));
13183 __ bind(&not_outermost_js_2);
13184#endif
13185
13186 // Restore the top frame descriptor from the stack.
13187 __ bind(&exit);
13188 __ pop(Operand::StaticVariable(ExternalReference(Top::k_c_entry_fp_address)));
13189
13190 // Restore callee-saved registers (C calling conventions).
13191 __ pop(ebx);
13192 __ pop(esi);
13193 __ pop(edi);
13194 __ add(Operand(esp), Immediate(2 * kPointerSize)); // remove markers
13195
13196 // Restore frame pointer and return.
13197 __ pop(ebp);
13198 __ ret(0);
13199}
13200
13201
13202void InstanceofStub::Generate(MacroAssembler* masm) {
13203 // Get the object - go slow case if it's a smi.
13204 Label slow;
13205 __ mov(eax, Operand(esp, 2 * kPointerSize)); // 2 ~ return address, function
13206 __ test(eax, Immediate(kSmiTagMask));
13207 __ j(zero, &slow, not_taken);
13208
13209 // Check that the left hand is a JS object.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013210 __ IsObjectJSObjectType(eax, eax, edx, &slow);
Steve Blocka7e24c12009-10-30 11:49:00 +000013211
13212 // Get the prototype of the function.
13213 __ mov(edx, Operand(esp, 1 * kPointerSize)); // 1 ~ return address
Kristian Monsen25f61362010-05-21 11:50:48 +010013214 // edx is function, eax is map.
13215
13216 // Look up the function and the map in the instanceof cache.
13217 Label miss;
13218 ExternalReference roots_address = ExternalReference::roots_address();
13219 __ mov(ecx, Immediate(Heap::kInstanceofCacheFunctionRootIndex));
13220 __ cmp(edx, Operand::StaticArray(ecx, times_pointer_size, roots_address));
13221 __ j(not_equal, &miss);
13222 __ mov(ecx, Immediate(Heap::kInstanceofCacheMapRootIndex));
13223 __ cmp(eax, Operand::StaticArray(ecx, times_pointer_size, roots_address));
13224 __ j(not_equal, &miss);
13225 __ mov(ecx, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
13226 __ mov(eax, Operand::StaticArray(ecx, times_pointer_size, roots_address));
13227 __ ret(2 * kPointerSize);
13228
13229 __ bind(&miss);
Steve Blocka7e24c12009-10-30 11:49:00 +000013230 __ TryGetFunctionPrototype(edx, ebx, ecx, &slow);
13231
13232 // Check that the function prototype is a JS object.
13233 __ test(ebx, Immediate(kSmiTagMask));
13234 __ j(zero, &slow, not_taken);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013235 __ IsObjectJSObjectType(ebx, ecx, ecx, &slow);
Steve Blocka7e24c12009-10-30 11:49:00 +000013236
Kristian Monsen25f61362010-05-21 11:50:48 +010013237 // Register mapping:
13238 // eax is object map.
13239 // edx is function.
13240 // ebx is function prototype.
13241 __ mov(ecx, Immediate(Heap::kInstanceofCacheMapRootIndex));
13242 __ mov(Operand::StaticArray(ecx, times_pointer_size, roots_address), eax);
13243 __ mov(ecx, Immediate(Heap::kInstanceofCacheFunctionRootIndex));
13244 __ mov(Operand::StaticArray(ecx, times_pointer_size, roots_address), edx);
13245
Steve Blocka7e24c12009-10-30 11:49:00 +000013246 __ mov(ecx, FieldOperand(eax, Map::kPrototypeOffset));
13247
13248 // Loop through the prototype chain looking for the function prototype.
13249 Label loop, is_instance, is_not_instance;
13250 __ bind(&loop);
13251 __ cmp(ecx, Operand(ebx));
13252 __ j(equal, &is_instance);
13253 __ cmp(Operand(ecx), Immediate(Factory::null_value()));
13254 __ j(equal, &is_not_instance);
13255 __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
13256 __ mov(ecx, FieldOperand(ecx, Map::kPrototypeOffset));
13257 __ jmp(&loop);
13258
13259 __ bind(&is_instance);
13260 __ Set(eax, Immediate(0));
Kristian Monsen25f61362010-05-21 11:50:48 +010013261 __ mov(ecx, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
13262 __ mov(Operand::StaticArray(ecx, times_pointer_size, roots_address), eax);
Steve Blocka7e24c12009-10-30 11:49:00 +000013263 __ ret(2 * kPointerSize);
13264
13265 __ bind(&is_not_instance);
13266 __ Set(eax, Immediate(Smi::FromInt(1)));
Kristian Monsen25f61362010-05-21 11:50:48 +010013267 __ mov(ecx, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
13268 __ mov(Operand::StaticArray(ecx, times_pointer_size, roots_address), eax);
Steve Blocka7e24c12009-10-30 11:49:00 +000013269 __ ret(2 * kPointerSize);
13270
13271 // Slow-case: Go through the JavaScript implementation.
13272 __ bind(&slow);
13273 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
13274}
13275
13276
Steve Block6ded16b2010-05-10 14:33:55 +010013277int CompareStub::MinorKey() {
13278 // Encode the three parameters in a unique 16 bit value. To avoid duplicate
13279 // stubs the never NaN NaN condition is only taken into account if the
13280 // condition is equals.
Ben Murdoch3bec4d22010-07-22 14:51:16 +010013281 ASSERT(static_cast<unsigned>(cc_) < (1 << 12));
13282 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
Steve Block6ded16b2010-05-10 14:33:55 +010013283 return ConditionField::encode(static_cast<unsigned>(cc_))
Ben Murdoch3bec4d22010-07-22 14:51:16 +010013284 | RegisterField::encode(false) // lhs_ and rhs_ are not used
Steve Block6ded16b2010-05-10 14:33:55 +010013285 | StrictField::encode(strict_)
13286 | NeverNanNanField::encode(cc_ == equal ? never_nan_nan_ : false)
13287 | IncludeNumberCompareField::encode(include_number_compare_);
Leon Clarkee46be812010-01-19 14:06:41 +000013288}
13289
13290
Steve Block6ded16b2010-05-10 14:33:55 +010013291// Unfortunately you have to run without snapshots to see most of these
13292// names in the profile since most compare stubs end up in the snapshot.
13293const char* CompareStub::GetName() {
Ben Murdoch3bec4d22010-07-22 14:51:16 +010013294 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
13295
Steve Block6ded16b2010-05-10 14:33:55 +010013296 if (name_ != NULL) return name_;
13297 const int kMaxNameLength = 100;
13298 name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);
13299 if (name_ == NULL) return "OOM";
13300
13301 const char* cc_name;
13302 switch (cc_) {
13303 case less: cc_name = "LT"; break;
13304 case greater: cc_name = "GT"; break;
13305 case less_equal: cc_name = "LE"; break;
13306 case greater_equal: cc_name = "GE"; break;
13307 case equal: cc_name = "EQ"; break;
13308 case not_equal: cc_name = "NE"; break;
13309 default: cc_name = "UnknownCondition"; break;
13310 }
13311
13312 const char* strict_name = "";
13313 if (strict_ && (cc_ == equal || cc_ == not_equal)) {
13314 strict_name = "_STRICT";
13315 }
13316
13317 const char* never_nan_nan_name = "";
13318 if (never_nan_nan_ && (cc_ == equal || cc_ == not_equal)) {
13319 never_nan_nan_name = "_NO_NAN";
13320 }
13321
13322 const char* include_number_compare_name = "";
13323 if (!include_number_compare_) {
13324 include_number_compare_name = "_NO_NUMBER";
13325 }
13326
13327 OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
13328 "CompareStub_%s%s%s%s",
13329 cc_name,
13330 strict_name,
13331 never_nan_nan_name,
13332 include_number_compare_name);
13333 return name_;
13334}
13335
13336
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013337// -------------------------------------------------------------------------
13338// StringCharCodeAtGenerator
13339
13340void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
13341 Label flat_string;
Steve Block6ded16b2010-05-10 14:33:55 +010013342 Label ascii_string;
13343 Label got_char_code;
13344
13345 // If the receiver is a smi trigger the non-string case.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010013346 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013347 __ test(object_, Immediate(kSmiTagMask));
13348 __ j(zero, receiver_not_string_);
Steve Block6ded16b2010-05-10 14:33:55 +010013349
13350 // Fetch the instance type of the receiver into result register.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013351 __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
13352 __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
Steve Block6ded16b2010-05-10 14:33:55 +010013353 // If the receiver is not a string trigger the non-string case.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013354 __ test(result_, Immediate(kIsNotStringMask));
13355 __ j(not_zero, receiver_not_string_);
Steve Block6ded16b2010-05-10 14:33:55 +010013356
13357 // If the index is non-smi trigger the non-smi case.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010013358 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013359 __ test(index_, Immediate(kSmiTagMask));
13360 __ j(not_zero, &index_not_smi_);
13361
13362 // Put smi-tagged index into scratch register.
13363 __ mov(scratch_, index_);
13364 __ bind(&got_smi_index_);
Steve Block6ded16b2010-05-10 14:33:55 +010013365
13366 // Check for index out of range.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013367 __ cmp(scratch_, FieldOperand(object_, String::kLengthOffset));
13368 __ j(above_equal, index_out_of_range_);
Steve Block6ded16b2010-05-10 14:33:55 +010013369
13370 // We need special handling for non-flat strings.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010013371 STATIC_ASSERT(kSeqStringTag == 0);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013372 __ test(result_, Immediate(kStringRepresentationMask));
13373 __ j(zero, &flat_string);
Steve Block6ded16b2010-05-10 14:33:55 +010013374
13375 // Handle non-flat strings.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013376 __ test(result_, Immediate(kIsConsStringMask));
13377 __ j(zero, &call_runtime_);
Steve Block6ded16b2010-05-10 14:33:55 +010013378
13379 // ConsString.
13380 // Check whether the right hand side is the empty string (i.e. if
13381 // this is really a flat string in a cons string). If that is not
13382 // the case we would rather go to the runtime system now to flatten
13383 // the string.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013384 __ cmp(FieldOperand(object_, ConsString::kSecondOffset),
13385 Immediate(Factory::empty_string()));
13386 __ j(not_equal, &call_runtime_);
Steve Block6ded16b2010-05-10 14:33:55 +010013387 // Get the first of the two strings and load its instance type.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013388 __ mov(object_, FieldOperand(object_, ConsString::kFirstOffset));
13389 __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
13390 __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
13391 // If the first cons component is also non-flat, then go to runtime.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010013392 STATIC_ASSERT(kSeqStringTag == 0);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013393 __ test(result_, Immediate(kStringRepresentationMask));
13394 __ j(not_zero, &call_runtime_);
13395
13396 // Check for 1-byte or 2-byte string.
13397 __ bind(&flat_string);
Kristian Monsen50ef84f2010-07-29 15:18:00 +010013398 STATIC_ASSERT(kAsciiStringTag != 0);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013399 __ test(result_, Immediate(kStringEncodingMask));
13400 __ j(not_zero, &ascii_string);
13401
13402 // 2-byte string.
13403 // Load the 2-byte character code into the result register.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010013404 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013405 __ movzx_w(result_, FieldOperand(object_,
13406 scratch_, times_1, // Scratch is smi-tagged.
13407 SeqTwoByteString::kHeaderSize));
13408 __ jmp(&got_char_code);
Steve Block6ded16b2010-05-10 14:33:55 +010013409
13410 // ASCII string.
Steve Block6ded16b2010-05-10 14:33:55 +010013411 // Load the byte into the result register.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013412 __ bind(&ascii_string);
13413 __ SmiUntag(scratch_);
13414 __ movzx_b(result_, FieldOperand(object_,
13415 scratch_, times_1,
13416 SeqAsciiString::kHeaderSize));
Steve Block6ded16b2010-05-10 14:33:55 +010013417 __ bind(&got_char_code);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013418 __ SmiTag(result_);
13419 __ bind(&exit_);
Steve Block6ded16b2010-05-10 14:33:55 +010013420}
13421
13422
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013423void StringCharCodeAtGenerator::GenerateSlow(
13424 MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
13425 __ Abort("Unexpected fallthrough to CharCodeAt slow case");
Steve Block6ded16b2010-05-10 14:33:55 +010013426
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013427 // Index is not a smi.
13428 __ bind(&index_not_smi_);
13429 // If index is a heap number, try converting it to an integer.
13430 __ CheckMap(index_, Factory::heap_number_map(), index_not_number_, true);
13431 call_helper.BeforeCall(masm);
13432 __ push(object_);
13433 __ push(index_);
13434 __ push(index_); // Consumed by runtime conversion function.
13435 if (index_flags_ == STRING_INDEX_IS_NUMBER) {
13436 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
13437 } else {
13438 ASSERT(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
13439 // NumberToSmi discards numbers that are not exact integers.
13440 __ CallRuntime(Runtime::kNumberToSmi, 1);
13441 }
13442 if (!scratch_.is(eax)) {
13443 // Save the conversion result before the pop instructions below
13444 // have a chance to overwrite it.
13445 __ mov(scratch_, eax);
13446 }
13447 __ pop(index_);
13448 __ pop(object_);
13449 // Reload the instance type.
13450 __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
13451 __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
13452 call_helper.AfterCall(masm);
13453 // If index is still not a smi, it must be out of range.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010013454 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013455 __ test(scratch_, Immediate(kSmiTagMask));
13456 __ j(not_zero, index_out_of_range_);
13457 // Otherwise, return to the fast path.
13458 __ jmp(&got_smi_index_);
Steve Block6ded16b2010-05-10 14:33:55 +010013459
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013460 // Call runtime. We get here when the receiver is a string and the
13461 // index is a number, but the code of getting the actual character
13462 // is too complex (e.g., when the string needs to be flattened).
13463 __ bind(&call_runtime_);
13464 call_helper.BeforeCall(masm);
13465 __ push(object_);
13466 __ push(index_);
13467 __ CallRuntime(Runtime::kStringCharCodeAt, 2);
13468 if (!result_.is(eax)) {
13469 __ mov(result_, eax);
13470 }
13471 call_helper.AfterCall(masm);
13472 __ jmp(&exit_);
13473
13474 __ Abort("Unexpected fallthrough from CharCodeAt slow case");
13475}
13476
13477
13478// -------------------------------------------------------------------------
13479// StringCharFromCodeGenerator
13480
13481void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
Steve Block6ded16b2010-05-10 14:33:55 +010013482 // Fast case of Heap::LookupSingleCharacterStringFromCode.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010013483 STATIC_ASSERT(kSmiTag == 0);
13484 STATIC_ASSERT(kSmiShiftSize == 0);
Steve Block6ded16b2010-05-10 14:33:55 +010013485 ASSERT(IsPowerOf2(String::kMaxAsciiCharCode + 1));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013486 __ test(code_,
Steve Block6ded16b2010-05-10 14:33:55 +010013487 Immediate(kSmiTagMask |
13488 ((~String::kMaxAsciiCharCode) << kSmiTagSize)));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013489 __ j(not_zero, &slow_case_, not_taken);
Steve Block6ded16b2010-05-10 14:33:55 +010013490
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013491 __ Set(result_, Immediate(Factory::single_character_string_cache()));
Kristian Monsen50ef84f2010-07-29 15:18:00 +010013492 STATIC_ASSERT(kSmiTag == 0);
13493 STATIC_ASSERT(kSmiTagSize == 1);
13494 STATIC_ASSERT(kSmiShiftSize == 0);
Steve Block6ded16b2010-05-10 14:33:55 +010013495 // At this point code register contains smi tagged ascii char code.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013496 __ mov(result_, FieldOperand(result_,
13497 code_, times_half_pointer_size,
13498 FixedArray::kHeaderSize));
13499 __ cmp(result_, Factory::undefined_value());
13500 __ j(equal, &slow_case_, not_taken);
13501 __ bind(&exit_);
13502}
Steve Block6ded16b2010-05-10 14:33:55 +010013503
Steve Block6ded16b2010-05-10 14:33:55 +010013504
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013505void StringCharFromCodeGenerator::GenerateSlow(
13506 MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
13507 __ Abort("Unexpected fallthrough to CharFromCode slow case");
13508
13509 __ bind(&slow_case_);
13510 call_helper.BeforeCall(masm);
13511 __ push(code_);
13512 __ CallRuntime(Runtime::kCharFromCode, 1);
13513 if (!result_.is(eax)) {
13514 __ mov(result_, eax);
Steve Block6ded16b2010-05-10 14:33:55 +010013515 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013516 call_helper.AfterCall(masm);
13517 __ jmp(&exit_);
13518
13519 __ Abort("Unexpected fallthrough from CharFromCode slow case");
13520}
13521
13522
13523// -------------------------------------------------------------------------
13524// StringCharAtGenerator
13525
13526void StringCharAtGenerator::GenerateFast(MacroAssembler* masm) {
13527 char_code_at_generator_.GenerateFast(masm);
13528 char_from_code_generator_.GenerateFast(masm);
13529}
13530
13531
13532void StringCharAtGenerator::GenerateSlow(
13533 MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
13534 char_code_at_generator_.GenerateSlow(masm, call_helper);
13535 char_from_code_generator_.GenerateSlow(masm, call_helper);
Steve Blocka7e24c12009-10-30 11:49:00 +000013536}
13537
Steve Blockd0582a62009-12-15 09:54:21 +000013538
13539void StringAddStub::Generate(MacroAssembler* masm) {
13540 Label string_add_runtime;
13541
13542 // Load the two arguments.
13543 __ mov(eax, Operand(esp, 2 * kPointerSize)); // First argument.
13544 __ mov(edx, Operand(esp, 1 * kPointerSize)); // Second argument.
13545
13546 // Make sure that both arguments are strings if not known in advance.
13547 if (string_check_) {
13548 __ test(eax, Immediate(kSmiTagMask));
13549 __ j(zero, &string_add_runtime);
13550 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, ebx);
13551 __ j(above_equal, &string_add_runtime);
13552
13553 // First argument is a a string, test second.
13554 __ test(edx, Immediate(kSmiTagMask));
13555 __ j(zero, &string_add_runtime);
13556 __ CmpObjectType(edx, FIRST_NONSTRING_TYPE, ebx);
13557 __ j(above_equal, &string_add_runtime);
13558 }
13559
13560 // Both arguments are strings.
13561 // eax: first string
13562 // edx: second string
13563 // Check if either of the strings are empty. In that case return the other.
13564 Label second_not_zero_length, both_not_zero_length;
13565 __ mov(ecx, FieldOperand(edx, String::kLengthOffset));
Kristian Monsen50ef84f2010-07-29 15:18:00 +010013566 STATIC_ASSERT(kSmiTag == 0);
Steve Blockd0582a62009-12-15 09:54:21 +000013567 __ test(ecx, Operand(ecx));
13568 __ j(not_zero, &second_not_zero_length);
13569 // Second string is empty, result is first string which is already in eax.
13570 __ IncrementCounter(&Counters::string_add_native, 1);
13571 __ ret(2 * kPointerSize);
13572 __ bind(&second_not_zero_length);
13573 __ mov(ebx, FieldOperand(eax, String::kLengthOffset));
Kristian Monsen50ef84f2010-07-29 15:18:00 +010013574 STATIC_ASSERT(kSmiTag == 0);
Steve Blockd0582a62009-12-15 09:54:21 +000013575 __ test(ebx, Operand(ebx));
13576 __ j(not_zero, &both_not_zero_length);
13577 // First string is empty, result is second string which is in edx.
13578 __ mov(eax, edx);
13579 __ IncrementCounter(&Counters::string_add_native, 1);
13580 __ ret(2 * kPointerSize);
13581
13582 // Both strings are non-empty.
13583 // eax: first string
Steve Block6ded16b2010-05-10 14:33:55 +010013584 // ebx: length of first string as a smi
13585 // ecx: length of second string as a smi
Steve Blockd0582a62009-12-15 09:54:21 +000013586 // edx: second string
13587 // Look at the length of the result of adding the two strings.
Andrei Popescu402d9372010-02-26 13:31:12 +000013588 Label string_add_flat_result, longer_than_two;
Steve Blockd0582a62009-12-15 09:54:21 +000013589 __ bind(&both_not_zero_length);
13590 __ add(ebx, Operand(ecx));
Kristian Monsen50ef84f2010-07-29 15:18:00 +010013591 STATIC_ASSERT(Smi::kMaxValue == String::kMaxLength);
Steve Block6ded16b2010-05-10 14:33:55 +010013592 // Handle exceptionally long strings in the runtime system.
13593 __ j(overflow, &string_add_runtime);
Steve Blockd0582a62009-12-15 09:54:21 +000013594 // Use the runtime system when adding two one character strings, as it
13595 // contains optimizations for this specific case using the symbol table.
Steve Block6ded16b2010-05-10 14:33:55 +010013596 __ cmp(Operand(ebx), Immediate(Smi::FromInt(2)));
Andrei Popescu402d9372010-02-26 13:31:12 +000013597 __ j(not_equal, &longer_than_two);
13598
13599 // Check that both strings are non-external ascii strings.
13600 __ JumpIfNotBothSequentialAsciiStrings(eax, edx, ebx, ecx,
13601 &string_add_runtime);
13602
13603 // Get the two characters forming the sub string.
13604 __ movzx_b(ebx, FieldOperand(eax, SeqAsciiString::kHeaderSize));
13605 __ movzx_b(ecx, FieldOperand(edx, SeqAsciiString::kHeaderSize));
13606
13607 // Try to lookup two character string in symbol table. If it is not found
13608 // just allocate a new one.
13609 Label make_two_character_string, make_flat_ascii_string;
Steve Block6ded16b2010-05-10 14:33:55 +010013610 StringHelper::GenerateTwoCharacterSymbolTableProbe(
13611 masm, ebx, ecx, eax, edx, edi, &make_two_character_string);
13612 __ IncrementCounter(&Counters::string_add_native, 1);
Andrei Popescu402d9372010-02-26 13:31:12 +000013613 __ ret(2 * kPointerSize);
13614
13615 __ bind(&make_two_character_string);
Steve Block6ded16b2010-05-10 14:33:55 +010013616 __ Set(ebx, Immediate(Smi::FromInt(2)));
Andrei Popescu402d9372010-02-26 13:31:12 +000013617 __ jmp(&make_flat_ascii_string);
13618
13619 __ bind(&longer_than_two);
Steve Blockd0582a62009-12-15 09:54:21 +000013620 // Check if resulting string will be flat.
Steve Block6ded16b2010-05-10 14:33:55 +010013621 __ cmp(Operand(ebx), Immediate(Smi::FromInt(String::kMinNonFlatLength)));
Steve Blockd0582a62009-12-15 09:54:21 +000013622 __ j(below, &string_add_flat_result);
Steve Blockd0582a62009-12-15 09:54:21 +000013623
13624 // If result is not supposed to be flat allocate a cons string object. If both
13625 // strings are ascii the result is an ascii cons string.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010013626 Label non_ascii, allocated, ascii_data;
Steve Blockd0582a62009-12-15 09:54:21 +000013627 __ mov(edi, FieldOperand(eax, HeapObject::kMapOffset));
13628 __ movzx_b(ecx, FieldOperand(edi, Map::kInstanceTypeOffset));
13629 __ mov(edi, FieldOperand(edx, HeapObject::kMapOffset));
13630 __ movzx_b(edi, FieldOperand(edi, Map::kInstanceTypeOffset));
13631 __ and_(ecx, Operand(edi));
Kristian Monsen50ef84f2010-07-29 15:18:00 +010013632 STATIC_ASSERT(kStringEncodingMask == kAsciiStringTag);
Steve Blockd0582a62009-12-15 09:54:21 +000013633 __ test(ecx, Immediate(kAsciiStringTag));
13634 __ j(zero, &non_ascii);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010013635 __ bind(&ascii_data);
Steve Blockd0582a62009-12-15 09:54:21 +000013636 // Allocate an acsii cons string.
13637 __ AllocateAsciiConsString(ecx, edi, no_reg, &string_add_runtime);
13638 __ bind(&allocated);
13639 // Fill the fields of the cons string.
Steve Block6ded16b2010-05-10 14:33:55 +010013640 if (FLAG_debug_code) __ AbortIfNotSmi(ebx);
Steve Blockd0582a62009-12-15 09:54:21 +000013641 __ mov(FieldOperand(ecx, ConsString::kLengthOffset), ebx);
13642 __ mov(FieldOperand(ecx, ConsString::kHashFieldOffset),
13643 Immediate(String::kEmptyHashField));
13644 __ mov(FieldOperand(ecx, ConsString::kFirstOffset), eax);
13645 __ mov(FieldOperand(ecx, ConsString::kSecondOffset), edx);
13646 __ mov(eax, ecx);
13647 __ IncrementCounter(&Counters::string_add_native, 1);
13648 __ ret(2 * kPointerSize);
13649 __ bind(&non_ascii);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010013650 // At least one of the strings is two-byte. Check whether it happens
13651 // to contain only ascii characters.
13652 // ecx: first instance type AND second instance type.
13653 // edi: second instance type.
13654 __ test(ecx, Immediate(kAsciiDataHintMask));
13655 __ j(not_zero, &ascii_data);
13656 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
13657 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
13658 __ xor_(edi, Operand(ecx));
Kristian Monsen50ef84f2010-07-29 15:18:00 +010013659 STATIC_ASSERT(kAsciiStringTag != 0 && kAsciiDataHintTag != 0);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010013660 __ and_(edi, kAsciiStringTag | kAsciiDataHintTag);
13661 __ cmp(edi, kAsciiStringTag | kAsciiDataHintTag);
13662 __ j(equal, &ascii_data);
Steve Blockd0582a62009-12-15 09:54:21 +000013663 // Allocate a two byte cons string.
13664 __ AllocateConsString(ecx, edi, no_reg, &string_add_runtime);
13665 __ jmp(&allocated);
13666
13667 // Handle creating a flat result. First check that both strings are not
13668 // external strings.
13669 // eax: first string
Steve Block6ded16b2010-05-10 14:33:55 +010013670 // ebx: length of resulting flat string as a smi
Steve Blockd0582a62009-12-15 09:54:21 +000013671 // edx: second string
13672 __ bind(&string_add_flat_result);
13673 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
13674 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
13675 __ and_(ecx, kStringRepresentationMask);
13676 __ cmp(ecx, kExternalStringTag);
13677 __ j(equal, &string_add_runtime);
13678 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
13679 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
13680 __ and_(ecx, kStringRepresentationMask);
13681 __ cmp(ecx, kExternalStringTag);
13682 __ j(equal, &string_add_runtime);
13683 // Now check if both strings are ascii strings.
13684 // eax: first string
Steve Block6ded16b2010-05-10 14:33:55 +010013685 // ebx: length of resulting flat string as a smi
Steve Blockd0582a62009-12-15 09:54:21 +000013686 // edx: second string
13687 Label non_ascii_string_add_flat_result;
Kristian Monsen50ef84f2010-07-29 15:18:00 +010013688 STATIC_ASSERT(kStringEncodingMask == kAsciiStringTag);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013689 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
13690 __ test_b(FieldOperand(ecx, Map::kInstanceTypeOffset), kAsciiStringTag);
Steve Blockd0582a62009-12-15 09:54:21 +000013691 __ j(zero, &non_ascii_string_add_flat_result);
13692 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013693 __ test_b(FieldOperand(ecx, Map::kInstanceTypeOffset), kAsciiStringTag);
Steve Blockd0582a62009-12-15 09:54:21 +000013694 __ j(zero, &string_add_runtime);
Andrei Popescu402d9372010-02-26 13:31:12 +000013695
13696 __ bind(&make_flat_ascii_string);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010013697 // Both strings are ascii strings. As they are short they are both flat.
Steve Block6ded16b2010-05-10 14:33:55 +010013698 // ebx: length of resulting flat string as a smi
13699 __ SmiUntag(ebx);
Steve Blockd0582a62009-12-15 09:54:21 +000013700 __ AllocateAsciiString(eax, ebx, ecx, edx, edi, &string_add_runtime);
13701 // eax: result string
13702 __ mov(ecx, eax);
13703 // Locate first character of result.
13704 __ add(Operand(ecx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
13705 // Load first argument and locate first character.
13706 __ mov(edx, Operand(esp, 2 * kPointerSize));
13707 __ mov(edi, FieldOperand(edx, String::kLengthOffset));
Steve Block6ded16b2010-05-10 14:33:55 +010013708 __ SmiUntag(edi);
Steve Blockd0582a62009-12-15 09:54:21 +000013709 __ add(Operand(edx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
13710 // eax: result string
13711 // ecx: first character of result
13712 // edx: first char of first argument
13713 // edi: length of first argument
Steve Block6ded16b2010-05-10 14:33:55 +010013714 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, true);
Steve Blockd0582a62009-12-15 09:54:21 +000013715 // Load second argument and locate first character.
13716 __ mov(edx, Operand(esp, 1 * kPointerSize));
13717 __ mov(edi, FieldOperand(edx, String::kLengthOffset));
Steve Block6ded16b2010-05-10 14:33:55 +010013718 __ SmiUntag(edi);
Steve Blockd0582a62009-12-15 09:54:21 +000013719 __ add(Operand(edx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
13720 // eax: result string
13721 // ecx: next character of result
13722 // edx: first char of second argument
13723 // edi: length of second argument
Steve Block6ded16b2010-05-10 14:33:55 +010013724 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, true);
Steve Blockd0582a62009-12-15 09:54:21 +000013725 __ IncrementCounter(&Counters::string_add_native, 1);
13726 __ ret(2 * kPointerSize);
13727
13728 // Handle creating a flat two byte result.
13729 // eax: first string - known to be two byte
Steve Block6ded16b2010-05-10 14:33:55 +010013730 // ebx: length of resulting flat string as a smi
Steve Blockd0582a62009-12-15 09:54:21 +000013731 // edx: second string
13732 __ bind(&non_ascii_string_add_flat_result);
13733 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013734 __ test_b(FieldOperand(ecx, Map::kInstanceTypeOffset), kAsciiStringTag);
Steve Blockd0582a62009-12-15 09:54:21 +000013735 __ j(not_zero, &string_add_runtime);
13736 // Both strings are two byte strings. As they are short they are both
13737 // flat.
Steve Block6ded16b2010-05-10 14:33:55 +010013738 __ SmiUntag(ebx);
Steve Blockd0582a62009-12-15 09:54:21 +000013739 __ AllocateTwoByteString(eax, ebx, ecx, edx, edi, &string_add_runtime);
13740 // eax: result string
13741 __ mov(ecx, eax);
13742 // Locate first character of result.
13743 __ add(Operand(ecx),
13744 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
13745 // Load first argument and locate first character.
13746 __ mov(edx, Operand(esp, 2 * kPointerSize));
13747 __ mov(edi, FieldOperand(edx, String::kLengthOffset));
Steve Block6ded16b2010-05-10 14:33:55 +010013748 __ SmiUntag(edi);
Steve Blockd0582a62009-12-15 09:54:21 +000013749 __ add(Operand(edx),
13750 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
13751 // eax: result string
13752 // ecx: first character of result
13753 // edx: first char of first argument
13754 // edi: length of first argument
Steve Block6ded16b2010-05-10 14:33:55 +010013755 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, false);
Steve Blockd0582a62009-12-15 09:54:21 +000013756 // Load second argument and locate first character.
13757 __ mov(edx, Operand(esp, 1 * kPointerSize));
13758 __ mov(edi, FieldOperand(edx, String::kLengthOffset));
Steve Block6ded16b2010-05-10 14:33:55 +010013759 __ SmiUntag(edi);
Steve Blockd0582a62009-12-15 09:54:21 +000013760 __ add(Operand(edx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
13761 // eax: result string
13762 // ecx: next character of result
13763 // edx: first char of second argument
13764 // edi: length of second argument
Steve Block6ded16b2010-05-10 14:33:55 +010013765 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, false);
Steve Blockd0582a62009-12-15 09:54:21 +000013766 __ IncrementCounter(&Counters::string_add_native, 1);
13767 __ ret(2 * kPointerSize);
13768
13769 // Just jump to runtime to add the two strings.
13770 __ bind(&string_add_runtime);
Steve Block6ded16b2010-05-10 14:33:55 +010013771 __ TailCallRuntime(Runtime::kStringAdd, 2, 1);
Steve Blockd0582a62009-12-15 09:54:21 +000013772}
13773
13774
Steve Block6ded16b2010-05-10 14:33:55 +010013775void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
13776 Register dest,
13777 Register src,
13778 Register count,
13779 Register scratch,
13780 bool ascii) {
Steve Blockd0582a62009-12-15 09:54:21 +000013781 Label loop;
13782 __ bind(&loop);
13783 // This loop just copies one character at a time, as it is only used for very
13784 // short strings.
13785 if (ascii) {
13786 __ mov_b(scratch, Operand(src, 0));
13787 __ mov_b(Operand(dest, 0), scratch);
13788 __ add(Operand(src), Immediate(1));
13789 __ add(Operand(dest), Immediate(1));
13790 } else {
13791 __ mov_w(scratch, Operand(src, 0));
13792 __ mov_w(Operand(dest, 0), scratch);
13793 __ add(Operand(src), Immediate(2));
13794 __ add(Operand(dest), Immediate(2));
13795 }
13796 __ sub(Operand(count), Immediate(1));
13797 __ j(not_zero, &loop);
13798}
13799
13800
Steve Block6ded16b2010-05-10 14:33:55 +010013801void StringHelper::GenerateCopyCharactersREP(MacroAssembler* masm,
13802 Register dest,
13803 Register src,
13804 Register count,
13805 Register scratch,
13806 bool ascii) {
Kristian Monsen50ef84f2010-07-29 15:18:00 +010013807 // Copy characters using rep movs of doublewords.
13808 // The destination is aligned on a 4 byte boundary because we are
13809 // copying to the beginning of a newly allocated string.
Leon Clarkee46be812010-01-19 14:06:41 +000013810 ASSERT(dest.is(edi)); // rep movs destination
13811 ASSERT(src.is(esi)); // rep movs source
13812 ASSERT(count.is(ecx)); // rep movs count
13813 ASSERT(!scratch.is(dest));
13814 ASSERT(!scratch.is(src));
13815 ASSERT(!scratch.is(count));
13816
13817 // Nothing to do for zero characters.
13818 Label done;
13819 __ test(count, Operand(count));
13820 __ j(zero, &done);
13821
13822 // Make count the number of bytes to copy.
13823 if (!ascii) {
13824 __ shl(count, 1);
13825 }
13826
13827 // Don't enter the rep movs if there are less than 4 bytes to copy.
13828 Label last_bytes;
13829 __ test(count, Immediate(~3));
13830 __ j(zero, &last_bytes);
13831
13832 // Copy from edi to esi using rep movs instruction.
13833 __ mov(scratch, count);
13834 __ sar(count, 2); // Number of doublewords to copy.
Steve Block6ded16b2010-05-10 14:33:55 +010013835 __ cld();
Leon Clarkee46be812010-01-19 14:06:41 +000013836 __ rep_movs();
13837
13838 // Find number of bytes left.
13839 __ mov(count, scratch);
13840 __ and_(count, 3);
13841
13842 // Check if there are more bytes to copy.
13843 __ bind(&last_bytes);
13844 __ test(count, Operand(count));
13845 __ j(zero, &done);
13846
13847 // Copy remaining characters.
13848 Label loop;
13849 __ bind(&loop);
13850 __ mov_b(scratch, Operand(src, 0));
13851 __ mov_b(Operand(dest, 0), scratch);
13852 __ add(Operand(src), Immediate(1));
13853 __ add(Operand(dest), Immediate(1));
13854 __ sub(Operand(count), Immediate(1));
13855 __ j(not_zero, &loop);
13856
13857 __ bind(&done);
13858}
13859
13860
Steve Block6ded16b2010-05-10 14:33:55 +010013861void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
13862 Register c1,
13863 Register c2,
13864 Register scratch1,
13865 Register scratch2,
13866 Register scratch3,
13867 Label* not_found) {
Andrei Popescu402d9372010-02-26 13:31:12 +000013868 // Register scratch3 is the general scratch register in this function.
13869 Register scratch = scratch3;
13870
13871 // Make sure that both characters are not digits as such strings has a
13872 // different hash algorithm. Don't try to look for these in the symbol table.
13873 Label not_array_index;
13874 __ mov(scratch, c1);
13875 __ sub(Operand(scratch), Immediate(static_cast<int>('0')));
13876 __ cmp(Operand(scratch), Immediate(static_cast<int>('9' - '0')));
13877 __ j(above, &not_array_index);
13878 __ mov(scratch, c2);
13879 __ sub(Operand(scratch), Immediate(static_cast<int>('0')));
13880 __ cmp(Operand(scratch), Immediate(static_cast<int>('9' - '0')));
13881 __ j(below_equal, not_found);
13882
13883 __ bind(&not_array_index);
13884 // Calculate the two character string hash.
13885 Register hash = scratch1;
13886 GenerateHashInit(masm, hash, c1, scratch);
13887 GenerateHashAddCharacter(masm, hash, c2, scratch);
13888 GenerateHashGetHash(masm, hash, scratch);
13889
13890 // Collect the two characters in a register.
13891 Register chars = c1;
13892 __ shl(c2, kBitsPerByte);
13893 __ or_(chars, Operand(c2));
13894
13895 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
13896 // hash: hash of two character string.
13897
13898 // Load the symbol table.
13899 Register symbol_table = c2;
13900 ExternalReference roots_address = ExternalReference::roots_address();
13901 __ mov(scratch, Immediate(Heap::kSymbolTableRootIndex));
13902 __ mov(symbol_table,
13903 Operand::StaticArray(scratch, times_pointer_size, roots_address));
13904
13905 // Calculate capacity mask from the symbol table capacity.
13906 Register mask = scratch2;
Steve Block6ded16b2010-05-10 14:33:55 +010013907 __ mov(mask, FieldOperand(symbol_table, SymbolTable::kCapacityOffset));
Andrei Popescu402d9372010-02-26 13:31:12 +000013908 __ SmiUntag(mask);
13909 __ sub(Operand(mask), Immediate(1));
13910
13911 // Registers
13912 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
13913 // hash: hash of two character string
13914 // symbol_table: symbol table
13915 // mask: capacity mask
13916 // scratch: -
13917
13918 // Perform a number of probes in the symbol table.
13919 static const int kProbes = 4;
13920 Label found_in_symbol_table;
13921 Label next_probe[kProbes], next_probe_pop_mask[kProbes];
13922 for (int i = 0; i < kProbes; i++) {
13923 // Calculate entry in symbol table.
13924 __ mov(scratch, hash);
13925 if (i > 0) {
13926 __ add(Operand(scratch), Immediate(SymbolTable::GetProbeOffset(i)));
13927 }
13928 __ and_(scratch, Operand(mask));
13929
Kristian Monsen50ef84f2010-07-29 15:18:00 +010013930 // Load the entry from the symbol table.
Andrei Popescu402d9372010-02-26 13:31:12 +000013931 Register candidate = scratch; // Scratch register contains candidate.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010013932 STATIC_ASSERT(SymbolTable::kEntrySize == 1);
Andrei Popescu402d9372010-02-26 13:31:12 +000013933 __ mov(candidate,
13934 FieldOperand(symbol_table,
13935 scratch,
13936 times_pointer_size,
Steve Block6ded16b2010-05-10 14:33:55 +010013937 SymbolTable::kElementsStartOffset));
Andrei Popescu402d9372010-02-26 13:31:12 +000013938
13939 // If entry is undefined no string with this hash can be found.
13940 __ cmp(candidate, Factory::undefined_value());
13941 __ j(equal, not_found);
13942
13943 // If length is not 2 the string is not a candidate.
Steve Block6ded16b2010-05-10 14:33:55 +010013944 __ cmp(FieldOperand(candidate, String::kLengthOffset),
13945 Immediate(Smi::FromInt(2)));
Andrei Popescu402d9372010-02-26 13:31:12 +000013946 __ j(not_equal, &next_probe[i]);
13947
13948 // As we are out of registers save the mask on the stack and use that
13949 // register as a temporary.
13950 __ push(mask);
13951 Register temp = mask;
13952
13953 // Check that the candidate is a non-external ascii string.
13954 __ mov(temp, FieldOperand(candidate, HeapObject::kMapOffset));
13955 __ movzx_b(temp, FieldOperand(temp, Map::kInstanceTypeOffset));
13956 __ JumpIfInstanceTypeIsNotSequentialAscii(
13957 temp, temp, &next_probe_pop_mask[i]);
13958
13959 // Check if the two characters match.
13960 __ mov(temp, FieldOperand(candidate, SeqAsciiString::kHeaderSize));
13961 __ and_(temp, 0x0000ffff);
13962 __ cmp(chars, Operand(temp));
13963 __ j(equal, &found_in_symbol_table);
13964 __ bind(&next_probe_pop_mask[i]);
13965 __ pop(mask);
13966 __ bind(&next_probe[i]);
13967 }
13968
13969 // No matching 2 character string found by probing.
13970 __ jmp(not_found);
13971
13972 // Scratch register contains result when we fall through to here.
13973 Register result = scratch;
13974 __ bind(&found_in_symbol_table);
Kristian Monsen50ef84f2010-07-29 15:18:00 +010013975 __ pop(mask); // Pop saved mask from the stack.
Andrei Popescu402d9372010-02-26 13:31:12 +000013976 if (!result.is(eax)) {
13977 __ mov(eax, result);
13978 }
13979}
13980
13981
Steve Block6ded16b2010-05-10 14:33:55 +010013982void StringHelper::GenerateHashInit(MacroAssembler* masm,
13983 Register hash,
13984 Register character,
13985 Register scratch) {
Andrei Popescu402d9372010-02-26 13:31:12 +000013986 // hash = character + (character << 10);
13987 __ mov(hash, character);
13988 __ shl(hash, 10);
13989 __ add(hash, Operand(character));
13990 // hash ^= hash >> 6;
13991 __ mov(scratch, hash);
13992 __ sar(scratch, 6);
13993 __ xor_(hash, Operand(scratch));
13994}
13995
13996
Steve Block6ded16b2010-05-10 14:33:55 +010013997void StringHelper::GenerateHashAddCharacter(MacroAssembler* masm,
13998 Register hash,
13999 Register character,
14000 Register scratch) {
Andrei Popescu402d9372010-02-26 13:31:12 +000014001 // hash += character;
14002 __ add(hash, Operand(character));
14003 // hash += hash << 10;
14004 __ mov(scratch, hash);
14005 __ shl(scratch, 10);
14006 __ add(hash, Operand(scratch));
14007 // hash ^= hash >> 6;
14008 __ mov(scratch, hash);
14009 __ sar(scratch, 6);
14010 __ xor_(hash, Operand(scratch));
14011}
14012
14013
Steve Block6ded16b2010-05-10 14:33:55 +010014014void StringHelper::GenerateHashGetHash(MacroAssembler* masm,
14015 Register hash,
14016 Register scratch) {
Andrei Popescu402d9372010-02-26 13:31:12 +000014017 // hash += hash << 3;
14018 __ mov(scratch, hash);
14019 __ shl(scratch, 3);
14020 __ add(hash, Operand(scratch));
14021 // hash ^= hash >> 11;
14022 __ mov(scratch, hash);
14023 __ sar(scratch, 11);
14024 __ xor_(hash, Operand(scratch));
14025 // hash += hash << 15;
14026 __ mov(scratch, hash);
14027 __ shl(scratch, 15);
14028 __ add(hash, Operand(scratch));
14029
14030 // if (hash == 0) hash = 27;
14031 Label hash_not_zero;
14032 __ test(hash, Operand(hash));
14033 __ j(not_zero, &hash_not_zero);
14034 __ mov(hash, Immediate(27));
14035 __ bind(&hash_not_zero);
14036}
14037
14038
Leon Clarkee46be812010-01-19 14:06:41 +000014039void SubStringStub::Generate(MacroAssembler* masm) {
14040 Label runtime;
14041
14042 // Stack frame on entry.
14043 // esp[0]: return address
14044 // esp[4]: to
14045 // esp[8]: from
14046 // esp[12]: string
14047
14048 // Make sure first argument is a string.
14049 __ mov(eax, Operand(esp, 3 * kPointerSize));
Kristian Monsen50ef84f2010-07-29 15:18:00 +010014050 STATIC_ASSERT(kSmiTag == 0);
Leon Clarkee46be812010-01-19 14:06:41 +000014051 __ test(eax, Immediate(kSmiTagMask));
14052 __ j(zero, &runtime);
14053 Condition is_string = masm->IsObjectStringType(eax, ebx, ebx);
14054 __ j(NegateCondition(is_string), &runtime);
14055
14056 // eax: string
14057 // ebx: instance type
Kristian Monsen50ef84f2010-07-29 15:18:00 +010014058
Leon Clarkee46be812010-01-19 14:06:41 +000014059 // Calculate length of sub string using the smi values.
Andrei Popescu402d9372010-02-26 13:31:12 +000014060 Label result_longer_than_two;
14061 __ mov(ecx, Operand(esp, 1 * kPointerSize)); // To index.
Leon Clarkee46be812010-01-19 14:06:41 +000014062 __ test(ecx, Immediate(kSmiTagMask));
14063 __ j(not_zero, &runtime);
Andrei Popescu402d9372010-02-26 13:31:12 +000014064 __ mov(edx, Operand(esp, 2 * kPointerSize)); // From index.
Leon Clarkee46be812010-01-19 14:06:41 +000014065 __ test(edx, Immediate(kSmiTagMask));
14066 __ j(not_zero, &runtime);
14067 __ sub(ecx, Operand(edx));
Steve Block8defd9f2010-07-08 12:39:36 +010014068 __ cmp(ecx, FieldOperand(eax, String::kLengthOffset));
14069 Label return_eax;
14070 __ j(equal, &return_eax);
Andrei Popescu402d9372010-02-26 13:31:12 +000014071 // Special handling of sub-strings of length 1 and 2. One character strings
14072 // are handled in the runtime system (looked up in the single character
14073 // cache). Two character strings are looked for in the symbol cache.
Leon Clarkee46be812010-01-19 14:06:41 +000014074 __ SmiUntag(ecx); // Result length is no longer smi.
14075 __ cmp(ecx, 2);
Andrei Popescu402d9372010-02-26 13:31:12 +000014076 __ j(greater, &result_longer_than_two);
14077 __ j(less, &runtime);
Leon Clarkee46be812010-01-19 14:06:41 +000014078
Andrei Popescu402d9372010-02-26 13:31:12 +000014079 // Sub string of length 2 requested.
14080 // eax: string
14081 // ebx: instance type
14082 // ecx: sub string length (value is 2)
14083 // edx: from index (smi)
14084 __ JumpIfInstanceTypeIsNotSequentialAscii(ebx, ebx, &runtime);
14085
14086 // Get the two characters forming the sub string.
14087 __ SmiUntag(edx); // From index is no longer smi.
14088 __ movzx_b(ebx, FieldOperand(eax, edx, times_1, SeqAsciiString::kHeaderSize));
14089 __ movzx_b(ecx,
14090 FieldOperand(eax, edx, times_1, SeqAsciiString::kHeaderSize + 1));
14091
14092 // Try to lookup two character string in symbol table.
14093 Label make_two_character_string;
Steve Block6ded16b2010-05-10 14:33:55 +010014094 StringHelper::GenerateTwoCharacterSymbolTableProbe(
14095 masm, ebx, ecx, eax, edx, edi, &make_two_character_string);
14096 __ ret(3 * kPointerSize);
Andrei Popescu402d9372010-02-26 13:31:12 +000014097
14098 __ bind(&make_two_character_string);
14099 // Setup registers for allocating the two character string.
14100 __ mov(eax, Operand(esp, 3 * kPointerSize));
14101 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
14102 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
14103 __ Set(ecx, Immediate(2));
14104
14105 __ bind(&result_longer_than_two);
Leon Clarkee46be812010-01-19 14:06:41 +000014106 // eax: string
14107 // ebx: instance type
14108 // ecx: result string length
14109 // Check for flat ascii string
14110 Label non_ascii_flat;
Andrei Popescu402d9372010-02-26 13:31:12 +000014111 __ JumpIfInstanceTypeIsNotSequentialAscii(ebx, ebx, &non_ascii_flat);
Leon Clarkee46be812010-01-19 14:06:41 +000014112
14113 // Allocate the result.
14114 __ AllocateAsciiString(eax, ecx, ebx, edx, edi, &runtime);
14115
14116 // eax: result string
14117 // ecx: result string length
14118 __ mov(edx, esi); // esi used by following code.
14119 // Locate first character of result.
14120 __ mov(edi, eax);
14121 __ add(Operand(edi), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
14122 // Load string argument and locate character of sub string start.
14123 __ mov(esi, Operand(esp, 3 * kPointerSize));
14124 __ add(Operand(esi), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
14125 __ mov(ebx, Operand(esp, 2 * kPointerSize)); // from
14126 __ SmiUntag(ebx);
14127 __ add(esi, Operand(ebx));
14128
14129 // eax: result string
14130 // ecx: result length
14131 // edx: original value of esi
14132 // edi: first character of result
14133 // esi: character of sub string start
Steve Block6ded16b2010-05-10 14:33:55 +010014134 StringHelper::GenerateCopyCharactersREP(masm, edi, esi, ecx, ebx, true);
Leon Clarkee46be812010-01-19 14:06:41 +000014135 __ mov(esi, edx); // Restore esi.
14136 __ IncrementCounter(&Counters::sub_string_native, 1);
14137 __ ret(3 * kPointerSize);
14138
14139 __ bind(&non_ascii_flat);
14140 // eax: string
14141 // ebx: instance type & kStringRepresentationMask | kStringEncodingMask
14142 // ecx: result string length
14143 // Check for flat two byte string
14144 __ cmp(ebx, kSeqStringTag | kTwoByteStringTag);
14145 __ j(not_equal, &runtime);
14146
14147 // Allocate the result.
14148 __ AllocateTwoByteString(eax, ecx, ebx, edx, edi, &runtime);
14149
14150 // eax: result string
14151 // ecx: result string length
14152 __ mov(edx, esi); // esi used by following code.
14153 // Locate first character of result.
14154 __ mov(edi, eax);
14155 __ add(Operand(edi),
14156 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
14157 // Load string argument and locate character of sub string start.
14158 __ mov(esi, Operand(esp, 3 * kPointerSize));
Andrei Popescu31002712010-02-23 13:46:05 +000014159 __ add(Operand(esi),
14160 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
Leon Clarkee46be812010-01-19 14:06:41 +000014161 __ mov(ebx, Operand(esp, 2 * kPointerSize)); // from
14162 // As from is a smi it is 2 times the value which matches the size of a two
14163 // byte character.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010014164 STATIC_ASSERT(kSmiTag == 0);
14165 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
Leon Clarkee46be812010-01-19 14:06:41 +000014166 __ add(esi, Operand(ebx));
14167
14168 // eax: result string
14169 // ecx: result length
14170 // edx: original value of esi
14171 // edi: first character of result
14172 // esi: character of sub string start
Steve Block6ded16b2010-05-10 14:33:55 +010014173 StringHelper::GenerateCopyCharactersREP(masm, edi, esi, ecx, ebx, false);
Leon Clarkee46be812010-01-19 14:06:41 +000014174 __ mov(esi, edx); // Restore esi.
Steve Block8defd9f2010-07-08 12:39:36 +010014175
14176 __ bind(&return_eax);
Leon Clarkee46be812010-01-19 14:06:41 +000014177 __ IncrementCounter(&Counters::sub_string_native, 1);
14178 __ ret(3 * kPointerSize);
14179
14180 // Just jump to runtime to create the sub string.
14181 __ bind(&runtime);
Steve Block6ded16b2010-05-10 14:33:55 +010014182 __ TailCallRuntime(Runtime::kSubString, 3, 1);
Leon Clarkee46be812010-01-19 14:06:41 +000014183}
14184
14185
14186void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
14187 Register left,
14188 Register right,
14189 Register scratch1,
14190 Register scratch2,
14191 Register scratch3) {
Leon Clarked91b9f72010-01-27 17:25:45 +000014192 Label result_not_equal;
14193 Label result_greater;
14194 Label compare_lengths;
Steve Block6ded16b2010-05-10 14:33:55 +010014195
14196 __ IncrementCounter(&Counters::string_compare_native, 1);
14197
Leon Clarked91b9f72010-01-27 17:25:45 +000014198 // Find minimum length.
14199 Label left_shorter;
Leon Clarkee46be812010-01-19 14:06:41 +000014200 __ mov(scratch1, FieldOperand(left, String::kLengthOffset));
Leon Clarked91b9f72010-01-27 17:25:45 +000014201 __ mov(scratch3, scratch1);
14202 __ sub(scratch3, FieldOperand(right, String::kLengthOffset));
14203
14204 Register length_delta = scratch3;
14205
14206 __ j(less_equal, &left_shorter);
14207 // Right string is shorter. Change scratch1 to be length of right string.
14208 __ sub(scratch1, Operand(length_delta));
14209 __ bind(&left_shorter);
14210
14211 Register min_length = scratch1;
14212
14213 // If either length is zero, just compare lengths.
14214 __ test(min_length, Operand(min_length));
14215 __ j(zero, &compare_lengths);
14216
14217 // Change index to run from -min_length to -1 by adding min_length
14218 // to string start. This means that loop ends when index reaches zero,
14219 // which doesn't need an additional compare.
Steve Block6ded16b2010-05-10 14:33:55 +010014220 __ SmiUntag(min_length);
Leon Clarked91b9f72010-01-27 17:25:45 +000014221 __ lea(left,
14222 FieldOperand(left,
14223 min_length, times_1,
14224 SeqAsciiString::kHeaderSize));
14225 __ lea(right,
14226 FieldOperand(right,
14227 min_length, times_1,
14228 SeqAsciiString::kHeaderSize));
14229 __ neg(min_length);
14230
14231 Register index = min_length; // index = -min_length;
14232
14233 {
14234 // Compare loop.
14235 Label loop;
14236 __ bind(&loop);
14237 // Compare characters.
14238 __ mov_b(scratch2, Operand(left, index, times_1, 0));
14239 __ cmpb(scratch2, Operand(right, index, times_1, 0));
14240 __ j(not_equal, &result_not_equal);
14241 __ add(Operand(index), Immediate(1));
14242 __ j(not_zero, &loop);
Leon Clarkee46be812010-01-19 14:06:41 +000014243 }
14244
Leon Clarked91b9f72010-01-27 17:25:45 +000014245 // Compare lengths - strings up to min-length are equal.
Leon Clarkee46be812010-01-19 14:06:41 +000014246 __ bind(&compare_lengths);
Leon Clarked91b9f72010-01-27 17:25:45 +000014247 __ test(length_delta, Operand(length_delta));
Leon Clarkee46be812010-01-19 14:06:41 +000014248 __ j(not_zero, &result_not_equal);
14249
14250 // Result is EQUAL.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010014251 STATIC_ASSERT(EQUAL == 0);
14252 STATIC_ASSERT(kSmiTag == 0);
Leon Clarked91b9f72010-01-27 17:25:45 +000014253 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
Kristian Monsen50ef84f2010-07-29 15:18:00 +010014254 __ ret(0);
Leon Clarked91b9f72010-01-27 17:25:45 +000014255
Leon Clarkee46be812010-01-19 14:06:41 +000014256 __ bind(&result_not_equal);
14257 __ j(greater, &result_greater);
14258
14259 // Result is LESS.
Leon Clarked91b9f72010-01-27 17:25:45 +000014260 __ Set(eax, Immediate(Smi::FromInt(LESS)));
Kristian Monsen50ef84f2010-07-29 15:18:00 +010014261 __ ret(0);
Leon Clarkee46be812010-01-19 14:06:41 +000014262
14263 // Result is GREATER.
14264 __ bind(&result_greater);
Leon Clarked91b9f72010-01-27 17:25:45 +000014265 __ Set(eax, Immediate(Smi::FromInt(GREATER)));
Kristian Monsen50ef84f2010-07-29 15:18:00 +010014266 __ ret(0);
Leon Clarkee46be812010-01-19 14:06:41 +000014267}
14268
14269
14270void StringCompareStub::Generate(MacroAssembler* masm) {
14271 Label runtime;
14272
14273 // Stack frame on entry.
14274 // esp[0]: return address
14275 // esp[4]: right string
14276 // esp[8]: left string
14277
14278 __ mov(edx, Operand(esp, 2 * kPointerSize)); // left
14279 __ mov(eax, Operand(esp, 1 * kPointerSize)); // right
14280
14281 Label not_same;
14282 __ cmp(edx, Operand(eax));
14283 __ j(not_equal, &not_same);
Kristian Monsen50ef84f2010-07-29 15:18:00 +010014284 STATIC_ASSERT(EQUAL == 0);
14285 STATIC_ASSERT(kSmiTag == 0);
Leon Clarked91b9f72010-01-27 17:25:45 +000014286 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
Leon Clarkee46be812010-01-19 14:06:41 +000014287 __ IncrementCounter(&Counters::string_compare_native, 1);
14288 __ ret(2 * kPointerSize);
14289
14290 __ bind(&not_same);
14291
Leon Clarked91b9f72010-01-27 17:25:45 +000014292 // Check that both objects are sequential ascii strings.
14293 __ JumpIfNotBothSequentialAsciiStrings(edx, eax, ecx, ebx, &runtime);
Leon Clarkee46be812010-01-19 14:06:41 +000014294
14295 // Compare flat ascii strings.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010014296 // Drop arguments from the stack.
14297 __ pop(ecx);
14298 __ add(Operand(esp), Immediate(2 * kPointerSize));
14299 __ push(ecx);
Leon Clarkee46be812010-01-19 14:06:41 +000014300 GenerateCompareFlatAsciiStrings(masm, edx, eax, ecx, ebx, edi);
14301
Leon Clarkee46be812010-01-19 14:06:41 +000014302 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
14303 // tagged as a small integer.
14304 __ bind(&runtime);
Steve Block6ded16b2010-05-10 14:33:55 +010014305 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
Leon Clarkee46be812010-01-19 14:06:41 +000014306}
14307
Steve Blocka7e24c12009-10-30 11:49:00 +000014308#undef __
14309
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010014310#define __ masm.
14311
14312MemCopyFunction CreateMemCopyFunction() {
14313 size_t actual_size;
14314 byte* buffer = static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize,
14315 &actual_size,
14316 true));
14317 CHECK(buffer);
14318 HandleScope handles;
14319 MacroAssembler masm(buffer, static_cast<int>(actual_size));
14320
14321 // Generated code is put into a fixed, unmovable, buffer, and not into
14322 // the V8 heap. We can't, and don't, refer to any relocatable addresses
14323 // (e.g. the JavaScript nan-object).
14324
14325 // 32-bit C declaration function calls pass arguments on stack.
14326
14327 // Stack layout:
14328 // esp[12]: Third argument, size.
14329 // esp[8]: Second argument, source pointer.
14330 // esp[4]: First argument, destination pointer.
14331 // esp[0]: return address
14332
14333 const int kDestinationOffset = 1 * kPointerSize;
14334 const int kSourceOffset = 2 * kPointerSize;
14335 const int kSizeOffset = 3 * kPointerSize;
14336
14337 int stack_offset = 0; // Update if we change the stack height.
14338
14339 if (FLAG_debug_code) {
14340 __ cmp(Operand(esp, kSizeOffset + stack_offset),
14341 Immediate(kMinComplexMemCopy));
14342 Label ok;
14343 __ j(greater_equal, &ok);
14344 __ int3();
14345 __ bind(&ok);
14346 }
14347 if (CpuFeatures::IsSupported(SSE2)) {
14348 CpuFeatures::Scope enable(SSE2);
14349 __ push(edi);
14350 __ push(esi);
14351 stack_offset += 2 * kPointerSize;
14352 Register dst = edi;
14353 Register src = esi;
14354 Register count = ecx;
14355 __ mov(dst, Operand(esp, stack_offset + kDestinationOffset));
14356 __ mov(src, Operand(esp, stack_offset + kSourceOffset));
14357 __ mov(count, Operand(esp, stack_offset + kSizeOffset));
14358
14359
14360 __ movdqu(xmm0, Operand(src, 0));
14361 __ movdqu(Operand(dst, 0), xmm0);
14362 __ mov(edx, dst);
14363 __ and_(edx, 0xF);
14364 __ neg(edx);
14365 __ add(Operand(edx), Immediate(16));
14366 __ add(dst, Operand(edx));
14367 __ add(src, Operand(edx));
14368 __ sub(Operand(count), edx);
14369
14370 // edi is now aligned. Check if esi is also aligned.
14371 Label unaligned_source;
14372 __ test(Operand(src), Immediate(0x0F));
14373 __ j(not_zero, &unaligned_source);
14374 {
14375 __ IncrementCounter(&Counters::memcopy_aligned, 1);
14376 // Copy loop for aligned source and destination.
14377 __ mov(edx, count);
14378 Register loop_count = ecx;
14379 Register count = edx;
14380 __ shr(loop_count, 5);
14381 {
14382 // Main copy loop.
14383 Label loop;
14384 __ bind(&loop);
14385 __ prefetch(Operand(src, 0x20), 1);
14386 __ movdqa(xmm0, Operand(src, 0x00));
14387 __ movdqa(xmm1, Operand(src, 0x10));
14388 __ add(Operand(src), Immediate(0x20));
14389
14390 __ movdqa(Operand(dst, 0x00), xmm0);
14391 __ movdqa(Operand(dst, 0x10), xmm1);
14392 __ add(Operand(dst), Immediate(0x20));
14393
14394 __ dec(loop_count);
14395 __ j(not_zero, &loop);
14396 }
14397
14398 // At most 31 bytes to copy.
14399 Label move_less_16;
14400 __ test(Operand(count), Immediate(0x10));
14401 __ j(zero, &move_less_16);
14402 __ movdqa(xmm0, Operand(src, 0));
14403 __ add(Operand(src), Immediate(0x10));
14404 __ movdqa(Operand(dst, 0), xmm0);
14405 __ add(Operand(dst), Immediate(0x10));
14406 __ bind(&move_less_16);
14407
14408 // At most 15 bytes to copy. Copy 16 bytes at end of string.
14409 __ and_(count, 0xF);
14410 __ movdqu(xmm0, Operand(src, count, times_1, -0x10));
14411 __ movdqu(Operand(dst, count, times_1, -0x10), xmm0);
14412
14413 __ pop(esi);
14414 __ pop(edi);
14415 __ ret(0);
14416 }
14417 __ Align(16);
14418 {
14419 // Copy loop for unaligned source and aligned destination.
14420 // If source is not aligned, we can't read it as efficiently.
14421 __ bind(&unaligned_source);
14422 __ IncrementCounter(&Counters::memcopy_unaligned, 1);
14423 __ mov(edx, ecx);
14424 Register loop_count = ecx;
14425 Register count = edx;
14426 __ shr(loop_count, 5);
14427 {
14428 // Main copy loop
14429 Label loop;
14430 __ bind(&loop);
14431 __ prefetch(Operand(src, 0x20), 1);
14432 __ movdqu(xmm0, Operand(src, 0x00));
14433 __ movdqu(xmm1, Operand(src, 0x10));
14434 __ add(Operand(src), Immediate(0x20));
14435
14436 __ movdqa(Operand(dst, 0x00), xmm0);
14437 __ movdqa(Operand(dst, 0x10), xmm1);
14438 __ add(Operand(dst), Immediate(0x20));
14439
14440 __ dec(loop_count);
14441 __ j(not_zero, &loop);
14442 }
14443
14444 // At most 31 bytes to copy.
14445 Label move_less_16;
14446 __ test(Operand(count), Immediate(0x10));
14447 __ j(zero, &move_less_16);
14448 __ movdqu(xmm0, Operand(src, 0));
14449 __ add(Operand(src), Immediate(0x10));
14450 __ movdqa(Operand(dst, 0), xmm0);
14451 __ add(Operand(dst), Immediate(0x10));
14452 __ bind(&move_less_16);
14453
14454 // At most 15 bytes to copy. Copy 16 bytes at end of string.
14455 __ and_(count, 0x0F);
14456 __ movdqu(xmm0, Operand(src, count, times_1, -0x10));
14457 __ movdqu(Operand(dst, count, times_1, -0x10), xmm0);
14458
14459 __ pop(esi);
14460 __ pop(edi);
14461 __ ret(0);
14462 }
14463
14464 } else {
14465 __ IncrementCounter(&Counters::memcopy_noxmm, 1);
14466 // SSE2 not supported. Unlikely to happen in practice.
14467 __ push(edi);
14468 __ push(esi);
14469 stack_offset += 2 * kPointerSize;
14470 __ cld();
14471 Register dst = edi;
14472 Register src = esi;
14473 Register count = ecx;
14474 __ mov(dst, Operand(esp, stack_offset + kDestinationOffset));
14475 __ mov(src, Operand(esp, stack_offset + kSourceOffset));
14476 __ mov(count, Operand(esp, stack_offset + kSizeOffset));
14477
14478 // Copy the first word.
14479 __ mov(eax, Operand(src, 0));
14480 __ mov(Operand(dst, 0), eax);
14481
14482 // Increment src,dstso that dst is aligned.
14483 __ mov(edx, dst);
14484 __ and_(edx, 0x03);
14485 __ neg(edx);
14486 __ add(Operand(edx), Immediate(4)); // edx = 4 - (dst & 3)
14487 __ add(dst, Operand(edx));
14488 __ add(src, Operand(edx));
14489 __ sub(Operand(count), edx);
14490 // edi is now aligned, ecx holds number of remaning bytes to copy.
14491
14492 __ mov(edx, count);
14493 count = edx;
14494 __ shr(ecx, 2); // Make word count instead of byte count.
14495 __ rep_movs();
14496
14497 // At most 3 bytes left to copy. Copy 4 bytes at end of string.
14498 __ and_(count, 3);
14499 __ mov(eax, Operand(src, count, times_1, -4));
14500 __ mov(Operand(dst, count, times_1, -4), eax);
14501
14502 __ pop(esi);
14503 __ pop(edi);
14504 __ ret(0);
14505 }
14506
14507 CodeDesc desc;
14508 masm.GetCode(&desc);
14509 // Call the function from C++.
14510 return FUNCTION_CAST<MemCopyFunction>(buffer);
14511}
14512
14513#undef __
14514
Steve Blocka7e24c12009-10-30 11:49:00 +000014515} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +010014516
14517#endif // V8_TARGET_ARCH_IA32