blob: 02a03fc73316c50f494c36a67cca211b88a46f73 [file] [log] [blame]
Leon Clarked91b9f72010-01-27 17:25:45 +00001// Copyright 2010 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_IA32)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "bootstrapper.h"
33#include "codegen-inl.h"
Steve Blockd0582a62009-12-15 09:54:21 +000034#include "compiler.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000035#include "debug.h"
36#include "ic-inl.h"
37#include "parser.h"
Leon Clarkee46be812010-01-19 14:06:41 +000038#include "regexp-macro-assembler.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000039#include "register-allocator-inl.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000040#include "scopes.h"
Steve Block6ded16b2010-05-10 14:33:55 +010041#include "virtual-frame-inl.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000042
43namespace v8 {
44namespace internal {
45
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010046#define __ ACCESS_MASM(masm)
Steve Blocka7e24c12009-10-30 11:49:00 +000047
48// -------------------------------------------------------------------------
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010049// Platform-specific FrameRegisterState functions.
Steve Blocka7e24c12009-10-30 11:49:00 +000050
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010051void FrameRegisterState::Save(MacroAssembler* masm) const {
Steve Blocka7e24c12009-10-30 11:49:00 +000052 for (int i = 0; i < RegisterAllocator::kNumRegisters; i++) {
53 int action = registers_[i];
54 if (action == kPush) {
55 __ push(RegisterAllocator::ToRegister(i));
56 } else if (action != kIgnore && (action & kSyncedFlag) == 0) {
57 __ mov(Operand(ebp, action), RegisterAllocator::ToRegister(i));
58 }
59 }
60}
61
62
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010063void FrameRegisterState::Restore(MacroAssembler* masm) const {
Steve Blocka7e24c12009-10-30 11:49:00 +000064 // Restore registers in reverse order due to the stack.
65 for (int i = RegisterAllocator::kNumRegisters - 1; i >= 0; i--) {
66 int action = registers_[i];
67 if (action == kPush) {
68 __ pop(RegisterAllocator::ToRegister(i));
69 } else if (action != kIgnore) {
70 action &= ~kSyncedFlag;
71 __ mov(RegisterAllocator::ToRegister(i), Operand(ebp, action));
72 }
73 }
74}
75
76
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010077#undef __
78#define __ ACCESS_MASM(masm_)
79
80// -------------------------------------------------------------------------
81// Platform-specific DeferredCode functions.
82
83void DeferredCode::SaveRegisters() {
84 frame_state_.Save(masm_);
85}
86
87
88void DeferredCode::RestoreRegisters() {
89 frame_state_.Restore(masm_);
90}
91
92
93// -------------------------------------------------------------------------
94// Platform-specific RuntimeCallHelper functions.
95
96void VirtualFrameRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
97 frame_state_->Save(masm);
98}
99
100
101void VirtualFrameRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
102 frame_state_->Restore(masm);
103}
104
105
106void ICRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
107 masm->EnterInternalFrame();
108}
109
110
111void ICRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
112 masm->LeaveInternalFrame();
113}
114
115
Steve Blocka7e24c12009-10-30 11:49:00 +0000116// -------------------------------------------------------------------------
117// CodeGenState implementation.
118
119CodeGenState::CodeGenState(CodeGenerator* owner)
120 : owner_(owner),
Steve Blocka7e24c12009-10-30 11:49:00 +0000121 destination_(NULL),
122 previous_(NULL) {
123 owner_->set_state(this);
124}
125
126
127CodeGenState::CodeGenState(CodeGenerator* owner,
Steve Blocka7e24c12009-10-30 11:49:00 +0000128 ControlDestination* destination)
129 : owner_(owner),
Steve Blocka7e24c12009-10-30 11:49:00 +0000130 destination_(destination),
131 previous_(owner->state()) {
132 owner_->set_state(this);
133}
134
135
136CodeGenState::~CodeGenState() {
137 ASSERT(owner_->state() == this);
138 owner_->set_state(previous_);
139}
140
Steve Blocka7e24c12009-10-30 11:49:00 +0000141// -------------------------------------------------------------------------
Ben Murdoch3bec4d22010-07-22 14:51:16 +0100142// CodeGenerator implementation.
Steve Blocka7e24c12009-10-30 11:49:00 +0000143
Andrei Popescu31002712010-02-23 13:46:05 +0000144CodeGenerator::CodeGenerator(MacroAssembler* masm)
145 : deferred_(8),
Leon Clarke4515c472010-02-03 11:58:03 +0000146 masm_(masm),
Andrei Popescu31002712010-02-23 13:46:05 +0000147 info_(NULL),
Steve Blocka7e24c12009-10-30 11:49:00 +0000148 frame_(NULL),
149 allocator_(NULL),
150 state_(NULL),
151 loop_nesting_(0),
Steve Block6ded16b2010-05-10 14:33:55 +0100152 in_safe_int32_mode_(false),
153 safe_int32_mode_enabled_(true),
Steve Blocka7e24c12009-10-30 11:49:00 +0000154 function_return_is_shadowed_(false),
155 in_spilled_code_(false) {
156}
157
158
159// Calling conventions:
160// ebp: caller's frame pointer
161// esp: stack pointer
162// edi: called JS function
163// esi: callee's context
164
Andrei Popescu402d9372010-02-26 13:31:12 +0000165void CodeGenerator::Generate(CompilationInfo* info) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000166 // Record the position for debugging purposes.
Andrei Popescu31002712010-02-23 13:46:05 +0000167 CodeForFunctionPosition(info->function());
Steve Block6ded16b2010-05-10 14:33:55 +0100168 Comment cmnt(masm_, "[ function compiled by virtual frame code generator");
Steve Blocka7e24c12009-10-30 11:49:00 +0000169
170 // Initialize state.
Andrei Popescu31002712010-02-23 13:46:05 +0000171 info_ = info;
Steve Blocka7e24c12009-10-30 11:49:00 +0000172 ASSERT(allocator_ == NULL);
173 RegisterAllocator register_allocator(this);
174 allocator_ = &register_allocator;
175 ASSERT(frame_ == NULL);
176 frame_ = new VirtualFrame();
177 set_in_spilled_code(false);
178
179 // Adjust for function-level loop nesting.
Steve Block6ded16b2010-05-10 14:33:55 +0100180 ASSERT_EQ(0, loop_nesting_);
181 loop_nesting_ = info->loop_nesting();
Steve Blocka7e24c12009-10-30 11:49:00 +0000182
183 JumpTarget::set_compiling_deferred_code(false);
184
185#ifdef DEBUG
186 if (strlen(FLAG_stop_at) > 0 &&
Andrei Popescu31002712010-02-23 13:46:05 +0000187 info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000188 frame_->SpillAll();
189 __ int3();
190 }
191#endif
192
193 // New scope to get automatic timing calculation.
Steve Block6ded16b2010-05-10 14:33:55 +0100194 { HistogramTimerScope codegen_timer(&Counters::code_generation);
Steve Blocka7e24c12009-10-30 11:49:00 +0000195 CodeGenState state(this);
196
197 // Entry:
198 // Stack: receiver, arguments, return address.
199 // ebp: caller's frame pointer
200 // esp: stack pointer
201 // edi: called JS function
202 // esi: callee's context
203 allocator_->Initialize();
Steve Blocka7e24c12009-10-30 11:49:00 +0000204
Iain Merrick75681382010-08-19 15:07:18 +0100205 frame_->Enter();
Leon Clarke4515c472010-02-03 11:58:03 +0000206
Iain Merrick75681382010-08-19 15:07:18 +0100207 // Allocate space for locals and initialize them.
208 frame_->AllocateStackSlots();
Leon Clarke4515c472010-02-03 11:58:03 +0000209
Iain Merrick75681382010-08-19 15:07:18 +0100210 // Allocate the local context if needed.
211 int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
212 if (heap_slots > 0) {
213 Comment cmnt(masm_, "[ allocate local context");
214 // Allocate local context.
215 // Get outer context and create a new context based on it.
216 frame_->PushFunction();
217 Result context;
218 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
219 FastNewContextStub stub(heap_slots);
220 context = frame_->CallStub(&stub, 1);
221 } else {
222 context = frame_->CallRuntime(Runtime::kNewContext, 1);
Leon Clarke4515c472010-02-03 11:58:03 +0000223 }
224
Iain Merrick75681382010-08-19 15:07:18 +0100225 // Update context local.
226 frame_->SaveContextRegister();
Leon Clarke4515c472010-02-03 11:58:03 +0000227
Iain Merrick75681382010-08-19 15:07:18 +0100228 // Verify that the runtime call result and esi agree.
229 if (FLAG_debug_code) {
230 __ cmp(context.reg(), Operand(esi));
231 __ Assert(equal, "Runtime::NewContext should end up in esi");
Andrei Popescu402d9372010-02-26 13:31:12 +0000232 }
Leon Clarke4515c472010-02-03 11:58:03 +0000233 }
234
Iain Merrick75681382010-08-19 15:07:18 +0100235 // TODO(1241774): Improve this code:
236 // 1) only needed if we have a context
237 // 2) no need to recompute context ptr every single time
238 // 3) don't copy parameter operand code from SlotOperand!
239 {
240 Comment cmnt2(masm_, "[ copy context parameters into .context");
241 // Note that iteration order is relevant here! If we have the same
242 // parameter twice (e.g., function (x, y, x)), and that parameter
243 // needs to be copied into the context, it must be the last argument
244 // passed to the parameter that needs to be copied. This is a rare
245 // case so we don't check for it, instead we rely on the copying
246 // order: such a parameter is copied repeatedly into the same
247 // context location and thus the last value is what is seen inside
248 // the function.
249 for (int i = 0; i < scope()->num_parameters(); i++) {
250 Variable* par = scope()->parameter(i);
251 Slot* slot = par->slot();
252 if (slot != NULL && slot->type() == Slot::CONTEXT) {
253 // The use of SlotOperand below is safe in unspilled code
254 // because the slot is guaranteed to be a context slot.
255 //
256 // There are no parameters in the global scope.
257 ASSERT(!scope()->is_global_scope());
258 frame_->PushParameterAt(i);
259 Result value = frame_->Pop();
260 value.ToRegister();
261
262 // SlotOperand loads context.reg() with the context object
263 // stored to, used below in RecordWrite.
264 Result context = allocator_->Allocate();
265 ASSERT(context.is_valid());
266 __ mov(SlotOperand(slot, context.reg()), value.reg());
267 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
268 Result scratch = allocator_->Allocate();
269 ASSERT(scratch.is_valid());
270 frame_->Spill(context.reg());
271 frame_->Spill(value.reg());
272 __ RecordWrite(context.reg(), offset, value.reg(), scratch.reg());
273 }
274 }
275 }
276
277 // Store the arguments object. This must happen after context
278 // initialization because the arguments object may be stored in
279 // the context.
280 if (ArgumentsMode() != NO_ARGUMENTS_ALLOCATION) {
281 StoreArgumentsObject(true);
282 }
283
284 // Initialize ThisFunction reference if present.
285 if (scope()->is_function_scope() && scope()->function() != NULL) {
286 frame_->Push(Factory::the_hole_value());
287 StoreToSlot(scope()->function()->slot(), NOT_CONST_INIT);
288 }
289
290
Steve Blocka7e24c12009-10-30 11:49:00 +0000291 // Initialize the function return target after the locals are set
292 // up, because it needs the expected frame height from the frame.
293 function_return_.set_direction(JumpTarget::BIDIRECTIONAL);
294 function_return_is_shadowed_ = false;
295
Steve Blocka7e24c12009-10-30 11:49:00 +0000296 // Generate code to 'execute' declarations and initialize functions
297 // (source elements). In case of an illegal redeclaration we need to
298 // handle that instead of processing the declarations.
Andrei Popescu31002712010-02-23 13:46:05 +0000299 if (scope()->HasIllegalRedeclaration()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000300 Comment cmnt(masm_, "[ illegal redeclarations");
Andrei Popescu31002712010-02-23 13:46:05 +0000301 scope()->VisitIllegalRedeclaration(this);
Steve Blocka7e24c12009-10-30 11:49:00 +0000302 } else {
303 Comment cmnt(masm_, "[ declarations");
Andrei Popescu31002712010-02-23 13:46:05 +0000304 ProcessDeclarations(scope()->declarations());
Steve Blocka7e24c12009-10-30 11:49:00 +0000305 // Bail out if a stack-overflow exception occurred when processing
306 // declarations.
307 if (HasStackOverflow()) return;
308 }
309
310 if (FLAG_trace) {
311 frame_->CallRuntime(Runtime::kTraceEnter, 0);
312 // Ignore the return value.
313 }
314 CheckStack();
315
316 // Compile the body of the function in a vanilla state. Don't
317 // bother compiling all the code if the scope has an illegal
318 // redeclaration.
Andrei Popescu31002712010-02-23 13:46:05 +0000319 if (!scope()->HasIllegalRedeclaration()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000320 Comment cmnt(masm_, "[ function body");
321#ifdef DEBUG
322 bool is_builtin = Bootstrapper::IsActive();
323 bool should_trace =
324 is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls;
325 if (should_trace) {
326 frame_->CallRuntime(Runtime::kDebugTrace, 0);
327 // Ignore the return value.
328 }
329#endif
Andrei Popescu31002712010-02-23 13:46:05 +0000330 VisitStatements(info->function()->body());
Steve Blocka7e24c12009-10-30 11:49:00 +0000331
332 // Handle the return from the function.
333 if (has_valid_frame()) {
334 // If there is a valid frame, control flow can fall off the end of
335 // the body. In that case there is an implicit return statement.
336 ASSERT(!function_return_is_shadowed_);
Andrei Popescu31002712010-02-23 13:46:05 +0000337 CodeForReturnPosition(info->function());
Steve Blocka7e24c12009-10-30 11:49:00 +0000338 frame_->PrepareForReturn();
339 Result undefined(Factory::undefined_value());
340 if (function_return_.is_bound()) {
341 function_return_.Jump(&undefined);
342 } else {
343 function_return_.Bind(&undefined);
344 GenerateReturnSequence(&undefined);
345 }
346 } else if (function_return_.is_linked()) {
347 // If the return target has dangling jumps to it, then we have not
348 // yet generated the return sequence. This can happen when (a)
349 // control does not flow off the end of the body so we did not
350 // compile an artificial return statement just above, and (b) there
351 // are return statements in the body but (c) they are all shadowed.
352 Result return_value;
353 function_return_.Bind(&return_value);
354 GenerateReturnSequence(&return_value);
355 }
356 }
357 }
358
359 // Adjust for function-level loop nesting.
Ben Murdoch3bec4d22010-07-22 14:51:16 +0100360 ASSERT_EQ(loop_nesting_, info->loop_nesting());
Steve Block6ded16b2010-05-10 14:33:55 +0100361 loop_nesting_ = 0;
Steve Blocka7e24c12009-10-30 11:49:00 +0000362
363 // Code generation state must be reset.
364 ASSERT(state_ == NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000365 ASSERT(!function_return_is_shadowed_);
366 function_return_.Unuse();
367 DeleteFrame();
368
369 // Process any deferred code using the register allocator.
370 if (!HasStackOverflow()) {
371 HistogramTimerScope deferred_timer(&Counters::deferred_code_generation);
372 JumpTarget::set_compiling_deferred_code(true);
373 ProcessDeferred();
374 JumpTarget::set_compiling_deferred_code(false);
375 }
376
377 // There is no need to delete the register allocator, it is a
378 // stack-allocated local.
379 allocator_ = NULL;
Steve Blocka7e24c12009-10-30 11:49:00 +0000380}
381
382
383Operand CodeGenerator::SlotOperand(Slot* slot, Register tmp) {
384 // Currently, this assertion will fail if we try to assign to
385 // a constant variable that is constant because it is read-only
386 // (such as the variable referring to a named function expression).
387 // We need to implement assignments to read-only variables.
388 // Ideally, we should do this during AST generation (by converting
389 // such assignments into expression statements); however, in general
390 // we may not be able to make the decision until past AST generation,
391 // that is when the entire program is known.
392 ASSERT(slot != NULL);
393 int index = slot->index();
394 switch (slot->type()) {
395 case Slot::PARAMETER:
396 return frame_->ParameterAt(index);
397
398 case Slot::LOCAL:
399 return frame_->LocalAt(index);
400
401 case Slot::CONTEXT: {
402 // Follow the context chain if necessary.
403 ASSERT(!tmp.is(esi)); // do not overwrite context register
404 Register context = esi;
405 int chain_length = scope()->ContextChainLength(slot->var()->scope());
406 for (int i = 0; i < chain_length; i++) {
407 // Load the closure.
408 // (All contexts, even 'with' contexts, have a closure,
409 // and it is the same for all contexts inside a function.
410 // There is no need to go to the function context first.)
411 __ mov(tmp, ContextOperand(context, Context::CLOSURE_INDEX));
412 // Load the function context (which is the incoming, outer context).
413 __ mov(tmp, FieldOperand(tmp, JSFunction::kContextOffset));
414 context = tmp;
415 }
416 // We may have a 'with' context now. Get the function context.
417 // (In fact this mov may never be the needed, since the scope analysis
418 // may not permit a direct context access in this case and thus we are
419 // always at a function context. However it is safe to dereference be-
420 // cause the function context of a function context is itself. Before
421 // deleting this mov we should try to create a counter-example first,
422 // though...)
423 __ mov(tmp, ContextOperand(context, Context::FCONTEXT_INDEX));
424 return ContextOperand(tmp, index);
425 }
426
427 default:
428 UNREACHABLE();
429 return Operand(eax);
430 }
431}
432
433
434Operand CodeGenerator::ContextSlotOperandCheckExtensions(Slot* slot,
435 Result tmp,
436 JumpTarget* slow) {
437 ASSERT(slot->type() == Slot::CONTEXT);
438 ASSERT(tmp.is_register());
439 Register context = esi;
440
441 for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) {
442 if (s->num_heap_slots() > 0) {
443 if (s->calls_eval()) {
444 // Check that extension is NULL.
445 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
446 Immediate(0));
447 slow->Branch(not_equal, not_taken);
448 }
449 __ mov(tmp.reg(), ContextOperand(context, Context::CLOSURE_INDEX));
450 __ mov(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset));
451 context = tmp.reg();
452 }
453 }
454 // Check that last extension is NULL.
455 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
456 slow->Branch(not_equal, not_taken);
457 __ mov(tmp.reg(), ContextOperand(context, Context::FCONTEXT_INDEX));
458 return ContextOperand(tmp.reg(), slot->index());
459}
460
461
462// Emit code to load the value of an expression to the top of the
463// frame. If the expression is boolean-valued it may be compiled (or
464// partially compiled) into control flow to the control destination.
465// If force_control is true, control flow is forced.
Steve Block6ded16b2010-05-10 14:33:55 +0100466void CodeGenerator::LoadCondition(Expression* expr,
Steve Blocka7e24c12009-10-30 11:49:00 +0000467 ControlDestination* dest,
468 bool force_control) {
469 ASSERT(!in_spilled_code());
470 int original_height = frame_->height();
471
Steve Blockd0582a62009-12-15 09:54:21 +0000472 { CodeGenState new_state(this, dest);
Steve Block6ded16b2010-05-10 14:33:55 +0100473 Visit(expr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000474
475 // If we hit a stack overflow, we may not have actually visited
476 // the expression. In that case, we ensure that we have a
477 // valid-looking frame state because we will continue to generate
478 // code as we unwind the C++ stack.
479 //
480 // It's possible to have both a stack overflow and a valid frame
481 // state (eg, a subexpression overflowed, visiting it returned
482 // with a dummied frame state, and visiting this expression
483 // returned with a normal-looking state).
484 if (HasStackOverflow() &&
485 !dest->is_used() &&
486 frame_->height() == original_height) {
487 dest->Goto(true);
488 }
489 }
490
491 if (force_control && !dest->is_used()) {
492 // Convert the TOS value into flow to the control destination.
493 ToBoolean(dest);
494 }
495
496 ASSERT(!(force_control && !dest->is_used()));
497 ASSERT(dest->is_used() || frame_->height() == original_height + 1);
498}
499
500
Steve Blockd0582a62009-12-15 09:54:21 +0000501void CodeGenerator::LoadAndSpill(Expression* expression) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000502 ASSERT(in_spilled_code());
503 set_in_spilled_code(false);
Steve Blockd0582a62009-12-15 09:54:21 +0000504 Load(expression);
Steve Blocka7e24c12009-10-30 11:49:00 +0000505 frame_->SpillAll();
506 set_in_spilled_code(true);
507}
508
509
Steve Block6ded16b2010-05-10 14:33:55 +0100510void CodeGenerator::LoadInSafeInt32Mode(Expression* expr,
511 BreakTarget* unsafe_bailout) {
512 set_unsafe_bailout(unsafe_bailout);
513 set_in_safe_int32_mode(true);
514 Load(expr);
515 Result value = frame_->Pop();
516 ASSERT(frame_->HasNoUntaggedInt32Elements());
517 if (expr->GuaranteedSmiResult()) {
518 ConvertInt32ResultToSmi(&value);
519 } else {
520 ConvertInt32ResultToNumber(&value);
521 }
522 set_in_safe_int32_mode(false);
523 set_unsafe_bailout(NULL);
524 frame_->Push(&value);
525}
526
527
528void CodeGenerator::LoadWithSafeInt32ModeDisabled(Expression* expr) {
529 set_safe_int32_mode_enabled(false);
530 Load(expr);
531 set_safe_int32_mode_enabled(true);
532}
533
534
535void CodeGenerator::ConvertInt32ResultToSmi(Result* value) {
536 ASSERT(value->is_untagged_int32());
537 if (value->is_register()) {
538 __ add(value->reg(), Operand(value->reg()));
539 } else {
540 ASSERT(value->is_constant());
541 ASSERT(value->handle()->IsSmi());
542 }
543 value->set_untagged_int32(false);
544 value->set_type_info(TypeInfo::Smi());
545}
546
547
548void CodeGenerator::ConvertInt32ResultToNumber(Result* value) {
549 ASSERT(value->is_untagged_int32());
550 if (value->is_register()) {
551 Register val = value->reg();
552 JumpTarget done;
553 __ add(val, Operand(val));
554 done.Branch(no_overflow, value);
555 __ sar(val, 1);
556 // If there was an overflow, bits 30 and 31 of the original number disagree.
557 __ xor_(val, 0x80000000u);
558 if (CpuFeatures::IsSupported(SSE2)) {
559 CpuFeatures::Scope fscope(SSE2);
560 __ cvtsi2sd(xmm0, Operand(val));
561 } else {
562 // Move val to ST[0] in the FPU
563 // Push and pop are safe with respect to the virtual frame because
564 // all synced elements are below the actual stack pointer.
565 __ push(val);
566 __ fild_s(Operand(esp, 0));
567 __ pop(val);
568 }
569 Result scratch = allocator_->Allocate();
570 ASSERT(scratch.is_register());
571 Label allocation_failed;
572 __ AllocateHeapNumber(val, scratch.reg(),
573 no_reg, &allocation_failed);
574 VirtualFrame* clone = new VirtualFrame(frame_);
575 scratch.Unuse();
576 if (CpuFeatures::IsSupported(SSE2)) {
577 CpuFeatures::Scope fscope(SSE2);
578 __ movdbl(FieldOperand(val, HeapNumber::kValueOffset), xmm0);
579 } else {
580 __ fstp_d(FieldOperand(val, HeapNumber::kValueOffset));
581 }
582 done.Jump(value);
583
584 // Establish the virtual frame, cloned from where AllocateHeapNumber
585 // jumped to allocation_failed.
586 RegisterFile empty_regs;
587 SetFrame(clone, &empty_regs);
588 __ bind(&allocation_failed);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100589 if (!CpuFeatures::IsSupported(SSE2)) {
590 // Pop the value from the floating point stack.
591 __ fstp(0);
592 }
Steve Block6ded16b2010-05-10 14:33:55 +0100593 unsafe_bailout_->Jump();
594
595 done.Bind(value);
596 } else {
597 ASSERT(value->is_constant());
598 }
599 value->set_untagged_int32(false);
600 value->set_type_info(TypeInfo::Integer32());
601}
602
603
Steve Blockd0582a62009-12-15 09:54:21 +0000604void CodeGenerator::Load(Expression* expr) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000605#ifdef DEBUG
606 int original_height = frame_->height();
607#endif
608 ASSERT(!in_spilled_code());
Steve Blocka7e24c12009-10-30 11:49:00 +0000609
Steve Block6ded16b2010-05-10 14:33:55 +0100610 // If the expression should be a side-effect-free 32-bit int computation,
611 // compile that SafeInt32 path, and a bailout path.
612 if (!in_safe_int32_mode() &&
613 safe_int32_mode_enabled() &&
614 expr->side_effect_free() &&
615 expr->num_bit_ops() > 2 &&
616 CpuFeatures::IsSupported(SSE2)) {
617 BreakTarget unsafe_bailout;
618 JumpTarget done;
619 unsafe_bailout.set_expected_height(frame_->height());
620 LoadInSafeInt32Mode(expr, &unsafe_bailout);
621 done.Jump();
622
623 if (unsafe_bailout.is_linked()) {
624 unsafe_bailout.Bind();
625 LoadWithSafeInt32ModeDisabled(expr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000626 }
Steve Block6ded16b2010-05-10 14:33:55 +0100627 done.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +0000628 } else {
Steve Block6ded16b2010-05-10 14:33:55 +0100629 JumpTarget true_target;
630 JumpTarget false_target;
Steve Block6ded16b2010-05-10 14:33:55 +0100631 ControlDestination dest(&true_target, &false_target, true);
632 LoadCondition(expr, &dest, false);
633
634 if (dest.false_was_fall_through()) {
635 // The false target was just bound.
Steve Blocka7e24c12009-10-30 11:49:00 +0000636 JumpTarget loaded;
Steve Block6ded16b2010-05-10 14:33:55 +0100637 frame_->Push(Factory::false_value());
638 // There may be dangling jumps to the true target.
Steve Blocka7e24c12009-10-30 11:49:00 +0000639 if (true_target.is_linked()) {
Steve Block6ded16b2010-05-10 14:33:55 +0100640 loaded.Jump();
Steve Blocka7e24c12009-10-30 11:49:00 +0000641 true_target.Bind();
642 frame_->Push(Factory::true_value());
Steve Block6ded16b2010-05-10 14:33:55 +0100643 loaded.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +0000644 }
Steve Block6ded16b2010-05-10 14:33:55 +0100645
646 } else if (dest.is_used()) {
647 // There is true, and possibly false, control flow (with true as
648 // the fall through).
649 JumpTarget loaded;
650 frame_->Push(Factory::true_value());
Steve Blocka7e24c12009-10-30 11:49:00 +0000651 if (false_target.is_linked()) {
Steve Block6ded16b2010-05-10 14:33:55 +0100652 loaded.Jump();
Steve Blocka7e24c12009-10-30 11:49:00 +0000653 false_target.Bind();
654 frame_->Push(Factory::false_value());
Steve Block6ded16b2010-05-10 14:33:55 +0100655 loaded.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +0000656 }
Steve Block6ded16b2010-05-10 14:33:55 +0100657
658 } else {
659 // We have a valid value on top of the frame, but we still may
660 // have dangling jumps to the true and false targets from nested
661 // subexpressions (eg, the left subexpressions of the
662 // short-circuited boolean operators).
663 ASSERT(has_valid_frame());
664 if (true_target.is_linked() || false_target.is_linked()) {
665 JumpTarget loaded;
666 loaded.Jump(); // Don't lose the current TOS.
667 if (true_target.is_linked()) {
668 true_target.Bind();
669 frame_->Push(Factory::true_value());
670 if (false_target.is_linked()) {
671 loaded.Jump();
672 }
673 }
674 if (false_target.is_linked()) {
675 false_target.Bind();
676 frame_->Push(Factory::false_value());
677 }
678 loaded.Bind();
679 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000680 }
681 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000682 ASSERT(has_valid_frame());
683 ASSERT(frame_->height() == original_height + 1);
684}
685
686
687void CodeGenerator::LoadGlobal() {
688 if (in_spilled_code()) {
689 frame_->EmitPush(GlobalObject());
690 } else {
691 Result temp = allocator_->Allocate();
692 __ mov(temp.reg(), GlobalObject());
693 frame_->Push(&temp);
694 }
695}
696
697
698void CodeGenerator::LoadGlobalReceiver() {
699 Result temp = allocator_->Allocate();
700 Register reg = temp.reg();
701 __ mov(reg, GlobalObject());
702 __ mov(reg, FieldOperand(reg, GlobalObject::kGlobalReceiverOffset));
703 frame_->Push(&temp);
704}
705
706
Steve Blockd0582a62009-12-15 09:54:21 +0000707void CodeGenerator::LoadTypeofExpression(Expression* expr) {
708 // Special handling of identifiers as subexpressions of typeof.
709 Variable* variable = expr->AsVariableProxy()->AsVariable();
Steve Blocka7e24c12009-10-30 11:49:00 +0000710 if (variable != NULL && !variable->is_this() && variable->is_global()) {
Steve Blockd0582a62009-12-15 09:54:21 +0000711 // For a global variable we build the property reference
712 // <global>.<variable> and perform a (regular non-contextual) property
713 // load to make sure we do not get reference errors.
Steve Blocka7e24c12009-10-30 11:49:00 +0000714 Slot global(variable, Slot::CONTEXT, Context::GLOBAL_INDEX);
715 Literal key(variable->name());
Steve Blocka7e24c12009-10-30 11:49:00 +0000716 Property property(&global, &key, RelocInfo::kNoPosition);
Steve Blockd0582a62009-12-15 09:54:21 +0000717 Reference ref(this, &property);
718 ref.GetValue();
719 } else if (variable != NULL && variable->slot() != NULL) {
720 // For a variable that rewrites to a slot, we signal it is the immediate
721 // subexpression of a typeof.
Leon Clarkef7060e22010-06-03 12:02:55 +0100722 LoadFromSlotCheckForArguments(variable->slot(), INSIDE_TYPEOF);
Steve Blocka7e24c12009-10-30 11:49:00 +0000723 } else {
Steve Blockd0582a62009-12-15 09:54:21 +0000724 // Anything else can be handled normally.
725 Load(expr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000726 }
727}
728
729
Andrei Popescu31002712010-02-23 13:46:05 +0000730ArgumentsAllocationMode CodeGenerator::ArgumentsMode() {
731 if (scope()->arguments() == NULL) return NO_ARGUMENTS_ALLOCATION;
732 ASSERT(scope()->arguments_shadow() != NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000733 // We don't want to do lazy arguments allocation for functions that
734 // have heap-allocated contexts, because it interfers with the
735 // uninitialized const tracking in the context objects.
Andrei Popescu31002712010-02-23 13:46:05 +0000736 return (scope()->num_heap_slots() > 0)
Steve Blocka7e24c12009-10-30 11:49:00 +0000737 ? EAGER_ARGUMENTS_ALLOCATION
738 : LAZY_ARGUMENTS_ALLOCATION;
739}
740
741
742Result CodeGenerator::StoreArgumentsObject(bool initial) {
743 ArgumentsAllocationMode mode = ArgumentsMode();
744 ASSERT(mode != NO_ARGUMENTS_ALLOCATION);
745
746 Comment cmnt(masm_, "[ store arguments object");
747 if (mode == LAZY_ARGUMENTS_ALLOCATION && initial) {
748 // When using lazy arguments allocation, we store the hole value
749 // as a sentinel indicating that the arguments object hasn't been
750 // allocated yet.
751 frame_->Push(Factory::the_hole_value());
752 } else {
753 ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT);
754 frame_->PushFunction();
755 frame_->PushReceiverSlotAddress();
Andrei Popescu31002712010-02-23 13:46:05 +0000756 frame_->Push(Smi::FromInt(scope()->num_parameters()));
Steve Blocka7e24c12009-10-30 11:49:00 +0000757 Result result = frame_->CallStub(&stub, 3);
758 frame_->Push(&result);
759 }
760
Andrei Popescu31002712010-02-23 13:46:05 +0000761 Variable* arguments = scope()->arguments()->var();
762 Variable* shadow = scope()->arguments_shadow()->var();
Leon Clarkee46be812010-01-19 14:06:41 +0000763 ASSERT(arguments != NULL && arguments->slot() != NULL);
764 ASSERT(shadow != NULL && shadow->slot() != NULL);
765 JumpTarget done;
766 bool skip_arguments = false;
767 if (mode == LAZY_ARGUMENTS_ALLOCATION && !initial) {
Ben Murdoch3bec4d22010-07-22 14:51:16 +0100768 // We have to skip storing into the arguments slot if it has
769 // already been written to. This can happen if the a function
770 // has a local variable named 'arguments'.
Leon Clarkef7060e22010-06-03 12:02:55 +0100771 LoadFromSlot(arguments->slot(), NOT_INSIDE_TYPEOF);
772 Result probe = frame_->Pop();
Leon Clarkee46be812010-01-19 14:06:41 +0000773 if (probe.is_constant()) {
774 // We have to skip updating the arguments object if it has
775 // been assigned a proper value.
776 skip_arguments = !probe.handle()->IsTheHole();
777 } else {
778 __ cmp(Operand(probe.reg()), Immediate(Factory::the_hole_value()));
779 probe.Unuse();
780 done.Branch(not_equal);
Steve Blocka7e24c12009-10-30 11:49:00 +0000781 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000782 }
Leon Clarkee46be812010-01-19 14:06:41 +0000783 if (!skip_arguments) {
784 StoreToSlot(arguments->slot(), NOT_CONST_INIT);
785 if (mode == LAZY_ARGUMENTS_ALLOCATION) done.Bind();
786 }
787 StoreToSlot(shadow->slot(), NOT_CONST_INIT);
Steve Blocka7e24c12009-10-30 11:49:00 +0000788 return frame_->Pop();
789}
790
Leon Clarked91b9f72010-01-27 17:25:45 +0000791//------------------------------------------------------------------------------
792// CodeGenerator implementation of variables, lookups, and stores.
Steve Blocka7e24c12009-10-30 11:49:00 +0000793
Leon Clarked91b9f72010-01-27 17:25:45 +0000794Reference::Reference(CodeGenerator* cgen,
795 Expression* expression,
796 bool persist_after_get)
797 : cgen_(cgen),
798 expression_(expression),
799 type_(ILLEGAL),
800 persist_after_get_(persist_after_get) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000801 cgen->LoadReference(this);
802}
803
804
805Reference::~Reference() {
Leon Clarked91b9f72010-01-27 17:25:45 +0000806 ASSERT(is_unloaded() || is_illegal());
Steve Blocka7e24c12009-10-30 11:49:00 +0000807}
808
809
810void CodeGenerator::LoadReference(Reference* ref) {
811 // References are loaded from both spilled and unspilled code. Set the
812 // state to unspilled to allow that (and explicitly spill after
813 // construction at the construction sites).
814 bool was_in_spilled_code = in_spilled_code_;
815 in_spilled_code_ = false;
816
817 Comment cmnt(masm_, "[ LoadReference");
818 Expression* e = ref->expression();
819 Property* property = e->AsProperty();
820 Variable* var = e->AsVariableProxy()->AsVariable();
821
822 if (property != NULL) {
823 // The expression is either a property or a variable proxy that rewrites
824 // to a property.
825 Load(property->obj());
Leon Clarkee46be812010-01-19 14:06:41 +0000826 if (property->key()->IsPropertyName()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000827 ref->set_type(Reference::NAMED);
828 } else {
829 Load(property->key());
830 ref->set_type(Reference::KEYED);
831 }
832 } else if (var != NULL) {
833 // The expression is a variable proxy that does not rewrite to a
834 // property. Global variables are treated as named property references.
835 if (var->is_global()) {
Andrei Popescu402d9372010-02-26 13:31:12 +0000836 // If eax is free, the register allocator prefers it. Thus the code
837 // generator will load the global object into eax, which is where
838 // LoadIC wants it. Most uses of Reference call LoadIC directly
839 // after the reference is created.
840 frame_->Spill(eax);
Steve Blocka7e24c12009-10-30 11:49:00 +0000841 LoadGlobal();
842 ref->set_type(Reference::NAMED);
843 } else {
844 ASSERT(var->slot() != NULL);
845 ref->set_type(Reference::SLOT);
846 }
847 } else {
848 // Anything else is a runtime error.
849 Load(e);
850 frame_->CallRuntime(Runtime::kThrowReferenceError, 1);
851 }
852
853 in_spilled_code_ = was_in_spilled_code;
854}
855
856
Steve Blocka7e24c12009-10-30 11:49:00 +0000857// ECMA-262, section 9.2, page 30: ToBoolean(). Pop the top of stack and
858// convert it to a boolean in the condition code register or jump to
859// 'false_target'/'true_target' as appropriate.
860void CodeGenerator::ToBoolean(ControlDestination* dest) {
861 Comment cmnt(masm_, "[ ToBoolean");
862
863 // The value to convert should be popped from the frame.
864 Result value = frame_->Pop();
865 value.ToRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +0000866
Steve Block6ded16b2010-05-10 14:33:55 +0100867 if (value.is_integer32()) { // Also takes Smi case.
868 Comment cmnt(masm_, "ONLY_INTEGER_32");
Andrei Popescu402d9372010-02-26 13:31:12 +0000869 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100870 Label ok;
871 __ AbortIfNotNumber(value.reg());
872 __ test(value.reg(), Immediate(kSmiTagMask));
873 __ j(zero, &ok);
874 __ fldz();
875 __ fld_d(FieldOperand(value.reg(), HeapNumber::kValueOffset));
876 __ FCmp();
877 __ j(not_zero, &ok);
878 __ Abort("Smi was wrapped in HeapNumber in output from bitop");
879 __ bind(&ok);
880 }
881 // In the integer32 case there are no Smis hidden in heap numbers, so we
882 // need only test for Smi zero.
883 __ test(value.reg(), Operand(value.reg()));
884 dest->false_target()->Branch(zero);
885 value.Unuse();
886 dest->Split(not_zero);
887 } else if (value.is_number()) {
888 Comment cmnt(masm_, "ONLY_NUMBER");
889 // Fast case if TypeInfo indicates only numbers.
890 if (FLAG_debug_code) {
891 __ AbortIfNotNumber(value.reg());
Andrei Popescu402d9372010-02-26 13:31:12 +0000892 }
893 // Smi => false iff zero.
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100894 STATIC_ASSERT(kSmiTag == 0);
Andrei Popescu402d9372010-02-26 13:31:12 +0000895 __ test(value.reg(), Operand(value.reg()));
896 dest->false_target()->Branch(zero);
897 __ test(value.reg(), Immediate(kSmiTagMask));
898 dest->true_target()->Branch(zero);
899 __ fldz();
900 __ fld_d(FieldOperand(value.reg(), HeapNumber::kValueOffset));
901 __ FCmp();
902 value.Unuse();
903 dest->Split(not_zero);
904 } else {
905 // Fast case checks.
906 // 'false' => false.
907 __ cmp(value.reg(), Factory::false_value());
908 dest->false_target()->Branch(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +0000909
Andrei Popescu402d9372010-02-26 13:31:12 +0000910 // 'true' => true.
911 __ cmp(value.reg(), Factory::true_value());
912 dest->true_target()->Branch(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +0000913
Andrei Popescu402d9372010-02-26 13:31:12 +0000914 // 'undefined' => false.
915 __ cmp(value.reg(), Factory::undefined_value());
916 dest->false_target()->Branch(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +0000917
Andrei Popescu402d9372010-02-26 13:31:12 +0000918 // Smi => false iff zero.
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100919 STATIC_ASSERT(kSmiTag == 0);
Andrei Popescu402d9372010-02-26 13:31:12 +0000920 __ test(value.reg(), Operand(value.reg()));
921 dest->false_target()->Branch(zero);
922 __ test(value.reg(), Immediate(kSmiTagMask));
923 dest->true_target()->Branch(zero);
Steve Blocka7e24c12009-10-30 11:49:00 +0000924
Andrei Popescu402d9372010-02-26 13:31:12 +0000925 // Call the stub for all other cases.
926 frame_->Push(&value); // Undo the Pop() from above.
927 ToBooleanStub stub;
928 Result temp = frame_->CallStub(&stub, 1);
929 // Convert the result to a condition code.
930 __ test(temp.reg(), Operand(temp.reg()));
931 temp.Unuse();
932 dest->Split(not_equal);
933 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000934}
935
936
937class FloatingPointHelper : public AllStatic {
938 public:
Leon Clarked91b9f72010-01-27 17:25:45 +0000939
940 enum ArgLocation {
941 ARGS_ON_STACK,
942 ARGS_IN_REGISTERS
943 };
944
Steve Blocka7e24c12009-10-30 11:49:00 +0000945 // Code pattern for loading a floating point value. Input value must
946 // be either a smi or a heap number object (fp value). Requirements:
947 // operand in register number. Returns operand as floating point number
948 // on FPU stack.
949 static void LoadFloatOperand(MacroAssembler* masm, Register number);
Steve Block6ded16b2010-05-10 14:33:55 +0100950
Steve Blocka7e24c12009-10-30 11:49:00 +0000951 // Code pattern for loading floating point values. Input values must
952 // be either smi or heap number objects (fp values). Requirements:
Leon Clarked91b9f72010-01-27 17:25:45 +0000953 // operand_1 on TOS+1 or in edx, operand_2 on TOS+2 or in eax.
954 // Returns operands as floating point numbers on FPU stack.
955 static void LoadFloatOperands(MacroAssembler* masm,
956 Register scratch,
957 ArgLocation arg_location = ARGS_ON_STACK);
958
959 // Similar to LoadFloatOperand but assumes that both operands are smis.
960 // Expects operands in edx, eax.
961 static void LoadFloatSmis(MacroAssembler* masm, Register scratch);
962
Steve Blocka7e24c12009-10-30 11:49:00 +0000963 // Test if operands are smi or number objects (fp). Requirements:
964 // operand_1 in eax, operand_2 in edx; falls through on float
965 // operands, jumps to the non_float label otherwise.
966 static void CheckFloatOperands(MacroAssembler* masm,
967 Label* non_float,
968 Register scratch);
Steve Block6ded16b2010-05-10 14:33:55 +0100969
Leon Clarkee46be812010-01-19 14:06:41 +0000970 // Takes the operands in edx and eax and loads them as integers in eax
971 // and ecx.
972 static void LoadAsIntegers(MacroAssembler* masm,
Steve Block6ded16b2010-05-10 14:33:55 +0100973 TypeInfo type_info,
Leon Clarkee46be812010-01-19 14:06:41 +0000974 bool use_sse3,
975 Label* operand_conversion_failure);
Steve Block6ded16b2010-05-10 14:33:55 +0100976 static void LoadNumbersAsIntegers(MacroAssembler* masm,
977 TypeInfo type_info,
978 bool use_sse3,
979 Label* operand_conversion_failure);
980 static void LoadUnknownsAsIntegers(MacroAssembler* masm,
981 bool use_sse3,
982 Label* operand_conversion_failure);
983
Andrei Popescu402d9372010-02-26 13:31:12 +0000984 // Test if operands are smis or heap numbers and load them
985 // into xmm0 and xmm1 if they are. Operands are in edx and eax.
986 // Leaves operands unchanged.
987 static void LoadSSE2Operands(MacroAssembler* masm);
Steve Block6ded16b2010-05-10 14:33:55 +0100988
Steve Blocka7e24c12009-10-30 11:49:00 +0000989 // Test if operands are numbers (smi or HeapNumber objects), and load
990 // them into xmm0 and xmm1 if they are. Jump to label not_numbers if
991 // either operand is not a number. Operands are in edx and eax.
992 // Leaves operands unchanged.
Leon Clarked91b9f72010-01-27 17:25:45 +0000993 static void LoadSSE2Operands(MacroAssembler* masm, Label* not_numbers);
994
995 // Similar to LoadSSE2Operands but assumes that both operands are smis.
996 // Expects operands in edx, eax.
997 static void LoadSSE2Smis(MacroAssembler* masm, Register scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000998};
999
1000
1001const char* GenericBinaryOpStub::GetName() {
Leon Clarkee46be812010-01-19 14:06:41 +00001002 if (name_ != NULL) return name_;
1003 const int kMaxNameLength = 100;
1004 name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);
1005 if (name_ == NULL) return "OOM";
1006 const char* op_name = Token::Name(op_);
1007 const char* overwrite_name;
1008 switch (mode_) {
1009 case NO_OVERWRITE: overwrite_name = "Alloc"; break;
1010 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break;
1011 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
1012 default: overwrite_name = "UnknownOverwrite"; break;
Steve Blocka7e24c12009-10-30 11:49:00 +00001013 }
Leon Clarkee46be812010-01-19 14:06:41 +00001014
1015 OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
Steve Block6ded16b2010-05-10 14:33:55 +01001016 "GenericBinaryOpStub_%s_%s%s_%s%s_%s_%s",
Leon Clarkee46be812010-01-19 14:06:41 +00001017 op_name,
1018 overwrite_name,
1019 (flags_ & NO_SMI_CODE_IN_STUB) ? "_NoSmiInStub" : "",
1020 args_in_registers_ ? "RegArgs" : "StackArgs",
Andrei Popescu402d9372010-02-26 13:31:12 +00001021 args_reversed_ ? "_R" : "",
Steve Block6ded16b2010-05-10 14:33:55 +01001022 static_operands_type_.ToString(),
1023 BinaryOpIC::GetName(runtime_operands_type_));
Leon Clarkee46be812010-01-19 14:06:41 +00001024 return name_;
Steve Blocka7e24c12009-10-30 11:49:00 +00001025}
1026
1027
Iain Merrick75681382010-08-19 15:07:18 +01001028// Perform or call the specialized stub for a binary operation. Requires the
1029// three registers left, right and dst to be distinct and spilled. This
1030// deferred operation has up to three entry points: The main one calls the
1031// runtime system. The second is for when the result is a non-Smi. The
1032// third is for when at least one of the inputs is non-Smi and we have SSE2.
Steve Blocka7e24c12009-10-30 11:49:00 +00001033class DeferredInlineBinaryOperation: public DeferredCode {
1034 public:
1035 DeferredInlineBinaryOperation(Token::Value op,
1036 Register dst,
1037 Register left,
1038 Register right,
Steve Block6ded16b2010-05-10 14:33:55 +01001039 TypeInfo left_info,
1040 TypeInfo right_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00001041 OverwriteMode mode)
Steve Block6ded16b2010-05-10 14:33:55 +01001042 : op_(op), dst_(dst), left_(left), right_(right),
1043 left_info_(left_info), right_info_(right_info), mode_(mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001044 set_comment("[ DeferredInlineBinaryOperation");
Iain Merrick75681382010-08-19 15:07:18 +01001045 ASSERT(!left.is(right));
Steve Blocka7e24c12009-10-30 11:49:00 +00001046 }
1047
1048 virtual void Generate();
1049
Iain Merrick75681382010-08-19 15:07:18 +01001050 // This stub makes explicit calls to SaveRegisters(), RestoreRegisters() and
1051 // Exit().
1052 virtual bool AutoSaveAndRestore() { return false; }
1053
1054 void JumpToAnswerOutOfRange(Condition cond);
1055 void JumpToConstantRhs(Condition cond, Smi* smi_value);
1056 Label* NonSmiInputLabel();
1057
Steve Blocka7e24c12009-10-30 11:49:00 +00001058 private:
Iain Merrick75681382010-08-19 15:07:18 +01001059 void GenerateAnswerOutOfRange();
1060 void GenerateNonSmiInput();
1061
Steve Blocka7e24c12009-10-30 11:49:00 +00001062 Token::Value op_;
1063 Register dst_;
1064 Register left_;
1065 Register right_;
Steve Block6ded16b2010-05-10 14:33:55 +01001066 TypeInfo left_info_;
1067 TypeInfo right_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +00001068 OverwriteMode mode_;
Iain Merrick75681382010-08-19 15:07:18 +01001069 Label answer_out_of_range_;
1070 Label non_smi_input_;
1071 Label constant_rhs_;
1072 Smi* smi_value_;
Steve Blocka7e24c12009-10-30 11:49:00 +00001073};
1074
1075
Iain Merrick75681382010-08-19 15:07:18 +01001076Label* DeferredInlineBinaryOperation::NonSmiInputLabel() {
1077 if (Token::IsBitOp(op_) && CpuFeatures::IsSupported(SSE2)) {
1078 return &non_smi_input_;
1079 } else {
1080 return entry_label();
1081 }
1082}
1083
1084
1085void DeferredInlineBinaryOperation::JumpToAnswerOutOfRange(Condition cond) {
1086 __ j(cond, &answer_out_of_range_);
1087}
1088
1089
1090void DeferredInlineBinaryOperation::JumpToConstantRhs(Condition cond,
1091 Smi* smi_value) {
1092 smi_value_ = smi_value;
1093 __ j(cond, &constant_rhs_);
1094}
1095
1096
Steve Blocka7e24c12009-10-30 11:49:00 +00001097void DeferredInlineBinaryOperation::Generate() {
Iain Merrick75681382010-08-19 15:07:18 +01001098 // Registers are not saved implicitly for this stub, so we should not
1099 // tread on the registers that were not passed to us.
1100 if (CpuFeatures::IsSupported(SSE2) &&
1101 ((op_ == Token::ADD) ||
1102 (op_ == Token::SUB) ||
1103 (op_ == Token::MUL) ||
1104 (op_ == Token::DIV))) {
Leon Clarkee46be812010-01-19 14:06:41 +00001105 CpuFeatures::Scope use_sse2(SSE2);
1106 Label call_runtime, after_alloc_failure;
1107 Label left_smi, right_smi, load_right, do_op;
Steve Block6ded16b2010-05-10 14:33:55 +01001108 if (!left_info_.IsSmi()) {
1109 __ test(left_, Immediate(kSmiTagMask));
1110 __ j(zero, &left_smi);
1111 if (!left_info_.IsNumber()) {
1112 __ cmp(FieldOperand(left_, HeapObject::kMapOffset),
1113 Factory::heap_number_map());
1114 __ j(not_equal, &call_runtime);
1115 }
1116 __ movdbl(xmm0, FieldOperand(left_, HeapNumber::kValueOffset));
1117 if (mode_ == OVERWRITE_LEFT) {
1118 __ mov(dst_, left_);
1119 }
1120 __ jmp(&load_right);
Leon Clarkee46be812010-01-19 14:06:41 +00001121
Steve Block6ded16b2010-05-10 14:33:55 +01001122 __ bind(&left_smi);
1123 } else {
1124 if (FLAG_debug_code) __ AbortIfNotSmi(left_);
1125 }
Leon Clarkee46be812010-01-19 14:06:41 +00001126 __ SmiUntag(left_);
1127 __ cvtsi2sd(xmm0, Operand(left_));
1128 __ SmiTag(left_);
1129 if (mode_ == OVERWRITE_LEFT) {
1130 Label alloc_failure;
1131 __ push(left_);
1132 __ AllocateHeapNumber(dst_, left_, no_reg, &after_alloc_failure);
1133 __ pop(left_);
1134 }
1135
1136 __ bind(&load_right);
Steve Block6ded16b2010-05-10 14:33:55 +01001137 if (!right_info_.IsSmi()) {
1138 __ test(right_, Immediate(kSmiTagMask));
1139 __ j(zero, &right_smi);
1140 if (!right_info_.IsNumber()) {
1141 __ cmp(FieldOperand(right_, HeapObject::kMapOffset),
1142 Factory::heap_number_map());
1143 __ j(not_equal, &call_runtime);
1144 }
1145 __ movdbl(xmm1, FieldOperand(right_, HeapNumber::kValueOffset));
1146 if (mode_ == OVERWRITE_RIGHT) {
1147 __ mov(dst_, right_);
1148 } else if (mode_ == NO_OVERWRITE) {
1149 Label alloc_failure;
1150 __ push(left_);
1151 __ AllocateHeapNumber(dst_, left_, no_reg, &after_alloc_failure);
1152 __ pop(left_);
1153 }
1154 __ jmp(&do_op);
Leon Clarkee46be812010-01-19 14:06:41 +00001155
Steve Block6ded16b2010-05-10 14:33:55 +01001156 __ bind(&right_smi);
1157 } else {
1158 if (FLAG_debug_code) __ AbortIfNotSmi(right_);
1159 }
Leon Clarkee46be812010-01-19 14:06:41 +00001160 __ SmiUntag(right_);
1161 __ cvtsi2sd(xmm1, Operand(right_));
1162 __ SmiTag(right_);
1163 if (mode_ == OVERWRITE_RIGHT || mode_ == NO_OVERWRITE) {
Leon Clarkee46be812010-01-19 14:06:41 +00001164 __ push(left_);
1165 __ AllocateHeapNumber(dst_, left_, no_reg, &after_alloc_failure);
1166 __ pop(left_);
1167 }
1168
1169 __ bind(&do_op);
1170 switch (op_) {
1171 case Token::ADD: __ addsd(xmm0, xmm1); break;
1172 case Token::SUB: __ subsd(xmm0, xmm1); break;
1173 case Token::MUL: __ mulsd(xmm0, xmm1); break;
1174 case Token::DIV: __ divsd(xmm0, xmm1); break;
1175 default: UNREACHABLE();
1176 }
1177 __ movdbl(FieldOperand(dst_, HeapNumber::kValueOffset), xmm0);
Iain Merrick75681382010-08-19 15:07:18 +01001178 Exit();
1179
Leon Clarkee46be812010-01-19 14:06:41 +00001180
1181 __ bind(&after_alloc_failure);
1182 __ pop(left_);
1183 __ bind(&call_runtime);
1184 }
Iain Merrick75681382010-08-19 15:07:18 +01001185 // Register spilling is not done implicitly for this stub.
1186 // We can't postpone it any more now though.
1187 SaveRegisters();
1188
Steve Block6ded16b2010-05-10 14:33:55 +01001189 GenericBinaryOpStub stub(op_,
1190 mode_,
1191 NO_SMI_CODE_IN_STUB,
1192 TypeInfo::Combine(left_info_, right_info_));
Steve Block3ce2e202009-11-05 08:53:23 +00001193 stub.GenerateCall(masm_, left_, right_);
Steve Blocka7e24c12009-10-30 11:49:00 +00001194 if (!dst_.is(eax)) __ mov(dst_, eax);
Iain Merrick75681382010-08-19 15:07:18 +01001195 RestoreRegisters();
1196 Exit();
1197
1198 if (non_smi_input_.is_linked() || constant_rhs_.is_linked()) {
1199 GenerateNonSmiInput();
1200 }
1201 if (answer_out_of_range_.is_linked()) {
1202 GenerateAnswerOutOfRange();
1203 }
1204}
1205
1206
1207void DeferredInlineBinaryOperation::GenerateNonSmiInput() {
1208 // We know at least one of the inputs was not a Smi.
1209 // This is a third entry point into the deferred code.
1210 // We may not overwrite left_ because we want to be able
1211 // to call the handling code for non-smi answer and it
1212 // might want to overwrite the heap number in left_.
1213 ASSERT(!right_.is(dst_));
1214 ASSERT(!left_.is(dst_));
1215 ASSERT(!left_.is(right_));
1216 // This entry point is used for bit ops where the right hand side
1217 // is a constant Smi and the left hand side is a heap object. It
1218 // is also used for bit ops where both sides are unknown, but where
1219 // at least one of them is a heap object.
1220 bool rhs_is_constant = constant_rhs_.is_linked();
1221 // We can't generate code for both cases.
1222 ASSERT(!non_smi_input_.is_linked() || !constant_rhs_.is_linked());
1223
1224 if (FLAG_debug_code) {
1225 __ int3(); // We don't fall through into this code.
1226 }
1227
1228 __ bind(&non_smi_input_);
1229
1230 if (rhs_is_constant) {
1231 __ bind(&constant_rhs_);
1232 // In this case the input is a heap object and it is in the dst_ register.
1233 // The left_ and right_ registers have not been initialized yet.
1234 __ mov(right_, Immediate(smi_value_));
1235 __ mov(left_, Operand(dst_));
1236 if (!CpuFeatures::IsSupported(SSE2)) {
1237 __ jmp(entry_label());
1238 return;
1239 } else {
1240 CpuFeatures::Scope use_sse2(SSE2);
1241 __ JumpIfNotNumber(dst_, left_info_, entry_label());
1242 __ ConvertToInt32(dst_, left_, dst_, left_info_, entry_label());
1243 __ SmiUntag(right_);
1244 }
1245 } else {
1246 // We know we have SSE2 here because otherwise the label is not linked (see
1247 // NonSmiInputLabel).
1248 CpuFeatures::Scope use_sse2(SSE2);
1249 // Handle the non-constant right hand side situation:
1250 if (left_info_.IsSmi()) {
1251 // Right is a heap object.
1252 __ JumpIfNotNumber(right_, right_info_, entry_label());
1253 __ ConvertToInt32(right_, right_, dst_, right_info_, entry_label());
1254 __ mov(dst_, Operand(left_));
1255 __ SmiUntag(dst_);
1256 } else if (right_info_.IsSmi()) {
1257 // Left is a heap object.
1258 __ JumpIfNotNumber(left_, left_info_, entry_label());
1259 __ ConvertToInt32(dst_, left_, dst_, left_info_, entry_label());
1260 __ SmiUntag(right_);
1261 } else {
1262 // Here we don't know if it's one or both that is a heap object.
1263 Label only_right_is_heap_object, got_both;
1264 __ mov(dst_, Operand(left_));
1265 __ SmiUntag(dst_, &only_right_is_heap_object);
1266 // Left was a heap object.
1267 __ JumpIfNotNumber(left_, left_info_, entry_label());
1268 __ ConvertToInt32(dst_, left_, dst_, left_info_, entry_label());
1269 __ SmiUntag(right_, &got_both);
1270 // Both were heap objects.
1271 __ rcl(right_, 1); // Put tag back.
1272 __ JumpIfNotNumber(right_, right_info_, entry_label());
1273 __ ConvertToInt32(right_, right_, no_reg, right_info_, entry_label());
1274 __ jmp(&got_both);
1275 __ bind(&only_right_is_heap_object);
1276 __ JumpIfNotNumber(right_, right_info_, entry_label());
1277 __ ConvertToInt32(right_, right_, no_reg, right_info_, entry_label());
1278 __ bind(&got_both);
1279 }
1280 }
1281 ASSERT(op_ == Token::BIT_AND ||
1282 op_ == Token::BIT_OR ||
1283 op_ == Token::BIT_XOR ||
1284 right_.is(ecx));
1285 switch (op_) {
1286 case Token::BIT_AND: __ and_(dst_, Operand(right_)); break;
1287 case Token::BIT_OR: __ or_(dst_, Operand(right_)); break;
1288 case Token::BIT_XOR: __ xor_(dst_, Operand(right_)); break;
1289 case Token::SHR: __ shr_cl(dst_); break;
1290 case Token::SAR: __ sar_cl(dst_); break;
1291 case Token::SHL: __ shl_cl(dst_); break;
1292 default: UNREACHABLE();
1293 }
1294 if (op_ == Token::SHR) {
1295 // Check that the *unsigned* result fits in a smi. Neither of
1296 // the two high-order bits can be set:
1297 // * 0x80000000: high bit would be lost when smi tagging.
1298 // * 0x40000000: this number would convert to negative when smi
1299 // tagging.
1300 __ test(dst_, Immediate(0xc0000000));
1301 __ j(not_zero, &answer_out_of_range_);
1302 } else {
1303 // Check that the *signed* result fits in a smi.
1304 __ cmp(dst_, 0xc0000000);
1305 __ j(negative, &answer_out_of_range_);
1306 }
1307 __ SmiTag(dst_);
1308 Exit();
1309}
1310
1311
1312void DeferredInlineBinaryOperation::GenerateAnswerOutOfRange() {
1313 Label after_alloc_failure2;
1314 Label allocation_ok;
1315 __ bind(&after_alloc_failure2);
1316 // We have to allocate a number, causing a GC, while keeping hold of
1317 // the answer in dst_. The answer is not a Smi. We can't just call the
1318 // runtime shift function here because we already threw away the inputs.
1319 __ xor_(left_, Operand(left_));
1320 __ shl(dst_, 1); // Put top bit in carry flag and Smi tag the low bits.
1321 __ rcr(left_, 1); // Rotate with carry.
1322 __ push(dst_); // Smi tagged low 31 bits.
1323 __ push(left_); // 0 or 0x80000000, which is Smi tagged in both cases.
1324 __ CallRuntime(Runtime::kNumberAlloc, 0);
1325 if (!left_.is(eax)) {
1326 __ mov(left_, eax);
1327 }
1328 __ pop(right_); // High bit.
1329 __ pop(dst_); // Low 31 bits.
1330 __ shr(dst_, 1); // Put 0 in top bit.
1331 __ or_(dst_, Operand(right_));
1332 __ jmp(&allocation_ok);
1333
1334 // This is the second entry point to the deferred code. It is used only by
1335 // the bit operations.
1336 // The dst_ register has the answer. It is not Smi tagged. If mode_ is
1337 // OVERWRITE_LEFT then left_ must contain either an overwritable heap number
1338 // or a Smi.
1339 // Put a heap number pointer in left_.
1340 __ bind(&answer_out_of_range_);
1341 SaveRegisters();
1342 if (mode_ == OVERWRITE_LEFT) {
1343 __ test(left_, Immediate(kSmiTagMask));
1344 __ j(not_zero, &allocation_ok);
1345 }
1346 // This trashes right_.
1347 __ AllocateHeapNumber(left_, right_, no_reg, &after_alloc_failure2);
1348 __ bind(&allocation_ok);
1349 if (CpuFeatures::IsSupported(SSE2) && op_ != Token::SHR) {
1350 CpuFeatures::Scope use_sse2(SSE2);
1351 ASSERT(Token::IsBitOp(op_));
1352 // Signed conversion.
1353 __ cvtsi2sd(xmm0, Operand(dst_));
1354 __ movdbl(FieldOperand(left_, HeapNumber::kValueOffset), xmm0);
1355 } else {
1356 if (op_ == Token::SHR) {
1357 __ push(Immediate(0)); // High word of unsigned value.
1358 __ push(dst_);
1359 __ fild_d(Operand(esp, 0));
1360 __ Drop(2);
1361 } else {
1362 ASSERT(Token::IsBitOp(op_));
1363 __ push(dst_);
1364 __ fild_s(Operand(esp, 0)); // Signed conversion.
1365 __ pop(dst_);
1366 }
1367 __ fstp_d(FieldOperand(left_, HeapNumber::kValueOffset));
1368 }
1369 __ mov(dst_, left_);
1370 RestoreRegisters();
1371 Exit();
Steve Blocka7e24c12009-10-30 11:49:00 +00001372}
1373
1374
Steve Block6ded16b2010-05-10 14:33:55 +01001375static TypeInfo CalculateTypeInfo(TypeInfo operands_type,
1376 Token::Value op,
1377 const Result& right,
1378 const Result& left) {
1379 // Set TypeInfo of result according to the operation performed.
1380 // Rely on the fact that smis have a 31 bit payload on ia32.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01001381 STATIC_ASSERT(kSmiValueSize == 31);
Steve Block6ded16b2010-05-10 14:33:55 +01001382 switch (op) {
1383 case Token::COMMA:
1384 return right.type_info();
1385 case Token::OR:
1386 case Token::AND:
1387 // Result type can be either of the two input types.
1388 return operands_type;
1389 case Token::BIT_AND: {
1390 // Anding with positive Smis will give you a Smi.
1391 if (right.is_constant() && right.handle()->IsSmi() &&
1392 Smi::cast(*right.handle())->value() >= 0) {
1393 return TypeInfo::Smi();
1394 } else if (left.is_constant() && left.handle()->IsSmi() &&
1395 Smi::cast(*left.handle())->value() >= 0) {
1396 return TypeInfo::Smi();
1397 }
1398 return (operands_type.IsSmi())
1399 ? TypeInfo::Smi()
1400 : TypeInfo::Integer32();
1401 }
1402 case Token::BIT_OR: {
1403 // Oring with negative Smis will give you a Smi.
1404 if (right.is_constant() && right.handle()->IsSmi() &&
1405 Smi::cast(*right.handle())->value() < 0) {
1406 return TypeInfo::Smi();
1407 } else if (left.is_constant() && left.handle()->IsSmi() &&
1408 Smi::cast(*left.handle())->value() < 0) {
1409 return TypeInfo::Smi();
1410 }
1411 return (operands_type.IsSmi())
1412 ? TypeInfo::Smi()
1413 : TypeInfo::Integer32();
1414 }
1415 case Token::BIT_XOR:
1416 // Result is always a 32 bit integer. Smi property of inputs is preserved.
1417 return (operands_type.IsSmi())
1418 ? TypeInfo::Smi()
1419 : TypeInfo::Integer32();
1420 case Token::SAR:
1421 if (left.is_smi()) return TypeInfo::Smi();
1422 // Result is a smi if we shift by a constant >= 1, otherwise an integer32.
1423 // Shift amount is masked with 0x1F (ECMA standard 11.7.2).
1424 return (right.is_constant() && right.handle()->IsSmi()
1425 && (Smi::cast(*right.handle())->value() & 0x1F) >= 1)
1426 ? TypeInfo::Smi()
1427 : TypeInfo::Integer32();
1428 case Token::SHR:
1429 // Result is a smi if we shift by a constant >= 2, an integer32 if
1430 // we shift by 1, and an unsigned 32-bit integer if we shift by 0.
1431 if (right.is_constant() && right.handle()->IsSmi()) {
1432 int shift_amount = Smi::cast(*right.handle())->value() & 0x1F;
1433 if (shift_amount > 1) {
1434 return TypeInfo::Smi();
1435 } else if (shift_amount > 0) {
1436 return TypeInfo::Integer32();
1437 }
1438 }
1439 return TypeInfo::Number();
1440 case Token::ADD:
1441 if (operands_type.IsSmi()) {
1442 // The Integer32 range is big enough to take the sum of any two Smis.
1443 return TypeInfo::Integer32();
1444 } else if (operands_type.IsNumber()) {
1445 return TypeInfo::Number();
1446 } else if (left.type_info().IsString() || right.type_info().IsString()) {
1447 return TypeInfo::String();
1448 } else {
1449 return TypeInfo::Unknown();
1450 }
1451 case Token::SHL:
1452 return TypeInfo::Integer32();
1453 case Token::SUB:
1454 // The Integer32 range is big enough to take the difference of any two
1455 // Smis.
1456 return (operands_type.IsSmi()) ?
1457 TypeInfo::Integer32() :
1458 TypeInfo::Number();
1459 case Token::MUL:
1460 case Token::DIV:
1461 case Token::MOD:
1462 // Result is always a number.
1463 return TypeInfo::Number();
1464 default:
1465 UNREACHABLE();
1466 }
1467 UNREACHABLE();
1468 return TypeInfo::Unknown();
1469}
1470
1471
1472void CodeGenerator::GenericBinaryOperation(BinaryOperation* expr,
Steve Blocka7e24c12009-10-30 11:49:00 +00001473 OverwriteMode overwrite_mode) {
1474 Comment cmnt(masm_, "[ BinaryOperation");
Steve Block6ded16b2010-05-10 14:33:55 +01001475 Token::Value op = expr->op();
Steve Blocka7e24c12009-10-30 11:49:00 +00001476 Comment cmnt_token(masm_, Token::String(op));
1477
1478 if (op == Token::COMMA) {
1479 // Simply discard left value.
1480 frame_->Nip(1);
1481 return;
1482 }
1483
Steve Blocka7e24c12009-10-30 11:49:00 +00001484 Result right = frame_->Pop();
1485 Result left = frame_->Pop();
1486
1487 if (op == Token::ADD) {
Steve Block6ded16b2010-05-10 14:33:55 +01001488 const bool left_is_string = left.type_info().IsString();
1489 const bool right_is_string = right.type_info().IsString();
1490 // Make sure constant strings have string type info.
1491 ASSERT(!(left.is_constant() && left.handle()->IsString()) ||
1492 left_is_string);
1493 ASSERT(!(right.is_constant() && right.handle()->IsString()) ||
1494 right_is_string);
Steve Blocka7e24c12009-10-30 11:49:00 +00001495 if (left_is_string || right_is_string) {
1496 frame_->Push(&left);
1497 frame_->Push(&right);
1498 Result answer;
1499 if (left_is_string) {
1500 if (right_is_string) {
Steve Block6ded16b2010-05-10 14:33:55 +01001501 StringAddStub stub(NO_STRING_CHECK_IN_STUB);
1502 answer = frame_->CallStub(&stub, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001503 } else {
1504 answer =
1505 frame_->InvokeBuiltin(Builtins::STRING_ADD_LEFT, CALL_FUNCTION, 2);
1506 }
1507 } else if (right_is_string) {
1508 answer =
1509 frame_->InvokeBuiltin(Builtins::STRING_ADD_RIGHT, CALL_FUNCTION, 2);
1510 }
Steve Block6ded16b2010-05-10 14:33:55 +01001511 answer.set_type_info(TypeInfo::String());
Steve Blocka7e24c12009-10-30 11:49:00 +00001512 frame_->Push(&answer);
1513 return;
1514 }
1515 // Neither operand is known to be a string.
1516 }
1517
Andrei Popescu402d9372010-02-26 13:31:12 +00001518 bool left_is_smi_constant = left.is_constant() && left.handle()->IsSmi();
1519 bool left_is_non_smi_constant = left.is_constant() && !left.handle()->IsSmi();
1520 bool right_is_smi_constant = right.is_constant() && right.handle()->IsSmi();
1521 bool right_is_non_smi_constant =
1522 right.is_constant() && !right.handle()->IsSmi();
Steve Blocka7e24c12009-10-30 11:49:00 +00001523
Andrei Popescu402d9372010-02-26 13:31:12 +00001524 if (left_is_smi_constant && right_is_smi_constant) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001525 // Compute the constant result at compile time, and leave it on the frame.
1526 int left_int = Smi::cast(*left.handle())->value();
1527 int right_int = Smi::cast(*right.handle())->value();
1528 if (FoldConstantSmis(op, left_int, right_int)) return;
1529 }
1530
Andrei Popescu402d9372010-02-26 13:31:12 +00001531 // Get number type of left and right sub-expressions.
Steve Block6ded16b2010-05-10 14:33:55 +01001532 TypeInfo operands_type =
1533 TypeInfo::Combine(left.type_info(), right.type_info());
1534
1535 TypeInfo result_type = CalculateTypeInfo(operands_type, op, right, left);
Andrei Popescu402d9372010-02-26 13:31:12 +00001536
Leon Clarked91b9f72010-01-27 17:25:45 +00001537 Result answer;
Andrei Popescu402d9372010-02-26 13:31:12 +00001538 if (left_is_non_smi_constant || right_is_non_smi_constant) {
Leon Clarked91b9f72010-01-27 17:25:45 +00001539 // Go straight to the slow case, with no smi code.
Andrei Popescu402d9372010-02-26 13:31:12 +00001540 GenericBinaryOpStub stub(op,
1541 overwrite_mode,
1542 NO_SMI_CODE_IN_STUB,
1543 operands_type);
Leon Clarked91b9f72010-01-27 17:25:45 +00001544 answer = stub.GenerateCall(masm_, frame_, &left, &right);
Andrei Popescu402d9372010-02-26 13:31:12 +00001545 } else if (right_is_smi_constant) {
Steve Block6ded16b2010-05-10 14:33:55 +01001546 answer = ConstantSmiBinaryOperation(expr, &left, right.handle(),
1547 false, overwrite_mode);
Andrei Popescu402d9372010-02-26 13:31:12 +00001548 } else if (left_is_smi_constant) {
Steve Block6ded16b2010-05-10 14:33:55 +01001549 answer = ConstantSmiBinaryOperation(expr, &right, left.handle(),
1550 true, overwrite_mode);
Leon Clarkeeab96aa2010-01-27 16:31:12 +00001551 } else {
Leon Clarked91b9f72010-01-27 17:25:45 +00001552 // Set the flags based on the operation, type and loop nesting level.
1553 // Bit operations always assume they likely operate on Smis. Still only
1554 // generate the inline Smi check code if this operation is part of a loop.
1555 // For all other operations only inline the Smi check code for likely smis
1556 // if the operation is part of a loop.
Steve Block6ded16b2010-05-10 14:33:55 +01001557 if (loop_nesting() > 0 &&
1558 (Token::IsBitOp(op) ||
1559 operands_type.IsInteger32() ||
1560 expr->type()->IsLikelySmi())) {
1561 answer = LikelySmiBinaryOperation(expr, &left, &right, overwrite_mode);
Leon Clarked91b9f72010-01-27 17:25:45 +00001562 } else {
Andrei Popescu402d9372010-02-26 13:31:12 +00001563 GenericBinaryOpStub stub(op,
1564 overwrite_mode,
1565 NO_GENERIC_BINARY_FLAGS,
1566 operands_type);
Leon Clarked91b9f72010-01-27 17:25:45 +00001567 answer = stub.GenerateCall(masm_, frame_, &left, &right);
Leon Clarkeeab96aa2010-01-27 16:31:12 +00001568 }
Leon Clarkeeab96aa2010-01-27 16:31:12 +00001569 }
Andrei Popescu402d9372010-02-26 13:31:12 +00001570
Steve Block6ded16b2010-05-10 14:33:55 +01001571 answer.set_type_info(result_type);
Leon Clarked91b9f72010-01-27 17:25:45 +00001572 frame_->Push(&answer);
Steve Blocka7e24c12009-10-30 11:49:00 +00001573}
1574
1575
1576bool CodeGenerator::FoldConstantSmis(Token::Value op, int left, int right) {
1577 Object* answer_object = Heap::undefined_value();
1578 switch (op) {
1579 case Token::ADD:
1580 if (Smi::IsValid(left + right)) {
1581 answer_object = Smi::FromInt(left + right);
1582 }
1583 break;
1584 case Token::SUB:
1585 if (Smi::IsValid(left - right)) {
1586 answer_object = Smi::FromInt(left - right);
1587 }
1588 break;
1589 case Token::MUL: {
1590 double answer = static_cast<double>(left) * right;
1591 if (answer >= Smi::kMinValue && answer <= Smi::kMaxValue) {
1592 // If the product is zero and the non-zero factor is negative,
1593 // the spec requires us to return floating point negative zero.
1594 if (answer != 0 || (left >= 0 && right >= 0)) {
1595 answer_object = Smi::FromInt(static_cast<int>(answer));
1596 }
1597 }
1598 }
1599 break;
1600 case Token::DIV:
1601 case Token::MOD:
1602 break;
1603 case Token::BIT_OR:
1604 answer_object = Smi::FromInt(left | right);
1605 break;
1606 case Token::BIT_AND:
1607 answer_object = Smi::FromInt(left & right);
1608 break;
1609 case Token::BIT_XOR:
1610 answer_object = Smi::FromInt(left ^ right);
1611 break;
1612
1613 case Token::SHL: {
1614 int shift_amount = right & 0x1F;
1615 if (Smi::IsValid(left << shift_amount)) {
1616 answer_object = Smi::FromInt(left << shift_amount);
1617 }
1618 break;
1619 }
1620 case Token::SHR: {
1621 int shift_amount = right & 0x1F;
1622 unsigned int unsigned_left = left;
1623 unsigned_left >>= shift_amount;
1624 if (unsigned_left <= static_cast<unsigned int>(Smi::kMaxValue)) {
1625 answer_object = Smi::FromInt(unsigned_left);
1626 }
1627 break;
1628 }
1629 case Token::SAR: {
1630 int shift_amount = right & 0x1F;
1631 unsigned int unsigned_left = left;
1632 if (left < 0) {
1633 // Perform arithmetic shift of a negative number by
1634 // complementing number, logical shifting, complementing again.
1635 unsigned_left = ~unsigned_left;
1636 unsigned_left >>= shift_amount;
1637 unsigned_left = ~unsigned_left;
1638 } else {
1639 unsigned_left >>= shift_amount;
1640 }
Ben Murdoch3bec4d22010-07-22 14:51:16 +01001641 ASSERT(Smi::IsValid(static_cast<int32_t>(unsigned_left)));
1642 answer_object = Smi::FromInt(static_cast<int32_t>(unsigned_left));
Steve Blocka7e24c12009-10-30 11:49:00 +00001643 break;
1644 }
1645 default:
1646 UNREACHABLE();
1647 break;
1648 }
1649 if (answer_object == Heap::undefined_value()) {
1650 return false;
1651 }
1652 frame_->Push(Handle<Object>(answer_object));
1653 return true;
1654}
1655
1656
Kristian Monsen50ef84f2010-07-29 15:18:00 +01001657void CodeGenerator::JumpIfBothSmiUsingTypeInfo(Result* left,
1658 Result* right,
1659 JumpTarget* both_smi) {
1660 TypeInfo left_info = left->type_info();
1661 TypeInfo right_info = right->type_info();
1662 if (left_info.IsDouble() || left_info.IsString() ||
1663 right_info.IsDouble() || right_info.IsString()) {
1664 // We know that left and right are not both smi. Don't do any tests.
1665 return;
1666 }
1667
1668 if (left->reg().is(right->reg())) {
1669 if (!left_info.IsSmi()) {
1670 __ test(left->reg(), Immediate(kSmiTagMask));
1671 both_smi->Branch(zero);
1672 } else {
1673 if (FLAG_debug_code) __ AbortIfNotSmi(left->reg());
1674 left->Unuse();
1675 right->Unuse();
1676 both_smi->Jump();
1677 }
1678 } else if (!left_info.IsSmi()) {
1679 if (!right_info.IsSmi()) {
1680 Result temp = allocator_->Allocate();
1681 ASSERT(temp.is_valid());
1682 __ mov(temp.reg(), left->reg());
1683 __ or_(temp.reg(), Operand(right->reg()));
1684 __ test(temp.reg(), Immediate(kSmiTagMask));
1685 temp.Unuse();
1686 both_smi->Branch(zero);
1687 } else {
1688 __ test(left->reg(), Immediate(kSmiTagMask));
1689 both_smi->Branch(zero);
1690 }
1691 } else {
1692 if (FLAG_debug_code) __ AbortIfNotSmi(left->reg());
1693 if (!right_info.IsSmi()) {
1694 __ test(right->reg(), Immediate(kSmiTagMask));
1695 both_smi->Branch(zero);
1696 } else {
1697 if (FLAG_debug_code) __ AbortIfNotSmi(right->reg());
1698 left->Unuse();
1699 right->Unuse();
1700 both_smi->Jump();
1701 }
1702 }
1703}
1704
1705
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001706void CodeGenerator::JumpIfNotBothSmiUsingTypeInfo(Register left,
1707 Register right,
1708 Register scratch,
1709 TypeInfo left_info,
1710 TypeInfo right_info,
1711 DeferredCode* deferred) {
Iain Merrick75681382010-08-19 15:07:18 +01001712 JumpIfNotBothSmiUsingTypeInfo(left,
1713 right,
1714 scratch,
1715 left_info,
1716 right_info,
1717 deferred->entry_label());
1718}
1719
1720
1721void CodeGenerator::JumpIfNotBothSmiUsingTypeInfo(Register left,
1722 Register right,
1723 Register scratch,
1724 TypeInfo left_info,
1725 TypeInfo right_info,
1726 Label* on_not_smi) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001727 if (left.is(right)) {
1728 if (!left_info.IsSmi()) {
1729 __ test(left, Immediate(kSmiTagMask));
Iain Merrick75681382010-08-19 15:07:18 +01001730 __ j(not_zero, on_not_smi);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001731 } else {
1732 if (FLAG_debug_code) __ AbortIfNotSmi(left);
1733 }
1734 } else if (!left_info.IsSmi()) {
1735 if (!right_info.IsSmi()) {
1736 __ mov(scratch, left);
1737 __ or_(scratch, Operand(right));
1738 __ test(scratch, Immediate(kSmiTagMask));
Iain Merrick75681382010-08-19 15:07:18 +01001739 __ j(not_zero, on_not_smi);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001740 } else {
1741 __ test(left, Immediate(kSmiTagMask));
Iain Merrick75681382010-08-19 15:07:18 +01001742 __ j(not_zero, on_not_smi);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001743 if (FLAG_debug_code) __ AbortIfNotSmi(right);
1744 }
1745 } else {
1746 if (FLAG_debug_code) __ AbortIfNotSmi(left);
1747 if (!right_info.IsSmi()) {
1748 __ test(right, Immediate(kSmiTagMask));
Iain Merrick75681382010-08-19 15:07:18 +01001749 __ j(not_zero, on_not_smi);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001750 } else {
1751 if (FLAG_debug_code) __ AbortIfNotSmi(right);
1752 }
1753 }
1754}
Steve Block6ded16b2010-05-10 14:33:55 +01001755
1756
Steve Blocka7e24c12009-10-30 11:49:00 +00001757// Implements a binary operation using a deferred code object and some
1758// inline code to operate on smis quickly.
Steve Block6ded16b2010-05-10 14:33:55 +01001759Result CodeGenerator::LikelySmiBinaryOperation(BinaryOperation* expr,
Leon Clarked91b9f72010-01-27 17:25:45 +00001760 Result* left,
1761 Result* right,
1762 OverwriteMode overwrite_mode) {
Kristian Monsen25f61362010-05-21 11:50:48 +01001763 // Copy the type info because left and right may be overwritten.
1764 TypeInfo left_type_info = left->type_info();
1765 TypeInfo right_type_info = right->type_info();
Steve Block6ded16b2010-05-10 14:33:55 +01001766 Token::Value op = expr->op();
Leon Clarked91b9f72010-01-27 17:25:45 +00001767 Result answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00001768 // Special handling of div and mod because they use fixed registers.
1769 if (op == Token::DIV || op == Token::MOD) {
1770 // We need eax as the quotient register, edx as the remainder
1771 // register, neither left nor right in eax or edx, and left copied
1772 // to eax.
1773 Result quotient;
1774 Result remainder;
1775 bool left_is_in_eax = false;
1776 // Step 1: get eax for quotient.
1777 if ((left->is_register() && left->reg().is(eax)) ||
1778 (right->is_register() && right->reg().is(eax))) {
1779 // One or both is in eax. Use a fresh non-edx register for
1780 // them.
1781 Result fresh = allocator_->Allocate();
1782 ASSERT(fresh.is_valid());
1783 if (fresh.reg().is(edx)) {
1784 remainder = fresh;
1785 fresh = allocator_->Allocate();
1786 ASSERT(fresh.is_valid());
1787 }
1788 if (left->is_register() && left->reg().is(eax)) {
1789 quotient = *left;
1790 *left = fresh;
1791 left_is_in_eax = true;
1792 }
1793 if (right->is_register() && right->reg().is(eax)) {
1794 quotient = *right;
1795 *right = fresh;
1796 }
1797 __ mov(fresh.reg(), eax);
1798 } else {
1799 // Neither left nor right is in eax.
1800 quotient = allocator_->Allocate(eax);
1801 }
1802 ASSERT(quotient.is_register() && quotient.reg().is(eax));
1803 ASSERT(!(left->is_register() && left->reg().is(eax)));
1804 ASSERT(!(right->is_register() && right->reg().is(eax)));
1805
1806 // Step 2: get edx for remainder if necessary.
1807 if (!remainder.is_valid()) {
1808 if ((left->is_register() && left->reg().is(edx)) ||
1809 (right->is_register() && right->reg().is(edx))) {
1810 Result fresh = allocator_->Allocate();
1811 ASSERT(fresh.is_valid());
1812 if (left->is_register() && left->reg().is(edx)) {
1813 remainder = *left;
1814 *left = fresh;
1815 }
1816 if (right->is_register() && right->reg().is(edx)) {
1817 remainder = *right;
1818 *right = fresh;
1819 }
1820 __ mov(fresh.reg(), edx);
1821 } else {
1822 // Neither left nor right is in edx.
1823 remainder = allocator_->Allocate(edx);
1824 }
1825 }
1826 ASSERT(remainder.is_register() && remainder.reg().is(edx));
1827 ASSERT(!(left->is_register() && left->reg().is(edx)));
1828 ASSERT(!(right->is_register() && right->reg().is(edx)));
1829
1830 left->ToRegister();
1831 right->ToRegister();
1832 frame_->Spill(eax);
1833 frame_->Spill(edx);
Iain Merrick75681382010-08-19 15:07:18 +01001834 // DeferredInlineBinaryOperation requires all the registers that it is
1835 // told about to be spilled and distinct.
1836 Result distinct_right = frame_->MakeDistinctAndSpilled(left, right);
Steve Blocka7e24c12009-10-30 11:49:00 +00001837
1838 // Check that left and right are smi tagged.
1839 DeferredInlineBinaryOperation* deferred =
1840 new DeferredInlineBinaryOperation(op,
1841 (op == Token::DIV) ? eax : edx,
1842 left->reg(),
Iain Merrick75681382010-08-19 15:07:18 +01001843 distinct_right.reg(),
Kristian Monsen25f61362010-05-21 11:50:48 +01001844 left_type_info,
1845 right_type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00001846 overwrite_mode);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001847 JumpIfNotBothSmiUsingTypeInfo(left->reg(), right->reg(), edx,
1848 left_type_info, right_type_info, deferred);
1849 if (!left_is_in_eax) {
1850 __ mov(eax, left->reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001851 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001852 // Sign extend eax into edx:eax.
1853 __ cdq();
1854 // Check for 0 divisor.
1855 __ test(right->reg(), Operand(right->reg()));
1856 deferred->Branch(zero);
1857 // Divide edx:eax by the right operand.
1858 __ idiv(right->reg());
1859
1860 // Complete the operation.
1861 if (op == Token::DIV) {
1862 // Check for negative zero result. If result is zero, and divisor
1863 // is negative, return a floating point negative zero. The
1864 // virtual frame is unchanged in this block, so local control flow
Steve Block6ded16b2010-05-10 14:33:55 +01001865 // can use a Label rather than a JumpTarget. If the context of this
1866 // expression will treat -0 like 0, do not do this test.
1867 if (!expr->no_negative_zero()) {
1868 Label non_zero_result;
1869 __ test(left->reg(), Operand(left->reg()));
1870 __ j(not_zero, &non_zero_result);
1871 __ test(right->reg(), Operand(right->reg()));
1872 deferred->Branch(negative);
1873 __ bind(&non_zero_result);
1874 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001875 // Check for the corner case of dividing the most negative smi by
1876 // -1. We cannot use the overflow flag, since it is not set by
1877 // idiv instruction.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01001878 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00001879 __ cmp(eax, 0x40000000);
1880 deferred->Branch(equal);
1881 // Check that the remainder is zero.
1882 __ test(edx, Operand(edx));
1883 deferred->Branch(not_zero);
1884 // Tag the result and store it in the quotient register.
Leon Clarkee46be812010-01-19 14:06:41 +00001885 __ SmiTag(eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00001886 deferred->BindExit();
1887 left->Unuse();
1888 right->Unuse();
Leon Clarked91b9f72010-01-27 17:25:45 +00001889 answer = quotient;
Steve Blocka7e24c12009-10-30 11:49:00 +00001890 } else {
1891 ASSERT(op == Token::MOD);
1892 // Check for a negative zero result. If the result is zero, and
1893 // the dividend is negative, return a floating point negative
1894 // zero. The frame is unchanged in this block, so local control
1895 // flow can use a Label rather than a JumpTarget.
Steve Block6ded16b2010-05-10 14:33:55 +01001896 if (!expr->no_negative_zero()) {
1897 Label non_zero_result;
1898 __ test(edx, Operand(edx));
1899 __ j(not_zero, &non_zero_result, taken);
1900 __ test(left->reg(), Operand(left->reg()));
1901 deferred->Branch(negative);
1902 __ bind(&non_zero_result);
1903 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001904 deferred->BindExit();
1905 left->Unuse();
1906 right->Unuse();
Leon Clarked91b9f72010-01-27 17:25:45 +00001907 answer = remainder;
Steve Blocka7e24c12009-10-30 11:49:00 +00001908 }
Leon Clarked91b9f72010-01-27 17:25:45 +00001909 ASSERT(answer.is_valid());
1910 return answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00001911 }
1912
1913 // Special handling of shift operations because they use fixed
1914 // registers.
1915 if (op == Token::SHL || op == Token::SHR || op == Token::SAR) {
1916 // Move left out of ecx if necessary.
1917 if (left->is_register() && left->reg().is(ecx)) {
1918 *left = allocator_->Allocate();
1919 ASSERT(left->is_valid());
1920 __ mov(left->reg(), ecx);
1921 }
1922 right->ToRegister(ecx);
1923 left->ToRegister();
1924 ASSERT(left->is_register() && !left->reg().is(ecx));
1925 ASSERT(right->is_register() && right->reg().is(ecx));
Iain Merrick75681382010-08-19 15:07:18 +01001926 if (left_type_info.IsSmi()) {
1927 if (FLAG_debug_code) __ AbortIfNotSmi(left->reg());
1928 }
1929 if (right_type_info.IsSmi()) {
1930 if (FLAG_debug_code) __ AbortIfNotSmi(right->reg());
1931 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001932
1933 // We will modify right, it must be spilled.
1934 frame_->Spill(ecx);
Iain Merrick75681382010-08-19 15:07:18 +01001935 // DeferredInlineBinaryOperation requires all the registers that it is told
1936 // about to be spilled and distinct. We know that right is ecx and left is
1937 // not ecx.
1938 frame_->Spill(left->reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001939
1940 // Use a fresh answer register to avoid spilling the left operand.
Leon Clarked91b9f72010-01-27 17:25:45 +00001941 answer = allocator_->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00001942 ASSERT(answer.is_valid());
Iain Merrick75681382010-08-19 15:07:18 +01001943
Steve Blocka7e24c12009-10-30 11:49:00 +00001944 DeferredInlineBinaryOperation* deferred =
1945 new DeferredInlineBinaryOperation(op,
1946 answer.reg(),
1947 left->reg(),
1948 ecx,
Kristian Monsen25f61362010-05-21 11:50:48 +01001949 left_type_info,
1950 right_type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00001951 overwrite_mode);
Iain Merrick75681382010-08-19 15:07:18 +01001952 JumpIfNotBothSmiUsingTypeInfo(left->reg(), right->reg(), answer.reg(),
1953 left_type_info, right_type_info,
1954 deferred->NonSmiInputLabel());
Steve Blocka7e24c12009-10-30 11:49:00 +00001955
Iain Merrick75681382010-08-19 15:07:18 +01001956 // Untag both operands.
1957 __ mov(answer.reg(), left->reg());
1958 __ SmiUntag(answer.reg());
1959 __ SmiUntag(right->reg()); // Right is ecx.
Steve Block6ded16b2010-05-10 14:33:55 +01001960
Steve Blocka7e24c12009-10-30 11:49:00 +00001961 // Perform the operation.
Iain Merrick75681382010-08-19 15:07:18 +01001962 ASSERT(right->reg().is(ecx));
Steve Blocka7e24c12009-10-30 11:49:00 +00001963 switch (op) {
Iain Merrick75681382010-08-19 15:07:18 +01001964 case Token::SAR: {
Steve Blockd0582a62009-12-15 09:54:21 +00001965 __ sar_cl(answer.reg());
Iain Merrick75681382010-08-19 15:07:18 +01001966 if (!left_type_info.IsSmi()) {
1967 // Check that the *signed* result fits in a smi.
1968 __ cmp(answer.reg(), 0xc0000000);
1969 deferred->JumpToAnswerOutOfRange(negative);
1970 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001971 break;
Iain Merrick75681382010-08-19 15:07:18 +01001972 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001973 case Token::SHR: {
Steve Blockd0582a62009-12-15 09:54:21 +00001974 __ shr_cl(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001975 // Check that the *unsigned* result fits in a smi. Neither of
1976 // the two high-order bits can be set:
1977 // * 0x80000000: high bit would be lost when smi tagging.
1978 // * 0x40000000: this number would convert to negative when smi
1979 // tagging.
1980 // These two cases can only happen with shifts by 0 or 1 when
1981 // handed a valid smi. If the answer cannot be represented by a
1982 // smi, restore the left and right arguments, and jump to slow
1983 // case. The low bit of the left argument may be lost, but only
1984 // in a case where it is dropped anyway.
1985 __ test(answer.reg(), Immediate(0xc0000000));
Iain Merrick75681382010-08-19 15:07:18 +01001986 deferred->JumpToAnswerOutOfRange(not_zero);
Steve Blocka7e24c12009-10-30 11:49:00 +00001987 break;
1988 }
1989 case Token::SHL: {
Steve Blockd0582a62009-12-15 09:54:21 +00001990 __ shl_cl(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001991 // Check that the *signed* result fits in a smi.
1992 __ cmp(answer.reg(), 0xc0000000);
Iain Merrick75681382010-08-19 15:07:18 +01001993 deferred->JumpToAnswerOutOfRange(negative);
Steve Blocka7e24c12009-10-30 11:49:00 +00001994 break;
1995 }
1996 default:
1997 UNREACHABLE();
1998 }
1999 // Smi-tag the result in answer.
Leon Clarkee46be812010-01-19 14:06:41 +00002000 __ SmiTag(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00002001 deferred->BindExit();
2002 left->Unuse();
2003 right->Unuse();
Leon Clarked91b9f72010-01-27 17:25:45 +00002004 ASSERT(answer.is_valid());
2005 return answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00002006 }
2007
2008 // Handle the other binary operations.
2009 left->ToRegister();
2010 right->ToRegister();
Iain Merrick75681382010-08-19 15:07:18 +01002011 // DeferredInlineBinaryOperation requires all the registers that it is told
2012 // about to be spilled.
2013 Result distinct_right = frame_->MakeDistinctAndSpilled(left, right);
Steve Blocka7e24c12009-10-30 11:49:00 +00002014 // A newly allocated register answer is used to hold the answer. The
2015 // registers containing left and right are not modified so they don't
2016 // need to be spilled in the fast case.
Leon Clarked91b9f72010-01-27 17:25:45 +00002017 answer = allocator_->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00002018 ASSERT(answer.is_valid());
2019
2020 // Perform the smi tag check.
2021 DeferredInlineBinaryOperation* deferred =
2022 new DeferredInlineBinaryOperation(op,
2023 answer.reg(),
2024 left->reg(),
Iain Merrick75681382010-08-19 15:07:18 +01002025 distinct_right.reg(),
Kristian Monsen25f61362010-05-21 11:50:48 +01002026 left_type_info,
2027 right_type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00002028 overwrite_mode);
Iain Merrick75681382010-08-19 15:07:18 +01002029 Label non_smi_bit_op;
2030 if (op != Token::BIT_OR) {
2031 JumpIfNotBothSmiUsingTypeInfo(left->reg(), right->reg(), answer.reg(),
2032 left_type_info, right_type_info,
2033 deferred->NonSmiInputLabel());
2034 }
Steve Block6ded16b2010-05-10 14:33:55 +01002035
Steve Blocka7e24c12009-10-30 11:49:00 +00002036 __ mov(answer.reg(), left->reg());
2037 switch (op) {
2038 case Token::ADD:
Leon Clarked91b9f72010-01-27 17:25:45 +00002039 __ add(answer.reg(), Operand(right->reg()));
Steve Blocka7e24c12009-10-30 11:49:00 +00002040 deferred->Branch(overflow);
2041 break;
2042
2043 case Token::SUB:
Leon Clarked91b9f72010-01-27 17:25:45 +00002044 __ sub(answer.reg(), Operand(right->reg()));
Steve Blocka7e24c12009-10-30 11:49:00 +00002045 deferred->Branch(overflow);
2046 break;
2047
2048 case Token::MUL: {
2049 // If the smi tag is 0 we can just leave the tag on one operand.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002050 STATIC_ASSERT(kSmiTag == 0); // Adjust code below if not the case.
Steve Blocka7e24c12009-10-30 11:49:00 +00002051 // Remove smi tag from the left operand (but keep sign).
2052 // Left-hand operand has been copied into answer.
Leon Clarkee46be812010-01-19 14:06:41 +00002053 __ SmiUntag(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00002054 // Do multiplication of smis, leaving result in answer.
2055 __ imul(answer.reg(), Operand(right->reg()));
2056 // Go slow on overflows.
2057 deferred->Branch(overflow);
2058 // Check for negative zero result. If product is zero, and one
2059 // argument is negative, go to slow case. The frame is unchanged
2060 // in this block, so local control flow can use a Label rather
2061 // than a JumpTarget.
Steve Block6ded16b2010-05-10 14:33:55 +01002062 if (!expr->no_negative_zero()) {
2063 Label non_zero_result;
2064 __ test(answer.reg(), Operand(answer.reg()));
2065 __ j(not_zero, &non_zero_result, taken);
2066 __ mov(answer.reg(), left->reg());
2067 __ or_(answer.reg(), Operand(right->reg()));
2068 deferred->Branch(negative);
2069 __ xor_(answer.reg(), Operand(answer.reg())); // Positive 0 is correct.
2070 __ bind(&non_zero_result);
2071 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002072 break;
2073 }
2074
2075 case Token::BIT_OR:
2076 __ or_(answer.reg(), Operand(right->reg()));
Iain Merrick75681382010-08-19 15:07:18 +01002077 __ test(answer.reg(), Immediate(kSmiTagMask));
2078 __ j(not_zero, deferred->NonSmiInputLabel());
Steve Blocka7e24c12009-10-30 11:49:00 +00002079 break;
2080
2081 case Token::BIT_AND:
2082 __ and_(answer.reg(), Operand(right->reg()));
2083 break;
2084
2085 case Token::BIT_XOR:
2086 __ xor_(answer.reg(), Operand(right->reg()));
2087 break;
2088
2089 default:
2090 UNREACHABLE();
2091 break;
2092 }
Iain Merrick75681382010-08-19 15:07:18 +01002093
Steve Blocka7e24c12009-10-30 11:49:00 +00002094 deferred->BindExit();
2095 left->Unuse();
2096 right->Unuse();
Leon Clarked91b9f72010-01-27 17:25:45 +00002097 ASSERT(answer.is_valid());
2098 return answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00002099}
2100
2101
2102// Call the appropriate binary operation stub to compute src op value
2103// and leave the result in dst.
2104class DeferredInlineSmiOperation: public DeferredCode {
2105 public:
2106 DeferredInlineSmiOperation(Token::Value op,
2107 Register dst,
2108 Register src,
Steve Block6ded16b2010-05-10 14:33:55 +01002109 TypeInfo type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00002110 Smi* value,
2111 OverwriteMode overwrite_mode)
2112 : op_(op),
2113 dst_(dst),
2114 src_(src),
Steve Block6ded16b2010-05-10 14:33:55 +01002115 type_info_(type_info),
Steve Blocka7e24c12009-10-30 11:49:00 +00002116 value_(value),
2117 overwrite_mode_(overwrite_mode) {
Steve Block6ded16b2010-05-10 14:33:55 +01002118 if (type_info.IsSmi()) overwrite_mode_ = NO_OVERWRITE;
Steve Blocka7e24c12009-10-30 11:49:00 +00002119 set_comment("[ DeferredInlineSmiOperation");
2120 }
2121
2122 virtual void Generate();
2123
2124 private:
2125 Token::Value op_;
2126 Register dst_;
2127 Register src_;
Steve Block6ded16b2010-05-10 14:33:55 +01002128 TypeInfo type_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +00002129 Smi* value_;
2130 OverwriteMode overwrite_mode_;
2131};
2132
2133
2134void DeferredInlineSmiOperation::Generate() {
Steve Blocka7e24c12009-10-30 11:49:00 +00002135 // For mod we don't generate all the Smi code inline.
2136 GenericBinaryOpStub stub(
2137 op_,
2138 overwrite_mode_,
Steve Block6ded16b2010-05-10 14:33:55 +01002139 (op_ == Token::MOD) ? NO_GENERIC_BINARY_FLAGS : NO_SMI_CODE_IN_STUB,
2140 TypeInfo::Combine(TypeInfo::Smi(), type_info_));
Steve Block3ce2e202009-11-05 08:53:23 +00002141 stub.GenerateCall(masm_, src_, value_);
Steve Blocka7e24c12009-10-30 11:49:00 +00002142 if (!dst_.is(eax)) __ mov(dst_, eax);
2143}
2144
2145
2146// Call the appropriate binary operation stub to compute value op src
2147// and leave the result in dst.
2148class DeferredInlineSmiOperationReversed: public DeferredCode {
2149 public:
2150 DeferredInlineSmiOperationReversed(Token::Value op,
2151 Register dst,
2152 Smi* value,
2153 Register src,
Steve Block6ded16b2010-05-10 14:33:55 +01002154 TypeInfo type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00002155 OverwriteMode overwrite_mode)
2156 : op_(op),
2157 dst_(dst),
Steve Block6ded16b2010-05-10 14:33:55 +01002158 type_info_(type_info),
Steve Blocka7e24c12009-10-30 11:49:00 +00002159 value_(value),
2160 src_(src),
2161 overwrite_mode_(overwrite_mode) {
2162 set_comment("[ DeferredInlineSmiOperationReversed");
2163 }
2164
2165 virtual void Generate();
2166
2167 private:
2168 Token::Value op_;
2169 Register dst_;
Steve Block6ded16b2010-05-10 14:33:55 +01002170 TypeInfo type_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +00002171 Smi* value_;
2172 Register src_;
2173 OverwriteMode overwrite_mode_;
2174};
2175
2176
2177void DeferredInlineSmiOperationReversed::Generate() {
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002178 GenericBinaryOpStub stub(
Steve Block6ded16b2010-05-10 14:33:55 +01002179 op_,
2180 overwrite_mode_,
2181 NO_SMI_CODE_IN_STUB,
2182 TypeInfo::Combine(TypeInfo::Smi(), type_info_));
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002183 stub.GenerateCall(masm_, value_, src_);
Steve Blocka7e24c12009-10-30 11:49:00 +00002184 if (!dst_.is(eax)) __ mov(dst_, eax);
2185}
2186
2187
2188// The result of src + value is in dst. It either overflowed or was not
2189// smi tagged. Undo the speculative addition and call the appropriate
2190// specialized stub for add. The result is left in dst.
2191class DeferredInlineSmiAdd: public DeferredCode {
2192 public:
2193 DeferredInlineSmiAdd(Register dst,
Steve Block6ded16b2010-05-10 14:33:55 +01002194 TypeInfo type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00002195 Smi* value,
2196 OverwriteMode overwrite_mode)
Steve Block6ded16b2010-05-10 14:33:55 +01002197 : dst_(dst),
2198 type_info_(type_info),
2199 value_(value),
2200 overwrite_mode_(overwrite_mode) {
2201 if (type_info_.IsSmi()) overwrite_mode_ = NO_OVERWRITE;
Steve Blocka7e24c12009-10-30 11:49:00 +00002202 set_comment("[ DeferredInlineSmiAdd");
2203 }
2204
2205 virtual void Generate();
2206
2207 private:
2208 Register dst_;
Steve Block6ded16b2010-05-10 14:33:55 +01002209 TypeInfo type_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +00002210 Smi* value_;
2211 OverwriteMode overwrite_mode_;
2212};
2213
2214
2215void DeferredInlineSmiAdd::Generate() {
2216 // Undo the optimistic add operation and call the shared stub.
2217 __ sub(Operand(dst_), Immediate(value_));
Steve Block6ded16b2010-05-10 14:33:55 +01002218 GenericBinaryOpStub igostub(
2219 Token::ADD,
2220 overwrite_mode_,
2221 NO_SMI_CODE_IN_STUB,
2222 TypeInfo::Combine(TypeInfo::Smi(), type_info_));
Steve Block3ce2e202009-11-05 08:53:23 +00002223 igostub.GenerateCall(masm_, dst_, value_);
Steve Blocka7e24c12009-10-30 11:49:00 +00002224 if (!dst_.is(eax)) __ mov(dst_, eax);
2225}
2226
2227
2228// The result of value + src is in dst. It either overflowed or was not
2229// smi tagged. Undo the speculative addition and call the appropriate
2230// specialized stub for add. The result is left in dst.
2231class DeferredInlineSmiAddReversed: public DeferredCode {
2232 public:
2233 DeferredInlineSmiAddReversed(Register dst,
Steve Block6ded16b2010-05-10 14:33:55 +01002234 TypeInfo type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00002235 Smi* value,
2236 OverwriteMode overwrite_mode)
Steve Block6ded16b2010-05-10 14:33:55 +01002237 : dst_(dst),
2238 type_info_(type_info),
2239 value_(value),
2240 overwrite_mode_(overwrite_mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002241 set_comment("[ DeferredInlineSmiAddReversed");
2242 }
2243
2244 virtual void Generate();
2245
2246 private:
2247 Register dst_;
Steve Block6ded16b2010-05-10 14:33:55 +01002248 TypeInfo type_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +00002249 Smi* value_;
2250 OverwriteMode overwrite_mode_;
2251};
2252
2253
2254void DeferredInlineSmiAddReversed::Generate() {
2255 // Undo the optimistic add operation and call the shared stub.
2256 __ sub(Operand(dst_), Immediate(value_));
Steve Block6ded16b2010-05-10 14:33:55 +01002257 GenericBinaryOpStub igostub(
2258 Token::ADD,
2259 overwrite_mode_,
2260 NO_SMI_CODE_IN_STUB,
2261 TypeInfo::Combine(TypeInfo::Smi(), type_info_));
Steve Block3ce2e202009-11-05 08:53:23 +00002262 igostub.GenerateCall(masm_, value_, dst_);
Steve Blocka7e24c12009-10-30 11:49:00 +00002263 if (!dst_.is(eax)) __ mov(dst_, eax);
2264}
2265
2266
2267// The result of src - value is in dst. It either overflowed or was not
2268// smi tagged. Undo the speculative subtraction and call the
2269// appropriate specialized stub for subtract. The result is left in
2270// dst.
2271class DeferredInlineSmiSub: public DeferredCode {
2272 public:
2273 DeferredInlineSmiSub(Register dst,
Steve Block6ded16b2010-05-10 14:33:55 +01002274 TypeInfo type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00002275 Smi* value,
2276 OverwriteMode overwrite_mode)
Steve Block6ded16b2010-05-10 14:33:55 +01002277 : dst_(dst),
2278 type_info_(type_info),
2279 value_(value),
2280 overwrite_mode_(overwrite_mode) {
2281 if (type_info.IsSmi()) overwrite_mode_ = NO_OVERWRITE;
Steve Blocka7e24c12009-10-30 11:49:00 +00002282 set_comment("[ DeferredInlineSmiSub");
2283 }
2284
2285 virtual void Generate();
2286
2287 private:
2288 Register dst_;
Steve Block6ded16b2010-05-10 14:33:55 +01002289 TypeInfo type_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +00002290 Smi* value_;
2291 OverwriteMode overwrite_mode_;
2292};
2293
2294
2295void DeferredInlineSmiSub::Generate() {
2296 // Undo the optimistic sub operation and call the shared stub.
2297 __ add(Operand(dst_), Immediate(value_));
Steve Block6ded16b2010-05-10 14:33:55 +01002298 GenericBinaryOpStub igostub(
2299 Token::SUB,
2300 overwrite_mode_,
2301 NO_SMI_CODE_IN_STUB,
2302 TypeInfo::Combine(TypeInfo::Smi(), type_info_));
Steve Block3ce2e202009-11-05 08:53:23 +00002303 igostub.GenerateCall(masm_, dst_, value_);
Steve Blocka7e24c12009-10-30 11:49:00 +00002304 if (!dst_.is(eax)) __ mov(dst_, eax);
2305}
2306
2307
Kristian Monsen25f61362010-05-21 11:50:48 +01002308Result CodeGenerator::ConstantSmiBinaryOperation(BinaryOperation* expr,
2309 Result* operand,
2310 Handle<Object> value,
2311 bool reversed,
2312 OverwriteMode overwrite_mode) {
2313 // Generate inline code for a binary operation when one of the
2314 // operands is a constant smi. Consumes the argument "operand".
Steve Blocka7e24c12009-10-30 11:49:00 +00002315 if (IsUnsafeSmi(value)) {
2316 Result unsafe_operand(value);
2317 if (reversed) {
Steve Block6ded16b2010-05-10 14:33:55 +01002318 return LikelySmiBinaryOperation(expr, &unsafe_operand, operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00002319 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002320 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01002321 return LikelySmiBinaryOperation(expr, operand, &unsafe_operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00002322 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002323 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002324 }
2325
2326 // Get the literal value.
2327 Smi* smi_value = Smi::cast(*value);
2328 int int_value = smi_value->value();
2329
Steve Block6ded16b2010-05-10 14:33:55 +01002330 Token::Value op = expr->op();
Leon Clarked91b9f72010-01-27 17:25:45 +00002331 Result answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00002332 switch (op) {
2333 case Token::ADD: {
2334 operand->ToRegister();
2335 frame_->Spill(operand->reg());
2336
2337 // Optimistically add. Call the specialized add stub if the
2338 // result is not a smi or overflows.
2339 DeferredCode* deferred = NULL;
2340 if (reversed) {
2341 deferred = new DeferredInlineSmiAddReversed(operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002342 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002343 smi_value,
2344 overwrite_mode);
2345 } else {
2346 deferred = new DeferredInlineSmiAdd(operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002347 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002348 smi_value,
2349 overwrite_mode);
2350 }
2351 __ add(Operand(operand->reg()), Immediate(value));
2352 deferred->Branch(overflow);
Steve Block6ded16b2010-05-10 14:33:55 +01002353 if (!operand->type_info().IsSmi()) {
2354 __ test(operand->reg(), Immediate(kSmiTagMask));
2355 deferred->Branch(not_zero);
2356 } else if (FLAG_debug_code) {
2357 __ AbortIfNotSmi(operand->reg());
2358 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002359 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00002360 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00002361 break;
2362 }
2363
2364 case Token::SUB: {
2365 DeferredCode* deferred = NULL;
Steve Blocka7e24c12009-10-30 11:49:00 +00002366 if (reversed) {
2367 // The reversed case is only hit when the right operand is not a
2368 // constant.
2369 ASSERT(operand->is_register());
2370 answer = allocator()->Allocate();
2371 ASSERT(answer.is_valid());
2372 __ Set(answer.reg(), Immediate(value));
Steve Block6ded16b2010-05-10 14:33:55 +01002373 deferred =
2374 new DeferredInlineSmiOperationReversed(op,
2375 answer.reg(),
2376 smi_value,
2377 operand->reg(),
2378 operand->type_info(),
2379 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002380 __ sub(answer.reg(), Operand(operand->reg()));
2381 } else {
2382 operand->ToRegister();
2383 frame_->Spill(operand->reg());
2384 answer = *operand;
2385 deferred = new DeferredInlineSmiSub(operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002386 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002387 smi_value,
2388 overwrite_mode);
2389 __ sub(Operand(operand->reg()), Immediate(value));
2390 }
2391 deferred->Branch(overflow);
Steve Block6ded16b2010-05-10 14:33:55 +01002392 if (!operand->type_info().IsSmi()) {
2393 __ test(answer.reg(), Immediate(kSmiTagMask));
2394 deferred->Branch(not_zero);
2395 } else if (FLAG_debug_code) {
2396 __ AbortIfNotSmi(operand->reg());
2397 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002398 deferred->BindExit();
2399 operand->Unuse();
Steve Blocka7e24c12009-10-30 11:49:00 +00002400 break;
2401 }
2402
2403 case Token::SAR:
2404 if (reversed) {
2405 Result constant_operand(value);
Steve Block6ded16b2010-05-10 14:33:55 +01002406 answer = LikelySmiBinaryOperation(expr, &constant_operand, operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00002407 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002408 } else {
2409 // Only the least significant 5 bits of the shift value are used.
2410 // In the slow case, this masking is done inside the runtime call.
2411 int shift_value = int_value & 0x1f;
2412 operand->ToRegister();
2413 frame_->Spill(operand->reg());
Steve Block6ded16b2010-05-10 14:33:55 +01002414 if (!operand->type_info().IsSmi()) {
2415 DeferredInlineSmiOperation* deferred =
2416 new DeferredInlineSmiOperation(op,
2417 operand->reg(),
2418 operand->reg(),
2419 operand->type_info(),
2420 smi_value,
2421 overwrite_mode);
2422 __ test(operand->reg(), Immediate(kSmiTagMask));
2423 deferred->Branch(not_zero);
2424 if (shift_value > 0) {
2425 __ sar(operand->reg(), shift_value);
2426 __ and_(operand->reg(), ~kSmiTagMask);
2427 }
2428 deferred->BindExit();
2429 } else {
2430 if (FLAG_debug_code) {
2431 __ AbortIfNotSmi(operand->reg());
2432 }
2433 if (shift_value > 0) {
2434 __ sar(operand->reg(), shift_value);
2435 __ and_(operand->reg(), ~kSmiTagMask);
2436 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002437 }
Leon Clarked91b9f72010-01-27 17:25:45 +00002438 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00002439 }
2440 break;
2441
2442 case Token::SHR:
2443 if (reversed) {
2444 Result constant_operand(value);
Steve Block6ded16b2010-05-10 14:33:55 +01002445 answer = LikelySmiBinaryOperation(expr, &constant_operand, operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00002446 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002447 } else {
2448 // Only the least significant 5 bits of the shift value are used.
2449 // In the slow case, this masking is done inside the runtime call.
2450 int shift_value = int_value & 0x1f;
2451 operand->ToRegister();
Leon Clarked91b9f72010-01-27 17:25:45 +00002452 answer = allocator()->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00002453 ASSERT(answer.is_valid());
2454 DeferredInlineSmiOperation* deferred =
2455 new DeferredInlineSmiOperation(op,
2456 answer.reg(),
2457 operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002458 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002459 smi_value,
2460 overwrite_mode);
Steve Block6ded16b2010-05-10 14:33:55 +01002461 if (!operand->type_info().IsSmi()) {
2462 __ test(operand->reg(), Immediate(kSmiTagMask));
2463 deferred->Branch(not_zero);
2464 } else if (FLAG_debug_code) {
2465 __ AbortIfNotSmi(operand->reg());
2466 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002467 __ mov(answer.reg(), operand->reg());
Leon Clarkee46be812010-01-19 14:06:41 +00002468 __ SmiUntag(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00002469 __ shr(answer.reg(), shift_value);
2470 // A negative Smi shifted right two is in the positive Smi range.
2471 if (shift_value < 2) {
2472 __ test(answer.reg(), Immediate(0xc0000000));
2473 deferred->Branch(not_zero);
2474 }
2475 operand->Unuse();
Leon Clarkee46be812010-01-19 14:06:41 +00002476 __ SmiTag(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00002477 deferred->BindExit();
Steve Blocka7e24c12009-10-30 11:49:00 +00002478 }
2479 break;
2480
2481 case Token::SHL:
2482 if (reversed) {
Steve Block6ded16b2010-05-10 14:33:55 +01002483 // Move operand into ecx and also into a second register.
2484 // If operand is already in a register, take advantage of that.
2485 // This lets us modify ecx, but still bail out to deferred code.
Leon Clarkee46be812010-01-19 14:06:41 +00002486 Result right;
2487 Result right_copy_in_ecx;
Steve Block6ded16b2010-05-10 14:33:55 +01002488 TypeInfo right_type_info = operand->type_info();
Leon Clarkee46be812010-01-19 14:06:41 +00002489 operand->ToRegister();
2490 if (operand->reg().is(ecx)) {
2491 right = allocator()->Allocate();
2492 __ mov(right.reg(), ecx);
2493 frame_->Spill(ecx);
2494 right_copy_in_ecx = *operand;
2495 } else {
2496 right_copy_in_ecx = allocator()->Allocate(ecx);
2497 __ mov(ecx, operand->reg());
2498 right = *operand;
2499 }
2500 operand->Unuse();
2501
Leon Clarked91b9f72010-01-27 17:25:45 +00002502 answer = allocator()->Allocate();
Leon Clarkee46be812010-01-19 14:06:41 +00002503 DeferredInlineSmiOperationReversed* deferred =
2504 new DeferredInlineSmiOperationReversed(op,
2505 answer.reg(),
2506 smi_value,
2507 right.reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002508 right_type_info,
Leon Clarkee46be812010-01-19 14:06:41 +00002509 overwrite_mode);
2510 __ mov(answer.reg(), Immediate(int_value));
2511 __ sar(ecx, kSmiTagSize);
Steve Block6ded16b2010-05-10 14:33:55 +01002512 if (!right_type_info.IsSmi()) {
2513 deferred->Branch(carry);
2514 } else if (FLAG_debug_code) {
2515 __ AbortIfNotSmi(right.reg());
2516 }
Leon Clarkee46be812010-01-19 14:06:41 +00002517 __ shl_cl(answer.reg());
2518 __ cmp(answer.reg(), 0xc0000000);
2519 deferred->Branch(sign);
2520 __ SmiTag(answer.reg());
2521
2522 deferred->BindExit();
Steve Blocka7e24c12009-10-30 11:49:00 +00002523 } else {
2524 // Only the least significant 5 bits of the shift value are used.
2525 // In the slow case, this masking is done inside the runtime call.
2526 int shift_value = int_value & 0x1f;
2527 operand->ToRegister();
2528 if (shift_value == 0) {
2529 // Spill operand so it can be overwritten in the slow case.
2530 frame_->Spill(operand->reg());
2531 DeferredInlineSmiOperation* deferred =
2532 new DeferredInlineSmiOperation(op,
2533 operand->reg(),
2534 operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002535 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002536 smi_value,
2537 overwrite_mode);
2538 __ test(operand->reg(), Immediate(kSmiTagMask));
2539 deferred->Branch(not_zero);
2540 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00002541 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00002542 } else {
2543 // Use a fresh temporary for nonzero shift values.
Leon Clarked91b9f72010-01-27 17:25:45 +00002544 answer = allocator()->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00002545 ASSERT(answer.is_valid());
2546 DeferredInlineSmiOperation* deferred =
2547 new DeferredInlineSmiOperation(op,
2548 answer.reg(),
2549 operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002550 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002551 smi_value,
2552 overwrite_mode);
Steve Block6ded16b2010-05-10 14:33:55 +01002553 if (!operand->type_info().IsSmi()) {
2554 __ test(operand->reg(), Immediate(kSmiTagMask));
2555 deferred->Branch(not_zero);
2556 } else if (FLAG_debug_code) {
2557 __ AbortIfNotSmi(operand->reg());
2558 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002559 __ mov(answer.reg(), operand->reg());
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002560 STATIC_ASSERT(kSmiTag == 0); // adjust code if not the case
Steve Blocka7e24c12009-10-30 11:49:00 +00002561 // We do no shifts, only the Smi conversion, if shift_value is 1.
2562 if (shift_value > 1) {
2563 __ shl(answer.reg(), shift_value - 1);
2564 }
2565 // Convert int result to Smi, checking that it is in int range.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002566 STATIC_ASSERT(kSmiTagSize == 1); // adjust code if not the case
Steve Blocka7e24c12009-10-30 11:49:00 +00002567 __ add(answer.reg(), Operand(answer.reg()));
2568 deferred->Branch(overflow);
2569 deferred->BindExit();
2570 operand->Unuse();
Steve Blocka7e24c12009-10-30 11:49:00 +00002571 }
2572 }
2573 break;
2574
2575 case Token::BIT_OR:
2576 case Token::BIT_XOR:
2577 case Token::BIT_AND: {
2578 operand->ToRegister();
Iain Merrick75681382010-08-19 15:07:18 +01002579 // DeferredInlineBinaryOperation requires all the registers that it is
2580 // told about to be spilled.
Steve Blocka7e24c12009-10-30 11:49:00 +00002581 frame_->Spill(operand->reg());
Iain Merrick75681382010-08-19 15:07:18 +01002582 DeferredInlineBinaryOperation* deferred = NULL;
Steve Block6ded16b2010-05-10 14:33:55 +01002583 if (!operand->type_info().IsSmi()) {
Iain Merrick75681382010-08-19 15:07:18 +01002584 Result left = allocator()->Allocate();
2585 ASSERT(left.is_valid());
2586 Result right = allocator()->Allocate();
2587 ASSERT(right.is_valid());
2588 deferred = new DeferredInlineBinaryOperation(
2589 op,
2590 operand->reg(),
2591 left.reg(),
2592 right.reg(),
2593 operand->type_info(),
2594 TypeInfo::Smi(),
2595 overwrite_mode == NO_OVERWRITE ? NO_OVERWRITE : OVERWRITE_LEFT);
Steve Block6ded16b2010-05-10 14:33:55 +01002596 __ test(operand->reg(), Immediate(kSmiTagMask));
Iain Merrick75681382010-08-19 15:07:18 +01002597 deferred->JumpToConstantRhs(not_zero, smi_value);
Steve Block6ded16b2010-05-10 14:33:55 +01002598 } else if (FLAG_debug_code) {
2599 __ AbortIfNotSmi(operand->reg());
2600 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002601 if (op == Token::BIT_AND) {
2602 __ and_(Operand(operand->reg()), Immediate(value));
2603 } else if (op == Token::BIT_XOR) {
2604 if (int_value != 0) {
2605 __ xor_(Operand(operand->reg()), Immediate(value));
2606 }
2607 } else {
2608 ASSERT(op == Token::BIT_OR);
2609 if (int_value != 0) {
2610 __ or_(Operand(operand->reg()), Immediate(value));
2611 }
2612 }
Iain Merrick75681382010-08-19 15:07:18 +01002613 if (deferred != NULL) deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00002614 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00002615 break;
2616 }
2617
Andrei Popescu402d9372010-02-26 13:31:12 +00002618 case Token::DIV:
2619 if (!reversed && int_value == 2) {
2620 operand->ToRegister();
2621 frame_->Spill(operand->reg());
2622
2623 DeferredInlineSmiOperation* deferred =
2624 new DeferredInlineSmiOperation(op,
2625 operand->reg(),
2626 operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002627 operand->type_info(),
Andrei Popescu402d9372010-02-26 13:31:12 +00002628 smi_value,
2629 overwrite_mode);
2630 // Check that lowest log2(value) bits of operand are zero, and test
2631 // smi tag at the same time.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002632 STATIC_ASSERT(kSmiTag == 0);
2633 STATIC_ASSERT(kSmiTagSize == 1);
Andrei Popescu402d9372010-02-26 13:31:12 +00002634 __ test(operand->reg(), Immediate(3));
2635 deferred->Branch(not_zero); // Branch if non-smi or odd smi.
2636 __ sar(operand->reg(), 1);
2637 deferred->BindExit();
2638 answer = *operand;
2639 } else {
2640 // Cannot fall through MOD to default case, so we duplicate the
2641 // default case here.
2642 Result constant_operand(value);
2643 if (reversed) {
Steve Block6ded16b2010-05-10 14:33:55 +01002644 answer = LikelySmiBinaryOperation(expr, &constant_operand, operand,
Andrei Popescu402d9372010-02-26 13:31:12 +00002645 overwrite_mode);
2646 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01002647 answer = LikelySmiBinaryOperation(expr, operand, &constant_operand,
Andrei Popescu402d9372010-02-26 13:31:12 +00002648 overwrite_mode);
2649 }
2650 }
2651 break;
Steve Block6ded16b2010-05-10 14:33:55 +01002652
Steve Blocka7e24c12009-10-30 11:49:00 +00002653 // Generate inline code for mod of powers of 2 and negative powers of 2.
2654 case Token::MOD:
2655 if (!reversed &&
2656 int_value != 0 &&
2657 (IsPowerOf2(int_value) || IsPowerOf2(-int_value))) {
2658 operand->ToRegister();
2659 frame_->Spill(operand->reg());
Steve Block6ded16b2010-05-10 14:33:55 +01002660 DeferredCode* deferred =
2661 new DeferredInlineSmiOperation(op,
2662 operand->reg(),
2663 operand->reg(),
2664 operand->type_info(),
2665 smi_value,
2666 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002667 // Check for negative or non-Smi left hand side.
Steve Block6ded16b2010-05-10 14:33:55 +01002668 __ test(operand->reg(), Immediate(kSmiTagMask | kSmiSignMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00002669 deferred->Branch(not_zero);
2670 if (int_value < 0) int_value = -int_value;
2671 if (int_value == 1) {
2672 __ mov(operand->reg(), Immediate(Smi::FromInt(0)));
2673 } else {
2674 __ and_(operand->reg(), (int_value << kSmiTagSize) - 1);
2675 }
2676 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00002677 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00002678 break;
2679 }
2680 // Fall through if we did not find a power of 2 on the right hand side!
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002681 // The next case must be the default.
Steve Blocka7e24c12009-10-30 11:49:00 +00002682
2683 default: {
2684 Result constant_operand(value);
2685 if (reversed) {
Steve Block6ded16b2010-05-10 14:33:55 +01002686 answer = LikelySmiBinaryOperation(expr, &constant_operand, operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00002687 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002688 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01002689 answer = LikelySmiBinaryOperation(expr, operand, &constant_operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00002690 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002691 }
2692 break;
2693 }
2694 }
Leon Clarked91b9f72010-01-27 17:25:45 +00002695 ASSERT(answer.is_valid());
2696 return answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00002697}
2698
2699
Leon Clarkee46be812010-01-19 14:06:41 +00002700static bool CouldBeNaN(const Result& result) {
Steve Block6ded16b2010-05-10 14:33:55 +01002701 if (result.type_info().IsSmi()) return false;
2702 if (result.type_info().IsInteger32()) return false;
Leon Clarkee46be812010-01-19 14:06:41 +00002703 if (!result.is_constant()) return true;
2704 if (!result.handle()->IsHeapNumber()) return false;
2705 return isnan(HeapNumber::cast(*result.handle())->value());
2706}
2707
2708
Steve Block6ded16b2010-05-10 14:33:55 +01002709// Convert from signed to unsigned comparison to match the way EFLAGS are set
2710// by FPU and XMM compare instructions.
2711static Condition DoubleCondition(Condition cc) {
2712 switch (cc) {
2713 case less: return below;
2714 case equal: return equal;
2715 case less_equal: return below_equal;
2716 case greater: return above;
2717 case greater_equal: return above_equal;
2718 default: UNREACHABLE();
2719 }
2720 UNREACHABLE();
2721 return equal;
2722}
2723
2724
Leon Clarkee46be812010-01-19 14:06:41 +00002725void CodeGenerator::Comparison(AstNode* node,
2726 Condition cc,
Steve Blocka7e24c12009-10-30 11:49:00 +00002727 bool strict,
2728 ControlDestination* dest) {
2729 // Strict only makes sense for equality comparisons.
2730 ASSERT(!strict || cc == equal);
2731
2732 Result left_side;
2733 Result right_side;
2734 // Implement '>' and '<=' by reversal to obtain ECMA-262 conversion order.
2735 if (cc == greater || cc == less_equal) {
2736 cc = ReverseCondition(cc);
2737 left_side = frame_->Pop();
2738 right_side = frame_->Pop();
2739 } else {
2740 right_side = frame_->Pop();
2741 left_side = frame_->Pop();
2742 }
2743 ASSERT(cc == less || cc == equal || cc == greater_equal);
2744
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002745 // If either side is a constant smi, optimize the comparison.
Leon Clarkee46be812010-01-19 14:06:41 +00002746 bool left_side_constant_smi = false;
2747 bool left_side_constant_null = false;
2748 bool left_side_constant_1_char_string = false;
2749 if (left_side.is_constant()) {
2750 left_side_constant_smi = left_side.handle()->IsSmi();
2751 left_side_constant_null = left_side.handle()->IsNull();
2752 left_side_constant_1_char_string =
2753 (left_side.handle()->IsString() &&
Steve Block6ded16b2010-05-10 14:33:55 +01002754 String::cast(*left_side.handle())->length() == 1 &&
2755 String::cast(*left_side.handle())->IsAsciiRepresentation());
Leon Clarkee46be812010-01-19 14:06:41 +00002756 }
2757 bool right_side_constant_smi = false;
2758 bool right_side_constant_null = false;
2759 bool right_side_constant_1_char_string = false;
2760 if (right_side.is_constant()) {
2761 right_side_constant_smi = right_side.handle()->IsSmi();
2762 right_side_constant_null = right_side.handle()->IsNull();
2763 right_side_constant_1_char_string =
2764 (right_side.handle()->IsString() &&
Steve Block6ded16b2010-05-10 14:33:55 +01002765 String::cast(*right_side.handle())->length() == 1 &&
2766 String::cast(*right_side.handle())->IsAsciiRepresentation());
Leon Clarkee46be812010-01-19 14:06:41 +00002767 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002768
2769 if (left_side_constant_smi || right_side_constant_smi) {
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002770 bool is_loop_condition = (node->AsExpression() != NULL) &&
2771 node->AsExpression()->is_loop_condition();
2772 ConstantSmiComparison(cc, strict, dest, &left_side, &right_side,
2773 left_side_constant_smi, right_side_constant_smi,
2774 is_loop_condition);
Steve Blocka7e24c12009-10-30 11:49:00 +00002775 } else if (cc == equal &&
2776 (left_side_constant_null || right_side_constant_null)) {
2777 // To make null checks efficient, we check if either the left side or
2778 // the right side is the constant 'null'.
2779 // If so, we optimize the code by inlining a null check instead of
2780 // calling the (very) general runtime routine for checking equality.
2781 Result operand = left_side_constant_null ? right_side : left_side;
2782 right_side.Unuse();
2783 left_side.Unuse();
2784 operand.ToRegister();
2785 __ cmp(operand.reg(), Factory::null_value());
2786 if (strict) {
2787 operand.Unuse();
2788 dest->Split(equal);
2789 } else {
2790 // The 'null' value is only equal to 'undefined' if using non-strict
2791 // comparisons.
2792 dest->true_target()->Branch(equal);
2793 __ cmp(operand.reg(), Factory::undefined_value());
2794 dest->true_target()->Branch(equal);
2795 __ test(operand.reg(), Immediate(kSmiTagMask));
2796 dest->false_target()->Branch(equal);
2797
2798 // It can be an undetectable object.
2799 // Use a scratch register in preference to spilling operand.reg().
2800 Result temp = allocator()->Allocate();
2801 ASSERT(temp.is_valid());
2802 __ mov(temp.reg(),
2803 FieldOperand(operand.reg(), HeapObject::kMapOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002804 __ test_b(FieldOperand(temp.reg(), Map::kBitFieldOffset),
2805 1 << Map::kIsUndetectable);
Steve Blocka7e24c12009-10-30 11:49:00 +00002806 temp.Unuse();
2807 operand.Unuse();
2808 dest->Split(not_zero);
2809 }
Leon Clarkee46be812010-01-19 14:06:41 +00002810 } else if (left_side_constant_1_char_string ||
2811 right_side_constant_1_char_string) {
2812 if (left_side_constant_1_char_string && right_side_constant_1_char_string) {
2813 // Trivial case, comparing two constants.
2814 int left_value = String::cast(*left_side.handle())->Get(0);
2815 int right_value = String::cast(*right_side.handle())->Get(0);
2816 switch (cc) {
2817 case less:
2818 dest->Goto(left_value < right_value);
2819 break;
2820 case equal:
2821 dest->Goto(left_value == right_value);
2822 break;
2823 case greater_equal:
2824 dest->Goto(left_value >= right_value);
2825 break;
2826 default:
2827 UNREACHABLE();
2828 }
2829 } else {
2830 // Only one side is a constant 1 character string.
2831 // If left side is a constant 1-character string, reverse the operands.
2832 // Since one side is a constant string, conversion order does not matter.
2833 if (left_side_constant_1_char_string) {
2834 Result temp = left_side;
2835 left_side = right_side;
2836 right_side = temp;
2837 cc = ReverseCondition(cc);
2838 // This may reintroduce greater or less_equal as the value of cc.
2839 // CompareStub and the inline code both support all values of cc.
2840 }
2841 // Implement comparison against a constant string, inlining the case
2842 // where both sides are strings.
2843 left_side.ToRegister();
2844
2845 // Here we split control flow to the stub call and inlined cases
2846 // before finally splitting it to the control destination. We use
2847 // a jump target and branching to duplicate the virtual frame at
2848 // the first split. We manually handle the off-frame references
2849 // by reconstituting them on the non-fall-through path.
2850 JumpTarget is_not_string, is_string;
2851 Register left_reg = left_side.reg();
2852 Handle<Object> right_val = right_side.handle();
Steve Block6ded16b2010-05-10 14:33:55 +01002853 ASSERT(StringShape(String::cast(*right_val)).IsSymbol());
Leon Clarkee46be812010-01-19 14:06:41 +00002854 __ test(left_side.reg(), Immediate(kSmiTagMask));
2855 is_not_string.Branch(zero, &left_side);
2856 Result temp = allocator_->Allocate();
2857 ASSERT(temp.is_valid());
2858 __ mov(temp.reg(),
2859 FieldOperand(left_side.reg(), HeapObject::kMapOffset));
2860 __ movzx_b(temp.reg(),
2861 FieldOperand(temp.reg(), Map::kInstanceTypeOffset));
2862 // If we are testing for equality then make use of the symbol shortcut.
2863 // Check if the right left hand side has the same type as the left hand
2864 // side (which is always a symbol).
2865 if (cc == equal) {
2866 Label not_a_symbol;
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002867 STATIC_ASSERT(kSymbolTag != 0);
Leon Clarkee46be812010-01-19 14:06:41 +00002868 // Ensure that no non-strings have the symbol bit set.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002869 STATIC_ASSERT(LAST_TYPE < kNotStringTag + kIsSymbolMask);
Leon Clarkee46be812010-01-19 14:06:41 +00002870 __ test(temp.reg(), Immediate(kIsSymbolMask)); // Test the symbol bit.
2871 __ j(zero, &not_a_symbol);
2872 // They are symbols, so do identity compare.
2873 __ cmp(left_side.reg(), right_side.handle());
2874 dest->true_target()->Branch(equal);
2875 dest->false_target()->Branch(not_equal);
2876 __ bind(&not_a_symbol);
2877 }
Steve Block6ded16b2010-05-10 14:33:55 +01002878 // Call the compare stub if the left side is not a flat ascii string.
Leon Clarkee46be812010-01-19 14:06:41 +00002879 __ and_(temp.reg(),
2880 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
2881 __ cmp(temp.reg(), kStringTag | kSeqStringTag | kAsciiStringTag);
2882 temp.Unuse();
2883 is_string.Branch(equal, &left_side);
2884
2885 // Setup and call the compare stub.
2886 is_not_string.Bind(&left_side);
2887 CompareStub stub(cc, strict, kCantBothBeNaN);
2888 Result result = frame_->CallStub(&stub, &left_side, &right_side);
2889 result.ToRegister();
2890 __ cmp(result.reg(), 0);
2891 result.Unuse();
2892 dest->true_target()->Branch(cc);
2893 dest->false_target()->Jump();
2894
2895 is_string.Bind(&left_side);
Steve Block6ded16b2010-05-10 14:33:55 +01002896 // left_side is a sequential ASCII string.
Leon Clarkee46be812010-01-19 14:06:41 +00002897 left_side = Result(left_reg);
2898 right_side = Result(right_val);
Leon Clarkee46be812010-01-19 14:06:41 +00002899 // Test string equality and comparison.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002900 Label comparison_done;
Leon Clarkee46be812010-01-19 14:06:41 +00002901 if (cc == equal) {
Leon Clarkee46be812010-01-19 14:06:41 +00002902 __ cmp(FieldOperand(left_side.reg(), String::kLengthOffset),
Steve Block6ded16b2010-05-10 14:33:55 +01002903 Immediate(Smi::FromInt(1)));
Leon Clarkee46be812010-01-19 14:06:41 +00002904 __ j(not_equal, &comparison_done);
2905 uint8_t char_value =
Steve Block6ded16b2010-05-10 14:33:55 +01002906 static_cast<uint8_t>(String::cast(*right_val)->Get(0));
Leon Clarkee46be812010-01-19 14:06:41 +00002907 __ cmpb(FieldOperand(left_side.reg(), SeqAsciiString::kHeaderSize),
2908 char_value);
Leon Clarkee46be812010-01-19 14:06:41 +00002909 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002910 __ cmp(FieldOperand(left_side.reg(), String::kLengthOffset),
2911 Immediate(Smi::FromInt(1)));
2912 // If the length is 0 then the jump is taken and the flags
2913 // correctly represent being less than the one-character string.
2914 __ j(below, &comparison_done);
Steve Block6ded16b2010-05-10 14:33:55 +01002915 // Compare the first character of the string with the
2916 // constant 1-character string.
Leon Clarkee46be812010-01-19 14:06:41 +00002917 uint8_t char_value =
Steve Block6ded16b2010-05-10 14:33:55 +01002918 static_cast<uint8_t>(String::cast(*right_val)->Get(0));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002919 __ cmpb(FieldOperand(left_side.reg(), SeqAsciiString::kHeaderSize),
2920 char_value);
2921 __ j(not_equal, &comparison_done);
Leon Clarkee46be812010-01-19 14:06:41 +00002922 // If the first character is the same then the long string sorts after
2923 // the short one.
2924 __ cmp(FieldOperand(left_side.reg(), String::kLengthOffset),
Steve Block6ded16b2010-05-10 14:33:55 +01002925 Immediate(Smi::FromInt(1)));
Leon Clarkee46be812010-01-19 14:06:41 +00002926 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002927 __ bind(&comparison_done);
Leon Clarkee46be812010-01-19 14:06:41 +00002928 left_side.Unuse();
2929 right_side.Unuse();
2930 dest->Split(cc);
2931 }
2932 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01002933 // Neither side is a constant Smi, constant 1-char string or constant null.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002934 // If either side is a non-smi constant, or known to be a heap number,
2935 // skip the smi check.
Steve Blocka7e24c12009-10-30 11:49:00 +00002936 bool known_non_smi =
2937 (left_side.is_constant() && !left_side.handle()->IsSmi()) ||
Steve Block6ded16b2010-05-10 14:33:55 +01002938 (right_side.is_constant() && !right_side.handle()->IsSmi()) ||
2939 left_side.type_info().IsDouble() ||
2940 right_side.type_info().IsDouble();
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002941
Leon Clarkee46be812010-01-19 14:06:41 +00002942 NaNInformation nan_info =
2943 (CouldBeNaN(left_side) && CouldBeNaN(right_side)) ?
2944 kBothCouldBeNaN :
2945 kCantBothBeNaN;
Steve Block6ded16b2010-05-10 14:33:55 +01002946
2947 // Inline number comparison handling any combination of smi's and heap
2948 // numbers if:
2949 // code is in a loop
2950 // the compare operation is different from equal
2951 // compare is not a for-loop comparison
2952 // The reason for excluding equal is that it will most likely be done
2953 // with smi's (not heap numbers) and the code to comparing smi's is inlined
2954 // separately. The same reason applies for for-loop comparison which will
2955 // also most likely be smi comparisons.
2956 bool is_loop_condition = (node->AsExpression() != NULL)
2957 && node->AsExpression()->is_loop_condition();
2958 bool inline_number_compare =
2959 loop_nesting() > 0 && cc != equal && !is_loop_condition;
2960
2961 // Left and right needed in registers for the following code.
Steve Blocka7e24c12009-10-30 11:49:00 +00002962 left_side.ToRegister();
2963 right_side.ToRegister();
2964
2965 if (known_non_smi) {
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002966 // Inlined equality check:
2967 // If at least one of the objects is not NaN, then if the objects
2968 // are identical, they are equal.
Steve Block6ded16b2010-05-10 14:33:55 +01002969 if (nan_info == kCantBothBeNaN && cc == equal) {
2970 __ cmp(left_side.reg(), Operand(right_side.reg()));
2971 dest->true_target()->Branch(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +00002972 }
Steve Block6ded16b2010-05-10 14:33:55 +01002973
Ben Murdoch3bec4d22010-07-22 14:51:16 +01002974 // Inlined number comparison:
Steve Block6ded16b2010-05-10 14:33:55 +01002975 if (inline_number_compare) {
2976 GenerateInlineNumberComparison(&left_side, &right_side, cc, dest);
2977 }
2978
2979 // End of in-line compare, call out to the compare stub. Don't include
2980 // number comparison in the stub if it was inlined.
2981 CompareStub stub(cc, strict, nan_info, !inline_number_compare);
2982 Result answer = frame_->CallStub(&stub, &left_side, &right_side);
2983 __ test(answer.reg(), Operand(answer.reg()));
Steve Blocka7e24c12009-10-30 11:49:00 +00002984 answer.Unuse();
2985 dest->Split(cc);
2986 } else {
2987 // Here we split control flow to the stub call and inlined cases
2988 // before finally splitting it to the control destination. We use
2989 // a jump target and branching to duplicate the virtual frame at
2990 // the first split. We manually handle the off-frame references
2991 // by reconstituting them on the non-fall-through path.
2992 JumpTarget is_smi;
2993 Register left_reg = left_side.reg();
2994 Register right_reg = right_side.reg();
2995
Steve Block6ded16b2010-05-10 14:33:55 +01002996 // In-line check for comparing two smis.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002997 JumpIfBothSmiUsingTypeInfo(&left_side, &right_side, &is_smi);
Steve Block6ded16b2010-05-10 14:33:55 +01002998
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002999 if (has_valid_frame()) {
3000 // Inline the equality check if both operands can't be a NaN. If both
3001 // objects are the same they are equal.
3002 if (nan_info == kCantBothBeNaN && cc == equal) {
3003 __ cmp(left_side.reg(), Operand(right_side.reg()));
3004 dest->true_target()->Branch(equal);
3005 }
3006
3007 // Inlined number comparison:
3008 if (inline_number_compare) {
3009 GenerateInlineNumberComparison(&left_side, &right_side, cc, dest);
3010 }
3011
3012 // End of in-line compare, call out to the compare stub. Don't include
3013 // number comparison in the stub if it was inlined.
3014 CompareStub stub(cc, strict, nan_info, !inline_number_compare);
3015 Result answer = frame_->CallStub(&stub, &left_side, &right_side);
3016 __ test(answer.reg(), Operand(answer.reg()));
3017 answer.Unuse();
3018 if (is_smi.is_linked()) {
3019 dest->true_target()->Branch(cc);
3020 dest->false_target()->Jump();
3021 } else {
3022 dest->Split(cc);
3023 }
3024 }
3025
3026 if (is_smi.is_linked()) {
3027 is_smi.Bind();
3028 left_side = Result(left_reg);
3029 right_side = Result(right_reg);
Steve Block6ded16b2010-05-10 14:33:55 +01003030 __ cmp(left_side.reg(), Operand(right_side.reg()));
Kristian Monsen50ef84f2010-07-29 15:18:00 +01003031 right_side.Unuse();
3032 left_side.Unuse();
3033 dest->Split(cc);
Steve Block6ded16b2010-05-10 14:33:55 +01003034 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003035 }
3036 }
3037}
3038
3039
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003040void CodeGenerator::ConstantSmiComparison(Condition cc,
3041 bool strict,
3042 ControlDestination* dest,
3043 Result* left_side,
3044 Result* right_side,
3045 bool left_side_constant_smi,
3046 bool right_side_constant_smi,
3047 bool is_loop_condition) {
3048 if (left_side_constant_smi && right_side_constant_smi) {
3049 // Trivial case, comparing two constants.
3050 int left_value = Smi::cast(*left_side->handle())->value();
3051 int right_value = Smi::cast(*right_side->handle())->value();
3052 switch (cc) {
3053 case less:
3054 dest->Goto(left_value < right_value);
3055 break;
3056 case equal:
3057 dest->Goto(left_value == right_value);
3058 break;
3059 case greater_equal:
3060 dest->Goto(left_value >= right_value);
3061 break;
3062 default:
3063 UNREACHABLE();
3064 }
3065 } else {
3066 // Only one side is a constant Smi.
3067 // If left side is a constant Smi, reverse the operands.
3068 // Since one side is a constant Smi, conversion order does not matter.
3069 if (left_side_constant_smi) {
3070 Result* temp = left_side;
3071 left_side = right_side;
3072 right_side = temp;
3073 cc = ReverseCondition(cc);
3074 // This may re-introduce greater or less_equal as the value of cc.
3075 // CompareStub and the inline code both support all values of cc.
3076 }
3077 // Implement comparison against a constant Smi, inlining the case
3078 // where both sides are Smis.
3079 left_side->ToRegister();
3080 Register left_reg = left_side->reg();
3081 Handle<Object> right_val = right_side->handle();
3082
3083 if (left_side->is_smi()) {
3084 if (FLAG_debug_code) {
3085 __ AbortIfNotSmi(left_reg);
3086 }
3087 // Test smi equality and comparison by signed int comparison.
3088 if (IsUnsafeSmi(right_side->handle())) {
3089 right_side->ToRegister();
3090 __ cmp(left_reg, Operand(right_side->reg()));
3091 } else {
3092 __ cmp(Operand(left_reg), Immediate(right_side->handle()));
3093 }
3094 left_side->Unuse();
3095 right_side->Unuse();
3096 dest->Split(cc);
3097 } else {
3098 // Only the case where the left side could possibly be a non-smi is left.
3099 JumpTarget is_smi;
3100 if (cc == equal) {
3101 // We can do the equality comparison before the smi check.
3102 __ cmp(Operand(left_reg), Immediate(right_side->handle()));
3103 dest->true_target()->Branch(equal);
3104 __ test(left_reg, Immediate(kSmiTagMask));
3105 dest->false_target()->Branch(zero);
3106 } else {
3107 // Do the smi check, then the comparison.
3108 JumpTarget is_not_smi;
3109 __ test(left_reg, Immediate(kSmiTagMask));
3110 is_smi.Branch(zero, left_side, right_side);
3111 }
3112
3113 // Jump or fall through to here if we are comparing a non-smi to a
3114 // constant smi. If the non-smi is a heap number and this is not
3115 // a loop condition, inline the floating point code.
3116 if (!is_loop_condition && CpuFeatures::IsSupported(SSE2)) {
3117 // Right side is a constant smi and left side has been checked
3118 // not to be a smi.
3119 CpuFeatures::Scope use_sse2(SSE2);
3120 JumpTarget not_number;
3121 __ cmp(FieldOperand(left_reg, HeapObject::kMapOffset),
3122 Immediate(Factory::heap_number_map()));
3123 not_number.Branch(not_equal, left_side);
3124 __ movdbl(xmm1,
3125 FieldOperand(left_reg, HeapNumber::kValueOffset));
3126 int value = Smi::cast(*right_val)->value();
3127 if (value == 0) {
3128 __ xorpd(xmm0, xmm0);
3129 } else {
3130 Result temp = allocator()->Allocate();
3131 __ mov(temp.reg(), Immediate(value));
3132 __ cvtsi2sd(xmm0, Operand(temp.reg()));
3133 temp.Unuse();
3134 }
3135 __ ucomisd(xmm1, xmm0);
3136 // Jump to builtin for NaN.
3137 not_number.Branch(parity_even, left_side);
3138 left_side->Unuse();
3139 dest->true_target()->Branch(DoubleCondition(cc));
3140 dest->false_target()->Jump();
3141 not_number.Bind(left_side);
3142 }
3143
3144 // Setup and call the compare stub.
3145 CompareStub stub(cc, strict, kCantBothBeNaN);
3146 Result result = frame_->CallStub(&stub, left_side, right_side);
3147 result.ToRegister();
3148 __ test(result.reg(), Operand(result.reg()));
3149 result.Unuse();
3150 if (cc == equal) {
3151 dest->Split(cc);
3152 } else {
3153 dest->true_target()->Branch(cc);
3154 dest->false_target()->Jump();
3155
3156 // It is important for performance for this case to be at the end.
3157 is_smi.Bind(left_side, right_side);
3158 if (IsUnsafeSmi(right_side->handle())) {
3159 right_side->ToRegister();
3160 __ cmp(left_reg, Operand(right_side->reg()));
3161 } else {
3162 __ cmp(Operand(left_reg), Immediate(right_side->handle()));
3163 }
3164 left_side->Unuse();
3165 right_side->Unuse();
3166 dest->Split(cc);
3167 }
3168 }
3169 }
3170}
3171
3172
Steve Block6ded16b2010-05-10 14:33:55 +01003173// Check that the comparison operand is a number. Jump to not_numbers jump
3174// target passing the left and right result if the operand is not a number.
3175static void CheckComparisonOperand(MacroAssembler* masm_,
3176 Result* operand,
3177 Result* left_side,
3178 Result* right_side,
3179 JumpTarget* not_numbers) {
3180 // Perform check if operand is not known to be a number.
3181 if (!operand->type_info().IsNumber()) {
3182 Label done;
3183 __ test(operand->reg(), Immediate(kSmiTagMask));
3184 __ j(zero, &done);
3185 __ cmp(FieldOperand(operand->reg(), HeapObject::kMapOffset),
3186 Immediate(Factory::heap_number_map()));
3187 not_numbers->Branch(not_equal, left_side, right_side, not_taken);
3188 __ bind(&done);
3189 }
3190}
3191
3192
3193// Load a comparison operand to the FPU stack. This assumes that the operand has
3194// already been checked and is a number.
3195static void LoadComparisonOperand(MacroAssembler* masm_,
3196 Result* operand) {
3197 Label done;
3198 if (operand->type_info().IsDouble()) {
3199 // Operand is known to be a heap number, just load it.
3200 __ fld_d(FieldOperand(operand->reg(), HeapNumber::kValueOffset));
3201 } else if (operand->type_info().IsSmi()) {
3202 // Operand is known to be a smi. Convert it to double and keep the original
3203 // smi.
3204 __ SmiUntag(operand->reg());
3205 __ push(operand->reg());
3206 __ fild_s(Operand(esp, 0));
3207 __ pop(operand->reg());
3208 __ SmiTag(operand->reg());
3209 } else {
3210 // Operand type not known, check for smi otherwise assume heap number.
3211 Label smi;
3212 __ test(operand->reg(), Immediate(kSmiTagMask));
3213 __ j(zero, &smi);
3214 __ fld_d(FieldOperand(operand->reg(), HeapNumber::kValueOffset));
3215 __ jmp(&done);
3216 __ bind(&smi);
3217 __ SmiUntag(operand->reg());
3218 __ push(operand->reg());
3219 __ fild_s(Operand(esp, 0));
3220 __ pop(operand->reg());
3221 __ SmiTag(operand->reg());
3222 __ jmp(&done);
3223 }
3224 __ bind(&done);
3225}
3226
3227
3228// Load a comparison operand into into a XMM register. Jump to not_numbers jump
3229// target passing the left and right result if the operand is not a number.
3230static void LoadComparisonOperandSSE2(MacroAssembler* masm_,
3231 Result* operand,
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003232 XMMRegister xmm_reg,
Steve Block6ded16b2010-05-10 14:33:55 +01003233 Result* left_side,
3234 Result* right_side,
3235 JumpTarget* not_numbers) {
3236 Label done;
3237 if (operand->type_info().IsDouble()) {
3238 // Operand is known to be a heap number, just load it.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003239 __ movdbl(xmm_reg, FieldOperand(operand->reg(), HeapNumber::kValueOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01003240 } else if (operand->type_info().IsSmi()) {
3241 // Operand is known to be a smi. Convert it to double and keep the original
3242 // smi.
3243 __ SmiUntag(operand->reg());
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003244 __ cvtsi2sd(xmm_reg, Operand(operand->reg()));
Steve Block6ded16b2010-05-10 14:33:55 +01003245 __ SmiTag(operand->reg());
3246 } else {
3247 // Operand type not known, check for smi or heap number.
3248 Label smi;
3249 __ test(operand->reg(), Immediate(kSmiTagMask));
3250 __ j(zero, &smi);
3251 if (!operand->type_info().IsNumber()) {
3252 __ cmp(FieldOperand(operand->reg(), HeapObject::kMapOffset),
3253 Immediate(Factory::heap_number_map()));
3254 not_numbers->Branch(not_equal, left_side, right_side, taken);
3255 }
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003256 __ movdbl(xmm_reg, FieldOperand(operand->reg(), HeapNumber::kValueOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01003257 __ jmp(&done);
3258
3259 __ bind(&smi);
3260 // Comvert smi to float and keep the original smi.
3261 __ SmiUntag(operand->reg());
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003262 __ cvtsi2sd(xmm_reg, Operand(operand->reg()));
Steve Block6ded16b2010-05-10 14:33:55 +01003263 __ SmiTag(operand->reg());
3264 __ jmp(&done);
3265 }
3266 __ bind(&done);
3267}
3268
3269
3270void CodeGenerator::GenerateInlineNumberComparison(Result* left_side,
3271 Result* right_side,
3272 Condition cc,
3273 ControlDestination* dest) {
3274 ASSERT(left_side->is_register());
3275 ASSERT(right_side->is_register());
3276
3277 JumpTarget not_numbers;
3278 if (CpuFeatures::IsSupported(SSE2)) {
3279 CpuFeatures::Scope use_sse2(SSE2);
3280
3281 // Load left and right operand into registers xmm0 and xmm1 and compare.
3282 LoadComparisonOperandSSE2(masm_, left_side, xmm0, left_side, right_side,
3283 &not_numbers);
3284 LoadComparisonOperandSSE2(masm_, right_side, xmm1, left_side, right_side,
3285 &not_numbers);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003286 __ ucomisd(xmm0, xmm1);
Steve Block6ded16b2010-05-10 14:33:55 +01003287 } else {
3288 Label check_right, compare;
3289
3290 // Make sure that both comparison operands are numbers.
3291 CheckComparisonOperand(masm_, left_side, left_side, right_side,
3292 &not_numbers);
3293 CheckComparisonOperand(masm_, right_side, left_side, right_side,
3294 &not_numbers);
3295
3296 // Load right and left operand to FPU stack and compare.
3297 LoadComparisonOperand(masm_, right_side);
3298 LoadComparisonOperand(masm_, left_side);
3299 __ FCmp();
3300 }
3301
3302 // Bail out if a NaN is involved.
3303 not_numbers.Branch(parity_even, left_side, right_side, not_taken);
3304
3305 // Split to destination targets based on comparison.
3306 left_side->Unuse();
3307 right_side->Unuse();
3308 dest->true_target()->Branch(DoubleCondition(cc));
3309 dest->false_target()->Jump();
3310
3311 not_numbers.Bind(left_side, right_side);
3312}
3313
3314
Steve Blocka7e24c12009-10-30 11:49:00 +00003315// Call the function just below TOS on the stack with the given
3316// arguments. The receiver is the TOS.
3317void CodeGenerator::CallWithArguments(ZoneList<Expression*>* args,
Leon Clarkee46be812010-01-19 14:06:41 +00003318 CallFunctionFlags flags,
Steve Blocka7e24c12009-10-30 11:49:00 +00003319 int position) {
3320 // Push the arguments ("left-to-right") on the stack.
3321 int arg_count = args->length();
3322 for (int i = 0; i < arg_count; i++) {
3323 Load(args->at(i));
Leon Clarkef7060e22010-06-03 12:02:55 +01003324 frame_->SpillTop();
Steve Blocka7e24c12009-10-30 11:49:00 +00003325 }
3326
3327 // Record the position for debugging purposes.
3328 CodeForSourcePosition(position);
3329
3330 // Use the shared code stub to call the function.
3331 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Leon Clarkee46be812010-01-19 14:06:41 +00003332 CallFunctionStub call_function(arg_count, in_loop, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00003333 Result answer = frame_->CallStub(&call_function, arg_count + 1);
3334 // Restore context and replace function on the stack with the
3335 // result of the stub invocation.
3336 frame_->RestoreContextRegister();
3337 frame_->SetElementAt(0, &answer);
3338}
3339
3340
Leon Clarked91b9f72010-01-27 17:25:45 +00003341void CodeGenerator::CallApplyLazy(Expression* applicand,
Steve Blocka7e24c12009-10-30 11:49:00 +00003342 Expression* receiver,
3343 VariableProxy* arguments,
3344 int position) {
Leon Clarked91b9f72010-01-27 17:25:45 +00003345 // An optimized implementation of expressions of the form
3346 // x.apply(y, arguments).
3347 // If the arguments object of the scope has not been allocated,
3348 // and x.apply is Function.prototype.apply, this optimization
3349 // just copies y and the arguments of the current function on the
3350 // stack, as receiver and arguments, and calls x.
3351 // In the implementation comments, we call x the applicand
3352 // and y the receiver.
Steve Blocka7e24c12009-10-30 11:49:00 +00003353 ASSERT(ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION);
3354 ASSERT(arguments->IsArguments());
3355
Leon Clarked91b9f72010-01-27 17:25:45 +00003356 // Load applicand.apply onto the stack. This will usually
Steve Blocka7e24c12009-10-30 11:49:00 +00003357 // give us a megamorphic load site. Not super, but it works.
Leon Clarked91b9f72010-01-27 17:25:45 +00003358 Load(applicand);
Andrei Popescu402d9372010-02-26 13:31:12 +00003359 frame()->Dup();
Leon Clarked91b9f72010-01-27 17:25:45 +00003360 Handle<String> name = Factory::LookupAsciiSymbol("apply");
3361 frame()->Push(name);
3362 Result answer = frame()->CallLoadIC(RelocInfo::CODE_TARGET);
3363 __ nop();
3364 frame()->Push(&answer);
Steve Blocka7e24c12009-10-30 11:49:00 +00003365
3366 // Load the receiver and the existing arguments object onto the
3367 // expression stack. Avoid allocating the arguments object here.
3368 Load(receiver);
Leon Clarkef7060e22010-06-03 12:02:55 +01003369 LoadFromSlot(scope()->arguments()->var()->slot(), NOT_INSIDE_TYPEOF);
Steve Blocka7e24c12009-10-30 11:49:00 +00003370
3371 // Emit the source position information after having loaded the
3372 // receiver and the arguments.
3373 CodeForSourcePosition(position);
Leon Clarked91b9f72010-01-27 17:25:45 +00003374 // Contents of frame at this point:
3375 // Frame[0]: arguments object of the current function or the hole.
3376 // Frame[1]: receiver
3377 // Frame[2]: applicand.apply
3378 // Frame[3]: applicand.
Steve Blocka7e24c12009-10-30 11:49:00 +00003379
3380 // Check if the arguments object has been lazily allocated
3381 // already. If so, just use that instead of copying the arguments
3382 // from the stack. This also deals with cases where a local variable
3383 // named 'arguments' has been introduced.
3384 frame_->Dup();
3385 Result probe = frame_->Pop();
Leon Clarked91b9f72010-01-27 17:25:45 +00003386 { VirtualFrame::SpilledScope spilled_scope;
3387 Label slow, done;
3388 bool try_lazy = true;
3389 if (probe.is_constant()) {
3390 try_lazy = probe.handle()->IsTheHole();
3391 } else {
3392 __ cmp(Operand(probe.reg()), Immediate(Factory::the_hole_value()));
3393 probe.Unuse();
3394 __ j(not_equal, &slow);
3395 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003396
Leon Clarked91b9f72010-01-27 17:25:45 +00003397 if (try_lazy) {
3398 Label build_args;
3399 // Get rid of the arguments object probe.
3400 frame_->Drop(); // Can be called on a spilled frame.
3401 // Stack now has 3 elements on it.
3402 // Contents of stack at this point:
3403 // esp[0]: receiver
3404 // esp[1]: applicand.apply
3405 // esp[2]: applicand.
Steve Blocka7e24c12009-10-30 11:49:00 +00003406
Leon Clarked91b9f72010-01-27 17:25:45 +00003407 // Check that the receiver really is a JavaScript object.
3408 __ mov(eax, Operand(esp, 0));
3409 __ test(eax, Immediate(kSmiTagMask));
3410 __ j(zero, &build_args);
Steve Blocka7e24c12009-10-30 11:49:00 +00003411 // We allow all JSObjects including JSFunctions. As long as
3412 // JS_FUNCTION_TYPE is the last instance type and it is right
3413 // after LAST_JS_OBJECT_TYPE, we do not have to check the upper
3414 // bound.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01003415 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
3416 STATIC_ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
Leon Clarked91b9f72010-01-27 17:25:45 +00003417 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
3418 __ j(below, &build_args);
Steve Blocka7e24c12009-10-30 11:49:00 +00003419
Leon Clarked91b9f72010-01-27 17:25:45 +00003420 // Check that applicand.apply is Function.prototype.apply.
3421 __ mov(eax, Operand(esp, kPointerSize));
3422 __ test(eax, Immediate(kSmiTagMask));
3423 __ j(zero, &build_args);
3424 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ecx);
3425 __ j(not_equal, &build_args);
Leon Clarkeeab96aa2010-01-27 16:31:12 +00003426 Handle<Code> apply_code(Builtins::builtin(Builtins::FunctionApply));
Iain Merrick75681382010-08-19 15:07:18 +01003427 __ cmp(FieldOperand(eax, JSFunction::kCodeOffset), Immediate(apply_code));
Leon Clarked91b9f72010-01-27 17:25:45 +00003428 __ j(not_equal, &build_args);
3429
3430 // Check that applicand is a function.
3431 __ mov(edi, Operand(esp, 2 * kPointerSize));
3432 __ test(edi, Immediate(kSmiTagMask));
3433 __ j(zero, &build_args);
3434 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
3435 __ j(not_equal, &build_args);
3436
3437 // Copy the arguments to this function possibly from the
3438 // adaptor frame below it.
3439 Label invoke, adapted;
3440 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
3441 __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
3442 __ cmp(Operand(ecx),
3443 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3444 __ j(equal, &adapted);
3445
3446 // No arguments adaptor frame. Copy fixed number of arguments.
Andrei Popescu31002712010-02-23 13:46:05 +00003447 __ mov(eax, Immediate(scope()->num_parameters()));
3448 for (int i = 0; i < scope()->num_parameters(); i++) {
Leon Clarked91b9f72010-01-27 17:25:45 +00003449 __ push(frame_->ParameterAt(i));
3450 }
3451 __ jmp(&invoke);
3452
3453 // Arguments adaptor frame present. Copy arguments from there, but
3454 // avoid copying too many arguments to avoid stack overflows.
3455 __ bind(&adapted);
3456 static const uint32_t kArgumentsLimit = 1 * KB;
3457 __ mov(eax, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
3458 __ SmiUntag(eax);
3459 __ mov(ecx, Operand(eax));
3460 __ cmp(eax, kArgumentsLimit);
3461 __ j(above, &build_args);
3462
3463 // Loop through the arguments pushing them onto the execution
3464 // stack. We don't inform the virtual frame of the push, so we don't
3465 // have to worry about getting rid of the elements from the virtual
3466 // frame.
3467 Label loop;
3468 // ecx is a small non-negative integer, due to the test above.
3469 __ test(ecx, Operand(ecx));
3470 __ j(zero, &invoke);
3471 __ bind(&loop);
3472 __ push(Operand(edx, ecx, times_pointer_size, 1 * kPointerSize));
3473 __ dec(ecx);
3474 __ j(not_zero, &loop);
3475
3476 // Invoke the function.
3477 __ bind(&invoke);
3478 ParameterCount actual(eax);
3479 __ InvokeFunction(edi, actual, CALL_FUNCTION);
3480 // Drop applicand.apply and applicand from the stack, and push
3481 // the result of the function call, but leave the spilled frame
3482 // unchanged, with 3 elements, so it is correct when we compile the
3483 // slow-case code.
3484 __ add(Operand(esp), Immediate(2 * kPointerSize));
3485 __ push(eax);
3486 // Stack now has 1 element:
3487 // esp[0]: result
3488 __ jmp(&done);
3489
3490 // Slow-case: Allocate the arguments object since we know it isn't
3491 // there, and fall-through to the slow-case where we call
3492 // applicand.apply.
3493 __ bind(&build_args);
3494 // Stack now has 3 elements, because we have jumped from where:
3495 // esp[0]: receiver
3496 // esp[1]: applicand.apply
3497 // esp[2]: applicand.
3498
3499 // StoreArgumentsObject requires a correct frame, and may modify it.
3500 Result arguments_object = StoreArgumentsObject(false);
3501 frame_->SpillAll();
3502 arguments_object.ToRegister();
3503 frame_->EmitPush(arguments_object.reg());
3504 arguments_object.Unuse();
3505 // Stack and frame now have 4 elements.
3506 __ bind(&slow);
Leon Clarkeeab96aa2010-01-27 16:31:12 +00003507 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003508
Leon Clarked91b9f72010-01-27 17:25:45 +00003509 // Generic computation of x.apply(y, args) with no special optimization.
3510 // Flip applicand.apply and applicand on the stack, so
3511 // applicand looks like the receiver of the applicand.apply call.
3512 // Then process it as a normal function call.
3513 __ mov(eax, Operand(esp, 3 * kPointerSize));
3514 __ mov(ebx, Operand(esp, 2 * kPointerSize));
3515 __ mov(Operand(esp, 2 * kPointerSize), eax);
3516 __ mov(Operand(esp, 3 * kPointerSize), ebx);
Leon Clarkeeab96aa2010-01-27 16:31:12 +00003517
Leon Clarked91b9f72010-01-27 17:25:45 +00003518 CallFunctionStub call_function(2, NOT_IN_LOOP, NO_CALL_FUNCTION_FLAGS);
3519 Result res = frame_->CallStub(&call_function, 3);
3520 // The function and its two arguments have been dropped.
3521 frame_->Drop(1); // Drop the receiver as well.
3522 res.ToRegister();
3523 frame_->EmitPush(res.reg());
3524 // Stack now has 1 element:
3525 // esp[0]: result
3526 if (try_lazy) __ bind(&done);
3527 } // End of spilled scope.
3528 // Restore the context register after a call.
Steve Blocka7e24c12009-10-30 11:49:00 +00003529 frame_->RestoreContextRegister();
3530}
3531
3532
3533class DeferredStackCheck: public DeferredCode {
3534 public:
3535 DeferredStackCheck() {
3536 set_comment("[ DeferredStackCheck");
3537 }
3538
3539 virtual void Generate();
3540};
3541
3542
3543void DeferredStackCheck::Generate() {
3544 StackCheckStub stub;
3545 __ CallStub(&stub);
3546}
3547
3548
3549void CodeGenerator::CheckStack() {
Steve Blockd0582a62009-12-15 09:54:21 +00003550 DeferredStackCheck* deferred = new DeferredStackCheck;
3551 ExternalReference stack_limit =
3552 ExternalReference::address_of_stack_limit();
3553 __ cmp(esp, Operand::StaticVariable(stack_limit));
3554 deferred->Branch(below);
3555 deferred->BindExit();
Steve Blocka7e24c12009-10-30 11:49:00 +00003556}
3557
3558
3559void CodeGenerator::VisitAndSpill(Statement* statement) {
3560 ASSERT(in_spilled_code());
3561 set_in_spilled_code(false);
3562 Visit(statement);
3563 if (frame_ != NULL) {
3564 frame_->SpillAll();
3565 }
3566 set_in_spilled_code(true);
3567}
3568
3569
3570void CodeGenerator::VisitStatementsAndSpill(ZoneList<Statement*>* statements) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003571#ifdef DEBUG
3572 int original_height = frame_->height();
3573#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003574 ASSERT(in_spilled_code());
3575 set_in_spilled_code(false);
3576 VisitStatements(statements);
3577 if (frame_ != NULL) {
3578 frame_->SpillAll();
3579 }
3580 set_in_spilled_code(true);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003581
3582 ASSERT(!has_valid_frame() || frame_->height() == original_height);
Steve Blocka7e24c12009-10-30 11:49:00 +00003583}
3584
3585
3586void CodeGenerator::VisitStatements(ZoneList<Statement*>* statements) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003587#ifdef DEBUG
3588 int original_height = frame_->height();
3589#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003590 ASSERT(!in_spilled_code());
3591 for (int i = 0; has_valid_frame() && i < statements->length(); i++) {
3592 Visit(statements->at(i));
3593 }
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003594 ASSERT(!has_valid_frame() || frame_->height() == original_height);
Steve Blocka7e24c12009-10-30 11:49:00 +00003595}
3596
3597
3598void CodeGenerator::VisitBlock(Block* node) {
3599 ASSERT(!in_spilled_code());
3600 Comment cmnt(masm_, "[ Block");
3601 CodeForStatementPosition(node);
3602 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
3603 VisitStatements(node->statements());
3604 if (node->break_target()->is_linked()) {
3605 node->break_target()->Bind();
3606 }
3607 node->break_target()->Unuse();
3608}
3609
3610
3611void CodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
3612 // Call the runtime to declare the globals. The inevitable call
3613 // will sync frame elements to memory anyway, so we do it eagerly to
3614 // allow us to push the arguments directly into place.
3615 frame_->SyncRange(0, frame_->element_count() - 1);
3616
Steve Block3ce2e202009-11-05 08:53:23 +00003617 frame_->EmitPush(esi); // The context is the first argument.
Steve Blocka7e24c12009-10-30 11:49:00 +00003618 frame_->EmitPush(Immediate(pairs));
Steve Blocka7e24c12009-10-30 11:49:00 +00003619 frame_->EmitPush(Immediate(Smi::FromInt(is_eval() ? 1 : 0)));
3620 Result ignored = frame_->CallRuntime(Runtime::kDeclareGlobals, 3);
3621 // Return value is ignored.
3622}
3623
3624
3625void CodeGenerator::VisitDeclaration(Declaration* node) {
3626 Comment cmnt(masm_, "[ Declaration");
3627 Variable* var = node->proxy()->var();
3628 ASSERT(var != NULL); // must have been resolved
3629 Slot* slot = var->slot();
3630
3631 // If it was not possible to allocate the variable at compile time,
3632 // we need to "declare" it at runtime to make sure it actually
3633 // exists in the local context.
3634 if (slot != NULL && slot->type() == Slot::LOOKUP) {
3635 // Variables with a "LOOKUP" slot were introduced as non-locals
3636 // during variable resolution and must have mode DYNAMIC.
3637 ASSERT(var->is_dynamic());
3638 // For now, just do a runtime call. Sync the virtual frame eagerly
3639 // so we can simply push the arguments into place.
3640 frame_->SyncRange(0, frame_->element_count() - 1);
3641 frame_->EmitPush(esi);
3642 frame_->EmitPush(Immediate(var->name()));
3643 // Declaration nodes are always introduced in one of two modes.
3644 ASSERT(node->mode() == Variable::VAR || node->mode() == Variable::CONST);
3645 PropertyAttributes attr = node->mode() == Variable::VAR ? NONE : READ_ONLY;
3646 frame_->EmitPush(Immediate(Smi::FromInt(attr)));
3647 // Push initial value, if any.
3648 // Note: For variables we must not push an initial value (such as
3649 // 'undefined') because we may have a (legal) redeclaration and we
3650 // must not destroy the current value.
3651 if (node->mode() == Variable::CONST) {
3652 frame_->EmitPush(Immediate(Factory::the_hole_value()));
3653 } else if (node->fun() != NULL) {
3654 Load(node->fun());
3655 } else {
3656 frame_->EmitPush(Immediate(Smi::FromInt(0))); // no initial value!
3657 }
3658 Result ignored = frame_->CallRuntime(Runtime::kDeclareContextSlot, 4);
3659 // Ignore the return value (declarations are statements).
3660 return;
3661 }
3662
3663 ASSERT(!var->is_global());
3664
3665 // If we have a function or a constant, we need to initialize the variable.
3666 Expression* val = NULL;
3667 if (node->mode() == Variable::CONST) {
3668 val = new Literal(Factory::the_hole_value());
3669 } else {
3670 val = node->fun(); // NULL if we don't have a function
3671 }
3672
3673 if (val != NULL) {
3674 {
3675 // Set the initial value.
3676 Reference target(this, node->proxy());
3677 Load(val);
3678 target.SetValue(NOT_CONST_INIT);
3679 // The reference is removed from the stack (preserving TOS) when
3680 // it goes out of scope.
3681 }
3682 // Get rid of the assigned value (declarations are statements).
3683 frame_->Drop();
3684 }
3685}
3686
3687
3688void CodeGenerator::VisitExpressionStatement(ExpressionStatement* node) {
3689 ASSERT(!in_spilled_code());
3690 Comment cmnt(masm_, "[ ExpressionStatement");
3691 CodeForStatementPosition(node);
3692 Expression* expression = node->expression();
3693 expression->MarkAsStatement();
3694 Load(expression);
3695 // Remove the lingering expression result from the top of stack.
3696 frame_->Drop();
3697}
3698
3699
3700void CodeGenerator::VisitEmptyStatement(EmptyStatement* node) {
3701 ASSERT(!in_spilled_code());
3702 Comment cmnt(masm_, "// EmptyStatement");
3703 CodeForStatementPosition(node);
3704 // nothing to do
3705}
3706
3707
3708void CodeGenerator::VisitIfStatement(IfStatement* node) {
3709 ASSERT(!in_spilled_code());
3710 Comment cmnt(masm_, "[ IfStatement");
3711 // Generate different code depending on which parts of the if statement
3712 // are present or not.
3713 bool has_then_stm = node->HasThenStatement();
3714 bool has_else_stm = node->HasElseStatement();
3715
3716 CodeForStatementPosition(node);
3717 JumpTarget exit;
3718 if (has_then_stm && has_else_stm) {
3719 JumpTarget then;
3720 JumpTarget else_;
3721 ControlDestination dest(&then, &else_, true);
Steve Blockd0582a62009-12-15 09:54:21 +00003722 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00003723
3724 if (dest.false_was_fall_through()) {
3725 // The else target was bound, so we compile the else part first.
3726 Visit(node->else_statement());
3727
3728 // We may have dangling jumps to the then part.
3729 if (then.is_linked()) {
3730 if (has_valid_frame()) exit.Jump();
3731 then.Bind();
3732 Visit(node->then_statement());
3733 }
3734 } else {
3735 // The then target was bound, so we compile the then part first.
3736 Visit(node->then_statement());
3737
3738 if (else_.is_linked()) {
3739 if (has_valid_frame()) exit.Jump();
3740 else_.Bind();
3741 Visit(node->else_statement());
3742 }
3743 }
3744
3745 } else if (has_then_stm) {
3746 ASSERT(!has_else_stm);
3747 JumpTarget then;
3748 ControlDestination dest(&then, &exit, true);
Steve Blockd0582a62009-12-15 09:54:21 +00003749 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00003750
3751 if (dest.false_was_fall_through()) {
3752 // The exit label was bound. We may have dangling jumps to the
3753 // then part.
3754 if (then.is_linked()) {
3755 exit.Unuse();
3756 exit.Jump();
3757 then.Bind();
3758 Visit(node->then_statement());
3759 }
3760 } else {
3761 // The then label was bound.
3762 Visit(node->then_statement());
3763 }
3764
3765 } else if (has_else_stm) {
3766 ASSERT(!has_then_stm);
3767 JumpTarget else_;
3768 ControlDestination dest(&exit, &else_, false);
Steve Blockd0582a62009-12-15 09:54:21 +00003769 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00003770
3771 if (dest.true_was_fall_through()) {
3772 // The exit label was bound. We may have dangling jumps to the
3773 // else part.
3774 if (else_.is_linked()) {
3775 exit.Unuse();
3776 exit.Jump();
3777 else_.Bind();
3778 Visit(node->else_statement());
3779 }
3780 } else {
3781 // The else label was bound.
3782 Visit(node->else_statement());
3783 }
3784
3785 } else {
3786 ASSERT(!has_then_stm && !has_else_stm);
3787 // We only care about the condition's side effects (not its value
3788 // or control flow effect). LoadCondition is called without
3789 // forcing control flow.
3790 ControlDestination dest(&exit, &exit, true);
Steve Blockd0582a62009-12-15 09:54:21 +00003791 LoadCondition(node->condition(), &dest, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00003792 if (!dest.is_used()) {
3793 // We got a value on the frame rather than (or in addition to)
3794 // control flow.
3795 frame_->Drop();
3796 }
3797 }
3798
3799 if (exit.is_linked()) {
3800 exit.Bind();
3801 }
3802}
3803
3804
3805void CodeGenerator::VisitContinueStatement(ContinueStatement* node) {
3806 ASSERT(!in_spilled_code());
3807 Comment cmnt(masm_, "[ ContinueStatement");
3808 CodeForStatementPosition(node);
3809 node->target()->continue_target()->Jump();
3810}
3811
3812
3813void CodeGenerator::VisitBreakStatement(BreakStatement* node) {
3814 ASSERT(!in_spilled_code());
3815 Comment cmnt(masm_, "[ BreakStatement");
3816 CodeForStatementPosition(node);
3817 node->target()->break_target()->Jump();
3818}
3819
3820
3821void CodeGenerator::VisitReturnStatement(ReturnStatement* node) {
3822 ASSERT(!in_spilled_code());
3823 Comment cmnt(masm_, "[ ReturnStatement");
3824
3825 CodeForStatementPosition(node);
3826 Load(node->expression());
3827 Result return_value = frame_->Pop();
Steve Blockd0582a62009-12-15 09:54:21 +00003828 masm()->WriteRecordedPositions();
Steve Blocka7e24c12009-10-30 11:49:00 +00003829 if (function_return_is_shadowed_) {
3830 function_return_.Jump(&return_value);
3831 } else {
3832 frame_->PrepareForReturn();
3833 if (function_return_.is_bound()) {
3834 // If the function return label is already bound we reuse the
3835 // code by jumping to the return site.
3836 function_return_.Jump(&return_value);
3837 } else {
3838 function_return_.Bind(&return_value);
3839 GenerateReturnSequence(&return_value);
3840 }
3841 }
3842}
3843
3844
3845void CodeGenerator::GenerateReturnSequence(Result* return_value) {
3846 // The return value is a live (but not currently reference counted)
3847 // reference to eax. This is safe because the current frame does not
3848 // contain a reference to eax (it is prepared for the return by spilling
3849 // all registers).
3850 if (FLAG_trace) {
3851 frame_->Push(return_value);
3852 *return_value = frame_->CallRuntime(Runtime::kTraceExit, 1);
3853 }
3854 return_value->ToRegister(eax);
3855
3856 // Add a label for checking the size of the code used for returning.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003857#ifdef DEBUG
Steve Blocka7e24c12009-10-30 11:49:00 +00003858 Label check_exit_codesize;
3859 masm_->bind(&check_exit_codesize);
Ben Murdoch3bec4d22010-07-22 14:51:16 +01003860#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003861
3862 // Leave the frame and return popping the arguments and the
3863 // receiver.
3864 frame_->Exit();
Andrei Popescu31002712010-02-23 13:46:05 +00003865 masm_->ret((scope()->num_parameters() + 1) * kPointerSize);
Steve Blocka7e24c12009-10-30 11:49:00 +00003866 DeleteFrame();
3867
3868#ifdef ENABLE_DEBUGGER_SUPPORT
3869 // Check that the size of the code used for returning matches what is
3870 // expected by the debugger.
Steve Blockd0582a62009-12-15 09:54:21 +00003871 ASSERT_EQ(Assembler::kJSReturnSequenceLength,
Steve Blocka7e24c12009-10-30 11:49:00 +00003872 masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
3873#endif
3874}
3875
3876
3877void CodeGenerator::VisitWithEnterStatement(WithEnterStatement* node) {
3878 ASSERT(!in_spilled_code());
3879 Comment cmnt(masm_, "[ WithEnterStatement");
3880 CodeForStatementPosition(node);
3881 Load(node->expression());
3882 Result context;
3883 if (node->is_catch_block()) {
3884 context = frame_->CallRuntime(Runtime::kPushCatchContext, 1);
3885 } else {
3886 context = frame_->CallRuntime(Runtime::kPushContext, 1);
3887 }
3888
3889 // Update context local.
3890 frame_->SaveContextRegister();
3891
3892 // Verify that the runtime call result and esi agree.
3893 if (FLAG_debug_code) {
3894 __ cmp(context.reg(), Operand(esi));
3895 __ Assert(equal, "Runtime::NewContext should end up in esi");
3896 }
3897}
3898
3899
3900void CodeGenerator::VisitWithExitStatement(WithExitStatement* node) {
3901 ASSERT(!in_spilled_code());
3902 Comment cmnt(masm_, "[ WithExitStatement");
3903 CodeForStatementPosition(node);
3904 // Pop context.
3905 __ mov(esi, ContextOperand(esi, Context::PREVIOUS_INDEX));
3906 // Update context local.
3907 frame_->SaveContextRegister();
3908}
3909
3910
3911void CodeGenerator::VisitSwitchStatement(SwitchStatement* node) {
3912 ASSERT(!in_spilled_code());
3913 Comment cmnt(masm_, "[ SwitchStatement");
3914 CodeForStatementPosition(node);
3915 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
3916
3917 // Compile the switch value.
3918 Load(node->tag());
3919
3920 ZoneList<CaseClause*>* cases = node->cases();
3921 int length = cases->length();
3922 CaseClause* default_clause = NULL;
3923
3924 JumpTarget next_test;
3925 // Compile the case label expressions and comparisons. Exit early
3926 // if a comparison is unconditionally true. The target next_test is
3927 // bound before the loop in order to indicate control flow to the
3928 // first comparison.
3929 next_test.Bind();
3930 for (int i = 0; i < length && !next_test.is_unused(); i++) {
3931 CaseClause* clause = cases->at(i);
3932 // The default is not a test, but remember it for later.
3933 if (clause->is_default()) {
3934 default_clause = clause;
3935 continue;
3936 }
3937
3938 Comment cmnt(masm_, "[ Case comparison");
3939 // We recycle the same target next_test for each test. Bind it if
3940 // the previous test has not done so and then unuse it for the
3941 // loop.
3942 if (next_test.is_linked()) {
3943 next_test.Bind();
3944 }
3945 next_test.Unuse();
3946
3947 // Duplicate the switch value.
3948 frame_->Dup();
3949
3950 // Compile the label expression.
3951 Load(clause->label());
3952
3953 // Compare and branch to the body if true or the next test if
3954 // false. Prefer the next test as a fall through.
3955 ControlDestination dest(clause->body_target(), &next_test, false);
Leon Clarkee46be812010-01-19 14:06:41 +00003956 Comparison(node, equal, true, &dest);
Steve Blocka7e24c12009-10-30 11:49:00 +00003957
3958 // If the comparison fell through to the true target, jump to the
3959 // actual body.
3960 if (dest.true_was_fall_through()) {
3961 clause->body_target()->Unuse();
3962 clause->body_target()->Jump();
3963 }
3964 }
3965
3966 // If there was control flow to a next test from the last one
3967 // compiled, compile a jump to the default or break target.
3968 if (!next_test.is_unused()) {
3969 if (next_test.is_linked()) {
3970 next_test.Bind();
3971 }
3972 // Drop the switch value.
3973 frame_->Drop();
3974 if (default_clause != NULL) {
3975 default_clause->body_target()->Jump();
3976 } else {
3977 node->break_target()->Jump();
3978 }
3979 }
3980
Steve Blocka7e24c12009-10-30 11:49:00 +00003981 // The last instruction emitted was a jump, either to the default
3982 // clause or the break target, or else to a case body from the loop
3983 // that compiles the tests.
3984 ASSERT(!has_valid_frame());
3985 // Compile case bodies as needed.
3986 for (int i = 0; i < length; i++) {
3987 CaseClause* clause = cases->at(i);
3988
3989 // There are two ways to reach the body: from the corresponding
3990 // test or as the fall through of the previous body.
3991 if (clause->body_target()->is_linked() || has_valid_frame()) {
3992 if (clause->body_target()->is_linked()) {
3993 if (has_valid_frame()) {
3994 // If we have both a jump to the test and a fall through, put
3995 // a jump on the fall through path to avoid the dropping of
3996 // the switch value on the test path. The exception is the
3997 // default which has already had the switch value dropped.
3998 if (clause->is_default()) {
3999 clause->body_target()->Bind();
4000 } else {
4001 JumpTarget body;
4002 body.Jump();
4003 clause->body_target()->Bind();
4004 frame_->Drop();
4005 body.Bind();
4006 }
4007 } else {
4008 // No fall through to worry about.
4009 clause->body_target()->Bind();
4010 if (!clause->is_default()) {
4011 frame_->Drop();
4012 }
4013 }
4014 } else {
4015 // Otherwise, we have only fall through.
4016 ASSERT(has_valid_frame());
4017 }
4018
4019 // We are now prepared to compile the body.
4020 Comment cmnt(masm_, "[ Case body");
4021 VisitStatements(clause->statements());
4022 }
4023 clause->body_target()->Unuse();
4024 }
4025
4026 // We may not have a valid frame here so bind the break target only
4027 // if needed.
4028 if (node->break_target()->is_linked()) {
4029 node->break_target()->Bind();
4030 }
4031 node->break_target()->Unuse();
4032}
4033
4034
Steve Block3ce2e202009-11-05 08:53:23 +00004035void CodeGenerator::VisitDoWhileStatement(DoWhileStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004036 ASSERT(!in_spilled_code());
Steve Block3ce2e202009-11-05 08:53:23 +00004037 Comment cmnt(masm_, "[ DoWhileStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00004038 CodeForStatementPosition(node);
4039 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
Steve Block3ce2e202009-11-05 08:53:23 +00004040 JumpTarget body(JumpTarget::BIDIRECTIONAL);
4041 IncrementLoopNesting();
Steve Blocka7e24c12009-10-30 11:49:00 +00004042
Steve Block3ce2e202009-11-05 08:53:23 +00004043 ConditionAnalysis info = AnalyzeCondition(node->cond());
4044 // Label the top of the loop for the backward jump if necessary.
4045 switch (info) {
4046 case ALWAYS_TRUE:
4047 // Use the continue target.
4048 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
4049 node->continue_target()->Bind();
4050 break;
4051 case ALWAYS_FALSE:
4052 // No need to label it.
4053 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
4054 break;
4055 case DONT_KNOW:
4056 // Continue is the test, so use the backward body target.
4057 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
4058 body.Bind();
4059 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00004060 }
4061
Steve Block3ce2e202009-11-05 08:53:23 +00004062 CheckStack(); // TODO(1222600): ignore if body contains calls.
4063 Visit(node->body());
Steve Blocka7e24c12009-10-30 11:49:00 +00004064
Steve Block3ce2e202009-11-05 08:53:23 +00004065 // Compile the test.
4066 switch (info) {
4067 case ALWAYS_TRUE:
Ben Murdoch3bec4d22010-07-22 14:51:16 +01004068 // If control flow can fall off the end of the body, jump back
4069 // to the top and bind the break target at the exit.
Steve Block3ce2e202009-11-05 08:53:23 +00004070 if (has_valid_frame()) {
4071 node->continue_target()->Jump();
Steve Blocka7e24c12009-10-30 11:49:00 +00004072 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004073 if (node->break_target()->is_linked()) {
4074 node->break_target()->Bind();
4075 }
4076 break;
Steve Block3ce2e202009-11-05 08:53:23 +00004077 case ALWAYS_FALSE:
4078 // We may have had continues or breaks in the body.
4079 if (node->continue_target()->is_linked()) {
4080 node->continue_target()->Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00004081 }
Steve Block3ce2e202009-11-05 08:53:23 +00004082 if (node->break_target()->is_linked()) {
4083 node->break_target()->Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00004084 }
Steve Block3ce2e202009-11-05 08:53:23 +00004085 break;
4086 case DONT_KNOW:
4087 // We have to compile the test expression if it can be reached by
4088 // control flow falling out of the body or via continue.
4089 if (node->continue_target()->is_linked()) {
4090 node->continue_target()->Bind();
4091 }
4092 if (has_valid_frame()) {
Steve Blockd0582a62009-12-15 09:54:21 +00004093 Comment cmnt(masm_, "[ DoWhileCondition");
4094 CodeForDoWhileConditionPosition(node);
Steve Block3ce2e202009-11-05 08:53:23 +00004095 ControlDestination dest(&body, node->break_target(), false);
Steve Blockd0582a62009-12-15 09:54:21 +00004096 LoadCondition(node->cond(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00004097 }
Steve Block3ce2e202009-11-05 08:53:23 +00004098 if (node->break_target()->is_linked()) {
4099 node->break_target()->Bind();
4100 }
4101 break;
4102 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004103
Steve Block3ce2e202009-11-05 08:53:23 +00004104 DecrementLoopNesting();
Ben Murdoch3bec4d22010-07-22 14:51:16 +01004105 node->continue_target()->Unuse();
4106 node->break_target()->Unuse();
Steve Block3ce2e202009-11-05 08:53:23 +00004107}
Steve Blocka7e24c12009-10-30 11:49:00 +00004108
Steve Block3ce2e202009-11-05 08:53:23 +00004109
4110void CodeGenerator::VisitWhileStatement(WhileStatement* node) {
4111 ASSERT(!in_spilled_code());
4112 Comment cmnt(masm_, "[ WhileStatement");
4113 CodeForStatementPosition(node);
4114
4115 // If the condition is always false and has no side effects, we do not
4116 // need to compile anything.
4117 ConditionAnalysis info = AnalyzeCondition(node->cond());
4118 if (info == ALWAYS_FALSE) return;
4119
4120 // Do not duplicate conditions that may have function literal
4121 // subexpressions. This can cause us to compile the function literal
4122 // twice.
4123 bool test_at_bottom = !node->may_have_function_literal();
4124 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
4125 IncrementLoopNesting();
4126 JumpTarget body;
4127 if (test_at_bottom) {
4128 body.set_direction(JumpTarget::BIDIRECTIONAL);
4129 }
4130
4131 // Based on the condition analysis, compile the test as necessary.
4132 switch (info) {
4133 case ALWAYS_TRUE:
4134 // We will not compile the test expression. Label the top of the
4135 // loop with the continue target.
4136 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
4137 node->continue_target()->Bind();
4138 break;
4139 case DONT_KNOW: {
4140 if (test_at_bottom) {
4141 // Continue is the test at the bottom, no need to label the test
4142 // at the top. The body is a backward target.
4143 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
4144 } else {
4145 // Label the test at the top as the continue target. The body
4146 // is a forward-only target.
4147 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
4148 node->continue_target()->Bind();
4149 }
4150 // Compile the test with the body as the true target and preferred
4151 // fall-through and with the break target as the false target.
4152 ControlDestination dest(&body, node->break_target(), true);
Steve Blockd0582a62009-12-15 09:54:21 +00004153 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00004154
4155 if (dest.false_was_fall_through()) {
4156 // If we got the break target as fall-through, the test may have
4157 // been unconditionally false (if there are no jumps to the
4158 // body).
4159 if (!body.is_linked()) {
4160 DecrementLoopNesting();
4161 return;
4162 }
4163
4164 // Otherwise, jump around the body on the fall through and then
4165 // bind the body target.
4166 node->break_target()->Unuse();
4167 node->break_target()->Jump();
4168 body.Bind();
4169 }
4170 break;
4171 }
4172 case ALWAYS_FALSE:
4173 UNREACHABLE();
4174 break;
4175 }
4176
4177 CheckStack(); // TODO(1222600): ignore if body contains calls.
4178 Visit(node->body());
4179
4180 // Based on the condition analysis, compile the backward jump as
4181 // necessary.
4182 switch (info) {
4183 case ALWAYS_TRUE:
4184 // The loop body has been labeled with the continue target.
4185 if (has_valid_frame()) {
4186 node->continue_target()->Jump();
4187 }
4188 break;
4189 case DONT_KNOW:
4190 if (test_at_bottom) {
Ben Murdoch3bec4d22010-07-22 14:51:16 +01004191 // If we have chosen to recompile the test at the bottom,
4192 // then it is the continue target.
Steve Blocka7e24c12009-10-30 11:49:00 +00004193 if (node->continue_target()->is_linked()) {
4194 node->continue_target()->Bind();
4195 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004196 if (has_valid_frame()) {
Steve Block3ce2e202009-11-05 08:53:23 +00004197 // The break target is the fall-through (body is a backward
4198 // jump from here and thus an invalid fall-through).
4199 ControlDestination dest(&body, node->break_target(), false);
Steve Blockd0582a62009-12-15 09:54:21 +00004200 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00004201 }
4202 } else {
4203 // If we have chosen not to recompile the test at the bottom,
4204 // jump back to the one at the top.
4205 if (has_valid_frame()) {
4206 node->continue_target()->Jump();
Steve Blocka7e24c12009-10-30 11:49:00 +00004207 }
4208 }
Steve Block3ce2e202009-11-05 08:53:23 +00004209 break;
4210 case ALWAYS_FALSE:
4211 UNREACHABLE();
4212 break;
4213 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004214
Steve Block3ce2e202009-11-05 08:53:23 +00004215 // The break target may be already bound (by the condition), or there
4216 // may not be a valid frame. Bind it only if needed.
4217 if (node->break_target()->is_linked()) {
4218 node->break_target()->Bind();
4219 }
4220 DecrementLoopNesting();
4221}
4222
4223
Steve Block6ded16b2010-05-10 14:33:55 +01004224void CodeGenerator::SetTypeForStackSlot(Slot* slot, TypeInfo info) {
4225 ASSERT(slot->type() == Slot::LOCAL || slot->type() == Slot::PARAMETER);
4226 if (slot->type() == Slot::LOCAL) {
4227 frame_->SetTypeForLocalAt(slot->index(), info);
4228 } else {
4229 frame_->SetTypeForParamAt(slot->index(), info);
4230 }
4231 if (FLAG_debug_code && info.IsSmi()) {
4232 if (slot->type() == Slot::LOCAL) {
4233 frame_->PushLocalAt(slot->index());
4234 } else {
4235 frame_->PushParameterAt(slot->index());
4236 }
4237 Result var = frame_->Pop();
4238 var.ToRegister();
4239 __ AbortIfNotSmi(var.reg());
4240 }
4241}
4242
4243
Steve Block3ce2e202009-11-05 08:53:23 +00004244void CodeGenerator::VisitForStatement(ForStatement* node) {
4245 ASSERT(!in_spilled_code());
4246 Comment cmnt(masm_, "[ ForStatement");
4247 CodeForStatementPosition(node);
4248
4249 // Compile the init expression if present.
4250 if (node->init() != NULL) {
4251 Visit(node->init());
4252 }
4253
4254 // If the condition is always false and has no side effects, we do not
4255 // need to compile anything else.
4256 ConditionAnalysis info = AnalyzeCondition(node->cond());
4257 if (info == ALWAYS_FALSE) return;
4258
4259 // Do not duplicate conditions that may have function literal
4260 // subexpressions. This can cause us to compile the function literal
4261 // twice.
4262 bool test_at_bottom = !node->may_have_function_literal();
4263 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
4264 IncrementLoopNesting();
4265
4266 // Target for backward edge if no test at the bottom, otherwise
4267 // unused.
4268 JumpTarget loop(JumpTarget::BIDIRECTIONAL);
4269
4270 // Target for backward edge if there is a test at the bottom,
4271 // otherwise used as target for test at the top.
4272 JumpTarget body;
4273 if (test_at_bottom) {
4274 body.set_direction(JumpTarget::BIDIRECTIONAL);
4275 }
4276
4277 // Based on the condition analysis, compile the test as necessary.
4278 switch (info) {
4279 case ALWAYS_TRUE:
4280 // We will not compile the test expression. Label the top of the
4281 // loop.
4282 if (node->next() == NULL) {
4283 // Use the continue target if there is no update expression.
4284 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
4285 node->continue_target()->Bind();
4286 } else {
4287 // Otherwise use the backward loop target.
4288 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
4289 loop.Bind();
4290 }
4291 break;
4292 case DONT_KNOW: {
4293 if (test_at_bottom) {
4294 // Continue is either the update expression or the test at the
4295 // bottom, no need to label the test at the top.
4296 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
4297 } else if (node->next() == NULL) {
4298 // We are not recompiling the test at the bottom and there is no
4299 // update expression.
4300 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
4301 node->continue_target()->Bind();
4302 } else {
4303 // We are not recompiling the test at the bottom and there is an
4304 // update expression.
4305 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
4306 loop.Bind();
4307 }
Ben Murdoch3bec4d22010-07-22 14:51:16 +01004308
Steve Block3ce2e202009-11-05 08:53:23 +00004309 // Compile the test with the body as the true target and preferred
4310 // fall-through and with the break target as the false target.
4311 ControlDestination dest(&body, node->break_target(), true);
Steve Blockd0582a62009-12-15 09:54:21 +00004312 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00004313
4314 if (dest.false_was_fall_through()) {
4315 // If we got the break target as fall-through, the test may have
4316 // been unconditionally false (if there are no jumps to the
4317 // body).
4318 if (!body.is_linked()) {
4319 DecrementLoopNesting();
4320 return;
4321 }
4322
4323 // Otherwise, jump around the body on the fall through and then
4324 // bind the body target.
4325 node->break_target()->Unuse();
4326 node->break_target()->Jump();
4327 body.Bind();
4328 }
4329 break;
4330 }
4331 case ALWAYS_FALSE:
4332 UNREACHABLE();
4333 break;
4334 }
4335
4336 CheckStack(); // TODO(1222600): ignore if body contains calls.
Steve Block6ded16b2010-05-10 14:33:55 +01004337
4338 // We know that the loop index is a smi if it is not modified in the
4339 // loop body and it is checked against a constant limit in the loop
4340 // condition. In this case, we reset the static type information of the
4341 // loop index to smi before compiling the body, the update expression, and
4342 // the bottom check of the loop condition.
4343 if (node->is_fast_smi_loop()) {
4344 // Set number type of the loop variable to smi.
4345 SetTypeForStackSlot(node->loop_variable()->slot(), TypeInfo::Smi());
4346 }
4347
Steve Block3ce2e202009-11-05 08:53:23 +00004348 Visit(node->body());
4349
4350 // If there is an update expression, compile it if necessary.
4351 if (node->next() != NULL) {
4352 if (node->continue_target()->is_linked()) {
4353 node->continue_target()->Bind();
4354 }
4355
4356 // Control can reach the update by falling out of the body or by a
4357 // continue.
4358 if (has_valid_frame()) {
4359 // Record the source position of the statement as this code which
4360 // is after the code for the body actually belongs to the loop
4361 // statement and not the body.
4362 CodeForStatementPosition(node);
4363 Visit(node->next());
4364 }
4365 }
4366
Steve Block6ded16b2010-05-10 14:33:55 +01004367 // Set the type of the loop variable to smi before compiling the test
4368 // expression if we are in a fast smi loop condition.
4369 if (node->is_fast_smi_loop() && has_valid_frame()) {
4370 // Set number type of the loop variable to smi.
4371 SetTypeForStackSlot(node->loop_variable()->slot(), TypeInfo::Smi());
4372 }
4373
Steve Block3ce2e202009-11-05 08:53:23 +00004374 // Based on the condition analysis, compile the backward jump as
4375 // necessary.
4376 switch (info) {
4377 case ALWAYS_TRUE:
4378 if (has_valid_frame()) {
4379 if (node->next() == NULL) {
4380 node->continue_target()->Jump();
4381 } else {
4382 loop.Jump();
4383 }
4384 }
4385 break;
4386 case DONT_KNOW:
4387 if (test_at_bottom) {
4388 if (node->continue_target()->is_linked()) {
4389 // We can have dangling jumps to the continue target if there
4390 // was no update expression.
4391 node->continue_target()->Bind();
4392 }
4393 // Control can reach the test at the bottom by falling out of
4394 // the body, by a continue in the body, or from the update
4395 // expression.
4396 if (has_valid_frame()) {
4397 // The break target is the fall-through (body is a backward
4398 // jump from here).
4399 ControlDestination dest(&body, node->break_target(), false);
Steve Blockd0582a62009-12-15 09:54:21 +00004400 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00004401 }
4402 } else {
4403 // Otherwise, jump back to the test at the top.
Steve Blocka7e24c12009-10-30 11:49:00 +00004404 if (has_valid_frame()) {
4405 if (node->next() == NULL) {
4406 node->continue_target()->Jump();
4407 } else {
4408 loop.Jump();
4409 }
4410 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004411 }
4412 break;
Steve Block3ce2e202009-11-05 08:53:23 +00004413 case ALWAYS_FALSE:
4414 UNREACHABLE();
4415 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00004416 }
4417
Ben Murdoch3bec4d22010-07-22 14:51:16 +01004418 // The break target may be already bound (by the condition), or there
4419 // may not be a valid frame. Bind it only if needed.
Steve Block3ce2e202009-11-05 08:53:23 +00004420 if (node->break_target()->is_linked()) {
4421 node->break_target()->Bind();
4422 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004423 DecrementLoopNesting();
Steve Blocka7e24c12009-10-30 11:49:00 +00004424}
4425
4426
4427void CodeGenerator::VisitForInStatement(ForInStatement* node) {
4428 ASSERT(!in_spilled_code());
4429 VirtualFrame::SpilledScope spilled_scope;
4430 Comment cmnt(masm_, "[ ForInStatement");
4431 CodeForStatementPosition(node);
4432
4433 JumpTarget primitive;
4434 JumpTarget jsobject;
4435 JumpTarget fixed_array;
4436 JumpTarget entry(JumpTarget::BIDIRECTIONAL);
4437 JumpTarget end_del_check;
4438 JumpTarget exit;
4439
4440 // Get the object to enumerate over (converted to JSObject).
4441 LoadAndSpill(node->enumerable());
4442
4443 // Both SpiderMonkey and kjs ignore null and undefined in contrast
4444 // to the specification. 12.6.4 mandates a call to ToObject.
4445 frame_->EmitPop(eax);
4446
4447 // eax: value to be iterated over
4448 __ cmp(eax, Factory::undefined_value());
4449 exit.Branch(equal);
4450 __ cmp(eax, Factory::null_value());
4451 exit.Branch(equal);
4452
4453 // Stack layout in body:
4454 // [iteration counter (smi)] <- slot 0
4455 // [length of array] <- slot 1
4456 // [FixedArray] <- slot 2
4457 // [Map or 0] <- slot 3
4458 // [Object] <- slot 4
4459
4460 // Check if enumerable is already a JSObject
4461 // eax: value to be iterated over
4462 __ test(eax, Immediate(kSmiTagMask));
4463 primitive.Branch(zero);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004464 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
Steve Blocka7e24c12009-10-30 11:49:00 +00004465 jsobject.Branch(above_equal);
4466
4467 primitive.Bind();
4468 frame_->EmitPush(eax);
4469 frame_->InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION, 1);
4470 // function call returns the value in eax, which is where we want it below
4471
4472 jsobject.Bind();
4473 // Get the set of properties (as a FixedArray or Map).
4474 // eax: value to be iterated over
Steve Blockd0582a62009-12-15 09:54:21 +00004475 frame_->EmitPush(eax); // Push the object being iterated over.
Steve Blocka7e24c12009-10-30 11:49:00 +00004476
Steve Blockd0582a62009-12-15 09:54:21 +00004477 // Check cache validity in generated code. This is a fast case for
4478 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
4479 // guarantee cache validity, call the runtime system to check cache
4480 // validity or get the property names in a fixed array.
4481 JumpTarget call_runtime;
4482 JumpTarget loop(JumpTarget::BIDIRECTIONAL);
4483 JumpTarget check_prototype;
4484 JumpTarget use_cache;
4485 __ mov(ecx, eax);
4486 loop.Bind();
4487 // Check that there are no elements.
4488 __ mov(edx, FieldOperand(ecx, JSObject::kElementsOffset));
4489 __ cmp(Operand(edx), Immediate(Factory::empty_fixed_array()));
4490 call_runtime.Branch(not_equal);
4491 // Check that instance descriptors are not empty so that we can
4492 // check for an enum cache. Leave the map in ebx for the subsequent
4493 // prototype load.
4494 __ mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
4495 __ mov(edx, FieldOperand(ebx, Map::kInstanceDescriptorsOffset));
4496 __ cmp(Operand(edx), Immediate(Factory::empty_descriptor_array()));
4497 call_runtime.Branch(equal);
4498 // Check that there in an enum cache in the non-empty instance
4499 // descriptors. This is the case if the next enumeration index
4500 // field does not contain a smi.
4501 __ mov(edx, FieldOperand(edx, DescriptorArray::kEnumerationIndexOffset));
4502 __ test(edx, Immediate(kSmiTagMask));
4503 call_runtime.Branch(zero);
4504 // For all objects but the receiver, check that the cache is empty.
4505 __ cmp(ecx, Operand(eax));
4506 check_prototype.Branch(equal);
4507 __ mov(edx, FieldOperand(edx, DescriptorArray::kEnumCacheBridgeCacheOffset));
4508 __ cmp(Operand(edx), Immediate(Factory::empty_fixed_array()));
4509 call_runtime.Branch(not_equal);
4510 check_prototype.Bind();
4511 // Load the prototype from the map and loop if non-null.
4512 __ mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
4513 __ cmp(Operand(ecx), Immediate(Factory::null_value()));
4514 loop.Branch(not_equal);
4515 // The enum cache is valid. Load the map of the object being
4516 // iterated over and use the cache for the iteration.
4517 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
4518 use_cache.Jump();
4519
4520 call_runtime.Bind();
4521 // Call the runtime to get the property names for the object.
Steve Blocka7e24c12009-10-30 11:49:00 +00004522 frame_->EmitPush(eax); // push the Object (slot 4) for the runtime call
4523 frame_->CallRuntime(Runtime::kGetPropertyNamesFast, 1);
4524
Steve Blockd0582a62009-12-15 09:54:21 +00004525 // If we got a map from the runtime call, we can do a fast
4526 // modification check. Otherwise, we got a fixed array, and we have
4527 // to do a slow check.
Steve Blocka7e24c12009-10-30 11:49:00 +00004528 // eax: map or fixed array (result from call to
4529 // Runtime::kGetPropertyNamesFast)
4530 __ mov(edx, Operand(eax));
4531 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
4532 __ cmp(ecx, Factory::meta_map());
4533 fixed_array.Branch(not_equal);
4534
Steve Blockd0582a62009-12-15 09:54:21 +00004535 use_cache.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00004536 // Get enum cache
Steve Blockd0582a62009-12-15 09:54:21 +00004537 // eax: map (either the result from a call to
4538 // Runtime::kGetPropertyNamesFast or has been fetched directly from
4539 // the object)
Steve Blocka7e24c12009-10-30 11:49:00 +00004540 __ mov(ecx, Operand(eax));
Steve Blockd0582a62009-12-15 09:54:21 +00004541
Steve Blocka7e24c12009-10-30 11:49:00 +00004542 __ mov(ecx, FieldOperand(ecx, Map::kInstanceDescriptorsOffset));
4543 // Get the bridge array held in the enumeration index field.
4544 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumerationIndexOffset));
4545 // Get the cache from the bridge array.
4546 __ mov(edx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset));
4547
4548 frame_->EmitPush(eax); // <- slot 3
4549 frame_->EmitPush(edx); // <- slot 2
4550 __ mov(eax, FieldOperand(edx, FixedArray::kLengthOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004551 frame_->EmitPush(eax); // <- slot 1
4552 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 0
4553 entry.Jump();
4554
4555 fixed_array.Bind();
4556 // eax: fixed array (result from call to Runtime::kGetPropertyNamesFast)
4557 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 3
4558 frame_->EmitPush(eax); // <- slot 2
4559
4560 // Push the length of the array and the initial index onto the stack.
4561 __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004562 frame_->EmitPush(eax); // <- slot 1
4563 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 0
4564
4565 // Condition.
4566 entry.Bind();
4567 // Grab the current frame's height for the break and continue
4568 // targets only after all the state is pushed on the frame.
4569 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
4570 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
4571
4572 __ mov(eax, frame_->ElementAt(0)); // load the current count
4573 __ cmp(eax, frame_->ElementAt(1)); // compare to the array length
4574 node->break_target()->Branch(above_equal);
4575
4576 // Get the i'th entry of the array.
4577 __ mov(edx, frame_->ElementAt(2));
Kristian Monsen25f61362010-05-21 11:50:48 +01004578 __ mov(ebx, FixedArrayElementOperand(edx, eax));
Steve Blocka7e24c12009-10-30 11:49:00 +00004579
4580 // Get the expected map from the stack or a zero map in the
4581 // permanent slow case eax: current iteration count ebx: i'th entry
4582 // of the enum cache
4583 __ mov(edx, frame_->ElementAt(3));
4584 // Check if the expected map still matches that of the enumerable.
4585 // If not, we have to filter the key.
4586 // eax: current iteration count
4587 // ebx: i'th entry of the enum cache
4588 // edx: expected map value
4589 __ mov(ecx, frame_->ElementAt(4));
4590 __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
4591 __ cmp(ecx, Operand(edx));
4592 end_del_check.Branch(equal);
4593
4594 // Convert the entry to a string (or null if it isn't a property anymore).
4595 frame_->EmitPush(frame_->ElementAt(4)); // push enumerable
4596 frame_->EmitPush(ebx); // push entry
4597 frame_->InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION, 2);
4598 __ mov(ebx, Operand(eax));
4599
4600 // If the property has been removed while iterating, we just skip it.
Iain Merrick75681382010-08-19 15:07:18 +01004601 __ test(ebx, Operand(ebx));
Steve Blocka7e24c12009-10-30 11:49:00 +00004602 node->continue_target()->Branch(equal);
4603
4604 end_del_check.Bind();
4605 // Store the entry in the 'each' expression and take another spin in the
4606 // loop. edx: i'th entry of the enum cache (or string there of)
4607 frame_->EmitPush(ebx);
4608 { Reference each(this, node->each());
Steve Blocka7e24c12009-10-30 11:49:00 +00004609 if (!each.is_illegal()) {
4610 if (each.size() > 0) {
Iain Merrick75681382010-08-19 15:07:18 +01004611 // Loading a reference may leave the frame in an unspilled state.
4612 frame_->SpillAll();
4613 // Get the value (under the reference on the stack) from memory.
Steve Blocka7e24c12009-10-30 11:49:00 +00004614 frame_->EmitPush(frame_->ElementAt(each.size()));
Leon Clarked91b9f72010-01-27 17:25:45 +00004615 each.SetValue(NOT_CONST_INIT);
4616 frame_->Drop(2);
4617 } else {
4618 // If the reference was to a slot we rely on the convenient property
4619 // that it doesn't matter whether a value (eg, ebx pushed above) is
4620 // right on top of or right underneath a zero-sized reference.
4621 each.SetValue(NOT_CONST_INIT);
Steve Blocka7e24c12009-10-30 11:49:00 +00004622 frame_->Drop();
4623 }
4624 }
4625 }
4626 // Unloading a reference may leave the frame in an unspilled state.
4627 frame_->SpillAll();
4628
Steve Blocka7e24c12009-10-30 11:49:00 +00004629 // Body.
4630 CheckStack(); // TODO(1222600): ignore if body contains calls.
4631 VisitAndSpill(node->body());
4632
4633 // Next. Reestablish a spilled frame in case we are coming here via
4634 // a continue in the body.
4635 node->continue_target()->Bind();
4636 frame_->SpillAll();
4637 frame_->EmitPop(eax);
4638 __ add(Operand(eax), Immediate(Smi::FromInt(1)));
4639 frame_->EmitPush(eax);
4640 entry.Jump();
4641
4642 // Cleanup. No need to spill because VirtualFrame::Drop is safe for
4643 // any frame.
4644 node->break_target()->Bind();
4645 frame_->Drop(5);
4646
4647 // Exit.
4648 exit.Bind();
4649
4650 node->continue_target()->Unuse();
4651 node->break_target()->Unuse();
4652}
4653
4654
Steve Block3ce2e202009-11-05 08:53:23 +00004655void CodeGenerator::VisitTryCatchStatement(TryCatchStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004656 ASSERT(!in_spilled_code());
4657 VirtualFrame::SpilledScope spilled_scope;
Steve Block3ce2e202009-11-05 08:53:23 +00004658 Comment cmnt(masm_, "[ TryCatchStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00004659 CodeForStatementPosition(node);
4660
4661 JumpTarget try_block;
4662 JumpTarget exit;
4663
4664 try_block.Call();
4665 // --- Catch block ---
4666 frame_->EmitPush(eax);
4667
4668 // Store the caught exception in the catch variable.
Leon Clarkee46be812010-01-19 14:06:41 +00004669 Variable* catch_var = node->catch_var()->var();
4670 ASSERT(catch_var != NULL && catch_var->slot() != NULL);
4671 StoreToSlot(catch_var->slot(), NOT_CONST_INIT);
Steve Blocka7e24c12009-10-30 11:49:00 +00004672
4673 // Remove the exception from the stack.
4674 frame_->Drop();
4675
4676 VisitStatementsAndSpill(node->catch_block()->statements());
4677 if (has_valid_frame()) {
4678 exit.Jump();
4679 }
4680
4681
4682 // --- Try block ---
4683 try_block.Bind();
4684
4685 frame_->PushTryHandler(TRY_CATCH_HANDLER);
4686 int handler_height = frame_->height();
4687
4688 // Shadow the jump targets for all escapes from the try block, including
4689 // returns. During shadowing, the original target is hidden as the
4690 // ShadowTarget and operations on the original actually affect the
4691 // shadowing target.
4692 //
4693 // We should probably try to unify the escaping targets and the return
4694 // target.
4695 int nof_escapes = node->escaping_targets()->length();
4696 List<ShadowTarget*> shadows(1 + nof_escapes);
4697
4698 // Add the shadow target for the function return.
4699 static const int kReturnShadowIndex = 0;
4700 shadows.Add(new ShadowTarget(&function_return_));
4701 bool function_return_was_shadowed = function_return_is_shadowed_;
4702 function_return_is_shadowed_ = true;
4703 ASSERT(shadows[kReturnShadowIndex]->other_target() == &function_return_);
4704
4705 // Add the remaining shadow targets.
4706 for (int i = 0; i < nof_escapes; i++) {
4707 shadows.Add(new ShadowTarget(node->escaping_targets()->at(i)));
4708 }
4709
4710 // Generate code for the statements in the try block.
4711 VisitStatementsAndSpill(node->try_block()->statements());
4712
4713 // Stop the introduced shadowing and count the number of required unlinks.
4714 // After shadowing stops, the original targets are unshadowed and the
4715 // ShadowTargets represent the formerly shadowing targets.
4716 bool has_unlinks = false;
4717 for (int i = 0; i < shadows.length(); i++) {
4718 shadows[i]->StopShadowing();
4719 has_unlinks = has_unlinks || shadows[i]->is_linked();
4720 }
4721 function_return_is_shadowed_ = function_return_was_shadowed;
4722
4723 // Get an external reference to the handler address.
4724 ExternalReference handler_address(Top::k_handler_address);
4725
4726 // Make sure that there's nothing left on the stack above the
4727 // handler structure.
4728 if (FLAG_debug_code) {
4729 __ mov(eax, Operand::StaticVariable(handler_address));
4730 __ cmp(esp, Operand(eax));
4731 __ Assert(equal, "stack pointer should point to top handler");
4732 }
4733
4734 // If we can fall off the end of the try block, unlink from try chain.
4735 if (has_valid_frame()) {
4736 // The next handler address is on top of the frame. Unlink from
4737 // the handler list and drop the rest of this handler from the
4738 // frame.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01004739 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00004740 frame_->EmitPop(Operand::StaticVariable(handler_address));
4741 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
4742 if (has_unlinks) {
4743 exit.Jump();
4744 }
4745 }
4746
4747 // Generate unlink code for the (formerly) shadowing targets that
4748 // have been jumped to. Deallocate each shadow target.
4749 Result return_value;
4750 for (int i = 0; i < shadows.length(); i++) {
4751 if (shadows[i]->is_linked()) {
4752 // Unlink from try chain; be careful not to destroy the TOS if
4753 // there is one.
4754 if (i == kReturnShadowIndex) {
4755 shadows[i]->Bind(&return_value);
4756 return_value.ToRegister(eax);
4757 } else {
4758 shadows[i]->Bind();
4759 }
4760 // Because we can be jumping here (to spilled code) from
4761 // unspilled code, we need to reestablish a spilled frame at
4762 // this block.
4763 frame_->SpillAll();
4764
4765 // Reload sp from the top handler, because some statements that we
4766 // break from (eg, for...in) may have left stuff on the stack.
4767 __ mov(esp, Operand::StaticVariable(handler_address));
4768 frame_->Forget(frame_->height() - handler_height);
4769
Kristian Monsen50ef84f2010-07-29 15:18:00 +01004770 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00004771 frame_->EmitPop(Operand::StaticVariable(handler_address));
4772 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
4773
4774 if (i == kReturnShadowIndex) {
4775 if (!function_return_is_shadowed_) frame_->PrepareForReturn();
4776 shadows[i]->other_target()->Jump(&return_value);
4777 } else {
4778 shadows[i]->other_target()->Jump();
4779 }
4780 }
4781 }
4782
4783 exit.Bind();
4784}
4785
4786
Steve Block3ce2e202009-11-05 08:53:23 +00004787void CodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004788 ASSERT(!in_spilled_code());
4789 VirtualFrame::SpilledScope spilled_scope;
Steve Block3ce2e202009-11-05 08:53:23 +00004790 Comment cmnt(masm_, "[ TryFinallyStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00004791 CodeForStatementPosition(node);
4792
4793 // State: Used to keep track of reason for entering the finally
4794 // block. Should probably be extended to hold information for
4795 // break/continue from within the try block.
4796 enum { FALLING, THROWING, JUMPING };
4797
4798 JumpTarget try_block;
4799 JumpTarget finally_block;
4800
4801 try_block.Call();
4802
4803 frame_->EmitPush(eax);
4804 // In case of thrown exceptions, this is where we continue.
4805 __ Set(ecx, Immediate(Smi::FromInt(THROWING)));
4806 finally_block.Jump();
4807
4808 // --- Try block ---
4809 try_block.Bind();
4810
4811 frame_->PushTryHandler(TRY_FINALLY_HANDLER);
4812 int handler_height = frame_->height();
4813
4814 // Shadow the jump targets for all escapes from the try block, including
4815 // returns. During shadowing, the original target is hidden as the
4816 // ShadowTarget and operations on the original actually affect the
4817 // shadowing target.
4818 //
4819 // We should probably try to unify the escaping targets and the return
4820 // target.
4821 int nof_escapes = node->escaping_targets()->length();
4822 List<ShadowTarget*> shadows(1 + nof_escapes);
4823
4824 // Add the shadow target for the function return.
4825 static const int kReturnShadowIndex = 0;
4826 shadows.Add(new ShadowTarget(&function_return_));
4827 bool function_return_was_shadowed = function_return_is_shadowed_;
4828 function_return_is_shadowed_ = true;
4829 ASSERT(shadows[kReturnShadowIndex]->other_target() == &function_return_);
4830
4831 // Add the remaining shadow targets.
4832 for (int i = 0; i < nof_escapes; i++) {
4833 shadows.Add(new ShadowTarget(node->escaping_targets()->at(i)));
4834 }
4835
4836 // Generate code for the statements in the try block.
4837 VisitStatementsAndSpill(node->try_block()->statements());
4838
4839 // Stop the introduced shadowing and count the number of required unlinks.
4840 // After shadowing stops, the original targets are unshadowed and the
4841 // ShadowTargets represent the formerly shadowing targets.
4842 int nof_unlinks = 0;
4843 for (int i = 0; i < shadows.length(); i++) {
4844 shadows[i]->StopShadowing();
4845 if (shadows[i]->is_linked()) nof_unlinks++;
4846 }
4847 function_return_is_shadowed_ = function_return_was_shadowed;
4848
4849 // Get an external reference to the handler address.
4850 ExternalReference handler_address(Top::k_handler_address);
4851
4852 // If we can fall off the end of the try block, unlink from the try
4853 // chain and set the state on the frame to FALLING.
4854 if (has_valid_frame()) {
4855 // The next handler address is on top of the frame.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01004856 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00004857 frame_->EmitPop(Operand::StaticVariable(handler_address));
4858 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
4859
4860 // Fake a top of stack value (unneeded when FALLING) and set the
4861 // state in ecx, then jump around the unlink blocks if any.
4862 frame_->EmitPush(Immediate(Factory::undefined_value()));
4863 __ Set(ecx, Immediate(Smi::FromInt(FALLING)));
4864 if (nof_unlinks > 0) {
4865 finally_block.Jump();
4866 }
4867 }
4868
4869 // Generate code to unlink and set the state for the (formerly)
4870 // shadowing targets that have been jumped to.
4871 for (int i = 0; i < shadows.length(); i++) {
4872 if (shadows[i]->is_linked()) {
4873 // If we have come from the shadowed return, the return value is
4874 // on the virtual frame. We must preserve it until it is
4875 // pushed.
4876 if (i == kReturnShadowIndex) {
4877 Result return_value;
4878 shadows[i]->Bind(&return_value);
4879 return_value.ToRegister(eax);
4880 } else {
4881 shadows[i]->Bind();
4882 }
4883 // Because we can be jumping here (to spilled code) from
4884 // unspilled code, we need to reestablish a spilled frame at
4885 // this block.
4886 frame_->SpillAll();
4887
4888 // Reload sp from the top handler, because some statements that
4889 // we break from (eg, for...in) may have left stuff on the
4890 // stack.
4891 __ mov(esp, Operand::StaticVariable(handler_address));
4892 frame_->Forget(frame_->height() - handler_height);
4893
4894 // Unlink this handler and drop it from the frame.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01004895 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00004896 frame_->EmitPop(Operand::StaticVariable(handler_address));
4897 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
4898
4899 if (i == kReturnShadowIndex) {
4900 // If this target shadowed the function return, materialize
4901 // the return value on the stack.
4902 frame_->EmitPush(eax);
4903 } else {
4904 // Fake TOS for targets that shadowed breaks and continues.
4905 frame_->EmitPush(Immediate(Factory::undefined_value()));
4906 }
4907 __ Set(ecx, Immediate(Smi::FromInt(JUMPING + i)));
4908 if (--nof_unlinks > 0) {
4909 // If this is not the last unlink block, jump around the next.
4910 finally_block.Jump();
4911 }
4912 }
4913 }
4914
4915 // --- Finally block ---
4916 finally_block.Bind();
4917
4918 // Push the state on the stack.
4919 frame_->EmitPush(ecx);
4920
4921 // We keep two elements on the stack - the (possibly faked) result
4922 // and the state - while evaluating the finally block.
4923 //
4924 // Generate code for the statements in the finally block.
4925 VisitStatementsAndSpill(node->finally_block()->statements());
4926
4927 if (has_valid_frame()) {
4928 // Restore state and return value or faked TOS.
4929 frame_->EmitPop(ecx);
4930 frame_->EmitPop(eax);
4931 }
4932
4933 // Generate code to jump to the right destination for all used
4934 // formerly shadowing targets. Deallocate each shadow target.
4935 for (int i = 0; i < shadows.length(); i++) {
4936 if (has_valid_frame() && shadows[i]->is_bound()) {
4937 BreakTarget* original = shadows[i]->other_target();
4938 __ cmp(Operand(ecx), Immediate(Smi::FromInt(JUMPING + i)));
4939 if (i == kReturnShadowIndex) {
4940 // The return value is (already) in eax.
4941 Result return_value = allocator_->Allocate(eax);
4942 ASSERT(return_value.is_valid());
4943 if (function_return_is_shadowed_) {
4944 original->Branch(equal, &return_value);
4945 } else {
4946 // Branch around the preparation for return which may emit
4947 // code.
4948 JumpTarget skip;
4949 skip.Branch(not_equal);
4950 frame_->PrepareForReturn();
4951 original->Jump(&return_value);
4952 skip.Bind();
4953 }
4954 } else {
4955 original->Branch(equal);
4956 }
4957 }
4958 }
4959
4960 if (has_valid_frame()) {
4961 // Check if we need to rethrow the exception.
4962 JumpTarget exit;
4963 __ cmp(Operand(ecx), Immediate(Smi::FromInt(THROWING)));
4964 exit.Branch(not_equal);
4965
4966 // Rethrow exception.
4967 frame_->EmitPush(eax); // undo pop from above
4968 frame_->CallRuntime(Runtime::kReThrow, 1);
4969
4970 // Done.
4971 exit.Bind();
4972 }
4973}
4974
4975
4976void CodeGenerator::VisitDebuggerStatement(DebuggerStatement* node) {
4977 ASSERT(!in_spilled_code());
4978 Comment cmnt(masm_, "[ DebuggerStatement");
4979 CodeForStatementPosition(node);
4980#ifdef ENABLE_DEBUGGER_SUPPORT
4981 // Spill everything, even constants, to the frame.
4982 frame_->SpillAll();
Leon Clarke4515c472010-02-03 11:58:03 +00004983
Andrei Popescu402d9372010-02-26 13:31:12 +00004984 frame_->DebugBreak();
Steve Blocka7e24c12009-10-30 11:49:00 +00004985 // Ignore the return value.
4986#endif
4987}
4988
4989
Steve Block6ded16b2010-05-10 14:33:55 +01004990Result CodeGenerator::InstantiateFunction(
4991 Handle<SharedFunctionInfo> function_info) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004992 // The inevitable call will sync frame elements to memory anyway, so
4993 // we do it eagerly to allow us to push the arguments directly into
4994 // place.
Andrei Popescu402d9372010-02-26 13:31:12 +00004995 frame()->SyncRange(0, frame()->element_count() - 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00004996
Leon Clarkee46be812010-01-19 14:06:41 +00004997 // Use the fast case closure allocation code that allocates in new
4998 // space for nested functions that don't need literals cloning.
Steve Block6ded16b2010-05-10 14:33:55 +01004999 if (scope()->is_function_scope() && function_info->num_literals() == 0) {
Leon Clarkee46be812010-01-19 14:06:41 +00005000 FastNewClosureStub stub;
Steve Block6ded16b2010-05-10 14:33:55 +01005001 frame()->EmitPush(Immediate(function_info));
Andrei Popescu402d9372010-02-26 13:31:12 +00005002 return frame()->CallStub(&stub, 1);
Leon Clarkee46be812010-01-19 14:06:41 +00005003 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01005004 // Call the runtime to instantiate the function based on the
5005 // shared function info.
Andrei Popescu402d9372010-02-26 13:31:12 +00005006 frame()->EmitPush(esi);
Steve Block6ded16b2010-05-10 14:33:55 +01005007 frame()->EmitPush(Immediate(function_info));
Andrei Popescu402d9372010-02-26 13:31:12 +00005008 return frame()->CallRuntime(Runtime::kNewClosure, 2);
Leon Clarkee46be812010-01-19 14:06:41 +00005009 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005010}
5011
5012
5013void CodeGenerator::VisitFunctionLiteral(FunctionLiteral* node) {
5014 Comment cmnt(masm_, "[ FunctionLiteral");
Steve Block6ded16b2010-05-10 14:33:55 +01005015 ASSERT(!in_safe_int32_mode());
5016 // Build the function info and instantiate it.
5017 Handle<SharedFunctionInfo> function_info =
5018 Compiler::BuildFunctionInfo(node, script(), this);
Steve Blocka7e24c12009-10-30 11:49:00 +00005019 // Check for stack-overflow exception.
5020 if (HasStackOverflow()) return;
Steve Block6ded16b2010-05-10 14:33:55 +01005021 Result result = InstantiateFunction(function_info);
Andrei Popescu402d9372010-02-26 13:31:12 +00005022 frame()->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00005023}
5024
5025
Steve Block6ded16b2010-05-10 14:33:55 +01005026void CodeGenerator::VisitSharedFunctionInfoLiteral(
5027 SharedFunctionInfoLiteral* node) {
5028 ASSERT(!in_safe_int32_mode());
5029 Comment cmnt(masm_, "[ SharedFunctionInfoLiteral");
5030 Result result = InstantiateFunction(node->shared_function_info());
Andrei Popescu402d9372010-02-26 13:31:12 +00005031 frame()->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00005032}
5033
5034
5035void CodeGenerator::VisitConditional(Conditional* node) {
5036 Comment cmnt(masm_, "[ Conditional");
Steve Block6ded16b2010-05-10 14:33:55 +01005037 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005038 JumpTarget then;
5039 JumpTarget else_;
5040 JumpTarget exit;
5041 ControlDestination dest(&then, &else_, true);
Steve Blockd0582a62009-12-15 09:54:21 +00005042 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00005043
5044 if (dest.false_was_fall_through()) {
5045 // The else target was bound, so we compile the else part first.
Steve Blockd0582a62009-12-15 09:54:21 +00005046 Load(node->else_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00005047
5048 if (then.is_linked()) {
5049 exit.Jump();
5050 then.Bind();
Steve Blockd0582a62009-12-15 09:54:21 +00005051 Load(node->then_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00005052 }
5053 } else {
5054 // The then target was bound, so we compile the then part first.
Steve Blockd0582a62009-12-15 09:54:21 +00005055 Load(node->then_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00005056
5057 if (else_.is_linked()) {
5058 exit.Jump();
5059 else_.Bind();
Steve Blockd0582a62009-12-15 09:54:21 +00005060 Load(node->else_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00005061 }
5062 }
5063
5064 exit.Bind();
5065}
5066
5067
Leon Clarkef7060e22010-06-03 12:02:55 +01005068void CodeGenerator::LoadFromSlot(Slot* slot, TypeofState typeof_state) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005069 if (slot->type() == Slot::LOOKUP) {
5070 ASSERT(slot->var()->is_dynamic());
Steve Blocka7e24c12009-10-30 11:49:00 +00005071 JumpTarget slow;
5072 JumpTarget done;
Leon Clarkef7060e22010-06-03 12:02:55 +01005073 Result value;
Steve Blocka7e24c12009-10-30 11:49:00 +00005074
Kristian Monsen25f61362010-05-21 11:50:48 +01005075 // Generate fast case for loading from slots that correspond to
5076 // local/global variables or arguments unless they are shadowed by
5077 // eval-introduced bindings.
5078 EmitDynamicLoadFromSlotFastCase(slot,
5079 typeof_state,
Leon Clarkef7060e22010-06-03 12:02:55 +01005080 &value,
Kristian Monsen25f61362010-05-21 11:50:48 +01005081 &slow,
5082 &done);
Steve Blocka7e24c12009-10-30 11:49:00 +00005083
5084 slow.Bind();
5085 // A runtime call is inevitable. We eagerly sync frame elements
5086 // to memory so that we can push the arguments directly into place
5087 // on top of the frame.
Andrei Popescu402d9372010-02-26 13:31:12 +00005088 frame()->SyncRange(0, frame()->element_count() - 1);
5089 frame()->EmitPush(esi);
5090 frame()->EmitPush(Immediate(slot->var()->name()));
Steve Blocka7e24c12009-10-30 11:49:00 +00005091 if (typeof_state == INSIDE_TYPEOF) {
Leon Clarkef7060e22010-06-03 12:02:55 +01005092 value =
Andrei Popescu402d9372010-02-26 13:31:12 +00005093 frame()->CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00005094 } else {
Leon Clarkef7060e22010-06-03 12:02:55 +01005095 value = frame()->CallRuntime(Runtime::kLoadContextSlot, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00005096 }
5097
Leon Clarkef7060e22010-06-03 12:02:55 +01005098 done.Bind(&value);
5099 frame_->Push(&value);
Steve Blocka7e24c12009-10-30 11:49:00 +00005100
5101 } else if (slot->var()->mode() == Variable::CONST) {
5102 // Const slots may contain 'the hole' value (the constant hasn't been
5103 // initialized yet) which needs to be converted into the 'undefined'
5104 // value.
5105 //
5106 // We currently spill the virtual frame because constants use the
5107 // potentially unsafe direct-frame access of SlotOperand.
5108 VirtualFrame::SpilledScope spilled_scope;
5109 Comment cmnt(masm_, "[ Load const");
Andrei Popescu402d9372010-02-26 13:31:12 +00005110 Label exit;
Steve Blocka7e24c12009-10-30 11:49:00 +00005111 __ mov(ecx, SlotOperand(slot, ecx));
5112 __ cmp(ecx, Factory::the_hole_value());
Andrei Popescu402d9372010-02-26 13:31:12 +00005113 __ j(not_equal, &exit);
Steve Blocka7e24c12009-10-30 11:49:00 +00005114 __ mov(ecx, Factory::undefined_value());
Andrei Popescu402d9372010-02-26 13:31:12 +00005115 __ bind(&exit);
Leon Clarkef7060e22010-06-03 12:02:55 +01005116 frame()->EmitPush(ecx);
Steve Blocka7e24c12009-10-30 11:49:00 +00005117
5118 } else if (slot->type() == Slot::PARAMETER) {
Andrei Popescu402d9372010-02-26 13:31:12 +00005119 frame()->PushParameterAt(slot->index());
Steve Blocka7e24c12009-10-30 11:49:00 +00005120
5121 } else if (slot->type() == Slot::LOCAL) {
Andrei Popescu402d9372010-02-26 13:31:12 +00005122 frame()->PushLocalAt(slot->index());
Steve Blocka7e24c12009-10-30 11:49:00 +00005123
5124 } else {
5125 // The other remaining slot types (LOOKUP and GLOBAL) cannot reach
5126 // here.
5127 //
5128 // The use of SlotOperand below is safe for an unspilled frame
5129 // because it will always be a context slot.
5130 ASSERT(slot->type() == Slot::CONTEXT);
Leon Clarkef7060e22010-06-03 12:02:55 +01005131 Result temp = allocator()->Allocate();
5132 ASSERT(temp.is_valid());
5133 __ mov(temp.reg(), SlotOperand(slot, temp.reg()));
5134 frame()->Push(&temp);
Steve Blocka7e24c12009-10-30 11:49:00 +00005135 }
5136}
5137
5138
Leon Clarkef7060e22010-06-03 12:02:55 +01005139void CodeGenerator::LoadFromSlotCheckForArguments(Slot* slot,
Andrei Popescu402d9372010-02-26 13:31:12 +00005140 TypeofState state) {
Leon Clarkef7060e22010-06-03 12:02:55 +01005141 LoadFromSlot(slot, state);
Steve Blocka7e24c12009-10-30 11:49:00 +00005142
5143 // Bail out quickly if we're not using lazy arguments allocation.
Leon Clarkef7060e22010-06-03 12:02:55 +01005144 if (ArgumentsMode() != LAZY_ARGUMENTS_ALLOCATION) return;
Steve Blocka7e24c12009-10-30 11:49:00 +00005145
5146 // ... or if the slot isn't a non-parameter arguments slot.
Leon Clarkef7060e22010-06-03 12:02:55 +01005147 if (slot->type() == Slot::PARAMETER || !slot->is_arguments()) return;
Steve Blocka7e24c12009-10-30 11:49:00 +00005148
5149 // If the loaded value is a constant, we know if the arguments
5150 // object has been lazily loaded yet.
Leon Clarkef7060e22010-06-03 12:02:55 +01005151 Result result = frame()->Pop();
Andrei Popescu402d9372010-02-26 13:31:12 +00005152 if (result.is_constant()) {
5153 if (result.handle()->IsTheHole()) {
Leon Clarkef7060e22010-06-03 12:02:55 +01005154 result = StoreArgumentsObject(false);
Steve Blocka7e24c12009-10-30 11:49:00 +00005155 }
Leon Clarkef7060e22010-06-03 12:02:55 +01005156 frame()->Push(&result);
5157 return;
Steve Blocka7e24c12009-10-30 11:49:00 +00005158 }
Leon Clarkef7060e22010-06-03 12:02:55 +01005159 ASSERT(result.is_register());
Steve Blocka7e24c12009-10-30 11:49:00 +00005160 // The loaded value is in a register. If it is the sentinel that
5161 // indicates that we haven't loaded the arguments object yet, we
5162 // need to do it now.
5163 JumpTarget exit;
Andrei Popescu402d9372010-02-26 13:31:12 +00005164 __ cmp(Operand(result.reg()), Immediate(Factory::the_hole_value()));
Leon Clarkef7060e22010-06-03 12:02:55 +01005165 frame()->Push(&result);
5166 exit.Branch(not_equal);
Andrei Popescu402d9372010-02-26 13:31:12 +00005167
Andrei Popescu402d9372010-02-26 13:31:12 +00005168 result = StoreArgumentsObject(false);
Leon Clarkef7060e22010-06-03 12:02:55 +01005169 frame()->SetElementAt(0, &result);
5170 result.Unuse();
5171 exit.Bind();
5172 return;
Steve Blocka7e24c12009-10-30 11:49:00 +00005173}
5174
5175
5176Result CodeGenerator::LoadFromGlobalSlotCheckExtensions(
5177 Slot* slot,
5178 TypeofState typeof_state,
5179 JumpTarget* slow) {
Steve Block6ded16b2010-05-10 14:33:55 +01005180 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005181 // Check that no extension objects have been created by calls to
5182 // eval from the current scope to the global scope.
5183 Register context = esi;
5184 Result tmp = allocator_->Allocate();
5185 ASSERT(tmp.is_valid()); // All non-reserved registers were available.
5186
5187 Scope* s = scope();
5188 while (s != NULL) {
5189 if (s->num_heap_slots() > 0) {
5190 if (s->calls_eval()) {
5191 // Check that extension is NULL.
5192 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
5193 Immediate(0));
5194 slow->Branch(not_equal, not_taken);
5195 }
5196 // Load next context in chain.
5197 __ mov(tmp.reg(), ContextOperand(context, Context::CLOSURE_INDEX));
5198 __ mov(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset));
5199 context = tmp.reg();
5200 }
5201 // If no outer scope calls eval, we do not need to check more
5202 // context extensions. If we have reached an eval scope, we check
5203 // all extensions from this point.
5204 if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break;
5205 s = s->outer_scope();
5206 }
5207
5208 if (s != NULL && s->is_eval_scope()) {
5209 // Loop up the context chain. There is no frame effect so it is
5210 // safe to use raw labels here.
5211 Label next, fast;
5212 if (!context.is(tmp.reg())) {
5213 __ mov(tmp.reg(), context);
5214 }
5215 __ bind(&next);
5216 // Terminate at global context.
5217 __ cmp(FieldOperand(tmp.reg(), HeapObject::kMapOffset),
5218 Immediate(Factory::global_context_map()));
5219 __ j(equal, &fast);
5220 // Check that extension is NULL.
5221 __ cmp(ContextOperand(tmp.reg(), Context::EXTENSION_INDEX), Immediate(0));
5222 slow->Branch(not_equal, not_taken);
5223 // Load next context in chain.
5224 __ mov(tmp.reg(), ContextOperand(tmp.reg(), Context::CLOSURE_INDEX));
5225 __ mov(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset));
5226 __ jmp(&next);
5227 __ bind(&fast);
5228 }
5229 tmp.Unuse();
5230
5231 // All extension objects were empty and it is safe to use a global
5232 // load IC call.
Andrei Popescu402d9372010-02-26 13:31:12 +00005233 // The register allocator prefers eax if it is free, so the code generator
5234 // will load the global object directly into eax, which is where the LoadIC
5235 // expects it.
5236 frame_->Spill(eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00005237 LoadGlobal();
5238 frame_->Push(slot->var()->name());
5239 RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
5240 ? RelocInfo::CODE_TARGET
5241 : RelocInfo::CODE_TARGET_CONTEXT;
5242 Result answer = frame_->CallLoadIC(mode);
5243 // A test eax instruction following the call signals that the inobject
5244 // property case was inlined. Ensure that there is not a test eax
5245 // instruction here.
5246 __ nop();
Steve Blocka7e24c12009-10-30 11:49:00 +00005247 return answer;
5248}
5249
5250
Kristian Monsen25f61362010-05-21 11:50:48 +01005251void CodeGenerator::EmitDynamicLoadFromSlotFastCase(Slot* slot,
5252 TypeofState typeof_state,
5253 Result* result,
5254 JumpTarget* slow,
5255 JumpTarget* done) {
5256 // Generate fast-case code for variables that might be shadowed by
5257 // eval-introduced variables. Eval is used a lot without
5258 // introducing variables. In those cases, we do not want to
5259 // perform a runtime call for all variables in the scope
5260 // containing the eval.
5261 if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) {
5262 *result = LoadFromGlobalSlotCheckExtensions(slot, typeof_state, slow);
5263 done->Jump(result);
5264
5265 } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) {
5266 Slot* potential_slot = slot->var()->local_if_not_shadowed()->slot();
5267 Expression* rewrite = slot->var()->local_if_not_shadowed()->rewrite();
5268 if (potential_slot != NULL) {
5269 // Generate fast case for locals that rewrite to slots.
5270 // Allocate a fresh register to use as a temp in
5271 // ContextSlotOperandCheckExtensions and to hold the result
5272 // value.
5273 *result = allocator()->Allocate();
5274 ASSERT(result->is_valid());
5275 __ mov(result->reg(),
5276 ContextSlotOperandCheckExtensions(potential_slot, *result, slow));
5277 if (potential_slot->var()->mode() == Variable::CONST) {
5278 __ cmp(result->reg(), Factory::the_hole_value());
5279 done->Branch(not_equal, result);
5280 __ mov(result->reg(), Factory::undefined_value());
5281 }
5282 done->Jump(result);
5283 } else if (rewrite != NULL) {
5284 // Generate fast case for calls of an argument function.
5285 Property* property = rewrite->AsProperty();
5286 if (property != NULL) {
5287 VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
5288 Literal* key_literal = property->key()->AsLiteral();
5289 if (obj_proxy != NULL &&
5290 key_literal != NULL &&
5291 obj_proxy->IsArguments() &&
5292 key_literal->handle()->IsSmi()) {
5293 // Load arguments object if there are no eval-introduced
5294 // variables. Then load the argument from the arguments
5295 // object using keyed load.
5296 Result arguments = allocator()->Allocate();
5297 ASSERT(arguments.is_valid());
5298 __ mov(arguments.reg(),
5299 ContextSlotOperandCheckExtensions(obj_proxy->var()->slot(),
5300 arguments,
5301 slow));
5302 frame_->Push(&arguments);
5303 frame_->Push(key_literal->handle());
5304 *result = EmitKeyedLoad();
5305 done->Jump(result);
5306 }
5307 }
5308 }
5309 }
5310}
5311
5312
Steve Blocka7e24c12009-10-30 11:49:00 +00005313void CodeGenerator::StoreToSlot(Slot* slot, InitState init_state) {
5314 if (slot->type() == Slot::LOOKUP) {
5315 ASSERT(slot->var()->is_dynamic());
5316
5317 // For now, just do a runtime call. Since the call is inevitable,
5318 // we eagerly sync the virtual frame so we can directly push the
5319 // arguments into place.
5320 frame_->SyncRange(0, frame_->element_count() - 1);
5321
5322 frame_->EmitPush(esi);
5323 frame_->EmitPush(Immediate(slot->var()->name()));
5324
5325 Result value;
5326 if (init_state == CONST_INIT) {
5327 // Same as the case for a normal store, but ignores attribute
5328 // (e.g. READ_ONLY) of context slot so that we can initialize const
5329 // properties (introduced via eval("const foo = (some expr);")). Also,
5330 // uses the current function context instead of the top context.
5331 //
5332 // Note that we must declare the foo upon entry of eval(), via a
5333 // context slot declaration, but we cannot initialize it at the same
5334 // time, because the const declaration may be at the end of the eval
5335 // code (sigh...) and the const variable may have been used before
5336 // (where its value is 'undefined'). Thus, we can only do the
5337 // initialization when we actually encounter the expression and when
5338 // the expression operands are defined and valid, and thus we need the
5339 // split into 2 operations: declaration of the context slot followed
5340 // by initialization.
5341 value = frame_->CallRuntime(Runtime::kInitializeConstContextSlot, 3);
5342 } else {
5343 value = frame_->CallRuntime(Runtime::kStoreContextSlot, 3);
5344 }
5345 // Storing a variable must keep the (new) value on the expression
5346 // stack. This is necessary for compiling chained assignment
5347 // expressions.
5348 frame_->Push(&value);
5349
5350 } else {
5351 ASSERT(!slot->var()->is_dynamic());
5352
5353 JumpTarget exit;
5354 if (init_state == CONST_INIT) {
5355 ASSERT(slot->var()->mode() == Variable::CONST);
5356 // Only the first const initialization must be executed (the slot
5357 // still contains 'the hole' value). When the assignment is executed,
5358 // the code is identical to a normal store (see below).
5359 //
5360 // We spill the frame in the code below because the direct-frame
5361 // access of SlotOperand is potentially unsafe with an unspilled
5362 // frame.
5363 VirtualFrame::SpilledScope spilled_scope;
5364 Comment cmnt(masm_, "[ Init const");
5365 __ mov(ecx, SlotOperand(slot, ecx));
5366 __ cmp(ecx, Factory::the_hole_value());
5367 exit.Branch(not_equal);
5368 }
5369
5370 // We must execute the store. Storing a variable must keep the (new)
5371 // value on the stack. This is necessary for compiling assignment
5372 // expressions.
5373 //
5374 // Note: We will reach here even with slot->var()->mode() ==
5375 // Variable::CONST because of const declarations which will initialize
5376 // consts to 'the hole' value and by doing so, end up calling this code.
5377 if (slot->type() == Slot::PARAMETER) {
5378 frame_->StoreToParameterAt(slot->index());
5379 } else if (slot->type() == Slot::LOCAL) {
5380 frame_->StoreToLocalAt(slot->index());
5381 } else {
5382 // The other slot types (LOOKUP and GLOBAL) cannot reach here.
5383 //
5384 // The use of SlotOperand below is safe for an unspilled frame
5385 // because the slot is a context slot.
5386 ASSERT(slot->type() == Slot::CONTEXT);
5387 frame_->Dup();
5388 Result value = frame_->Pop();
5389 value.ToRegister();
5390 Result start = allocator_->Allocate();
5391 ASSERT(start.is_valid());
5392 __ mov(SlotOperand(slot, start.reg()), value.reg());
5393 // RecordWrite may destroy the value registers.
5394 //
5395 // TODO(204): Avoid actually spilling when the value is not
5396 // needed (probably the common case).
5397 frame_->Spill(value.reg());
5398 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
5399 Result temp = allocator_->Allocate();
5400 ASSERT(temp.is_valid());
5401 __ RecordWrite(start.reg(), offset, value.reg(), temp.reg());
5402 // The results start, value, and temp are unused by going out of
5403 // scope.
5404 }
5405
5406 exit.Bind();
5407 }
5408}
5409
5410
Steve Block6ded16b2010-05-10 14:33:55 +01005411void CodeGenerator::VisitSlot(Slot* slot) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005412 Comment cmnt(masm_, "[ Slot");
Steve Block6ded16b2010-05-10 14:33:55 +01005413 if (in_safe_int32_mode()) {
5414 if ((slot->type() == Slot::LOCAL && !slot->is_arguments())) {
5415 frame()->UntaggedPushLocalAt(slot->index());
5416 } else if (slot->type() == Slot::PARAMETER) {
5417 frame()->UntaggedPushParameterAt(slot->index());
5418 } else {
5419 UNREACHABLE();
5420 }
5421 } else {
Leon Clarkef7060e22010-06-03 12:02:55 +01005422 LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF);
Steve Block6ded16b2010-05-10 14:33:55 +01005423 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005424}
5425
5426
5427void CodeGenerator::VisitVariableProxy(VariableProxy* node) {
5428 Comment cmnt(masm_, "[ VariableProxy");
5429 Variable* var = node->var();
5430 Expression* expr = var->rewrite();
5431 if (expr != NULL) {
5432 Visit(expr);
5433 } else {
5434 ASSERT(var->is_global());
Steve Block6ded16b2010-05-10 14:33:55 +01005435 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005436 Reference ref(this, node);
Steve Blockd0582a62009-12-15 09:54:21 +00005437 ref.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00005438 }
5439}
5440
5441
5442void CodeGenerator::VisitLiteral(Literal* node) {
5443 Comment cmnt(masm_, "[ Literal");
Steve Block6ded16b2010-05-10 14:33:55 +01005444 if (in_safe_int32_mode()) {
5445 frame_->PushUntaggedElement(node->handle());
5446 } else {
5447 frame_->Push(node->handle());
5448 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005449}
5450
5451
Steve Blockd0582a62009-12-15 09:54:21 +00005452void CodeGenerator::PushUnsafeSmi(Handle<Object> value) {
5453 ASSERT(value->IsSmi());
5454 int bits = reinterpret_cast<int>(*value);
5455 __ push(Immediate(bits & 0x0000FFFF));
5456 __ or_(Operand(esp, 0), Immediate(bits & 0xFFFF0000));
5457}
5458
5459
5460void CodeGenerator::StoreUnsafeSmiToLocal(int offset, Handle<Object> value) {
5461 ASSERT(value->IsSmi());
5462 int bits = reinterpret_cast<int>(*value);
5463 __ mov(Operand(ebp, offset), Immediate(bits & 0x0000FFFF));
5464 __ or_(Operand(ebp, offset), Immediate(bits & 0xFFFF0000));
5465}
5466
5467
5468void CodeGenerator::MoveUnsafeSmi(Register target, Handle<Object> value) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005469 ASSERT(target.is_valid());
5470 ASSERT(value->IsSmi());
5471 int bits = reinterpret_cast<int>(*value);
5472 __ Set(target, Immediate(bits & 0x0000FFFF));
Steve Blockd0582a62009-12-15 09:54:21 +00005473 __ or_(target, bits & 0xFFFF0000);
Steve Blocka7e24c12009-10-30 11:49:00 +00005474}
5475
5476
5477bool CodeGenerator::IsUnsafeSmi(Handle<Object> value) {
5478 if (!value->IsSmi()) return false;
5479 int int_value = Smi::cast(*value)->value();
5480 return !is_intn(int_value, kMaxSmiInlinedBits);
5481}
5482
5483
5484// Materialize the regexp literal 'node' in the literals array
5485// 'literals' of the function. Leave the regexp boilerplate in
5486// 'boilerplate'.
5487class DeferredRegExpLiteral: public DeferredCode {
5488 public:
5489 DeferredRegExpLiteral(Register boilerplate,
5490 Register literals,
5491 RegExpLiteral* node)
5492 : boilerplate_(boilerplate), literals_(literals), node_(node) {
5493 set_comment("[ DeferredRegExpLiteral");
5494 }
5495
5496 void Generate();
5497
5498 private:
5499 Register boilerplate_;
5500 Register literals_;
5501 RegExpLiteral* node_;
5502};
5503
5504
5505void DeferredRegExpLiteral::Generate() {
5506 // Since the entry is undefined we call the runtime system to
5507 // compute the literal.
5508 // Literal array (0).
5509 __ push(literals_);
5510 // Literal index (1).
5511 __ push(Immediate(Smi::FromInt(node_->literal_index())));
5512 // RegExp pattern (2).
5513 __ push(Immediate(node_->pattern()));
5514 // RegExp flags (3).
5515 __ push(Immediate(node_->flags()));
5516 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
5517 if (!boilerplate_.is(eax)) __ mov(boilerplate_, eax);
5518}
5519
5520
Ben Murdochbb769b22010-08-11 14:56:33 +01005521class DeferredAllocateInNewSpace: public DeferredCode {
5522 public:
5523 DeferredAllocateInNewSpace(int size, Register target)
5524 : size_(size), target_(target) {
5525 ASSERT(size >= kPointerSize && size <= Heap::MaxObjectSizeInNewSpace());
5526 set_comment("[ DeferredAllocateInNewSpace");
5527 }
5528 void Generate();
5529
5530 private:
5531 int size_;
5532 Register target_;
5533};
5534
5535
5536void DeferredAllocateInNewSpace::Generate() {
5537 __ push(Immediate(Smi::FromInt(size_)));
5538 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
5539 if (!target_.is(eax)) {
5540 __ mov(target_, eax);
5541 }
5542}
5543
5544
Steve Blocka7e24c12009-10-30 11:49:00 +00005545void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01005546 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005547 Comment cmnt(masm_, "[ RegExp Literal");
5548
5549 // Retrieve the literals array and check the allocated entry. Begin
5550 // with a writable copy of the function of this activation in a
5551 // register.
5552 frame_->PushFunction();
5553 Result literals = frame_->Pop();
5554 literals.ToRegister();
5555 frame_->Spill(literals.reg());
5556
5557 // Load the literals array of the function.
5558 __ mov(literals.reg(),
5559 FieldOperand(literals.reg(), JSFunction::kLiteralsOffset));
5560
5561 // Load the literal at the ast saved index.
5562 Result boilerplate = allocator_->Allocate();
5563 ASSERT(boilerplate.is_valid());
5564 int literal_offset =
5565 FixedArray::kHeaderSize + node->literal_index() * kPointerSize;
5566 __ mov(boilerplate.reg(), FieldOperand(literals.reg(), literal_offset));
5567
5568 // Check whether we need to materialize the RegExp object. If so,
5569 // jump to the deferred code passing the literals array.
5570 DeferredRegExpLiteral* deferred =
5571 new DeferredRegExpLiteral(boilerplate.reg(), literals.reg(), node);
5572 __ cmp(boilerplate.reg(), Factory::undefined_value());
5573 deferred->Branch(equal);
5574 deferred->BindExit();
Steve Blocka7e24c12009-10-30 11:49:00 +00005575
Ben Murdochbb769b22010-08-11 14:56:33 +01005576 // Register of boilerplate contains RegExp object.
5577
5578 Result tmp = allocator()->Allocate();
5579 ASSERT(tmp.is_valid());
5580
5581 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
5582
5583 DeferredAllocateInNewSpace* allocate_fallback =
5584 new DeferredAllocateInNewSpace(size, literals.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00005585 frame_->Push(&boilerplate);
Ben Murdochbb769b22010-08-11 14:56:33 +01005586 frame_->SpillTop();
5587 __ AllocateInNewSpace(size,
5588 literals.reg(),
5589 tmp.reg(),
5590 no_reg,
5591 allocate_fallback->entry_label(),
5592 TAG_OBJECT);
5593 allocate_fallback->BindExit();
5594 boilerplate = frame_->Pop();
5595 // Copy from boilerplate to clone and return clone.
5596
5597 for (int i = 0; i < size; i += kPointerSize) {
5598 __ mov(tmp.reg(), FieldOperand(boilerplate.reg(), i));
5599 __ mov(FieldOperand(literals.reg(), i), tmp.reg());
5600 }
5601 frame_->Push(&literals);
Steve Blocka7e24c12009-10-30 11:49:00 +00005602}
5603
5604
Steve Blocka7e24c12009-10-30 11:49:00 +00005605void CodeGenerator::VisitObjectLiteral(ObjectLiteral* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01005606 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005607 Comment cmnt(masm_, "[ ObjectLiteral");
5608
Leon Clarkee46be812010-01-19 14:06:41 +00005609 // Load a writable copy of the function of this activation in a
Steve Blocka7e24c12009-10-30 11:49:00 +00005610 // register.
5611 frame_->PushFunction();
5612 Result literals = frame_->Pop();
5613 literals.ToRegister();
5614 frame_->Spill(literals.reg());
5615
5616 // Load the literals array of the function.
5617 __ mov(literals.reg(),
5618 FieldOperand(literals.reg(), JSFunction::kLiteralsOffset));
Leon Clarkee46be812010-01-19 14:06:41 +00005619 // Literal array.
5620 frame_->Push(&literals);
5621 // Literal index.
5622 frame_->Push(Smi::FromInt(node->literal_index()));
5623 // Constant properties.
5624 frame_->Push(node->constant_properties());
Steve Block6ded16b2010-05-10 14:33:55 +01005625 // Should the object literal have fast elements?
5626 frame_->Push(Smi::FromInt(node->fast_elements() ? 1 : 0));
Leon Clarkee46be812010-01-19 14:06:41 +00005627 Result clone;
5628 if (node->depth() > 1) {
Steve Block6ded16b2010-05-10 14:33:55 +01005629 clone = frame_->CallRuntime(Runtime::kCreateObjectLiteral, 4);
Leon Clarkee46be812010-01-19 14:06:41 +00005630 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01005631 clone = frame_->CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
Steve Blocka7e24c12009-10-30 11:49:00 +00005632 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005633 frame_->Push(&clone);
5634
5635 for (int i = 0; i < node->properties()->length(); i++) {
5636 ObjectLiteral::Property* property = node->properties()->at(i);
5637 switch (property->kind()) {
5638 case ObjectLiteral::Property::CONSTANT:
5639 break;
5640 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
5641 if (CompileTimeValue::IsCompileTimeValue(property->value())) break;
5642 // else fall through.
5643 case ObjectLiteral::Property::COMPUTED: {
5644 Handle<Object> key(property->key()->handle());
5645 if (key->IsSymbol()) {
5646 // Duplicate the object as the IC receiver.
5647 frame_->Dup();
5648 Load(property->value());
Kristian Monsen50ef84f2010-07-29 15:18:00 +01005649 Result ignored =
5650 frame_->CallStoreIC(Handle<String>::cast(key), false);
5651 // A test eax instruction following the store IC call would
5652 // indicate the presence of an inlined version of the
5653 // store. Add a nop to indicate that there is no such
5654 // inlined version.
5655 __ nop();
Steve Blocka7e24c12009-10-30 11:49:00 +00005656 break;
5657 }
5658 // Fall through
5659 }
5660 case ObjectLiteral::Property::PROTOTYPE: {
5661 // Duplicate the object as an argument to the runtime call.
5662 frame_->Dup();
5663 Load(property->key());
5664 Load(property->value());
5665 Result ignored = frame_->CallRuntime(Runtime::kSetProperty, 3);
5666 // Ignore the result.
5667 break;
5668 }
5669 case ObjectLiteral::Property::SETTER: {
5670 // Duplicate the object as an argument to the runtime call.
5671 frame_->Dup();
5672 Load(property->key());
5673 frame_->Push(Smi::FromInt(1));
5674 Load(property->value());
5675 Result ignored = frame_->CallRuntime(Runtime::kDefineAccessor, 4);
5676 // Ignore the result.
5677 break;
5678 }
5679 case ObjectLiteral::Property::GETTER: {
5680 // Duplicate the object as an argument to the runtime call.
5681 frame_->Dup();
5682 Load(property->key());
5683 frame_->Push(Smi::FromInt(0));
5684 Load(property->value());
5685 Result ignored = frame_->CallRuntime(Runtime::kDefineAccessor, 4);
5686 // Ignore the result.
5687 break;
5688 }
5689 default: UNREACHABLE();
5690 }
5691 }
5692}
5693
5694
Steve Blocka7e24c12009-10-30 11:49:00 +00005695void CodeGenerator::VisitArrayLiteral(ArrayLiteral* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01005696 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005697 Comment cmnt(masm_, "[ ArrayLiteral");
5698
Leon Clarkee46be812010-01-19 14:06:41 +00005699 // Load a writable copy of the function of this activation in a
Steve Blocka7e24c12009-10-30 11:49:00 +00005700 // register.
5701 frame_->PushFunction();
5702 Result literals = frame_->Pop();
5703 literals.ToRegister();
5704 frame_->Spill(literals.reg());
5705
5706 // Load the literals array of the function.
5707 __ mov(literals.reg(),
5708 FieldOperand(literals.reg(), JSFunction::kLiteralsOffset));
5709
Leon Clarkee46be812010-01-19 14:06:41 +00005710 frame_->Push(&literals);
5711 frame_->Push(Smi::FromInt(node->literal_index()));
5712 frame_->Push(node->constant_elements());
5713 int length = node->values()->length();
5714 Result clone;
Iain Merrick75681382010-08-19 15:07:18 +01005715 if (node->constant_elements()->map() == Heap::fixed_cow_array_map()) {
5716 FastCloneShallowArrayStub stub(
5717 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
5718 clone = frame_->CallStub(&stub, 3);
5719 __ IncrementCounter(&Counters::cow_arrays_created_stub, 1);
5720 } else if (node->depth() > 1) {
Leon Clarkee46be812010-01-19 14:06:41 +00005721 clone = frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3);
Iain Merrick75681382010-08-19 15:07:18 +01005722 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
Leon Clarkee46be812010-01-19 14:06:41 +00005723 clone = frame_->CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
5724 } else {
Iain Merrick75681382010-08-19 15:07:18 +01005725 FastCloneShallowArrayStub stub(
5726 FastCloneShallowArrayStub::CLONE_ELEMENTS, length);
Leon Clarkee46be812010-01-19 14:06:41 +00005727 clone = frame_->CallStub(&stub, 3);
Steve Blocka7e24c12009-10-30 11:49:00 +00005728 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005729 frame_->Push(&clone);
5730
5731 // Generate code to set the elements in the array that are not
5732 // literals.
Leon Clarkee46be812010-01-19 14:06:41 +00005733 for (int i = 0; i < length; i++) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005734 Expression* value = node->values()->at(i);
5735
Iain Merrick75681382010-08-19 15:07:18 +01005736 if (!CompileTimeValue::ArrayLiteralElementNeedsInitialization(value)) {
5737 continue;
5738 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005739
5740 // The property must be set by generated code.
5741 Load(value);
5742
5743 // Get the property value off the stack.
5744 Result prop_value = frame_->Pop();
5745 prop_value.ToRegister();
5746
5747 // Fetch the array literal while leaving a copy on the stack and
5748 // use it to get the elements array.
5749 frame_->Dup();
5750 Result elements = frame_->Pop();
5751 elements.ToRegister();
5752 frame_->Spill(elements.reg());
5753 // Get the elements array.
5754 __ mov(elements.reg(),
5755 FieldOperand(elements.reg(), JSObject::kElementsOffset));
5756
5757 // Write to the indexed properties array.
5758 int offset = i * kPointerSize + FixedArray::kHeaderSize;
5759 __ mov(FieldOperand(elements.reg(), offset), prop_value.reg());
5760
5761 // Update the write barrier for the array address.
5762 frame_->Spill(prop_value.reg()); // Overwritten by the write barrier.
5763 Result scratch = allocator_->Allocate();
5764 ASSERT(scratch.is_valid());
5765 __ RecordWrite(elements.reg(), offset, prop_value.reg(), scratch.reg());
5766 }
5767}
5768
5769
5770void CodeGenerator::VisitCatchExtensionObject(CatchExtensionObject* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01005771 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005772 ASSERT(!in_spilled_code());
5773 // Call runtime routine to allocate the catch extension object and
5774 // assign the exception value to the catch variable.
5775 Comment cmnt(masm_, "[ CatchExtensionObject");
5776 Load(node->key());
5777 Load(node->value());
5778 Result result =
5779 frame_->CallRuntime(Runtime::kCreateCatchExtensionObject, 2);
5780 frame_->Push(&result);
5781}
5782
5783
Andrei Popescu402d9372010-02-26 13:31:12 +00005784void CodeGenerator::EmitSlotAssignment(Assignment* node) {
5785#ifdef DEBUG
5786 int original_height = frame()->height();
5787#endif
5788 Comment cmnt(masm(), "[ Variable Assignment");
5789 Variable* var = node->target()->AsVariableProxy()->AsVariable();
5790 ASSERT(var != NULL);
5791 Slot* slot = var->slot();
5792 ASSERT(slot != NULL);
5793
5794 // Evaluate the right-hand side.
5795 if (node->is_compound()) {
Steve Block6ded16b2010-05-10 14:33:55 +01005796 // For a compound assignment the right-hand side is a binary operation
5797 // between the current property value and the actual right-hand side.
Leon Clarkef7060e22010-06-03 12:02:55 +01005798 LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF);
Andrei Popescu402d9372010-02-26 13:31:12 +00005799 Load(node->value());
5800
Steve Block6ded16b2010-05-10 14:33:55 +01005801 // Perform the binary operation.
Andrei Popescu402d9372010-02-26 13:31:12 +00005802 bool overwrite_value =
5803 (node->value()->AsBinaryOperation() != NULL &&
5804 node->value()->AsBinaryOperation()->ResultOverwriteAllowed());
Steve Block6ded16b2010-05-10 14:33:55 +01005805 // Construct the implicit binary operation.
5806 BinaryOperation expr(node, node->binary_op(), node->target(),
5807 node->value());
5808 GenericBinaryOperation(&expr,
Andrei Popescu402d9372010-02-26 13:31:12 +00005809 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
5810 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01005811 // For non-compound assignment just load the right-hand side.
Andrei Popescu402d9372010-02-26 13:31:12 +00005812 Load(node->value());
5813 }
5814
5815 // Perform the assignment.
5816 if (var->mode() != Variable::CONST || node->op() == Token::INIT_CONST) {
5817 CodeForSourcePosition(node->position());
5818 StoreToSlot(slot,
5819 node->op() == Token::INIT_CONST ? CONST_INIT : NOT_CONST_INIT);
5820 }
5821 ASSERT(frame()->height() == original_height + 1);
5822}
5823
5824
5825void CodeGenerator::EmitNamedPropertyAssignment(Assignment* node) {
5826#ifdef DEBUG
5827 int original_height = frame()->height();
5828#endif
5829 Comment cmnt(masm(), "[ Named Property Assignment");
5830 Variable* var = node->target()->AsVariableProxy()->AsVariable();
5831 Property* prop = node->target()->AsProperty();
5832 ASSERT(var == NULL || (prop == NULL && var->is_global()));
5833
Steve Block6ded16b2010-05-10 14:33:55 +01005834 // Initialize name and evaluate the receiver sub-expression if necessary. If
5835 // the receiver is trivial it is not placed on the stack at this point, but
5836 // loaded whenever actually needed.
Andrei Popescu402d9372010-02-26 13:31:12 +00005837 Handle<String> name;
5838 bool is_trivial_receiver = false;
5839 if (var != NULL) {
5840 name = var->name();
5841 } else {
5842 Literal* lit = prop->key()->AsLiteral();
5843 ASSERT_NOT_NULL(lit);
5844 name = Handle<String>::cast(lit->handle());
5845 // Do not materialize the receiver on the frame if it is trivial.
5846 is_trivial_receiver = prop->obj()->IsTrivial();
5847 if (!is_trivial_receiver) Load(prop->obj());
5848 }
5849
Steve Block6ded16b2010-05-10 14:33:55 +01005850 // Change to slow case in the beginning of an initialization block to
5851 // avoid the quadratic behavior of repeatedly adding fast properties.
Andrei Popescu402d9372010-02-26 13:31:12 +00005852 if (node->starts_initialization_block()) {
Steve Block6ded16b2010-05-10 14:33:55 +01005853 // Initialization block consists of assignments of the form expr.x = ..., so
5854 // this will never be an assignment to a variable, so there must be a
5855 // receiver object.
Andrei Popescu402d9372010-02-26 13:31:12 +00005856 ASSERT_EQ(NULL, var);
Andrei Popescu402d9372010-02-26 13:31:12 +00005857 if (is_trivial_receiver) {
5858 frame()->Push(prop->obj());
5859 } else {
5860 frame()->Dup();
5861 }
5862 Result ignored = frame()->CallRuntime(Runtime::kToSlowProperties, 1);
5863 }
5864
Steve Block6ded16b2010-05-10 14:33:55 +01005865 // Change to fast case at the end of an initialization block. To prepare for
5866 // that add an extra copy of the receiver to the frame, so that it can be
5867 // converted back to fast case after the assignment.
Andrei Popescu402d9372010-02-26 13:31:12 +00005868 if (node->ends_initialization_block() && !is_trivial_receiver) {
Andrei Popescu402d9372010-02-26 13:31:12 +00005869 frame()->Dup();
5870 }
5871
Steve Block6ded16b2010-05-10 14:33:55 +01005872 // Stack layout:
5873 // [tos] : receiver (only materialized if non-trivial)
5874 // [tos+1] : receiver if at the end of an initialization block
5875
Andrei Popescu402d9372010-02-26 13:31:12 +00005876 // Evaluate the right-hand side.
5877 if (node->is_compound()) {
Steve Block6ded16b2010-05-10 14:33:55 +01005878 // For a compound assignment the right-hand side is a binary operation
5879 // between the current property value and the actual right-hand side.
Andrei Popescu402d9372010-02-26 13:31:12 +00005880 if (is_trivial_receiver) {
5881 frame()->Push(prop->obj());
5882 } else if (var != NULL) {
5883 // The LoadIC stub expects the object in eax.
5884 // Freeing eax causes the code generator to load the global into it.
5885 frame_->Spill(eax);
5886 LoadGlobal();
5887 } else {
5888 frame()->Dup();
5889 }
5890 Result value = EmitNamedLoad(name, var != NULL);
5891 frame()->Push(&value);
5892 Load(node->value());
5893
5894 bool overwrite_value =
5895 (node->value()->AsBinaryOperation() != NULL &&
5896 node->value()->AsBinaryOperation()->ResultOverwriteAllowed());
Steve Block6ded16b2010-05-10 14:33:55 +01005897 // Construct the implicit binary operation.
5898 BinaryOperation expr(node, node->binary_op(), node->target(),
5899 node->value());
5900 GenericBinaryOperation(&expr,
Andrei Popescu402d9372010-02-26 13:31:12 +00005901 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
5902 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01005903 // For non-compound assignment just load the right-hand side.
Andrei Popescu402d9372010-02-26 13:31:12 +00005904 Load(node->value());
5905 }
5906
Steve Block6ded16b2010-05-10 14:33:55 +01005907 // Stack layout:
5908 // [tos] : value
5909 // [tos+1] : receiver (only materialized if non-trivial)
5910 // [tos+2] : receiver if at the end of an initialization block
5911
Andrei Popescu402d9372010-02-26 13:31:12 +00005912 // Perform the assignment. It is safe to ignore constants here.
5913 ASSERT(var == NULL || var->mode() != Variable::CONST);
5914 ASSERT_NE(Token::INIT_CONST, node->op());
5915 if (is_trivial_receiver) {
5916 Result value = frame()->Pop();
5917 frame()->Push(prop->obj());
5918 frame()->Push(&value);
5919 }
5920 CodeForSourcePosition(node->position());
5921 bool is_contextual = (var != NULL);
5922 Result answer = EmitNamedStore(name, is_contextual);
5923 frame()->Push(&answer);
5924
Steve Block6ded16b2010-05-10 14:33:55 +01005925 // Stack layout:
5926 // [tos] : result
5927 // [tos+1] : receiver if at the end of an initialization block
5928
Andrei Popescu402d9372010-02-26 13:31:12 +00005929 if (node->ends_initialization_block()) {
5930 ASSERT_EQ(NULL, var);
5931 // The argument to the runtime call is the receiver.
5932 if (is_trivial_receiver) {
5933 frame()->Push(prop->obj());
5934 } else {
5935 // A copy of the receiver is below the value of the assignment. Swap
5936 // the receiver and the value of the assignment expression.
5937 Result result = frame()->Pop();
5938 Result receiver = frame()->Pop();
5939 frame()->Push(&result);
5940 frame()->Push(&receiver);
5941 }
5942 Result ignored = frame_->CallRuntime(Runtime::kToFastProperties, 1);
5943 }
5944
Steve Block6ded16b2010-05-10 14:33:55 +01005945 // Stack layout:
5946 // [tos] : result
5947
Andrei Popescu402d9372010-02-26 13:31:12 +00005948 ASSERT_EQ(frame()->height(), original_height + 1);
5949}
5950
5951
5952void CodeGenerator::EmitKeyedPropertyAssignment(Assignment* node) {
5953#ifdef DEBUG
5954 int original_height = frame()->height();
5955#endif
Steve Block6ded16b2010-05-10 14:33:55 +01005956 Comment cmnt(masm_, "[ Keyed Property Assignment");
Andrei Popescu402d9372010-02-26 13:31:12 +00005957 Property* prop = node->target()->AsProperty();
5958 ASSERT_NOT_NULL(prop);
5959
5960 // Evaluate the receiver subexpression.
5961 Load(prop->obj());
5962
Steve Block6ded16b2010-05-10 14:33:55 +01005963 // Change to slow case in the beginning of an initialization block to
5964 // avoid the quadratic behavior of repeatedly adding fast properties.
Andrei Popescu402d9372010-02-26 13:31:12 +00005965 if (node->starts_initialization_block()) {
Andrei Popescu402d9372010-02-26 13:31:12 +00005966 frame_->Dup();
5967 Result ignored = frame_->CallRuntime(Runtime::kToSlowProperties, 1);
5968 }
5969
Steve Block6ded16b2010-05-10 14:33:55 +01005970 // Change to fast case at the end of an initialization block. To prepare for
5971 // that add an extra copy of the receiver to the frame, so that it can be
5972 // converted back to fast case after the assignment.
Andrei Popescu402d9372010-02-26 13:31:12 +00005973 if (node->ends_initialization_block()) {
Andrei Popescu402d9372010-02-26 13:31:12 +00005974 frame_->Dup();
5975 }
5976
5977 // Evaluate the key subexpression.
5978 Load(prop->key());
5979
Steve Block6ded16b2010-05-10 14:33:55 +01005980 // Stack layout:
5981 // [tos] : key
5982 // [tos+1] : receiver
5983 // [tos+2] : receiver if at the end of an initialization block
5984
Andrei Popescu402d9372010-02-26 13:31:12 +00005985 // Evaluate the right-hand side.
5986 if (node->is_compound()) {
Steve Block6ded16b2010-05-10 14:33:55 +01005987 // For a compound assignment the right-hand side is a binary operation
5988 // between the current property value and the actual right-hand side.
5989 // Duplicate receiver and key for loading the current property value.
Andrei Popescu402d9372010-02-26 13:31:12 +00005990 frame()->PushElementAt(1);
5991 frame()->PushElementAt(1);
5992 Result value = EmitKeyedLoad();
5993 frame()->Push(&value);
5994 Load(node->value());
5995
Steve Block6ded16b2010-05-10 14:33:55 +01005996 // Perform the binary operation.
Andrei Popescu402d9372010-02-26 13:31:12 +00005997 bool overwrite_value =
5998 (node->value()->AsBinaryOperation() != NULL &&
5999 node->value()->AsBinaryOperation()->ResultOverwriteAllowed());
Steve Block6ded16b2010-05-10 14:33:55 +01006000 BinaryOperation expr(node, node->binary_op(), node->target(),
6001 node->value());
6002 GenericBinaryOperation(&expr,
Andrei Popescu402d9372010-02-26 13:31:12 +00006003 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
6004 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01006005 // For non-compound assignment just load the right-hand side.
Andrei Popescu402d9372010-02-26 13:31:12 +00006006 Load(node->value());
6007 }
6008
Steve Block6ded16b2010-05-10 14:33:55 +01006009 // Stack layout:
6010 // [tos] : value
6011 // [tos+1] : key
6012 // [tos+2] : receiver
6013 // [tos+3] : receiver if at the end of an initialization block
6014
Andrei Popescu402d9372010-02-26 13:31:12 +00006015 // Perform the assignment. It is safe to ignore constants here.
6016 ASSERT(node->op() != Token::INIT_CONST);
6017 CodeForSourcePosition(node->position());
6018 Result answer = EmitKeyedStore(prop->key()->type());
6019 frame()->Push(&answer);
6020
Steve Block6ded16b2010-05-10 14:33:55 +01006021 // Stack layout:
6022 // [tos] : result
6023 // [tos+1] : receiver if at the end of an initialization block
6024
6025 // Change to fast case at the end of an initialization block.
Andrei Popescu402d9372010-02-26 13:31:12 +00006026 if (node->ends_initialization_block()) {
6027 // The argument to the runtime call is the extra copy of the receiver,
6028 // which is below the value of the assignment. Swap the receiver and
6029 // the value of the assignment expression.
6030 Result result = frame()->Pop();
6031 Result receiver = frame()->Pop();
6032 frame()->Push(&result);
6033 frame()->Push(&receiver);
6034 Result ignored = frame_->CallRuntime(Runtime::kToFastProperties, 1);
6035 }
6036
Steve Block6ded16b2010-05-10 14:33:55 +01006037 // Stack layout:
6038 // [tos] : result
6039
Andrei Popescu402d9372010-02-26 13:31:12 +00006040 ASSERT(frame()->height() == original_height + 1);
6041}
6042
6043
Steve Blocka7e24c12009-10-30 11:49:00 +00006044void CodeGenerator::VisitAssignment(Assignment* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01006045 ASSERT(!in_safe_int32_mode());
Leon Clarked91b9f72010-01-27 17:25:45 +00006046#ifdef DEBUG
Andrei Popescu402d9372010-02-26 13:31:12 +00006047 int original_height = frame()->height();
Leon Clarked91b9f72010-01-27 17:25:45 +00006048#endif
Andrei Popescu402d9372010-02-26 13:31:12 +00006049 Variable* var = node->target()->AsVariableProxy()->AsVariable();
6050 Property* prop = node->target()->AsProperty();
Steve Blocka7e24c12009-10-30 11:49:00 +00006051
Andrei Popescu402d9372010-02-26 13:31:12 +00006052 if (var != NULL && !var->is_global()) {
6053 EmitSlotAssignment(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00006054
Andrei Popescu402d9372010-02-26 13:31:12 +00006055 } else if ((prop != NULL && prop->key()->IsPropertyName()) ||
6056 (var != NULL && var->is_global())) {
6057 // Properties whose keys are property names and global variables are
6058 // treated as named property references. We do not need to consider
6059 // global 'this' because it is not a valid left-hand side.
6060 EmitNamedPropertyAssignment(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00006061
Andrei Popescu402d9372010-02-26 13:31:12 +00006062 } else if (prop != NULL) {
6063 // Other properties (including rewritten parameters for a function that
6064 // uses arguments) are keyed property assignments.
6065 EmitKeyedPropertyAssignment(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00006066
Andrei Popescu402d9372010-02-26 13:31:12 +00006067 } else {
6068 // Invalid left-hand side.
6069 Load(node->target());
6070 Result result = frame()->CallRuntime(Runtime::kThrowReferenceError, 1);
6071 // The runtime call doesn't actually return but the code generator will
6072 // still generate code and expects a certain frame height.
6073 frame()->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00006074 }
Andrei Popescu402d9372010-02-26 13:31:12 +00006075
6076 ASSERT(frame()->height() == original_height + 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00006077}
6078
6079
6080void CodeGenerator::VisitThrow(Throw* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01006081 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00006082 Comment cmnt(masm_, "[ Throw");
6083 Load(node->exception());
6084 Result result = frame_->CallRuntime(Runtime::kThrow, 1);
6085 frame_->Push(&result);
6086}
6087
6088
6089void CodeGenerator::VisitProperty(Property* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01006090 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00006091 Comment cmnt(masm_, "[ Property");
6092 Reference property(this, node);
Steve Blockd0582a62009-12-15 09:54:21 +00006093 property.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00006094}
6095
6096
6097void CodeGenerator::VisitCall(Call* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01006098 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00006099 Comment cmnt(masm_, "[ Call");
6100
6101 Expression* function = node->expression();
6102 ZoneList<Expression*>* args = node->arguments();
6103
6104 // Check if the function is a variable or a property.
6105 Variable* var = function->AsVariableProxy()->AsVariable();
6106 Property* property = function->AsProperty();
6107
6108 // ------------------------------------------------------------------------
6109 // Fast-case: Use inline caching.
6110 // ---
6111 // According to ECMA-262, section 11.2.3, page 44, the function to call
6112 // must be resolved after the arguments have been evaluated. The IC code
6113 // automatically handles this by loading the arguments before the function
6114 // is resolved in cache misses (this also holds for megamorphic calls).
6115 // ------------------------------------------------------------------------
6116
6117 if (var != NULL && var->is_possibly_eval()) {
6118 // ----------------------------------
6119 // JavaScript example: 'eval(arg)' // eval is not known to be shadowed
6120 // ----------------------------------
6121
6122 // In a call to eval, we first call %ResolvePossiblyDirectEval to
6123 // resolve the function we need to call and the receiver of the
6124 // call. Then we call the resolved function using the given
6125 // arguments.
6126
6127 // Prepare the stack for the call to the resolved function.
6128 Load(function);
6129
6130 // Allocate a frame slot for the receiver.
6131 frame_->Push(Factory::undefined_value());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006132
6133 // Load the arguments.
Steve Blocka7e24c12009-10-30 11:49:00 +00006134 int arg_count = args->length();
6135 for (int i = 0; i < arg_count; i++) {
6136 Load(args->at(i));
Leon Clarkef7060e22010-06-03 12:02:55 +01006137 frame_->SpillTop();
Steve Blocka7e24c12009-10-30 11:49:00 +00006138 }
6139
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006140 // Result to hold the result of the function resolution and the
6141 // final result of the eval call.
6142 Result result;
6143
6144 // If we know that eval can only be shadowed by eval-introduced
6145 // variables we attempt to load the global eval function directly
6146 // in generated code. If we succeed, there is no need to perform a
6147 // context lookup in the runtime system.
6148 JumpTarget done;
6149 if (var->slot() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) {
6150 ASSERT(var->slot()->type() == Slot::LOOKUP);
6151 JumpTarget slow;
6152 // Prepare the stack for the call to
6153 // ResolvePossiblyDirectEvalNoLookup by pushing the loaded
6154 // function, the first argument to the eval call and the
6155 // receiver.
6156 Result fun = LoadFromGlobalSlotCheckExtensions(var->slot(),
6157 NOT_INSIDE_TYPEOF,
6158 &slow);
6159 frame_->Push(&fun);
6160 if (arg_count > 0) {
6161 frame_->PushElementAt(arg_count);
6162 } else {
6163 frame_->Push(Factory::undefined_value());
6164 }
6165 frame_->PushParameterAt(-1);
6166
6167 // Resolve the call.
6168 result =
6169 frame_->CallRuntime(Runtime::kResolvePossiblyDirectEvalNoLookup, 3);
6170
6171 done.Jump(&result);
6172 slow.Bind();
6173 }
6174
6175 // Prepare the stack for the call to ResolvePossiblyDirectEval by
6176 // pushing the loaded function, the first argument to the eval
6177 // call and the receiver.
Steve Blocka7e24c12009-10-30 11:49:00 +00006178 frame_->PushElementAt(arg_count + 1);
6179 if (arg_count > 0) {
6180 frame_->PushElementAt(arg_count);
6181 } else {
6182 frame_->Push(Factory::undefined_value());
6183 }
Leon Clarkee46be812010-01-19 14:06:41 +00006184 frame_->PushParameterAt(-1);
6185
Steve Blocka7e24c12009-10-30 11:49:00 +00006186 // Resolve the call.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006187 result = frame_->CallRuntime(Runtime::kResolvePossiblyDirectEval, 3);
6188
6189 // If we generated fast-case code bind the jump-target where fast
6190 // and slow case merge.
6191 if (done.is_linked()) done.Bind(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00006192
Leon Clarkee46be812010-01-19 14:06:41 +00006193 // The runtime call returns a pair of values in eax (function) and
6194 // edx (receiver). Touch up the stack with the right values.
6195 Result receiver = allocator_->Allocate(edx);
6196 frame_->SetElementAt(arg_count + 1, &result);
6197 frame_->SetElementAt(arg_count, &receiver);
6198 receiver.Unuse();
Steve Blocka7e24c12009-10-30 11:49:00 +00006199
6200 // Call the function.
6201 CodeForSourcePosition(node->position());
6202 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Leon Clarkee46be812010-01-19 14:06:41 +00006203 CallFunctionStub call_function(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
Steve Blocka7e24c12009-10-30 11:49:00 +00006204 result = frame_->CallStub(&call_function, arg_count + 1);
6205
6206 // Restore the context and overwrite the function on the stack with
6207 // the result.
6208 frame_->RestoreContextRegister();
6209 frame_->SetElementAt(0, &result);
6210
6211 } else if (var != NULL && !var->is_this() && var->is_global()) {
6212 // ----------------------------------
6213 // JavaScript example: 'foo(1, 2, 3)' // foo is global
6214 // ----------------------------------
6215
Steve Blocka7e24c12009-10-30 11:49:00 +00006216 // Pass the global object as the receiver and let the IC stub
6217 // patch the stack to use the global proxy as 'this' in the
6218 // invoked function.
6219 LoadGlobal();
6220
6221 // Load the arguments.
6222 int arg_count = args->length();
6223 for (int i = 0; i < arg_count; i++) {
6224 Load(args->at(i));
Leon Clarkef7060e22010-06-03 12:02:55 +01006225 frame_->SpillTop();
Steve Blocka7e24c12009-10-30 11:49:00 +00006226 }
6227
Leon Clarkee46be812010-01-19 14:06:41 +00006228 // Push the name of the function onto the frame.
6229 frame_->Push(var->name());
6230
Steve Blocka7e24c12009-10-30 11:49:00 +00006231 // Call the IC initialization code.
6232 CodeForSourcePosition(node->position());
6233 Result result = frame_->CallCallIC(RelocInfo::CODE_TARGET_CONTEXT,
6234 arg_count,
6235 loop_nesting());
6236 frame_->RestoreContextRegister();
Leon Clarkee46be812010-01-19 14:06:41 +00006237 frame_->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00006238
6239 } else if (var != NULL && var->slot() != NULL &&
6240 var->slot()->type() == Slot::LOOKUP) {
6241 // ----------------------------------
Kristian Monsen25f61362010-05-21 11:50:48 +01006242 // JavaScript examples:
6243 //
6244 // with (obj) foo(1, 2, 3) // foo may be in obj.
6245 //
6246 // function f() {};
6247 // function g() {
6248 // eval(...);
6249 // f(); // f could be in extension object.
6250 // }
Steve Blocka7e24c12009-10-30 11:49:00 +00006251 // ----------------------------------
6252
Kristian Monsen25f61362010-05-21 11:50:48 +01006253 JumpTarget slow, done;
6254 Result function;
6255
6256 // Generate fast case for loading functions from slots that
6257 // correspond to local/global variables or arguments unless they
6258 // are shadowed by eval-introduced bindings.
6259 EmitDynamicLoadFromSlotFastCase(var->slot(),
6260 NOT_INSIDE_TYPEOF,
6261 &function,
6262 &slow,
6263 &done);
6264
6265 slow.Bind();
6266 // Enter the runtime system to load the function from the context.
6267 // Sync the frame so we can push the arguments directly into
6268 // place.
Steve Blocka7e24c12009-10-30 11:49:00 +00006269 frame_->SyncRange(0, frame_->element_count() - 1);
6270 frame_->EmitPush(esi);
6271 frame_->EmitPush(Immediate(var->name()));
6272 frame_->CallRuntime(Runtime::kLoadContextSlot, 2);
6273 // The runtime call returns a pair of values in eax and edx. The
6274 // looked-up function is in eax and the receiver is in edx. These
6275 // register references are not ref counted here. We spill them
6276 // eagerly since they are arguments to an inevitable call (and are
6277 // not sharable by the arguments).
6278 ASSERT(!allocator()->is_used(eax));
6279 frame_->EmitPush(eax);
6280
6281 // Load the receiver.
6282 ASSERT(!allocator()->is_used(edx));
6283 frame_->EmitPush(edx);
6284
Kristian Monsen25f61362010-05-21 11:50:48 +01006285 // If fast case code has been generated, emit code to push the
6286 // function and receiver and have the slow path jump around this
6287 // code.
6288 if (done.is_linked()) {
6289 JumpTarget call;
6290 call.Jump();
6291 done.Bind(&function);
6292 frame_->Push(&function);
6293 LoadGlobalReceiver();
6294 call.Bind();
6295 }
6296
Steve Blocka7e24c12009-10-30 11:49:00 +00006297 // Call the function.
Leon Clarkee46be812010-01-19 14:06:41 +00006298 CallWithArguments(args, NO_CALL_FUNCTION_FLAGS, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00006299
6300 } else if (property != NULL) {
6301 // Check if the key is a literal string.
6302 Literal* literal = property->key()->AsLiteral();
6303
6304 if (literal != NULL && literal->handle()->IsSymbol()) {
6305 // ------------------------------------------------------------------
6306 // JavaScript example: 'object.foo(1, 2, 3)' or 'map["key"](1, 2, 3)'
6307 // ------------------------------------------------------------------
6308
6309 Handle<String> name = Handle<String>::cast(literal->handle());
6310
6311 if (ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION &&
6312 name->IsEqualTo(CStrVector("apply")) &&
6313 args->length() == 2 &&
6314 args->at(1)->AsVariableProxy() != NULL &&
6315 args->at(1)->AsVariableProxy()->IsArguments()) {
6316 // Use the optimized Function.prototype.apply that avoids
6317 // allocating lazily allocated arguments objects.
Leon Clarked91b9f72010-01-27 17:25:45 +00006318 CallApplyLazy(property->obj(),
Steve Blocka7e24c12009-10-30 11:49:00 +00006319 args->at(0),
6320 args->at(1)->AsVariableProxy(),
6321 node->position());
6322
6323 } else {
Leon Clarkee46be812010-01-19 14:06:41 +00006324 // Push the receiver onto the frame.
Steve Blocka7e24c12009-10-30 11:49:00 +00006325 Load(property->obj());
6326
6327 // Load the arguments.
6328 int arg_count = args->length();
6329 for (int i = 0; i < arg_count; i++) {
6330 Load(args->at(i));
Leon Clarkef7060e22010-06-03 12:02:55 +01006331 frame_->SpillTop();
Steve Blocka7e24c12009-10-30 11:49:00 +00006332 }
6333
Leon Clarkee46be812010-01-19 14:06:41 +00006334 // Push the name of the function onto the frame.
6335 frame_->Push(name);
6336
Steve Blocka7e24c12009-10-30 11:49:00 +00006337 // Call the IC initialization code.
6338 CodeForSourcePosition(node->position());
6339 Result result =
6340 frame_->CallCallIC(RelocInfo::CODE_TARGET, arg_count,
6341 loop_nesting());
6342 frame_->RestoreContextRegister();
Leon Clarkee46be812010-01-19 14:06:41 +00006343 frame_->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00006344 }
6345
6346 } else {
6347 // -------------------------------------------
6348 // JavaScript example: 'array[index](1, 2, 3)'
6349 // -------------------------------------------
6350
6351 // Load the function to call from the property through a reference.
Steve Blocka7e24c12009-10-30 11:49:00 +00006352
6353 // Pass receiver to called function.
6354 if (property->is_synthetic()) {
Leon Clarked91b9f72010-01-27 17:25:45 +00006355 Reference ref(this, property);
6356 ref.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00006357 // Use global object as receiver.
6358 LoadGlobalReceiver();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006359 // Call the function.
6360 CallWithArguments(args, RECEIVER_MIGHT_BE_VALUE, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00006361 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006362 // Push the receiver onto the frame.
Leon Clarked91b9f72010-01-27 17:25:45 +00006363 Load(property->obj());
Steve Blocka7e24c12009-10-30 11:49:00 +00006364
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006365 // Load the arguments.
6366 int arg_count = args->length();
6367 for (int i = 0; i < arg_count; i++) {
6368 Load(args->at(i));
6369 frame_->SpillTop();
6370 }
6371
6372 // Load the name of the function.
6373 Load(property->key());
6374
6375 // Call the IC initialization code.
6376 CodeForSourcePosition(node->position());
6377 Result result =
6378 frame_->CallKeyedCallIC(RelocInfo::CODE_TARGET,
6379 arg_count,
6380 loop_nesting());
6381 frame_->RestoreContextRegister();
6382 frame_->Push(&result);
6383 }
Steve Blocka7e24c12009-10-30 11:49:00 +00006384 }
6385
6386 } else {
6387 // ----------------------------------
6388 // JavaScript example: 'foo(1, 2, 3)' // foo is not global
6389 // ----------------------------------
6390
6391 // Load the function.
6392 Load(function);
6393
6394 // Pass the global proxy as the receiver.
6395 LoadGlobalReceiver();
6396
6397 // Call the function.
Leon Clarkee46be812010-01-19 14:06:41 +00006398 CallWithArguments(args, NO_CALL_FUNCTION_FLAGS, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00006399 }
6400}
6401
6402
6403void CodeGenerator::VisitCallNew(CallNew* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01006404 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00006405 Comment cmnt(masm_, "[ CallNew");
6406
6407 // According to ECMA-262, section 11.2.2, page 44, the function
6408 // expression in new calls must be evaluated before the
6409 // arguments. This is different from ordinary calls, where the
6410 // actual function to call is resolved after the arguments have been
6411 // evaluated.
6412
6413 // Compute function to call and use the global object as the
6414 // receiver. There is no need to use the global proxy here because
6415 // it will always be replaced with a newly allocated object.
6416 Load(node->expression());
6417 LoadGlobal();
6418
6419 // Push the arguments ("left-to-right") on the stack.
6420 ZoneList<Expression*>* args = node->arguments();
6421 int arg_count = args->length();
6422 for (int i = 0; i < arg_count; i++) {
6423 Load(args->at(i));
6424 }
6425
6426 // Call the construct call builtin that handles allocation and
6427 // constructor invocation.
6428 CodeForSourcePosition(node->position());
6429 Result result = frame_->CallConstructor(arg_count);
6430 // Replace the function on the stack with the result.
6431 frame_->SetElementAt(0, &result);
6432}
6433
6434
6435void CodeGenerator::GenerateIsSmi(ZoneList<Expression*>* args) {
6436 ASSERT(args->length() == 1);
6437 Load(args->at(0));
6438 Result value = frame_->Pop();
6439 value.ToRegister();
6440 ASSERT(value.is_valid());
6441 __ test(value.reg(), Immediate(kSmiTagMask));
6442 value.Unuse();
6443 destination()->Split(zero);
6444}
6445
6446
6447void CodeGenerator::GenerateLog(ZoneList<Expression*>* args) {
6448 // Conditionally generate a log call.
6449 // Args:
6450 // 0 (literal string): The type of logging (corresponds to the flags).
6451 // This is used to determine whether or not to generate the log call.
6452 // 1 (string): Format string. Access the string at argument index 2
6453 // with '%2s' (see Logger::LogRuntime for all the formats).
6454 // 2 (array): Arguments to the format string.
6455 ASSERT_EQ(args->length(), 3);
6456#ifdef ENABLE_LOGGING_AND_PROFILING
6457 if (ShouldGenerateLog(args->at(0))) {
6458 Load(args->at(1));
6459 Load(args->at(2));
6460 frame_->CallRuntime(Runtime::kLog, 2);
6461 }
6462#endif
6463 // Finally, we're expected to leave a value on the top of the stack.
6464 frame_->Push(Factory::undefined_value());
6465}
6466
6467
6468void CodeGenerator::GenerateIsNonNegativeSmi(ZoneList<Expression*>* args) {
6469 ASSERT(args->length() == 1);
6470 Load(args->at(0));
6471 Result value = frame_->Pop();
6472 value.ToRegister();
6473 ASSERT(value.is_valid());
Steve Block6ded16b2010-05-10 14:33:55 +01006474 __ test(value.reg(), Immediate(kSmiTagMask | kSmiSignMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00006475 value.Unuse();
6476 destination()->Split(zero);
6477}
6478
6479
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006480class DeferredStringCharCodeAt : public DeferredCode {
6481 public:
6482 DeferredStringCharCodeAt(Register object,
6483 Register index,
6484 Register scratch,
6485 Register result)
6486 : result_(result),
6487 char_code_at_generator_(object,
6488 index,
6489 scratch,
6490 result,
6491 &need_conversion_,
6492 &need_conversion_,
6493 &index_out_of_range_,
6494 STRING_INDEX_IS_NUMBER) {}
6495
6496 StringCharCodeAtGenerator* fast_case_generator() {
6497 return &char_code_at_generator_;
6498 }
6499
6500 virtual void Generate() {
6501 VirtualFrameRuntimeCallHelper call_helper(frame_state());
6502 char_code_at_generator_.GenerateSlow(masm(), call_helper);
6503
6504 __ bind(&need_conversion_);
6505 // Move the undefined value into the result register, which will
6506 // trigger conversion.
6507 __ Set(result_, Immediate(Factory::undefined_value()));
6508 __ jmp(exit_label());
6509
6510 __ bind(&index_out_of_range_);
6511 // When the index is out of range, the spec requires us to return
6512 // NaN.
6513 __ Set(result_, Immediate(Factory::nan_value()));
6514 __ jmp(exit_label());
6515 }
6516
6517 private:
6518 Register result_;
6519
6520 Label need_conversion_;
6521 Label index_out_of_range_;
6522
6523 StringCharCodeAtGenerator char_code_at_generator_;
6524};
6525
6526
6527// This generates code that performs a String.prototype.charCodeAt() call
6528// or returns a smi in order to trigger conversion.
6529void CodeGenerator::GenerateStringCharCodeAt(ZoneList<Expression*>* args) {
6530 Comment(masm_, "[ GenerateStringCharCodeAt");
Steve Blocka7e24c12009-10-30 11:49:00 +00006531 ASSERT(args->length() == 2);
6532
Steve Blocka7e24c12009-10-30 11:49:00 +00006533 Load(args->at(0));
6534 Load(args->at(1));
6535 Result index = frame_->Pop();
6536 Result object = frame_->Pop();
Steve Blocka7e24c12009-10-30 11:49:00 +00006537 object.ToRegister();
6538 index.ToRegister();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006539 // We might mutate the object register.
Steve Blocka7e24c12009-10-30 11:49:00 +00006540 frame_->Spill(object.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00006541
Steve Block6ded16b2010-05-10 14:33:55 +01006542 // We need two extra registers.
6543 Result result = allocator()->Allocate();
6544 ASSERT(result.is_valid());
6545 Result scratch = allocator()->Allocate();
6546 ASSERT(scratch.is_valid());
Steve Blocka7e24c12009-10-30 11:49:00 +00006547
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006548 DeferredStringCharCodeAt* deferred =
6549 new DeferredStringCharCodeAt(object.reg(),
6550 index.reg(),
6551 scratch.reg(),
6552 result.reg());
6553 deferred->fast_case_generator()->GenerateFast(masm_);
6554 deferred->BindExit();
Steve Block6ded16b2010-05-10 14:33:55 +01006555 frame_->Push(&result);
6556}
6557
6558
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006559class DeferredStringCharFromCode : public DeferredCode {
6560 public:
6561 DeferredStringCharFromCode(Register code,
6562 Register result)
6563 : char_from_code_generator_(code, result) {}
6564
6565 StringCharFromCodeGenerator* fast_case_generator() {
6566 return &char_from_code_generator_;
6567 }
6568
6569 virtual void Generate() {
6570 VirtualFrameRuntimeCallHelper call_helper(frame_state());
6571 char_from_code_generator_.GenerateSlow(masm(), call_helper);
6572 }
6573
6574 private:
6575 StringCharFromCodeGenerator char_from_code_generator_;
6576};
6577
6578
6579// Generates code for creating a one-char string from a char code.
6580void CodeGenerator::GenerateStringCharFromCode(ZoneList<Expression*>* args) {
6581 Comment(masm_, "[ GenerateStringCharFromCode");
Steve Block6ded16b2010-05-10 14:33:55 +01006582 ASSERT(args->length() == 1);
6583
6584 Load(args->at(0));
6585
6586 Result code = frame_->Pop();
6587 code.ToRegister();
6588 ASSERT(code.is_valid());
6589
Steve Block6ded16b2010-05-10 14:33:55 +01006590 Result result = allocator()->Allocate();
6591 ASSERT(result.is_valid());
6592
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006593 DeferredStringCharFromCode* deferred = new DeferredStringCharFromCode(
6594 code.reg(), result.reg());
6595 deferred->fast_case_generator()->GenerateFast(masm_);
6596 deferred->BindExit();
6597 frame_->Push(&result);
6598}
6599
6600
6601class DeferredStringCharAt : public DeferredCode {
6602 public:
6603 DeferredStringCharAt(Register object,
6604 Register index,
6605 Register scratch1,
6606 Register scratch2,
6607 Register result)
6608 : result_(result),
6609 char_at_generator_(object,
6610 index,
6611 scratch1,
6612 scratch2,
6613 result,
6614 &need_conversion_,
6615 &need_conversion_,
6616 &index_out_of_range_,
6617 STRING_INDEX_IS_NUMBER) {}
6618
6619 StringCharAtGenerator* fast_case_generator() {
6620 return &char_at_generator_;
6621 }
6622
6623 virtual void Generate() {
6624 VirtualFrameRuntimeCallHelper call_helper(frame_state());
6625 char_at_generator_.GenerateSlow(masm(), call_helper);
6626
6627 __ bind(&need_conversion_);
6628 // Move smi zero into the result register, which will trigger
6629 // conversion.
6630 __ Set(result_, Immediate(Smi::FromInt(0)));
6631 __ jmp(exit_label());
6632
6633 __ bind(&index_out_of_range_);
6634 // When the index is out of range, the spec requires us to return
6635 // the empty string.
6636 __ Set(result_, Immediate(Factory::empty_string()));
6637 __ jmp(exit_label());
6638 }
6639
6640 private:
6641 Register result_;
6642
6643 Label need_conversion_;
6644 Label index_out_of_range_;
6645
6646 StringCharAtGenerator char_at_generator_;
6647};
6648
6649
6650// This generates code that performs a String.prototype.charAt() call
6651// or returns a smi in order to trigger conversion.
6652void CodeGenerator::GenerateStringCharAt(ZoneList<Expression*>* args) {
6653 Comment(masm_, "[ GenerateStringCharAt");
6654 ASSERT(args->length() == 2);
6655
6656 Load(args->at(0));
6657 Load(args->at(1));
6658 Result index = frame_->Pop();
6659 Result object = frame_->Pop();
6660 object.ToRegister();
6661 index.ToRegister();
6662 // We might mutate the object register.
6663 frame_->Spill(object.reg());
6664
6665 // We need three extra registers.
6666 Result result = allocator()->Allocate();
6667 ASSERT(result.is_valid());
6668 Result scratch1 = allocator()->Allocate();
6669 ASSERT(scratch1.is_valid());
6670 Result scratch2 = allocator()->Allocate();
6671 ASSERT(scratch2.is_valid());
6672
6673 DeferredStringCharAt* deferred =
6674 new DeferredStringCharAt(object.reg(),
6675 index.reg(),
6676 scratch1.reg(),
6677 scratch2.reg(),
6678 result.reg());
6679 deferred->fast_case_generator()->GenerateFast(masm_);
6680 deferred->BindExit();
Steve Block6ded16b2010-05-10 14:33:55 +01006681 frame_->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00006682}
6683
6684
6685void CodeGenerator::GenerateIsArray(ZoneList<Expression*>* args) {
6686 ASSERT(args->length() == 1);
6687 Load(args->at(0));
6688 Result value = frame_->Pop();
6689 value.ToRegister();
6690 ASSERT(value.is_valid());
6691 __ test(value.reg(), Immediate(kSmiTagMask));
6692 destination()->false_target()->Branch(equal);
6693 // It is a heap object - get map.
6694 Result temp = allocator()->Allocate();
6695 ASSERT(temp.is_valid());
6696 // Check if the object is a JS array or not.
6697 __ CmpObjectType(value.reg(), JS_ARRAY_TYPE, temp.reg());
6698 value.Unuse();
6699 temp.Unuse();
6700 destination()->Split(equal);
6701}
6702
6703
Andrei Popescu402d9372010-02-26 13:31:12 +00006704void CodeGenerator::GenerateIsRegExp(ZoneList<Expression*>* args) {
6705 ASSERT(args->length() == 1);
6706 Load(args->at(0));
6707 Result value = frame_->Pop();
6708 value.ToRegister();
6709 ASSERT(value.is_valid());
6710 __ test(value.reg(), Immediate(kSmiTagMask));
6711 destination()->false_target()->Branch(equal);
6712 // It is a heap object - get map.
6713 Result temp = allocator()->Allocate();
6714 ASSERT(temp.is_valid());
6715 // Check if the object is a regexp.
6716 __ CmpObjectType(value.reg(), JS_REGEXP_TYPE, temp.reg());
6717 value.Unuse();
6718 temp.Unuse();
6719 destination()->Split(equal);
6720}
6721
6722
Steve Blockd0582a62009-12-15 09:54:21 +00006723void CodeGenerator::GenerateIsObject(ZoneList<Expression*>* args) {
6724 // This generates a fast version of:
6725 // (typeof(arg) === 'object' || %_ClassOf(arg) == 'RegExp')
6726 ASSERT(args->length() == 1);
6727 Load(args->at(0));
6728 Result obj = frame_->Pop();
6729 obj.ToRegister();
6730
6731 __ test(obj.reg(), Immediate(kSmiTagMask));
6732 destination()->false_target()->Branch(zero);
6733 __ cmp(obj.reg(), Factory::null_value());
6734 destination()->true_target()->Branch(equal);
6735
6736 Result map = allocator()->Allocate();
6737 ASSERT(map.is_valid());
6738 __ mov(map.reg(), FieldOperand(obj.reg(), HeapObject::kMapOffset));
6739 // Undetectable objects behave like undefined when tested with typeof.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006740 __ test_b(FieldOperand(map.reg(), Map::kBitFieldOffset),
6741 1 << Map::kIsUndetectable);
Steve Blockd0582a62009-12-15 09:54:21 +00006742 destination()->false_target()->Branch(not_zero);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006743 // Do a range test for JSObject type. We can't use
6744 // MacroAssembler::IsInstanceJSObjectType, because we are using a
6745 // ControlDestination, so we copy its implementation here.
Steve Blockd0582a62009-12-15 09:54:21 +00006746 __ movzx_b(map.reg(), FieldOperand(map.reg(), Map::kInstanceTypeOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006747 __ sub(Operand(map.reg()), Immediate(FIRST_JS_OBJECT_TYPE));
6748 __ cmp(map.reg(), LAST_JS_OBJECT_TYPE - FIRST_JS_OBJECT_TYPE);
Steve Blockd0582a62009-12-15 09:54:21 +00006749 obj.Unuse();
6750 map.Unuse();
Leon Clarkef7060e22010-06-03 12:02:55 +01006751 destination()->Split(below_equal);
Steve Blockd0582a62009-12-15 09:54:21 +00006752}
6753
6754
Iain Merrick75681382010-08-19 15:07:18 +01006755void CodeGenerator::GenerateIsSpecObject(ZoneList<Expression*>* args) {
Ben Murdoch3bec4d22010-07-22 14:51:16 +01006756 // This generates a fast version of:
6757 // (typeof(arg) === 'object' || %_ClassOf(arg) == 'RegExp' ||
6758 // typeof(arg) == function).
6759 // It includes undetectable objects (as opposed to IsObject).
6760 ASSERT(args->length() == 1);
6761 Load(args->at(0));
6762 Result value = frame_->Pop();
6763 value.ToRegister();
6764 ASSERT(value.is_valid());
6765 __ test(value.reg(), Immediate(kSmiTagMask));
6766 destination()->false_target()->Branch(equal);
6767
6768 // Check that this is an object.
6769 frame_->Spill(value.reg());
6770 __ CmpObjectType(value.reg(), FIRST_JS_OBJECT_TYPE, value.reg());
6771 value.Unuse();
6772 destination()->Split(above_equal);
6773}
6774
6775
Iain Merrick75681382010-08-19 15:07:18 +01006776// Deferred code to check whether the String JavaScript object is safe for using
6777// default value of. This code is called after the bit caching this information
6778// in the map has been checked with the map for the object in the map_result_
6779// register. On return the register map_result_ contains 1 for true and 0 for
6780// false.
6781class DeferredIsStringWrapperSafeForDefaultValueOf : public DeferredCode {
6782 public:
6783 DeferredIsStringWrapperSafeForDefaultValueOf(Register object,
6784 Register map_result,
6785 Register scratch1,
6786 Register scratch2)
6787 : object_(object),
6788 map_result_(map_result),
6789 scratch1_(scratch1),
6790 scratch2_(scratch2) { }
6791
6792 virtual void Generate() {
6793 Label false_result;
6794
6795 // Check that map is loaded as expected.
6796 if (FLAG_debug_code) {
6797 __ cmp(map_result_, FieldOperand(object_, HeapObject::kMapOffset));
6798 __ Assert(equal, "Map not in expected register");
6799 }
6800
6801 // Check for fast case object. Generate false result for slow case object.
6802 __ mov(scratch1_, FieldOperand(object_, JSObject::kPropertiesOffset));
6803 __ mov(scratch1_, FieldOperand(scratch1_, HeapObject::kMapOffset));
6804 __ cmp(scratch1_, Factory::hash_table_map());
6805 __ j(equal, &false_result);
6806
6807 // Look for valueOf symbol in the descriptor array, and indicate false if
6808 // found. The type is not checked, so if it is a transition it is a false
6809 // negative.
6810 __ mov(map_result_,
6811 FieldOperand(map_result_, Map::kInstanceDescriptorsOffset));
6812 __ mov(scratch1_, FieldOperand(map_result_, FixedArray::kLengthOffset));
6813 // map_result_: descriptor array
6814 // scratch1_: length of descriptor array
6815 // Calculate the end of the descriptor array.
6816 STATIC_ASSERT(kSmiTag == 0);
6817 STATIC_ASSERT(kSmiTagSize == 1);
6818 STATIC_ASSERT(kPointerSize == 4);
6819 __ lea(scratch1_,
6820 Operand(map_result_, scratch1_, times_2, FixedArray::kHeaderSize));
6821 // Calculate location of the first key name.
6822 __ add(Operand(map_result_),
6823 Immediate(FixedArray::kHeaderSize +
6824 DescriptorArray::kFirstIndex * kPointerSize));
6825 // Loop through all the keys in the descriptor array. If one of these is the
6826 // symbol valueOf the result is false.
6827 Label entry, loop;
6828 __ jmp(&entry);
6829 __ bind(&loop);
6830 __ mov(scratch2_, FieldOperand(map_result_, 0));
6831 __ cmp(scratch2_, Factory::value_of_symbol());
6832 __ j(equal, &false_result);
6833 __ add(Operand(map_result_), Immediate(kPointerSize));
6834 __ bind(&entry);
6835 __ cmp(map_result_, Operand(scratch1_));
6836 __ j(not_equal, &loop);
6837
6838 // Reload map as register map_result_ was used as temporary above.
6839 __ mov(map_result_, FieldOperand(object_, HeapObject::kMapOffset));
6840
6841 // If a valueOf property is not found on the object check that it's
6842 // prototype is the un-modified String prototype. If not result is false.
6843 __ mov(scratch1_, FieldOperand(map_result_, Map::kPrototypeOffset));
6844 __ test(scratch1_, Immediate(kSmiTagMask));
6845 __ j(zero, &false_result);
6846 __ mov(scratch1_, FieldOperand(scratch1_, HeapObject::kMapOffset));
6847 __ mov(scratch2_, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
6848 __ mov(scratch2_,
6849 FieldOperand(scratch2_, GlobalObject::kGlobalContextOffset));
6850 __ cmp(scratch1_,
6851 CodeGenerator::ContextOperand(
6852 scratch2_, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
6853 __ j(not_equal, &false_result);
6854 // Set the bit in the map to indicate that it has been checked safe for
6855 // default valueOf and set true result.
6856 __ or_(FieldOperand(map_result_, Map::kBitField2Offset),
6857 Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
6858 __ Set(map_result_, Immediate(1));
6859 __ jmp(exit_label());
6860 __ bind(&false_result);
6861 // Set false result.
6862 __ Set(map_result_, Immediate(0));
6863 }
6864
6865 private:
6866 Register object_;
6867 Register map_result_;
6868 Register scratch1_;
6869 Register scratch2_;
6870};
6871
6872
6873void CodeGenerator::GenerateIsStringWrapperSafeForDefaultValueOf(
6874 ZoneList<Expression*>* args) {
6875 ASSERT(args->length() == 1);
6876 Load(args->at(0));
6877 Result obj = frame_->Pop(); // Pop the string wrapper.
6878 obj.ToRegister();
6879 ASSERT(obj.is_valid());
6880 if (FLAG_debug_code) {
6881 __ AbortIfSmi(obj.reg());
6882 }
6883
6884 // Check whether this map has already been checked to be safe for default
6885 // valueOf.
6886 Result map_result = allocator()->Allocate();
6887 ASSERT(map_result.is_valid());
6888 __ mov(map_result.reg(), FieldOperand(obj.reg(), HeapObject::kMapOffset));
6889 __ test_b(FieldOperand(map_result.reg(), Map::kBitField2Offset),
6890 1 << Map::kStringWrapperSafeForDefaultValueOf);
6891 destination()->true_target()->Branch(not_zero);
6892
6893 // We need an additional two scratch registers for the deferred code.
6894 Result temp1 = allocator()->Allocate();
6895 ASSERT(temp1.is_valid());
6896 Result temp2 = allocator()->Allocate();
6897 ASSERT(temp2.is_valid());
6898
6899 DeferredIsStringWrapperSafeForDefaultValueOf* deferred =
6900 new DeferredIsStringWrapperSafeForDefaultValueOf(
6901 obj.reg(), map_result.reg(), temp1.reg(), temp2.reg());
6902 deferred->Branch(zero);
6903 deferred->BindExit();
6904 __ test(map_result.reg(), Operand(map_result.reg()));
6905 obj.Unuse();
6906 map_result.Unuse();
6907 temp1.Unuse();
6908 temp2.Unuse();
6909 destination()->Split(not_equal);
6910}
6911
6912
Steve Blockd0582a62009-12-15 09:54:21 +00006913void CodeGenerator::GenerateIsFunction(ZoneList<Expression*>* args) {
6914 // This generates a fast version of:
6915 // (%_ClassOf(arg) === 'Function')
6916 ASSERT(args->length() == 1);
6917 Load(args->at(0));
6918 Result obj = frame_->Pop();
6919 obj.ToRegister();
6920 __ test(obj.reg(), Immediate(kSmiTagMask));
6921 destination()->false_target()->Branch(zero);
6922 Result temp = allocator()->Allocate();
6923 ASSERT(temp.is_valid());
6924 __ CmpObjectType(obj.reg(), JS_FUNCTION_TYPE, temp.reg());
6925 obj.Unuse();
6926 temp.Unuse();
6927 destination()->Split(equal);
6928}
6929
6930
Leon Clarked91b9f72010-01-27 17:25:45 +00006931void CodeGenerator::GenerateIsUndetectableObject(ZoneList<Expression*>* args) {
6932 ASSERT(args->length() == 1);
6933 Load(args->at(0));
6934 Result obj = frame_->Pop();
6935 obj.ToRegister();
6936 __ test(obj.reg(), Immediate(kSmiTagMask));
6937 destination()->false_target()->Branch(zero);
6938 Result temp = allocator()->Allocate();
6939 ASSERT(temp.is_valid());
6940 __ mov(temp.reg(),
6941 FieldOperand(obj.reg(), HeapObject::kMapOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006942 __ test_b(FieldOperand(temp.reg(), Map::kBitFieldOffset),
6943 1 << Map::kIsUndetectable);
Leon Clarked91b9f72010-01-27 17:25:45 +00006944 obj.Unuse();
6945 temp.Unuse();
6946 destination()->Split(not_zero);
6947}
6948
6949
Steve Blocka7e24c12009-10-30 11:49:00 +00006950void CodeGenerator::GenerateIsConstructCall(ZoneList<Expression*>* args) {
6951 ASSERT(args->length() == 0);
6952
6953 // Get the frame pointer for the calling frame.
6954 Result fp = allocator()->Allocate();
6955 __ mov(fp.reg(), Operand(ebp, StandardFrameConstants::kCallerFPOffset));
6956
6957 // Skip the arguments adaptor frame if it exists.
6958 Label check_frame_marker;
6959 __ cmp(Operand(fp.reg(), StandardFrameConstants::kContextOffset),
6960 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
6961 __ j(not_equal, &check_frame_marker);
6962 __ mov(fp.reg(), Operand(fp.reg(), StandardFrameConstants::kCallerFPOffset));
6963
6964 // Check the marker in the calling frame.
6965 __ bind(&check_frame_marker);
6966 __ cmp(Operand(fp.reg(), StandardFrameConstants::kMarkerOffset),
6967 Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
6968 fp.Unuse();
6969 destination()->Split(equal);
6970}
6971
6972
6973void CodeGenerator::GenerateArgumentsLength(ZoneList<Expression*>* args) {
6974 ASSERT(args->length() == 0);
Steve Block6ded16b2010-05-10 14:33:55 +01006975
6976 Result fp = allocator_->Allocate();
6977 Result result = allocator_->Allocate();
6978 ASSERT(fp.is_valid() && result.is_valid());
6979
6980 Label exit;
6981
6982 // Get the number of formal parameters.
6983 __ Set(result.reg(), Immediate(Smi::FromInt(scope()->num_parameters())));
6984
6985 // Check if the calling frame is an arguments adaptor frame.
6986 __ mov(fp.reg(), Operand(ebp, StandardFrameConstants::kCallerFPOffset));
6987 __ cmp(Operand(fp.reg(), StandardFrameConstants::kContextOffset),
6988 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
6989 __ j(not_equal, &exit);
6990
6991 // Arguments adaptor case: Read the arguments length from the
6992 // adaptor frame.
6993 __ mov(result.reg(),
6994 Operand(fp.reg(), ArgumentsAdaptorFrameConstants::kLengthOffset));
6995
6996 __ bind(&exit);
6997 result.set_type_info(TypeInfo::Smi());
6998 if (FLAG_debug_code) __ AbortIfNotSmi(result.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00006999 frame_->Push(&result);
7000}
7001
7002
7003void CodeGenerator::GenerateClassOf(ZoneList<Expression*>* args) {
7004 ASSERT(args->length() == 1);
7005 JumpTarget leave, null, function, non_function_constructor;
7006 Load(args->at(0)); // Load the object.
7007 Result obj = frame_->Pop();
7008 obj.ToRegister();
7009 frame_->Spill(obj.reg());
7010
7011 // If the object is a smi, we return null.
7012 __ test(obj.reg(), Immediate(kSmiTagMask));
7013 null.Branch(zero);
7014
7015 // Check that the object is a JS object but take special care of JS
7016 // functions to make sure they have 'Function' as their class.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007017 __ CmpObjectType(obj.reg(), FIRST_JS_OBJECT_TYPE, obj.reg());
7018 null.Branch(below);
Steve Blocka7e24c12009-10-30 11:49:00 +00007019
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007020 // As long as JS_FUNCTION_TYPE is the last instance type and it is
7021 // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
7022 // LAST_JS_OBJECT_TYPE.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01007023 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
7024 STATIC_ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007025 __ CmpInstanceType(obj.reg(), JS_FUNCTION_TYPE);
7026 function.Branch(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +00007027
7028 // Check if the constructor in the map is a function.
7029 { Result tmp = allocator()->Allocate();
7030 __ mov(obj.reg(), FieldOperand(obj.reg(), Map::kConstructorOffset));
7031 __ CmpObjectType(obj.reg(), JS_FUNCTION_TYPE, tmp.reg());
7032 non_function_constructor.Branch(not_equal);
7033 }
7034
7035 // The map register now contains the constructor function. Grab the
7036 // instance class name from there.
7037 __ mov(obj.reg(),
7038 FieldOperand(obj.reg(), JSFunction::kSharedFunctionInfoOffset));
7039 __ mov(obj.reg(),
7040 FieldOperand(obj.reg(), SharedFunctionInfo::kInstanceClassNameOffset));
7041 frame_->Push(&obj);
7042 leave.Jump();
7043
7044 // Functions have class 'Function'.
7045 function.Bind();
7046 frame_->Push(Factory::function_class_symbol());
7047 leave.Jump();
7048
7049 // Objects with a non-function constructor have class 'Object'.
7050 non_function_constructor.Bind();
7051 frame_->Push(Factory::Object_symbol());
7052 leave.Jump();
7053
7054 // Non-JS objects have class null.
7055 null.Bind();
7056 frame_->Push(Factory::null_value());
7057
7058 // All done.
7059 leave.Bind();
7060}
7061
7062
7063void CodeGenerator::GenerateValueOf(ZoneList<Expression*>* args) {
7064 ASSERT(args->length() == 1);
7065 JumpTarget leave;
7066 Load(args->at(0)); // Load the object.
7067 frame_->Dup();
7068 Result object = frame_->Pop();
7069 object.ToRegister();
7070 ASSERT(object.is_valid());
7071 // if (object->IsSmi()) return object.
7072 __ test(object.reg(), Immediate(kSmiTagMask));
7073 leave.Branch(zero, taken);
7074 // It is a heap object - get map.
7075 Result temp = allocator()->Allocate();
7076 ASSERT(temp.is_valid());
7077 // if (!object->IsJSValue()) return object.
7078 __ CmpObjectType(object.reg(), JS_VALUE_TYPE, temp.reg());
7079 leave.Branch(not_equal, not_taken);
7080 __ mov(temp.reg(), FieldOperand(object.reg(), JSValue::kValueOffset));
7081 object.Unuse();
7082 frame_->SetElementAt(0, &temp);
7083 leave.Bind();
7084}
7085
7086
7087void CodeGenerator::GenerateSetValueOf(ZoneList<Expression*>* args) {
7088 ASSERT(args->length() == 2);
7089 JumpTarget leave;
7090 Load(args->at(0)); // Load the object.
7091 Load(args->at(1)); // Load the value.
7092 Result value = frame_->Pop();
7093 Result object = frame_->Pop();
7094 value.ToRegister();
7095 object.ToRegister();
7096
7097 // if (object->IsSmi()) return value.
7098 __ test(object.reg(), Immediate(kSmiTagMask));
7099 leave.Branch(zero, &value, taken);
7100
7101 // It is a heap object - get its map.
7102 Result scratch = allocator_->Allocate();
7103 ASSERT(scratch.is_valid());
7104 // if (!object->IsJSValue()) return value.
7105 __ CmpObjectType(object.reg(), JS_VALUE_TYPE, scratch.reg());
7106 leave.Branch(not_equal, &value, not_taken);
7107
7108 // Store the value.
7109 __ mov(FieldOperand(object.reg(), JSValue::kValueOffset), value.reg());
7110 // Update the write barrier. Save the value as it will be
7111 // overwritten by the write barrier code and is needed afterward.
7112 Result duplicate_value = allocator_->Allocate();
7113 ASSERT(duplicate_value.is_valid());
7114 __ mov(duplicate_value.reg(), value.reg());
7115 // The object register is also overwritten by the write barrier and
7116 // possibly aliased in the frame.
7117 frame_->Spill(object.reg());
7118 __ RecordWrite(object.reg(), JSValue::kValueOffset, duplicate_value.reg(),
7119 scratch.reg());
7120 object.Unuse();
7121 scratch.Unuse();
7122 duplicate_value.Unuse();
7123
7124 // Leave.
7125 leave.Bind(&value);
7126 frame_->Push(&value);
7127}
7128
7129
Steve Block6ded16b2010-05-10 14:33:55 +01007130void CodeGenerator::GenerateArguments(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00007131 ASSERT(args->length() == 1);
7132
7133 // ArgumentsAccessStub expects the key in edx and the formal
7134 // parameter count in eax.
7135 Load(args->at(0));
7136 Result key = frame_->Pop();
7137 // Explicitly create a constant result.
Andrei Popescu31002712010-02-23 13:46:05 +00007138 Result count(Handle<Smi>(Smi::FromInt(scope()->num_parameters())));
Steve Blocka7e24c12009-10-30 11:49:00 +00007139 // Call the shared stub to get to arguments[key].
7140 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
7141 Result result = frame_->CallStub(&stub, &key, &count);
7142 frame_->Push(&result);
7143}
7144
7145
7146void CodeGenerator::GenerateObjectEquals(ZoneList<Expression*>* args) {
7147 ASSERT(args->length() == 2);
7148
7149 // Load the two objects into registers and perform the comparison.
7150 Load(args->at(0));
7151 Load(args->at(1));
7152 Result right = frame_->Pop();
7153 Result left = frame_->Pop();
7154 right.ToRegister();
7155 left.ToRegister();
7156 __ cmp(right.reg(), Operand(left.reg()));
7157 right.Unuse();
7158 left.Unuse();
7159 destination()->Split(equal);
7160}
7161
7162
7163void CodeGenerator::GenerateGetFramePointer(ZoneList<Expression*>* args) {
7164 ASSERT(args->length() == 0);
Kristian Monsen50ef84f2010-07-29 15:18:00 +01007165 STATIC_ASSERT(kSmiTag == 0); // EBP value is aligned, so it looks like a Smi.
Steve Blocka7e24c12009-10-30 11:49:00 +00007166 Result ebp_as_smi = allocator_->Allocate();
7167 ASSERT(ebp_as_smi.is_valid());
7168 __ mov(ebp_as_smi.reg(), Operand(ebp));
7169 frame_->Push(&ebp_as_smi);
7170}
7171
7172
Steve Block6ded16b2010-05-10 14:33:55 +01007173void CodeGenerator::GenerateRandomHeapNumber(
7174 ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00007175 ASSERT(args->length() == 0);
7176 frame_->SpillAll();
7177
Steve Block6ded16b2010-05-10 14:33:55 +01007178 Label slow_allocate_heapnumber;
7179 Label heapnumber_allocated;
Steve Blocka7e24c12009-10-30 11:49:00 +00007180
Steve Block6ded16b2010-05-10 14:33:55 +01007181 __ AllocateHeapNumber(edi, ebx, ecx, &slow_allocate_heapnumber);
7182 __ jmp(&heapnumber_allocated);
Steve Blocka7e24c12009-10-30 11:49:00 +00007183
Steve Block6ded16b2010-05-10 14:33:55 +01007184 __ bind(&slow_allocate_heapnumber);
Ben Murdoch3bec4d22010-07-22 14:51:16 +01007185 // Allocate a heap number.
7186 __ CallRuntime(Runtime::kNumberAlloc, 0);
Steve Block6ded16b2010-05-10 14:33:55 +01007187 __ mov(edi, eax);
7188
7189 __ bind(&heapnumber_allocated);
7190
7191 __ PrepareCallCFunction(0, ebx);
7192 __ CallCFunction(ExternalReference::random_uint32_function(), 0);
7193
7194 // Convert 32 random bits in eax to 0.(32 random bits) in a double
7195 // by computing:
7196 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
7197 // This is implemented on both SSE2 and FPU.
7198 if (CpuFeatures::IsSupported(SSE2)) {
7199 CpuFeatures::Scope fscope(SSE2);
7200 __ mov(ebx, Immediate(0x49800000)); // 1.0 x 2^20 as single.
7201 __ movd(xmm1, Operand(ebx));
7202 __ movd(xmm0, Operand(eax));
7203 __ cvtss2sd(xmm1, xmm1);
7204 __ pxor(xmm0, xmm1);
7205 __ subsd(xmm0, xmm1);
7206 __ movdbl(FieldOperand(edi, HeapNumber::kValueOffset), xmm0);
7207 } else {
7208 // 0x4130000000000000 is 1.0 x 2^20 as a double.
7209 __ mov(FieldOperand(edi, HeapNumber::kExponentOffset),
7210 Immediate(0x41300000));
7211 __ mov(FieldOperand(edi, HeapNumber::kMantissaOffset), eax);
7212 __ fld_d(FieldOperand(edi, HeapNumber::kValueOffset));
7213 __ mov(FieldOperand(edi, HeapNumber::kMantissaOffset), Immediate(0));
7214 __ fld_d(FieldOperand(edi, HeapNumber::kValueOffset));
7215 __ fsubp(1);
7216 __ fstp_d(FieldOperand(edi, HeapNumber::kValueOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00007217 }
Steve Block6ded16b2010-05-10 14:33:55 +01007218 __ mov(eax, edi);
Steve Blocka7e24c12009-10-30 11:49:00 +00007219
7220 Result result = allocator_->Allocate(eax);
7221 frame_->Push(&result);
7222}
7223
7224
Steve Blockd0582a62009-12-15 09:54:21 +00007225void CodeGenerator::GenerateStringAdd(ZoneList<Expression*>* args) {
7226 ASSERT_EQ(2, args->length());
7227
7228 Load(args->at(0));
7229 Load(args->at(1));
7230
7231 StringAddStub stub(NO_STRING_ADD_FLAGS);
7232 Result answer = frame_->CallStub(&stub, 2);
7233 frame_->Push(&answer);
7234}
7235
7236
Leon Clarkee46be812010-01-19 14:06:41 +00007237void CodeGenerator::GenerateSubString(ZoneList<Expression*>* args) {
7238 ASSERT_EQ(3, args->length());
7239
7240 Load(args->at(0));
7241 Load(args->at(1));
7242 Load(args->at(2));
7243
7244 SubStringStub stub;
7245 Result answer = frame_->CallStub(&stub, 3);
7246 frame_->Push(&answer);
7247}
7248
7249
7250void CodeGenerator::GenerateStringCompare(ZoneList<Expression*>* args) {
7251 ASSERT_EQ(2, args->length());
7252
7253 Load(args->at(0));
7254 Load(args->at(1));
7255
7256 StringCompareStub stub;
7257 Result answer = frame_->CallStub(&stub, 2);
7258 frame_->Push(&answer);
7259}
7260
7261
7262void CodeGenerator::GenerateRegExpExec(ZoneList<Expression*>* args) {
Steve Block6ded16b2010-05-10 14:33:55 +01007263 ASSERT_EQ(4, args->length());
Leon Clarkee46be812010-01-19 14:06:41 +00007264
7265 // Load the arguments on the stack and call the stub.
7266 Load(args->at(0));
7267 Load(args->at(1));
7268 Load(args->at(2));
7269 Load(args->at(3));
7270 RegExpExecStub stub;
7271 Result result = frame_->CallStub(&stub, 4);
7272 frame_->Push(&result);
7273}
7274
7275
Steve Block6ded16b2010-05-10 14:33:55 +01007276void CodeGenerator::GenerateRegExpConstructResult(ZoneList<Expression*>* args) {
7277 // No stub. This code only occurs a few times in regexp.js.
7278 const int kMaxInlineLength = 100;
7279 ASSERT_EQ(3, args->length());
7280 Load(args->at(0)); // Size of array, smi.
7281 Load(args->at(1)); // "index" property value.
7282 Load(args->at(2)); // "input" property value.
7283 {
7284 VirtualFrame::SpilledScope spilled_scope;
7285
7286 Label slowcase;
7287 Label done;
7288 __ mov(ebx, Operand(esp, kPointerSize * 2));
7289 __ test(ebx, Immediate(kSmiTagMask));
7290 __ j(not_zero, &slowcase);
7291 __ cmp(Operand(ebx), Immediate(Smi::FromInt(kMaxInlineLength)));
7292 __ j(above, &slowcase);
7293 // Smi-tagging is equivalent to multiplying by 2.
7294 STATIC_ASSERT(kSmiTag == 0);
7295 STATIC_ASSERT(kSmiTagSize == 1);
7296 // Allocate RegExpResult followed by FixedArray with size in ebx.
7297 // JSArray: [Map][empty properties][Elements][Length-smi][index][input]
7298 // Elements: [Map][Length][..elements..]
7299 __ AllocateInNewSpace(JSRegExpResult::kSize + FixedArray::kHeaderSize,
7300 times_half_pointer_size,
7301 ebx, // In: Number of elements (times 2, being a smi)
7302 eax, // Out: Start of allocation (tagged).
7303 ecx, // Out: End of allocation.
7304 edx, // Scratch register
7305 &slowcase,
7306 TAG_OBJECT);
7307 // eax: Start of allocated area, object-tagged.
7308
7309 // Set JSArray map to global.regexp_result_map().
7310 // Set empty properties FixedArray.
7311 // Set elements to point to FixedArray allocated right after the JSArray.
7312 // Interleave operations for better latency.
7313 __ mov(edx, ContextOperand(esi, Context::GLOBAL_INDEX));
7314 __ mov(ecx, Immediate(Factory::empty_fixed_array()));
7315 __ lea(ebx, Operand(eax, JSRegExpResult::kSize));
7316 __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalContextOffset));
7317 __ mov(FieldOperand(eax, JSObject::kElementsOffset), ebx);
7318 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset), ecx);
7319 __ mov(edx, ContextOperand(edx, Context::REGEXP_RESULT_MAP_INDEX));
7320 __ mov(FieldOperand(eax, HeapObject::kMapOffset), edx);
7321
7322 // Set input, index and length fields from arguments.
7323 __ pop(FieldOperand(eax, JSRegExpResult::kInputOffset));
7324 __ pop(FieldOperand(eax, JSRegExpResult::kIndexOffset));
7325 __ pop(ecx);
7326 __ mov(FieldOperand(eax, JSArray::kLengthOffset), ecx);
7327
7328 // Fill out the elements FixedArray.
7329 // eax: JSArray.
7330 // ebx: FixedArray.
7331 // ecx: Number of elements in array, as smi.
7332
7333 // Set map.
7334 __ mov(FieldOperand(ebx, HeapObject::kMapOffset),
7335 Immediate(Factory::fixed_array_map()));
7336 // Set length.
Steve Block6ded16b2010-05-10 14:33:55 +01007337 __ mov(FieldOperand(ebx, FixedArray::kLengthOffset), ecx);
7338 // Fill contents of fixed-array with the-hole.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007339 __ SmiUntag(ecx);
Steve Block6ded16b2010-05-10 14:33:55 +01007340 __ mov(edx, Immediate(Factory::the_hole_value()));
7341 __ lea(ebx, FieldOperand(ebx, FixedArray::kHeaderSize));
7342 // Fill fixed array elements with hole.
7343 // eax: JSArray.
7344 // ecx: Number of elements to fill.
7345 // ebx: Start of elements in FixedArray.
7346 // edx: the hole.
7347 Label loop;
7348 __ test(ecx, Operand(ecx));
7349 __ bind(&loop);
7350 __ j(less_equal, &done); // Jump if ecx is negative or zero.
7351 __ sub(Operand(ecx), Immediate(1));
7352 __ mov(Operand(ebx, ecx, times_pointer_size, 0), edx);
7353 __ jmp(&loop);
7354
7355 __ bind(&slowcase);
7356 __ CallRuntime(Runtime::kRegExpConstructResult, 3);
7357
7358 __ bind(&done);
7359 }
7360 frame_->Forget(3);
7361 frame_->Push(eax);
7362}
7363
7364
7365class DeferredSearchCache: public DeferredCode {
7366 public:
7367 DeferredSearchCache(Register dst, Register cache, Register key)
7368 : dst_(dst), cache_(cache), key_(key) {
7369 set_comment("[ DeferredSearchCache");
7370 }
7371
7372 virtual void Generate();
7373
7374 private:
Kristian Monsen25f61362010-05-21 11:50:48 +01007375 Register dst_; // on invocation Smi index of finger, on exit
7376 // holds value being looked up.
7377 Register cache_; // instance of JSFunctionResultCache.
7378 Register key_; // key being looked up.
Steve Block6ded16b2010-05-10 14:33:55 +01007379};
7380
7381
7382void DeferredSearchCache::Generate() {
Kristian Monsen25f61362010-05-21 11:50:48 +01007383 Label first_loop, search_further, second_loop, cache_miss;
7384
7385 // Smi-tagging is equivalent to multiplying by 2.
7386 STATIC_ASSERT(kSmiTag == 0);
7387 STATIC_ASSERT(kSmiTagSize == 1);
7388
7389 Smi* kEntrySizeSmi = Smi::FromInt(JSFunctionResultCache::kEntrySize);
7390 Smi* kEntriesIndexSmi = Smi::FromInt(JSFunctionResultCache::kEntriesIndex);
7391
7392 // Check the cache from finger to start of the cache.
7393 __ bind(&first_loop);
7394 __ sub(Operand(dst_), Immediate(kEntrySizeSmi));
7395 __ cmp(Operand(dst_), Immediate(kEntriesIndexSmi));
7396 __ j(less, &search_further);
7397
7398 __ cmp(key_, CodeGenerator::FixedArrayElementOperand(cache_, dst_));
7399 __ j(not_equal, &first_loop);
7400
7401 __ mov(FieldOperand(cache_, JSFunctionResultCache::kFingerOffset), dst_);
7402 __ mov(dst_, CodeGenerator::FixedArrayElementOperand(cache_, dst_, 1));
7403 __ jmp(exit_label());
7404
7405 __ bind(&search_further);
7406
7407 // Check the cache from end of cache up to finger.
7408 __ mov(dst_, FieldOperand(cache_, JSFunctionResultCache::kCacheSizeOffset));
7409
7410 __ bind(&second_loop);
7411 __ sub(Operand(dst_), Immediate(kEntrySizeSmi));
7412 // Consider prefetching into some reg.
7413 __ cmp(dst_, FieldOperand(cache_, JSFunctionResultCache::kFingerOffset));
7414 __ j(less_equal, &cache_miss);
7415
7416 __ cmp(key_, CodeGenerator::FixedArrayElementOperand(cache_, dst_));
7417 __ j(not_equal, &second_loop);
7418
7419 __ mov(FieldOperand(cache_, JSFunctionResultCache::kFingerOffset), dst_);
7420 __ mov(dst_, CodeGenerator::FixedArrayElementOperand(cache_, dst_, 1));
7421 __ jmp(exit_label());
7422
7423 __ bind(&cache_miss);
7424 __ push(cache_); // store a reference to cache
7425 __ push(key_); // store a key
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01007426 __ push(Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
Steve Block6ded16b2010-05-10 14:33:55 +01007427 __ push(key_);
Kristian Monsen25f61362010-05-21 11:50:48 +01007428 // On ia32 function must be in edi.
7429 __ mov(edi, FieldOperand(cache_, JSFunctionResultCache::kFactoryOffset));
7430 ParameterCount expected(1);
7431 __ InvokeFunction(edi, expected, CALL_FUNCTION);
7432
7433 // Find a place to put new cached value into.
7434 Label add_new_entry, update_cache;
7435 __ mov(ecx, Operand(esp, kPointerSize)); // restore the cache
7436 // Possible optimization: cache size is constant for the given cache
7437 // so technically we could use a constant here. However, if we have
7438 // cache miss this optimization would hardly matter much.
7439
7440 // Check if we could add new entry to cache.
7441 __ mov(ebx, FieldOperand(ecx, FixedArray::kLengthOffset));
Kristian Monsen25f61362010-05-21 11:50:48 +01007442 __ cmp(ebx, FieldOperand(ecx, JSFunctionResultCache::kCacheSizeOffset));
7443 __ j(greater, &add_new_entry);
7444
7445 // Check if we could evict entry after finger.
7446 __ mov(edx, FieldOperand(ecx, JSFunctionResultCache::kFingerOffset));
7447 __ add(Operand(edx), Immediate(kEntrySizeSmi));
7448 __ cmp(ebx, Operand(edx));
7449 __ j(greater, &update_cache);
7450
7451 // Need to wrap over the cache.
7452 __ mov(edx, Immediate(kEntriesIndexSmi));
7453 __ jmp(&update_cache);
7454
7455 __ bind(&add_new_entry);
7456 __ mov(edx, FieldOperand(ecx, JSFunctionResultCache::kCacheSizeOffset));
7457 __ lea(ebx, Operand(edx, JSFunctionResultCache::kEntrySize << 1));
7458 __ mov(FieldOperand(ecx, JSFunctionResultCache::kCacheSizeOffset), ebx);
7459
7460 // Update the cache itself.
7461 // edx holds the index.
7462 __ bind(&update_cache);
7463 __ pop(ebx); // restore the key
7464 __ mov(FieldOperand(ecx, JSFunctionResultCache::kFingerOffset), edx);
7465 // Store key.
7466 __ mov(CodeGenerator::FixedArrayElementOperand(ecx, edx), ebx);
7467 __ RecordWrite(ecx, 0, ebx, edx);
7468
7469 // Store value.
7470 __ pop(ecx); // restore the cache.
7471 __ mov(edx, FieldOperand(ecx, JSFunctionResultCache::kFingerOffset));
7472 __ add(Operand(edx), Immediate(Smi::FromInt(1)));
7473 __ mov(ebx, eax);
7474 __ mov(CodeGenerator::FixedArrayElementOperand(ecx, edx), ebx);
7475 __ RecordWrite(ecx, 0, ebx, edx);
7476
Steve Block6ded16b2010-05-10 14:33:55 +01007477 if (!dst_.is(eax)) {
7478 __ mov(dst_, eax);
7479 }
7480}
7481
7482
7483void CodeGenerator::GenerateGetFromCache(ZoneList<Expression*>* args) {
7484 ASSERT_EQ(2, args->length());
7485
7486 ASSERT_NE(NULL, args->at(0)->AsLiteral());
7487 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
7488
7489 Handle<FixedArray> jsfunction_result_caches(
7490 Top::global_context()->jsfunction_result_caches());
7491 if (jsfunction_result_caches->length() <= cache_id) {
7492 __ Abort("Attempt to use undefined cache.");
7493 frame_->Push(Factory::undefined_value());
7494 return;
7495 }
7496
7497 Load(args->at(1));
7498 Result key = frame_->Pop();
7499 key.ToRegister();
7500
7501 Result cache = allocator()->Allocate();
7502 ASSERT(cache.is_valid());
7503 __ mov(cache.reg(), ContextOperand(esi, Context::GLOBAL_INDEX));
7504 __ mov(cache.reg(),
7505 FieldOperand(cache.reg(), GlobalObject::kGlobalContextOffset));
7506 __ mov(cache.reg(),
7507 ContextOperand(cache.reg(), Context::JSFUNCTION_RESULT_CACHES_INDEX));
7508 __ mov(cache.reg(),
7509 FieldOperand(cache.reg(), FixedArray::OffsetOfElementAt(cache_id)));
7510
7511 Result tmp = allocator()->Allocate();
7512 ASSERT(tmp.is_valid());
7513
7514 DeferredSearchCache* deferred = new DeferredSearchCache(tmp.reg(),
7515 cache.reg(),
7516 key.reg());
7517
Steve Block6ded16b2010-05-10 14:33:55 +01007518 // tmp.reg() now holds finger offset as a smi.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01007519 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
Kristian Monsen25f61362010-05-21 11:50:48 +01007520 __ mov(tmp.reg(), FieldOperand(cache.reg(),
7521 JSFunctionResultCache::kFingerOffset));
7522 __ cmp(key.reg(), FixedArrayElementOperand(cache.reg(), tmp.reg()));
Steve Block6ded16b2010-05-10 14:33:55 +01007523 deferred->Branch(not_equal);
7524
Kristian Monsen25f61362010-05-21 11:50:48 +01007525 __ mov(tmp.reg(), FixedArrayElementOperand(cache.reg(), tmp.reg(), 1));
Steve Block6ded16b2010-05-10 14:33:55 +01007526
7527 deferred->BindExit();
7528 frame_->Push(&tmp);
7529}
7530
7531
Andrei Popescu402d9372010-02-26 13:31:12 +00007532void CodeGenerator::GenerateNumberToString(ZoneList<Expression*>* args) {
7533 ASSERT_EQ(args->length(), 1);
7534
7535 // Load the argument on the stack and call the stub.
7536 Load(args->at(0));
7537 NumberToStringStub stub;
7538 Result result = frame_->CallStub(&stub, 1);
7539 frame_->Push(&result);
7540}
7541
7542
Steve Block6ded16b2010-05-10 14:33:55 +01007543class DeferredSwapElements: public DeferredCode {
7544 public:
7545 DeferredSwapElements(Register object, Register index1, Register index2)
7546 : object_(object), index1_(index1), index2_(index2) {
7547 set_comment("[ DeferredSwapElements");
7548 }
7549
7550 virtual void Generate();
7551
7552 private:
7553 Register object_, index1_, index2_;
7554};
7555
7556
7557void DeferredSwapElements::Generate() {
7558 __ push(object_);
7559 __ push(index1_);
7560 __ push(index2_);
7561 __ CallRuntime(Runtime::kSwapElements, 3);
7562}
7563
7564
7565void CodeGenerator::GenerateSwapElements(ZoneList<Expression*>* args) {
7566 // Note: this code assumes that indices are passed are within
7567 // elements' bounds and refer to valid (not holes) values.
7568 Comment cmnt(masm_, "[ GenerateSwapElements");
7569
7570 ASSERT_EQ(3, args->length());
7571
7572 Load(args->at(0));
7573 Load(args->at(1));
7574 Load(args->at(2));
7575
7576 Result index2 = frame_->Pop();
7577 index2.ToRegister();
7578
7579 Result index1 = frame_->Pop();
7580 index1.ToRegister();
7581
7582 Result object = frame_->Pop();
7583 object.ToRegister();
7584
7585 Result tmp1 = allocator()->Allocate();
7586 tmp1.ToRegister();
7587 Result tmp2 = allocator()->Allocate();
7588 tmp2.ToRegister();
7589
7590 frame_->Spill(object.reg());
7591 frame_->Spill(index1.reg());
7592 frame_->Spill(index2.reg());
7593
7594 DeferredSwapElements* deferred = new DeferredSwapElements(object.reg(),
7595 index1.reg(),
7596 index2.reg());
7597
7598 // Fetch the map and check if array is in fast case.
7599 // Check that object doesn't require security checks and
7600 // has no indexed interceptor.
7601 __ CmpObjectType(object.reg(), FIRST_JS_OBJECT_TYPE, tmp1.reg());
Leon Clarkef7060e22010-06-03 12:02:55 +01007602 deferred->Branch(below);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007603 __ test_b(FieldOperand(tmp1.reg(), Map::kBitFieldOffset),
7604 KeyedLoadIC::kSlowCaseBitFieldMask);
Steve Block6ded16b2010-05-10 14:33:55 +01007605 deferred->Branch(not_zero);
7606
Iain Merrick75681382010-08-19 15:07:18 +01007607 // Check the object's elements are in fast case and writable.
Steve Block6ded16b2010-05-10 14:33:55 +01007608 __ mov(tmp1.reg(), FieldOperand(object.reg(), JSObject::kElementsOffset));
7609 __ cmp(FieldOperand(tmp1.reg(), HeapObject::kMapOffset),
7610 Immediate(Factory::fixed_array_map()));
7611 deferred->Branch(not_equal);
7612
7613 // Smi-tagging is equivalent to multiplying by 2.
7614 STATIC_ASSERT(kSmiTag == 0);
7615 STATIC_ASSERT(kSmiTagSize == 1);
7616
7617 // Check that both indices are smis.
7618 __ mov(tmp2.reg(), index1.reg());
7619 __ or_(tmp2.reg(), Operand(index2.reg()));
7620 __ test(tmp2.reg(), Immediate(kSmiTagMask));
7621 deferred->Branch(not_zero);
7622
7623 // Bring addresses into index1 and index2.
Kristian Monsen25f61362010-05-21 11:50:48 +01007624 __ lea(index1.reg(), FixedArrayElementOperand(tmp1.reg(), index1.reg()));
7625 __ lea(index2.reg(), FixedArrayElementOperand(tmp1.reg(), index2.reg()));
Steve Block6ded16b2010-05-10 14:33:55 +01007626
7627 // Swap elements.
7628 __ mov(object.reg(), Operand(index1.reg(), 0));
7629 __ mov(tmp2.reg(), Operand(index2.reg(), 0));
7630 __ mov(Operand(index2.reg(), 0), object.reg());
7631 __ mov(Operand(index1.reg(), 0), tmp2.reg());
7632
7633 Label done;
7634 __ InNewSpace(tmp1.reg(), tmp2.reg(), equal, &done);
7635 // Possible optimization: do a check that both values are Smis
7636 // (or them and test against Smi mask.)
7637
7638 __ mov(tmp2.reg(), tmp1.reg());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007639 __ RecordWriteHelper(tmp2.reg(), index1.reg(), object.reg());
7640 __ RecordWriteHelper(tmp1.reg(), index2.reg(), object.reg());
Steve Block6ded16b2010-05-10 14:33:55 +01007641 __ bind(&done);
7642
7643 deferred->BindExit();
7644 frame_->Push(Factory::undefined_value());
7645}
7646
7647
7648void CodeGenerator::GenerateCallFunction(ZoneList<Expression*>* args) {
7649 Comment cmnt(masm_, "[ GenerateCallFunction");
7650
7651 ASSERT(args->length() >= 2);
7652
7653 int n_args = args->length() - 2; // for receiver and function.
7654 Load(args->at(0)); // receiver
7655 for (int i = 0; i < n_args; i++) {
7656 Load(args->at(i + 1));
7657 }
7658 Load(args->at(n_args + 1)); // function
7659 Result result = frame_->CallJSFunction(n_args);
7660 frame_->Push(&result);
7661}
7662
7663
7664// Generates the Math.pow method. Only handles special cases and
7665// branches to the runtime system for everything else. Please note
7666// that this function assumes that the callsite has executed ToNumber
7667// on both arguments.
7668void CodeGenerator::GenerateMathPow(ZoneList<Expression*>* args) {
7669 ASSERT(args->length() == 2);
7670 Load(args->at(0));
7671 Load(args->at(1));
7672 if (!CpuFeatures::IsSupported(SSE2)) {
7673 Result res = frame_->CallRuntime(Runtime::kMath_pow, 2);
7674 frame_->Push(&res);
7675 } else {
7676 CpuFeatures::Scope use_sse2(SSE2);
7677 Label allocate_return;
7678 // Load the two operands while leaving the values on the frame.
7679 frame()->Dup();
7680 Result exponent = frame()->Pop();
7681 exponent.ToRegister();
7682 frame()->Spill(exponent.reg());
7683 frame()->PushElementAt(1);
7684 Result base = frame()->Pop();
7685 base.ToRegister();
7686 frame()->Spill(base.reg());
7687
7688 Result answer = allocator()->Allocate();
7689 ASSERT(answer.is_valid());
7690 ASSERT(!exponent.reg().is(base.reg()));
7691 JumpTarget call_runtime;
7692
7693 // Save 1 in xmm3 - we need this several times later on.
7694 __ mov(answer.reg(), Immediate(1));
7695 __ cvtsi2sd(xmm3, Operand(answer.reg()));
7696
7697 Label exponent_nonsmi;
7698 Label base_nonsmi;
7699 // If the exponent is a heap number go to that specific case.
7700 __ test(exponent.reg(), Immediate(kSmiTagMask));
7701 __ j(not_zero, &exponent_nonsmi);
7702 __ test(base.reg(), Immediate(kSmiTagMask));
7703 __ j(not_zero, &base_nonsmi);
7704
7705 // Optimized version when y is an integer.
7706 Label powi;
7707 __ SmiUntag(base.reg());
7708 __ cvtsi2sd(xmm0, Operand(base.reg()));
7709 __ jmp(&powi);
7710 // exponent is smi and base is a heapnumber.
7711 __ bind(&base_nonsmi);
7712 __ cmp(FieldOperand(base.reg(), HeapObject::kMapOffset),
7713 Factory::heap_number_map());
7714 call_runtime.Branch(not_equal);
7715
7716 __ movdbl(xmm0, FieldOperand(base.reg(), HeapNumber::kValueOffset));
7717
7718 // Optimized version of pow if y is an integer.
7719 __ bind(&powi);
7720 __ SmiUntag(exponent.reg());
7721
7722 // Save exponent in base as we need to check if exponent is negative later.
7723 // We know that base and exponent are in different registers.
7724 __ mov(base.reg(), exponent.reg());
7725
7726 // Get absolute value of exponent.
7727 Label no_neg;
7728 __ cmp(exponent.reg(), 0);
7729 __ j(greater_equal, &no_neg);
7730 __ neg(exponent.reg());
7731 __ bind(&no_neg);
7732
7733 // Load xmm1 with 1.
7734 __ movsd(xmm1, xmm3);
7735 Label while_true;
7736 Label no_multiply;
7737
7738 __ bind(&while_true);
7739 __ shr(exponent.reg(), 1);
7740 __ j(not_carry, &no_multiply);
7741 __ mulsd(xmm1, xmm0);
7742 __ bind(&no_multiply);
7743 __ test(exponent.reg(), Operand(exponent.reg()));
7744 __ mulsd(xmm0, xmm0);
7745 __ j(not_zero, &while_true);
7746
7747 // x has the original value of y - if y is negative return 1/result.
7748 __ test(base.reg(), Operand(base.reg()));
7749 __ j(positive, &allocate_return);
7750 // Special case if xmm1 has reached infinity.
7751 __ mov(answer.reg(), Immediate(0x7FB00000));
7752 __ movd(xmm0, Operand(answer.reg()));
7753 __ cvtss2sd(xmm0, xmm0);
7754 __ ucomisd(xmm0, xmm1);
7755 call_runtime.Branch(equal);
7756 __ divsd(xmm3, xmm1);
7757 __ movsd(xmm1, xmm3);
7758 __ jmp(&allocate_return);
7759
7760 // exponent (or both) is a heapnumber - no matter what we should now work
7761 // on doubles.
7762 __ bind(&exponent_nonsmi);
7763 __ cmp(FieldOperand(exponent.reg(), HeapObject::kMapOffset),
7764 Factory::heap_number_map());
7765 call_runtime.Branch(not_equal);
7766 __ movdbl(xmm1, FieldOperand(exponent.reg(), HeapNumber::kValueOffset));
7767 // Test if exponent is nan.
7768 __ ucomisd(xmm1, xmm1);
7769 call_runtime.Branch(parity_even);
7770
7771 Label base_not_smi;
7772 Label handle_special_cases;
7773 __ test(base.reg(), Immediate(kSmiTagMask));
7774 __ j(not_zero, &base_not_smi);
7775 __ SmiUntag(base.reg());
7776 __ cvtsi2sd(xmm0, Operand(base.reg()));
7777 __ jmp(&handle_special_cases);
7778 __ bind(&base_not_smi);
7779 __ cmp(FieldOperand(base.reg(), HeapObject::kMapOffset),
7780 Factory::heap_number_map());
7781 call_runtime.Branch(not_equal);
7782 __ mov(answer.reg(), FieldOperand(base.reg(), HeapNumber::kExponentOffset));
7783 __ and_(answer.reg(), HeapNumber::kExponentMask);
7784 __ cmp(Operand(answer.reg()), Immediate(HeapNumber::kExponentMask));
7785 // base is NaN or +/-Infinity
7786 call_runtime.Branch(greater_equal);
7787 __ movdbl(xmm0, FieldOperand(base.reg(), HeapNumber::kValueOffset));
7788
7789 // base is in xmm0 and exponent is in xmm1.
7790 __ bind(&handle_special_cases);
7791 Label not_minus_half;
7792 // Test for -0.5.
7793 // Load xmm2 with -0.5.
7794 __ mov(answer.reg(), Immediate(0xBF000000));
7795 __ movd(xmm2, Operand(answer.reg()));
7796 __ cvtss2sd(xmm2, xmm2);
7797 // xmm2 now has -0.5.
7798 __ ucomisd(xmm2, xmm1);
7799 __ j(not_equal, &not_minus_half);
7800
7801 // Calculates reciprocal of square root.
7802 // Note that 1/sqrt(x) = sqrt(1/x))
7803 __ divsd(xmm3, xmm0);
7804 __ movsd(xmm1, xmm3);
7805 __ sqrtsd(xmm1, xmm1);
7806 __ jmp(&allocate_return);
7807
7808 // Test for 0.5.
7809 __ bind(&not_minus_half);
7810 // Load xmm2 with 0.5.
7811 // Since xmm3 is 1 and xmm2 is -0.5 this is simply xmm2 + xmm3.
7812 __ addsd(xmm2, xmm3);
7813 // xmm2 now has 0.5.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01007814 __ ucomisd(xmm2, xmm1);
Steve Block6ded16b2010-05-10 14:33:55 +01007815 call_runtime.Branch(not_equal);
7816 // Calculates square root.
7817 __ movsd(xmm1, xmm0);
7818 __ sqrtsd(xmm1, xmm1);
7819
7820 JumpTarget done;
7821 Label failure, success;
7822 __ bind(&allocate_return);
7823 // Make a copy of the frame to enable us to handle allocation
7824 // failure after the JumpTarget jump.
7825 VirtualFrame* clone = new VirtualFrame(frame());
7826 __ AllocateHeapNumber(answer.reg(), exponent.reg(),
7827 base.reg(), &failure);
7828 __ movdbl(FieldOperand(answer.reg(), HeapNumber::kValueOffset), xmm1);
7829 // Remove the two original values from the frame - we only need those
7830 // in the case where we branch to runtime.
7831 frame()->Drop(2);
7832 exponent.Unuse();
7833 base.Unuse();
7834 done.Jump(&answer);
7835 // Use the copy of the original frame as our current frame.
7836 RegisterFile empty_regs;
7837 SetFrame(clone, &empty_regs);
7838 // If we experience an allocation failure we branch to runtime.
7839 __ bind(&failure);
7840 call_runtime.Bind();
7841 answer = frame()->CallRuntime(Runtime::kMath_pow_cfunction, 2);
7842
7843 done.Bind(&answer);
7844 frame()->Push(&answer);
7845 }
7846}
7847
7848
Andrei Popescu402d9372010-02-26 13:31:12 +00007849void CodeGenerator::GenerateMathSin(ZoneList<Expression*>* args) {
7850 ASSERT_EQ(args->length(), 1);
7851 Load(args->at(0));
7852 TranscendentalCacheStub stub(TranscendentalCache::SIN);
7853 Result result = frame_->CallStub(&stub, 1);
7854 frame_->Push(&result);
7855}
7856
7857
7858void CodeGenerator::GenerateMathCos(ZoneList<Expression*>* args) {
7859 ASSERT_EQ(args->length(), 1);
7860 Load(args->at(0));
7861 TranscendentalCacheStub stub(TranscendentalCache::COS);
7862 Result result = frame_->CallStub(&stub, 1);
7863 frame_->Push(&result);
7864}
7865
7866
Steve Block6ded16b2010-05-10 14:33:55 +01007867// Generates the Math.sqrt method. Please note - this function assumes that
7868// the callsite has executed ToNumber on the argument.
7869void CodeGenerator::GenerateMathSqrt(ZoneList<Expression*>* args) {
7870 ASSERT_EQ(args->length(), 1);
7871 Load(args->at(0));
7872
7873 if (!CpuFeatures::IsSupported(SSE2)) {
7874 Result result = frame()->CallRuntime(Runtime::kMath_sqrt, 1);
7875 frame()->Push(&result);
7876 } else {
7877 CpuFeatures::Scope use_sse2(SSE2);
7878 // Leave original value on the frame if we need to call runtime.
7879 frame()->Dup();
7880 Result result = frame()->Pop();
7881 result.ToRegister();
7882 frame()->Spill(result.reg());
7883 Label runtime;
7884 Label non_smi;
7885 Label load_done;
7886 JumpTarget end;
7887
7888 __ test(result.reg(), Immediate(kSmiTagMask));
7889 __ j(not_zero, &non_smi);
7890 __ SmiUntag(result.reg());
7891 __ cvtsi2sd(xmm0, Operand(result.reg()));
7892 __ jmp(&load_done);
7893 __ bind(&non_smi);
7894 __ cmp(FieldOperand(result.reg(), HeapObject::kMapOffset),
7895 Factory::heap_number_map());
7896 __ j(not_equal, &runtime);
7897 __ movdbl(xmm0, FieldOperand(result.reg(), HeapNumber::kValueOffset));
7898
7899 __ bind(&load_done);
7900 __ sqrtsd(xmm0, xmm0);
7901 // A copy of the virtual frame to allow us to go to runtime after the
7902 // JumpTarget jump.
7903 Result scratch = allocator()->Allocate();
7904 VirtualFrame* clone = new VirtualFrame(frame());
7905 __ AllocateHeapNumber(result.reg(), scratch.reg(), no_reg, &runtime);
7906
7907 __ movdbl(FieldOperand(result.reg(), HeapNumber::kValueOffset), xmm0);
7908 frame()->Drop(1);
7909 scratch.Unuse();
7910 end.Jump(&result);
7911 // We only branch to runtime if we have an allocation error.
7912 // Use the copy of the original frame as our current frame.
7913 RegisterFile empty_regs;
7914 SetFrame(clone, &empty_regs);
7915 __ bind(&runtime);
7916 result = frame()->CallRuntime(Runtime::kMath_sqrt, 1);
7917
7918 end.Bind(&result);
7919 frame()->Push(&result);
7920 }
7921}
7922
7923
Ben Murdochbb769b22010-08-11 14:56:33 +01007924void CodeGenerator::GenerateIsRegExpEquivalent(ZoneList<Expression*>* args) {
7925 ASSERT_EQ(2, args->length());
7926 Load(args->at(0));
7927 Load(args->at(1));
7928 Result right_res = frame_->Pop();
7929 Result left_res = frame_->Pop();
7930 right_res.ToRegister();
7931 left_res.ToRegister();
7932 Result tmp_res = allocator()->Allocate();
7933 ASSERT(tmp_res.is_valid());
7934 Register right = right_res.reg();
7935 Register left = left_res.reg();
7936 Register tmp = tmp_res.reg();
7937 right_res.Unuse();
7938 left_res.Unuse();
7939 tmp_res.Unuse();
7940 __ cmp(left, Operand(right));
7941 destination()->true_target()->Branch(equal);
7942 // Fail if either is a non-HeapObject.
7943 __ mov(tmp, left);
7944 __ and_(Operand(tmp), right);
7945 __ test(Operand(tmp), Immediate(kSmiTagMask));
7946 destination()->false_target()->Branch(equal);
7947 __ CmpObjectType(left, JS_REGEXP_TYPE, tmp);
7948 destination()->false_target()->Branch(not_equal);
7949 __ cmp(tmp, FieldOperand(right, HeapObject::kMapOffset));
7950 destination()->false_target()->Branch(not_equal);
7951 __ mov(tmp, FieldOperand(left, JSRegExp::kDataOffset));
7952 __ cmp(tmp, FieldOperand(right, JSRegExp::kDataOffset));
7953 destination()->Split(equal);
7954}
7955
7956
Steve Blocka7e24c12009-10-30 11:49:00 +00007957void CodeGenerator::VisitCallRuntime(CallRuntime* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01007958 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00007959 if (CheckForInlineRuntimeCall(node)) {
7960 return;
7961 }
7962
7963 ZoneList<Expression*>* args = node->arguments();
7964 Comment cmnt(masm_, "[ CallRuntime");
7965 Runtime::Function* function = node->function();
7966
7967 if (function == NULL) {
Steve Blocka7e24c12009-10-30 11:49:00 +00007968 // Push the builtins object found in the current global object.
7969 Result temp = allocator()->Allocate();
7970 ASSERT(temp.is_valid());
7971 __ mov(temp.reg(), GlobalObject());
7972 __ mov(temp.reg(), FieldOperand(temp.reg(), GlobalObject::kBuiltinsOffset));
7973 frame_->Push(&temp);
7974 }
7975
7976 // Push the arguments ("left-to-right").
7977 int arg_count = args->length();
7978 for (int i = 0; i < arg_count; i++) {
7979 Load(args->at(i));
7980 }
7981
7982 if (function == NULL) {
7983 // Call the JS runtime function.
Leon Clarkee46be812010-01-19 14:06:41 +00007984 frame_->Push(node->name());
Steve Blocka7e24c12009-10-30 11:49:00 +00007985 Result answer = frame_->CallCallIC(RelocInfo::CODE_TARGET,
7986 arg_count,
7987 loop_nesting_);
7988 frame_->RestoreContextRegister();
Leon Clarkee46be812010-01-19 14:06:41 +00007989 frame_->Push(&answer);
Steve Blocka7e24c12009-10-30 11:49:00 +00007990 } else {
7991 // Call the C runtime function.
7992 Result answer = frame_->CallRuntime(function, arg_count);
7993 frame_->Push(&answer);
7994 }
7995}
7996
7997
7998void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00007999 Comment cmnt(masm_, "[ UnaryOperation");
8000
8001 Token::Value op = node->op();
8002
8003 if (op == Token::NOT) {
8004 // Swap the true and false targets but keep the same actual label
8005 // as the fall through.
8006 destination()->Invert();
Steve Blockd0582a62009-12-15 09:54:21 +00008007 LoadCondition(node->expression(), destination(), true);
Steve Blocka7e24c12009-10-30 11:49:00 +00008008 // Swap the labels back.
8009 destination()->Invert();
8010
8011 } else if (op == Token::DELETE) {
8012 Property* property = node->expression()->AsProperty();
8013 if (property != NULL) {
8014 Load(property->obj());
8015 Load(property->key());
8016 Result answer = frame_->InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, 2);
8017 frame_->Push(&answer);
8018 return;
8019 }
8020
8021 Variable* variable = node->expression()->AsVariableProxy()->AsVariable();
8022 if (variable != NULL) {
8023 Slot* slot = variable->slot();
8024 if (variable->is_global()) {
8025 LoadGlobal();
8026 frame_->Push(variable->name());
8027 Result answer = frame_->InvokeBuiltin(Builtins::DELETE,
8028 CALL_FUNCTION, 2);
8029 frame_->Push(&answer);
8030 return;
8031
8032 } else if (slot != NULL && slot->type() == Slot::LOOKUP) {
8033 // Call the runtime to look up the context holding the named
8034 // variable. Sync the virtual frame eagerly so we can push the
8035 // arguments directly into place.
8036 frame_->SyncRange(0, frame_->element_count() - 1);
8037 frame_->EmitPush(esi);
8038 frame_->EmitPush(Immediate(variable->name()));
8039 Result context = frame_->CallRuntime(Runtime::kLookupContext, 2);
8040 ASSERT(context.is_register());
8041 frame_->EmitPush(context.reg());
8042 context.Unuse();
8043 frame_->EmitPush(Immediate(variable->name()));
8044 Result answer = frame_->InvokeBuiltin(Builtins::DELETE,
8045 CALL_FUNCTION, 2);
8046 frame_->Push(&answer);
8047 return;
8048 }
8049
8050 // Default: Result of deleting non-global, not dynamically
8051 // introduced variables is false.
8052 frame_->Push(Factory::false_value());
8053
8054 } else {
8055 // Default: Result of deleting expressions is true.
8056 Load(node->expression()); // may have side-effects
8057 frame_->SetElementAt(0, Factory::true_value());
8058 }
8059
8060 } else if (op == Token::TYPEOF) {
8061 // Special case for loading the typeof expression; see comment on
8062 // LoadTypeofExpression().
8063 LoadTypeofExpression(node->expression());
8064 Result answer = frame_->CallRuntime(Runtime::kTypeof, 1);
8065 frame_->Push(&answer);
8066
8067 } else if (op == Token::VOID) {
8068 Expression* expression = node->expression();
8069 if (expression && expression->AsLiteral() && (
8070 expression->AsLiteral()->IsTrue() ||
8071 expression->AsLiteral()->IsFalse() ||
8072 expression->AsLiteral()->handle()->IsNumber() ||
8073 expression->AsLiteral()->handle()->IsString() ||
8074 expression->AsLiteral()->handle()->IsJSRegExp() ||
8075 expression->AsLiteral()->IsNull())) {
8076 // Omit evaluating the value of the primitive literal.
8077 // It will be discarded anyway, and can have no side effect.
8078 frame_->Push(Factory::undefined_value());
8079 } else {
8080 Load(node->expression());
8081 frame_->SetElementAt(0, Factory::undefined_value());
8082 }
8083
8084 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01008085 if (in_safe_int32_mode()) {
8086 Visit(node->expression());
8087 Result value = frame_->Pop();
8088 ASSERT(value.is_untagged_int32());
8089 // Registers containing an int32 value are not multiply used.
8090 ASSERT(!value.is_register() || !frame_->is_used(value.reg()));
8091 value.ToRegister();
8092 switch (op) {
8093 case Token::SUB: {
8094 __ neg(value.reg());
8095 if (node->no_negative_zero()) {
8096 // -MIN_INT is MIN_INT with the overflow flag set.
8097 unsafe_bailout_->Branch(overflow);
8098 } else {
8099 // MIN_INT and 0 both have bad negations. They both have 31 zeros.
8100 __ test(value.reg(), Immediate(0x7FFFFFFF));
8101 unsafe_bailout_->Branch(zero);
8102 }
8103 break;
8104 }
8105 case Token::BIT_NOT: {
8106 __ not_(value.reg());
8107 break;
8108 }
8109 case Token::ADD: {
8110 // Unary plus has no effect on int32 values.
8111 break;
8112 }
8113 default:
8114 UNREACHABLE();
8115 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00008116 }
Steve Block6ded16b2010-05-10 14:33:55 +01008117 frame_->Push(&value);
8118 } else {
8119 Load(node->expression());
Leon Clarkeac952652010-07-15 11:15:24 +01008120 bool can_overwrite =
Steve Block6ded16b2010-05-10 14:33:55 +01008121 (node->expression()->AsBinaryOperation() != NULL &&
8122 node->expression()->AsBinaryOperation()->ResultOverwriteAllowed());
Leon Clarkeac952652010-07-15 11:15:24 +01008123 UnaryOverwriteMode overwrite =
8124 can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
8125 bool no_negative_zero = node->expression()->no_negative_zero();
Steve Block6ded16b2010-05-10 14:33:55 +01008126 switch (op) {
8127 case Token::NOT:
8128 case Token::DELETE:
8129 case Token::TYPEOF:
8130 UNREACHABLE(); // handled above
8131 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00008132
Steve Block6ded16b2010-05-10 14:33:55 +01008133 case Token::SUB: {
Leon Clarkeac952652010-07-15 11:15:24 +01008134 GenericUnaryOpStub stub(
8135 Token::SUB,
8136 overwrite,
8137 no_negative_zero ? kIgnoreNegativeZero : kStrictNegativeZero);
Steve Block6ded16b2010-05-10 14:33:55 +01008138 Result operand = frame_->Pop();
8139 Result answer = frame_->CallStub(&stub, &operand);
8140 answer.set_type_info(TypeInfo::Number());
8141 frame_->Push(&answer);
8142 break;
8143 }
8144 case Token::BIT_NOT: {
8145 // Smi check.
8146 JumpTarget smi_label;
8147 JumpTarget continue_label;
8148 Result operand = frame_->Pop();
8149 TypeInfo operand_info = operand.type_info();
8150 operand.ToRegister();
8151 if (operand_info.IsSmi()) {
8152 if (FLAG_debug_code) __ AbortIfNotSmi(operand.reg());
8153 frame_->Spill(operand.reg());
8154 // Set smi tag bit. It will be reset by the not operation.
8155 __ lea(operand.reg(), Operand(operand.reg(), kSmiTagMask));
8156 __ not_(operand.reg());
8157 Result answer = operand;
8158 answer.set_type_info(TypeInfo::Smi());
8159 frame_->Push(&answer);
8160 } else {
8161 __ test(operand.reg(), Immediate(kSmiTagMask));
8162 smi_label.Branch(zero, &operand, taken);
Steve Blocka7e24c12009-10-30 11:49:00 +00008163
Steve Block6ded16b2010-05-10 14:33:55 +01008164 GenericUnaryOpStub stub(Token::BIT_NOT, overwrite);
8165 Result answer = frame_->CallStub(&stub, &operand);
8166 continue_label.Jump(&answer);
Leon Clarkee46be812010-01-19 14:06:41 +00008167
Steve Block6ded16b2010-05-10 14:33:55 +01008168 smi_label.Bind(&answer);
8169 answer.ToRegister();
8170 frame_->Spill(answer.reg());
8171 // Set smi tag bit. It will be reset by the not operation.
8172 __ lea(answer.reg(), Operand(answer.reg(), kSmiTagMask));
8173 __ not_(answer.reg());
Leon Clarkee46be812010-01-19 14:06:41 +00008174
Steve Block6ded16b2010-05-10 14:33:55 +01008175 continue_label.Bind(&answer);
8176 answer.set_type_info(TypeInfo::Integer32());
8177 frame_->Push(&answer);
8178 }
8179 break;
8180 }
8181 case Token::ADD: {
8182 // Smi check.
8183 JumpTarget continue_label;
8184 Result operand = frame_->Pop();
8185 TypeInfo operand_info = operand.type_info();
8186 operand.ToRegister();
8187 __ test(operand.reg(), Immediate(kSmiTagMask));
8188 continue_label.Branch(zero, &operand, taken);
Steve Blocka7e24c12009-10-30 11:49:00 +00008189
Steve Block6ded16b2010-05-10 14:33:55 +01008190 frame_->Push(&operand);
8191 Result answer = frame_->InvokeBuiltin(Builtins::TO_NUMBER,
Steve Blocka7e24c12009-10-30 11:49:00 +00008192 CALL_FUNCTION, 1);
8193
Steve Block6ded16b2010-05-10 14:33:55 +01008194 continue_label.Bind(&answer);
8195 if (operand_info.IsSmi()) {
8196 answer.set_type_info(TypeInfo::Smi());
8197 } else if (operand_info.IsInteger32()) {
8198 answer.set_type_info(TypeInfo::Integer32());
8199 } else {
8200 answer.set_type_info(TypeInfo::Number());
8201 }
8202 frame_->Push(&answer);
8203 break;
8204 }
8205 default:
8206 UNREACHABLE();
Steve Blocka7e24c12009-10-30 11:49:00 +00008207 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008208 }
8209 }
8210}
8211
8212
8213// The value in dst was optimistically incremented or decremented. The
8214// result overflowed or was not smi tagged. Undo the operation, call
8215// into the runtime to convert the argument to a number, and call the
8216// specialized add or subtract stub. The result is left in dst.
8217class DeferredPrefixCountOperation: public DeferredCode {
8218 public:
Steve Block6ded16b2010-05-10 14:33:55 +01008219 DeferredPrefixCountOperation(Register dst,
8220 bool is_increment,
8221 TypeInfo input_type)
8222 : dst_(dst), is_increment_(is_increment), input_type_(input_type) {
Steve Blocka7e24c12009-10-30 11:49:00 +00008223 set_comment("[ DeferredCountOperation");
8224 }
8225
8226 virtual void Generate();
8227
8228 private:
8229 Register dst_;
8230 bool is_increment_;
Steve Block6ded16b2010-05-10 14:33:55 +01008231 TypeInfo input_type_;
Steve Blocka7e24c12009-10-30 11:49:00 +00008232};
8233
8234
8235void DeferredPrefixCountOperation::Generate() {
8236 // Undo the optimistic smi operation.
8237 if (is_increment_) {
8238 __ sub(Operand(dst_), Immediate(Smi::FromInt(1)));
8239 } else {
8240 __ add(Operand(dst_), Immediate(Smi::FromInt(1)));
8241 }
Steve Block6ded16b2010-05-10 14:33:55 +01008242 Register left;
8243 if (input_type_.IsNumber()) {
8244 left = dst_;
Steve Blocka7e24c12009-10-30 11:49:00 +00008245 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01008246 __ push(dst_);
8247 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION);
8248 left = eax;
Steve Blocka7e24c12009-10-30 11:49:00 +00008249 }
Steve Block6ded16b2010-05-10 14:33:55 +01008250
8251 GenericBinaryOpStub stub(is_increment_ ? Token::ADD : Token::SUB,
8252 NO_OVERWRITE,
8253 NO_GENERIC_BINARY_FLAGS,
8254 TypeInfo::Number());
8255 stub.GenerateCall(masm_, left, Smi::FromInt(1));
8256
Steve Blocka7e24c12009-10-30 11:49:00 +00008257 if (!dst_.is(eax)) __ mov(dst_, eax);
8258}
8259
8260
8261// The value in dst was optimistically incremented or decremented. The
8262// result overflowed or was not smi tagged. Undo the operation and call
8263// into the runtime to convert the argument to a number. Update the
8264// original value in old. Call the specialized add or subtract stub.
8265// The result is left in dst.
8266class DeferredPostfixCountOperation: public DeferredCode {
8267 public:
Steve Block6ded16b2010-05-10 14:33:55 +01008268 DeferredPostfixCountOperation(Register dst,
8269 Register old,
8270 bool is_increment,
8271 TypeInfo input_type)
8272 : dst_(dst),
8273 old_(old),
8274 is_increment_(is_increment),
8275 input_type_(input_type) {
Steve Blocka7e24c12009-10-30 11:49:00 +00008276 set_comment("[ DeferredCountOperation");
8277 }
8278
8279 virtual void Generate();
8280
8281 private:
8282 Register dst_;
8283 Register old_;
8284 bool is_increment_;
Steve Block6ded16b2010-05-10 14:33:55 +01008285 TypeInfo input_type_;
Steve Blocka7e24c12009-10-30 11:49:00 +00008286};
8287
8288
8289void DeferredPostfixCountOperation::Generate() {
8290 // Undo the optimistic smi operation.
8291 if (is_increment_) {
8292 __ sub(Operand(dst_), Immediate(Smi::FromInt(1)));
8293 } else {
8294 __ add(Operand(dst_), Immediate(Smi::FromInt(1)));
8295 }
Steve Block6ded16b2010-05-10 14:33:55 +01008296 Register left;
8297 if (input_type_.IsNumber()) {
8298 __ push(dst_); // Save the input to use as the old value.
8299 left = dst_;
Steve Blocka7e24c12009-10-30 11:49:00 +00008300 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01008301 __ push(dst_);
8302 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION);
8303 __ push(eax); // Save the result of ToNumber to use as the old value.
8304 left = eax;
Steve Blocka7e24c12009-10-30 11:49:00 +00008305 }
Steve Block6ded16b2010-05-10 14:33:55 +01008306
8307 GenericBinaryOpStub stub(is_increment_ ? Token::ADD : Token::SUB,
8308 NO_OVERWRITE,
8309 NO_GENERIC_BINARY_FLAGS,
8310 TypeInfo::Number());
8311 stub.GenerateCall(masm_, left, Smi::FromInt(1));
8312
Steve Blocka7e24c12009-10-30 11:49:00 +00008313 if (!dst_.is(eax)) __ mov(dst_, eax);
8314 __ pop(old_);
8315}
8316
8317
8318void CodeGenerator::VisitCountOperation(CountOperation* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01008319 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00008320 Comment cmnt(masm_, "[ CountOperation");
8321
8322 bool is_postfix = node->is_postfix();
8323 bool is_increment = node->op() == Token::INC;
8324
8325 Variable* var = node->expression()->AsVariableProxy()->AsVariable();
8326 bool is_const = (var != NULL && var->mode() == Variable::CONST);
8327
8328 // Postfix operations need a stack slot under the reference to hold
8329 // the old value while the new value is being stored. This is so that
8330 // in the case that storing the new value requires a call, the old
8331 // value will be in the frame to be spilled.
8332 if (is_postfix) frame_->Push(Smi::FromInt(0));
8333
Leon Clarked91b9f72010-01-27 17:25:45 +00008334 // A constant reference is not saved to, so a constant reference is not a
8335 // compound assignment reference.
8336 { Reference target(this, node->expression(), !is_const);
Steve Blocka7e24c12009-10-30 11:49:00 +00008337 if (target.is_illegal()) {
8338 // Spoof the virtual frame to have the expected height (one higher
8339 // than on entry).
8340 if (!is_postfix) frame_->Push(Smi::FromInt(0));
8341 return;
8342 }
Steve Blockd0582a62009-12-15 09:54:21 +00008343 target.TakeValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00008344
8345 Result new_value = frame_->Pop();
8346 new_value.ToRegister();
8347
8348 Result old_value; // Only allocated in the postfix case.
8349 if (is_postfix) {
8350 // Allocate a temporary to preserve the old value.
8351 old_value = allocator_->Allocate();
8352 ASSERT(old_value.is_valid());
8353 __ mov(old_value.reg(), new_value.reg());
Steve Block6ded16b2010-05-10 14:33:55 +01008354
8355 // The return value for postfix operations is ToNumber(input).
8356 // Keep more precise type info if the input is some kind of
8357 // number already. If the input is not a number we have to wait
8358 // for the deferred code to convert it.
8359 if (new_value.type_info().IsNumber()) {
8360 old_value.set_type_info(new_value.type_info());
8361 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008362 }
Steve Block6ded16b2010-05-10 14:33:55 +01008363
Steve Blocka7e24c12009-10-30 11:49:00 +00008364 // Ensure the new value is writable.
8365 frame_->Spill(new_value.reg());
8366
Steve Block6ded16b2010-05-10 14:33:55 +01008367 Result tmp;
8368 if (new_value.is_smi()) {
8369 if (FLAG_debug_code) __ AbortIfNotSmi(new_value.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00008370 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01008371 // We don't know statically if the input is a smi.
8372 // In order to combine the overflow and the smi tag check, we need
8373 // to be able to allocate a byte register. We attempt to do so
8374 // without spilling. If we fail, we will generate separate overflow
8375 // and smi tag checks.
8376 // We allocate and clear a temporary byte register before performing
8377 // the count operation since clearing the register using xor will clear
8378 // the overflow flag.
8379 tmp = allocator_->AllocateByteRegisterWithoutSpilling();
8380 if (tmp.is_valid()) {
8381 __ Set(tmp.reg(), Immediate(0));
8382 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008383 }
8384
8385 if (is_increment) {
8386 __ add(Operand(new_value.reg()), Immediate(Smi::FromInt(1)));
8387 } else {
8388 __ sub(Operand(new_value.reg()), Immediate(Smi::FromInt(1)));
8389 }
8390
Steve Block6ded16b2010-05-10 14:33:55 +01008391 DeferredCode* deferred = NULL;
8392 if (is_postfix) {
8393 deferred = new DeferredPostfixCountOperation(new_value.reg(),
8394 old_value.reg(),
8395 is_increment,
8396 new_value.type_info());
8397 } else {
8398 deferred = new DeferredPrefixCountOperation(new_value.reg(),
8399 is_increment,
8400 new_value.type_info());
8401 }
8402
8403 if (new_value.is_smi()) {
8404 // In case we have a smi as input just check for overflow.
8405 deferred->Branch(overflow);
8406 } else {
8407 // If the count operation didn't overflow and the result is a valid
8408 // smi, we're done. Otherwise, we jump to the deferred slow-case
8409 // code.
Steve Blocka7e24c12009-10-30 11:49:00 +00008410 // We combine the overflow and the smi tag check if we could
8411 // successfully allocate a temporary byte register.
Steve Block6ded16b2010-05-10 14:33:55 +01008412 if (tmp.is_valid()) {
8413 __ setcc(overflow, tmp.reg());
8414 __ or_(Operand(tmp.reg()), new_value.reg());
8415 __ test(tmp.reg(), Immediate(kSmiTagMask));
8416 tmp.Unuse();
8417 deferred->Branch(not_zero);
8418 } else {
8419 // Otherwise we test separately for overflow and smi tag.
8420 deferred->Branch(overflow);
8421 __ test(new_value.reg(), Immediate(kSmiTagMask));
8422 deferred->Branch(not_zero);
8423 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008424 }
8425 deferred->BindExit();
8426
Steve Block6ded16b2010-05-10 14:33:55 +01008427 // Postfix count operations return their input converted to
8428 // number. The case when the input is already a number is covered
8429 // above in the allocation code for old_value.
8430 if (is_postfix && !new_value.type_info().IsNumber()) {
8431 old_value.set_type_info(TypeInfo::Number());
8432 }
8433
8434 // The result of ++ or -- is an Integer32 if the
8435 // input is a smi. Otherwise it is a number.
8436 if (new_value.is_smi()) {
8437 new_value.set_type_info(TypeInfo::Integer32());
8438 } else {
8439 new_value.set_type_info(TypeInfo::Number());
8440 }
8441
Steve Blocka7e24c12009-10-30 11:49:00 +00008442 // Postfix: store the old value in the allocated slot under the
8443 // reference.
8444 if (is_postfix) frame_->SetElementAt(target.size(), &old_value);
8445
8446 frame_->Push(&new_value);
8447 // Non-constant: update the reference.
8448 if (!is_const) target.SetValue(NOT_CONST_INIT);
8449 }
8450
8451 // Postfix: drop the new value and use the old.
8452 if (is_postfix) frame_->Drop();
8453}
8454
8455
Steve Block6ded16b2010-05-10 14:33:55 +01008456void CodeGenerator::Int32BinaryOperation(BinaryOperation* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00008457 Token::Value op = node->op();
Steve Block6ded16b2010-05-10 14:33:55 +01008458 Comment cmnt(masm_, "[ Int32BinaryOperation");
8459 ASSERT(in_safe_int32_mode());
8460 ASSERT(safe_int32_mode_enabled());
8461 ASSERT(FLAG_safe_int32_compiler);
Steve Blocka7e24c12009-10-30 11:49:00 +00008462
Steve Block6ded16b2010-05-10 14:33:55 +01008463 if (op == Token::COMMA) {
8464 // Discard left value.
8465 frame_->Nip(1);
8466 return;
8467 }
8468
8469 Result right = frame_->Pop();
8470 Result left = frame_->Pop();
8471
8472 ASSERT(right.is_untagged_int32());
8473 ASSERT(left.is_untagged_int32());
8474 // Registers containing an int32 value are not multiply used.
8475 ASSERT(!left.is_register() || !frame_->is_used(left.reg()));
8476 ASSERT(!right.is_register() || !frame_->is_used(right.reg()));
8477
8478 switch (op) {
8479 case Token::COMMA:
8480 case Token::OR:
8481 case Token::AND:
8482 UNREACHABLE();
8483 break;
8484 case Token::BIT_OR:
8485 case Token::BIT_XOR:
8486 case Token::BIT_AND:
8487 if (left.is_constant() || right.is_constant()) {
8488 int32_t value; // Put constant in value, non-constant in left.
8489 // Constants are known to be int32 values, from static analysis,
8490 // or else will be converted to int32 by implicit ECMA [[ToInt32]].
8491 if (left.is_constant()) {
8492 ASSERT(left.handle()->IsSmi() || left.handle()->IsHeapNumber());
8493 value = NumberToInt32(*left.handle());
8494 left = right;
8495 } else {
8496 ASSERT(right.handle()->IsSmi() || right.handle()->IsHeapNumber());
8497 value = NumberToInt32(*right.handle());
8498 }
8499
8500 left.ToRegister();
8501 if (op == Token::BIT_OR) {
8502 __ or_(Operand(left.reg()), Immediate(value));
8503 } else if (op == Token::BIT_XOR) {
8504 __ xor_(Operand(left.reg()), Immediate(value));
8505 } else {
8506 ASSERT(op == Token::BIT_AND);
8507 __ and_(Operand(left.reg()), Immediate(value));
8508 }
8509 } else {
8510 ASSERT(left.is_register());
8511 ASSERT(right.is_register());
8512 if (op == Token::BIT_OR) {
8513 __ or_(left.reg(), Operand(right.reg()));
8514 } else if (op == Token::BIT_XOR) {
8515 __ xor_(left.reg(), Operand(right.reg()));
8516 } else {
8517 ASSERT(op == Token::BIT_AND);
8518 __ and_(left.reg(), Operand(right.reg()));
8519 }
8520 }
8521 frame_->Push(&left);
8522 right.Unuse();
8523 break;
8524 case Token::SAR:
8525 case Token::SHL:
8526 case Token::SHR: {
8527 bool test_shr_overflow = false;
8528 left.ToRegister();
8529 if (right.is_constant()) {
8530 ASSERT(right.handle()->IsSmi() || right.handle()->IsHeapNumber());
8531 int shift_amount = NumberToInt32(*right.handle()) & 0x1F;
8532 if (op == Token::SAR) {
8533 __ sar(left.reg(), shift_amount);
8534 } else if (op == Token::SHL) {
8535 __ shl(left.reg(), shift_amount);
8536 } else {
8537 ASSERT(op == Token::SHR);
8538 __ shr(left.reg(), shift_amount);
8539 if (shift_amount == 0) test_shr_overflow = true;
8540 }
8541 } else {
8542 // Move right to ecx
8543 if (left.is_register() && left.reg().is(ecx)) {
8544 right.ToRegister();
8545 __ xchg(left.reg(), right.reg());
8546 left = right; // Left is unused here, copy of right unused by Push.
8547 } else {
8548 right.ToRegister(ecx);
8549 left.ToRegister();
8550 }
8551 if (op == Token::SAR) {
8552 __ sar_cl(left.reg());
8553 } else if (op == Token::SHL) {
8554 __ shl_cl(left.reg());
8555 } else {
8556 ASSERT(op == Token::SHR);
8557 __ shr_cl(left.reg());
8558 test_shr_overflow = true;
8559 }
8560 }
8561 {
8562 Register left_reg = left.reg();
8563 frame_->Push(&left);
8564 right.Unuse();
8565 if (test_shr_overflow && !node->to_int32()) {
8566 // Uint32 results with top bit set are not Int32 values.
8567 // If they will be forced to Int32, skip the test.
8568 // Test is needed because shr with shift amount 0 does not set flags.
8569 __ test(left_reg, Operand(left_reg));
8570 unsafe_bailout_->Branch(sign);
8571 }
8572 }
8573 break;
8574 }
8575 case Token::ADD:
8576 case Token::SUB:
8577 case Token::MUL:
8578 if ((left.is_constant() && op != Token::SUB) || right.is_constant()) {
8579 int32_t value; // Put constant in value, non-constant in left.
8580 if (right.is_constant()) {
8581 ASSERT(right.handle()->IsSmi() || right.handle()->IsHeapNumber());
8582 value = NumberToInt32(*right.handle());
8583 } else {
8584 ASSERT(left.handle()->IsSmi() || left.handle()->IsHeapNumber());
8585 value = NumberToInt32(*left.handle());
8586 left = right;
8587 }
8588
8589 left.ToRegister();
8590 if (op == Token::ADD) {
8591 __ add(Operand(left.reg()), Immediate(value));
8592 } else if (op == Token::SUB) {
8593 __ sub(Operand(left.reg()), Immediate(value));
8594 } else {
8595 ASSERT(op == Token::MUL);
8596 __ imul(left.reg(), left.reg(), value);
8597 }
8598 } else {
8599 left.ToRegister();
8600 ASSERT(left.is_register());
8601 ASSERT(right.is_register());
8602 if (op == Token::ADD) {
8603 __ add(left.reg(), Operand(right.reg()));
8604 } else if (op == Token::SUB) {
8605 __ sub(left.reg(), Operand(right.reg()));
8606 } else {
8607 ASSERT(op == Token::MUL);
8608 // We have statically verified that a negative zero can be ignored.
8609 __ imul(left.reg(), Operand(right.reg()));
8610 }
8611 }
8612 right.Unuse();
8613 frame_->Push(&left);
8614 if (!node->to_int32()) {
8615 // If ToInt32 is called on the result of ADD, SUB, or MUL, we don't
8616 // care about overflows.
8617 unsafe_bailout_->Branch(overflow);
8618 }
8619 break;
8620 case Token::DIV:
8621 case Token::MOD: {
8622 if (right.is_register() && (right.reg().is(eax) || right.reg().is(edx))) {
8623 if (left.is_register() && left.reg().is(edi)) {
8624 right.ToRegister(ebx);
8625 } else {
8626 right.ToRegister(edi);
8627 }
8628 }
8629 left.ToRegister(eax);
8630 Result edx_reg = allocator_->Allocate(edx);
8631 right.ToRegister();
8632 // The results are unused here because BreakTarget::Branch cannot handle
8633 // live results.
8634 Register right_reg = right.reg();
8635 left.Unuse();
8636 right.Unuse();
8637 edx_reg.Unuse();
8638 __ cmp(right_reg, 0);
8639 // Ensure divisor is positive: no chance of non-int32 or -0 result.
8640 unsafe_bailout_->Branch(less_equal);
8641 __ cdq(); // Sign-extend eax into edx:eax
8642 __ idiv(right_reg);
8643 if (op == Token::MOD) {
8644 // Negative zero can arise as a negative divident with a zero result.
8645 if (!node->no_negative_zero()) {
8646 Label not_negative_zero;
8647 __ test(edx, Operand(edx));
8648 __ j(not_zero, &not_negative_zero);
8649 __ test(eax, Operand(eax));
8650 unsafe_bailout_->Branch(negative);
8651 __ bind(&not_negative_zero);
8652 }
8653 Result edx_result(edx, TypeInfo::Integer32());
8654 edx_result.set_untagged_int32(true);
8655 frame_->Push(&edx_result);
8656 } else {
8657 ASSERT(op == Token::DIV);
8658 __ test(edx, Operand(edx));
8659 unsafe_bailout_->Branch(not_equal);
8660 Result eax_result(eax, TypeInfo::Integer32());
8661 eax_result.set_untagged_int32(true);
8662 frame_->Push(&eax_result);
8663 }
8664 break;
8665 }
8666 default:
8667 UNREACHABLE();
8668 break;
8669 }
8670}
8671
8672
8673void CodeGenerator::GenerateLogicalBooleanOperation(BinaryOperation* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00008674 // According to ECMA-262 section 11.11, page 58, the binary logical
8675 // operators must yield the result of one of the two expressions
8676 // before any ToBoolean() conversions. This means that the value
8677 // produced by a && or || operator is not necessarily a boolean.
8678
8679 // NOTE: If the left hand side produces a materialized value (not
8680 // control flow), we force the right hand side to do the same. This
8681 // is necessary because we assume that if we get control flow on the
8682 // last path out of an expression we got it on all paths.
Steve Block6ded16b2010-05-10 14:33:55 +01008683 if (node->op() == Token::AND) {
8684 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00008685 JumpTarget is_true;
8686 ControlDestination dest(&is_true, destination()->false_target(), true);
Steve Blockd0582a62009-12-15 09:54:21 +00008687 LoadCondition(node->left(), &dest, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00008688
8689 if (dest.false_was_fall_through()) {
8690 // The current false target was used as the fall-through. If
8691 // there are no dangling jumps to is_true then the left
8692 // subexpression was unconditionally false. Otherwise we have
8693 // paths where we do have to evaluate the right subexpression.
8694 if (is_true.is_linked()) {
8695 // We need to compile the right subexpression. If the jump to
8696 // the current false target was a forward jump then we have a
8697 // valid frame, we have just bound the false target, and we
8698 // have to jump around the code for the right subexpression.
8699 if (has_valid_frame()) {
8700 destination()->false_target()->Unuse();
8701 destination()->false_target()->Jump();
8702 }
8703 is_true.Bind();
8704 // The left subexpression compiled to control flow, so the
8705 // right one is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00008706 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00008707 } else {
8708 // We have actually just jumped to or bound the current false
8709 // target but the current control destination is not marked as
8710 // used.
8711 destination()->Use(false);
8712 }
8713
8714 } else if (dest.is_used()) {
8715 // The left subexpression compiled to control flow (and is_true
8716 // was just bound), so the right is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00008717 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00008718
8719 } else {
8720 // We have a materialized value on the frame, so we exit with
8721 // one on all paths. There are possibly also jumps to is_true
8722 // from nested subexpressions.
8723 JumpTarget pop_and_continue;
8724 JumpTarget exit;
8725
8726 // Avoid popping the result if it converts to 'false' using the
8727 // standard ToBoolean() conversion as described in ECMA-262,
8728 // section 9.2, page 30.
8729 //
8730 // Duplicate the TOS value. The duplicate will be popped by
8731 // ToBoolean.
8732 frame_->Dup();
8733 ControlDestination dest(&pop_and_continue, &exit, true);
8734 ToBoolean(&dest);
8735
8736 // Pop the result of evaluating the first part.
8737 frame_->Drop();
8738
8739 // Compile right side expression.
8740 is_true.Bind();
8741 Load(node->right());
8742
8743 // Exit (always with a materialized value).
8744 exit.Bind();
8745 }
8746
Steve Block6ded16b2010-05-10 14:33:55 +01008747 } else {
8748 ASSERT(node->op() == Token::OR);
8749 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00008750 JumpTarget is_false;
8751 ControlDestination dest(destination()->true_target(), &is_false, false);
Steve Blockd0582a62009-12-15 09:54:21 +00008752 LoadCondition(node->left(), &dest, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00008753
8754 if (dest.true_was_fall_through()) {
8755 // The current true target was used as the fall-through. If
8756 // there are no dangling jumps to is_false then the left
8757 // subexpression was unconditionally true. Otherwise we have
8758 // paths where we do have to evaluate the right subexpression.
8759 if (is_false.is_linked()) {
8760 // We need to compile the right subexpression. If the jump to
8761 // the current true target was a forward jump then we have a
8762 // valid frame, we have just bound the true target, and we
8763 // have to jump around the code for the right subexpression.
8764 if (has_valid_frame()) {
8765 destination()->true_target()->Unuse();
8766 destination()->true_target()->Jump();
8767 }
8768 is_false.Bind();
8769 // The left subexpression compiled to control flow, so the
8770 // right one is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00008771 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00008772 } else {
8773 // We have just jumped to or bound the current true target but
8774 // the current control destination is not marked as used.
8775 destination()->Use(true);
8776 }
8777
8778 } else if (dest.is_used()) {
8779 // The left subexpression compiled to control flow (and is_false
8780 // was just bound), so the right is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00008781 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00008782
8783 } else {
8784 // We have a materialized value on the frame, so we exit with
8785 // one on all paths. There are possibly also jumps to is_false
8786 // from nested subexpressions.
8787 JumpTarget pop_and_continue;
8788 JumpTarget exit;
8789
8790 // Avoid popping the result if it converts to 'true' using the
8791 // standard ToBoolean() conversion as described in ECMA-262,
8792 // section 9.2, page 30.
8793 //
8794 // Duplicate the TOS value. The duplicate will be popped by
8795 // ToBoolean.
8796 frame_->Dup();
8797 ControlDestination dest(&exit, &pop_and_continue, false);
8798 ToBoolean(&dest);
8799
8800 // Pop the result of evaluating the first part.
8801 frame_->Drop();
8802
8803 // Compile right side expression.
8804 is_false.Bind();
8805 Load(node->right());
8806
8807 // Exit (always with a materialized value).
8808 exit.Bind();
8809 }
Steve Block6ded16b2010-05-10 14:33:55 +01008810 }
8811}
Steve Blocka7e24c12009-10-30 11:49:00 +00008812
Steve Block6ded16b2010-05-10 14:33:55 +01008813
8814void CodeGenerator::VisitBinaryOperation(BinaryOperation* node) {
8815 Comment cmnt(masm_, "[ BinaryOperation");
8816
8817 if (node->op() == Token::AND || node->op() == Token::OR) {
8818 GenerateLogicalBooleanOperation(node);
8819 } else if (in_safe_int32_mode()) {
8820 Visit(node->left());
8821 Visit(node->right());
8822 Int32BinaryOperation(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00008823 } else {
8824 // NOTE: The code below assumes that the slow cases (calls to runtime)
8825 // never return a constant/immutable object.
8826 OverwriteMode overwrite_mode = NO_OVERWRITE;
8827 if (node->left()->AsBinaryOperation() != NULL &&
8828 node->left()->AsBinaryOperation()->ResultOverwriteAllowed()) {
8829 overwrite_mode = OVERWRITE_LEFT;
8830 } else if (node->right()->AsBinaryOperation() != NULL &&
8831 node->right()->AsBinaryOperation()->ResultOverwriteAllowed()) {
8832 overwrite_mode = OVERWRITE_RIGHT;
8833 }
8834
Steve Block6ded16b2010-05-10 14:33:55 +01008835 if (node->left()->IsTrivial()) {
8836 Load(node->right());
8837 Result right = frame_->Pop();
8838 frame_->Push(node->left());
8839 frame_->Push(&right);
8840 } else {
8841 Load(node->left());
8842 Load(node->right());
8843 }
8844 GenericBinaryOperation(node, overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00008845 }
8846}
8847
8848
8849void CodeGenerator::VisitThisFunction(ThisFunction* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01008850 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00008851 frame_->PushFunction();
8852}
8853
8854
8855void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01008856 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00008857 Comment cmnt(masm_, "[ CompareOperation");
8858
Leon Clarkee46be812010-01-19 14:06:41 +00008859 bool left_already_loaded = false;
8860
Steve Blocka7e24c12009-10-30 11:49:00 +00008861 // Get the expressions from the node.
8862 Expression* left = node->left();
8863 Expression* right = node->right();
8864 Token::Value op = node->op();
8865 // To make typeof testing for natives implemented in JavaScript really
8866 // efficient, we generate special code for expressions of the form:
8867 // 'typeof <expression> == <string>'.
8868 UnaryOperation* operation = left->AsUnaryOperation();
8869 if ((op == Token::EQ || op == Token::EQ_STRICT) &&
8870 (operation != NULL && operation->op() == Token::TYPEOF) &&
8871 (right->AsLiteral() != NULL &&
8872 right->AsLiteral()->handle()->IsString())) {
8873 Handle<String> check(String::cast(*right->AsLiteral()->handle()));
8874
8875 // Load the operand and move it to a register.
8876 LoadTypeofExpression(operation->expression());
8877 Result answer = frame_->Pop();
8878 answer.ToRegister();
8879
8880 if (check->Equals(Heap::number_symbol())) {
8881 __ test(answer.reg(), Immediate(kSmiTagMask));
8882 destination()->true_target()->Branch(zero);
8883 frame_->Spill(answer.reg());
8884 __ mov(answer.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset));
8885 __ cmp(answer.reg(), Factory::heap_number_map());
8886 answer.Unuse();
8887 destination()->Split(equal);
8888
8889 } else if (check->Equals(Heap::string_symbol())) {
8890 __ test(answer.reg(), Immediate(kSmiTagMask));
8891 destination()->false_target()->Branch(zero);
8892
8893 // It can be an undetectable string object.
8894 Result temp = allocator()->Allocate();
8895 ASSERT(temp.is_valid());
8896 __ mov(temp.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01008897 __ test_b(FieldOperand(temp.reg(), Map::kBitFieldOffset),
8898 1 << Map::kIsUndetectable);
Steve Blocka7e24c12009-10-30 11:49:00 +00008899 destination()->false_target()->Branch(not_zero);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01008900 __ CmpInstanceType(temp.reg(), FIRST_NONSTRING_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00008901 temp.Unuse();
8902 answer.Unuse();
Andrei Popescu402d9372010-02-26 13:31:12 +00008903 destination()->Split(below);
Steve Blocka7e24c12009-10-30 11:49:00 +00008904
8905 } else if (check->Equals(Heap::boolean_symbol())) {
8906 __ cmp(answer.reg(), Factory::true_value());
8907 destination()->true_target()->Branch(equal);
8908 __ cmp(answer.reg(), Factory::false_value());
8909 answer.Unuse();
8910 destination()->Split(equal);
8911
8912 } else if (check->Equals(Heap::undefined_symbol())) {
8913 __ cmp(answer.reg(), Factory::undefined_value());
8914 destination()->true_target()->Branch(equal);
8915
8916 __ test(answer.reg(), Immediate(kSmiTagMask));
8917 destination()->false_target()->Branch(zero);
8918
8919 // It can be an undetectable object.
8920 frame_->Spill(answer.reg());
8921 __ mov(answer.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01008922 __ test_b(FieldOperand(answer.reg(), Map::kBitFieldOffset),
8923 1 << Map::kIsUndetectable);
Steve Blocka7e24c12009-10-30 11:49:00 +00008924 answer.Unuse();
8925 destination()->Split(not_zero);
8926
8927 } else if (check->Equals(Heap::function_symbol())) {
8928 __ test(answer.reg(), Immediate(kSmiTagMask));
8929 destination()->false_target()->Branch(zero);
8930 frame_->Spill(answer.reg());
8931 __ CmpObjectType(answer.reg(), JS_FUNCTION_TYPE, answer.reg());
Steve Blockd0582a62009-12-15 09:54:21 +00008932 destination()->true_target()->Branch(equal);
8933 // Regular expressions are callable so typeof == 'function'.
8934 __ CmpInstanceType(answer.reg(), JS_REGEXP_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00008935 answer.Unuse();
8936 destination()->Split(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +00008937 } else if (check->Equals(Heap::object_symbol())) {
8938 __ test(answer.reg(), Immediate(kSmiTagMask));
8939 destination()->false_target()->Branch(zero);
8940 __ cmp(answer.reg(), Factory::null_value());
8941 destination()->true_target()->Branch(equal);
8942
Steve Blocka7e24c12009-10-30 11:49:00 +00008943 Result map = allocator()->Allocate();
8944 ASSERT(map.is_valid());
Steve Blockd0582a62009-12-15 09:54:21 +00008945 // Regular expressions are typeof == 'function', not 'object'.
8946 __ CmpObjectType(answer.reg(), JS_REGEXP_TYPE, map.reg());
8947 destination()->false_target()->Branch(equal);
8948
8949 // It can be an undetectable object.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01008950 __ test_b(FieldOperand(map.reg(), Map::kBitFieldOffset),
8951 1 << Map::kIsUndetectable);
Steve Blocka7e24c12009-10-30 11:49:00 +00008952 destination()->false_target()->Branch(not_zero);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01008953 // Do a range test for JSObject type. We can't use
8954 // MacroAssembler::IsInstanceJSObjectType, because we are using a
8955 // ControlDestination, so we copy its implementation here.
Steve Blocka7e24c12009-10-30 11:49:00 +00008956 __ movzx_b(map.reg(), FieldOperand(map.reg(), Map::kInstanceTypeOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01008957 __ sub(Operand(map.reg()), Immediate(FIRST_JS_OBJECT_TYPE));
8958 __ cmp(map.reg(), LAST_JS_OBJECT_TYPE - FIRST_JS_OBJECT_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00008959 answer.Unuse();
8960 map.Unuse();
Leon Clarkef7060e22010-06-03 12:02:55 +01008961 destination()->Split(below_equal);
Steve Blocka7e24c12009-10-30 11:49:00 +00008962 } else {
8963 // Uncommon case: typeof testing against a string literal that is
8964 // never returned from the typeof operator.
8965 answer.Unuse();
8966 destination()->Goto(false);
8967 }
8968 return;
Leon Clarkee46be812010-01-19 14:06:41 +00008969 } else if (op == Token::LT &&
8970 right->AsLiteral() != NULL &&
8971 right->AsLiteral()->handle()->IsHeapNumber()) {
8972 Handle<HeapNumber> check(HeapNumber::cast(*right->AsLiteral()->handle()));
8973 if (check->value() == 2147483648.0) { // 0x80000000.
8974 Load(left);
8975 left_already_loaded = true;
8976 Result lhs = frame_->Pop();
8977 lhs.ToRegister();
8978 __ test(lhs.reg(), Immediate(kSmiTagMask));
8979 destination()->true_target()->Branch(zero); // All Smis are less.
8980 Result scratch = allocator()->Allocate();
8981 ASSERT(scratch.is_valid());
8982 __ mov(scratch.reg(), FieldOperand(lhs.reg(), HeapObject::kMapOffset));
8983 __ cmp(scratch.reg(), Factory::heap_number_map());
8984 JumpTarget not_a_number;
8985 not_a_number.Branch(not_equal, &lhs);
8986 __ mov(scratch.reg(),
8987 FieldOperand(lhs.reg(), HeapNumber::kExponentOffset));
8988 __ cmp(Operand(scratch.reg()), Immediate(0xfff00000));
8989 not_a_number.Branch(above_equal, &lhs); // It's a negative NaN or -Inf.
8990 const uint32_t borderline_exponent =
8991 (HeapNumber::kExponentBias + 31) << HeapNumber::kExponentShift;
8992 __ cmp(Operand(scratch.reg()), Immediate(borderline_exponent));
8993 scratch.Unuse();
8994 lhs.Unuse();
8995 destination()->true_target()->Branch(less);
8996 destination()->false_target()->Jump();
8997
8998 not_a_number.Bind(&lhs);
8999 frame_->Push(&lhs);
9000 }
Steve Blocka7e24c12009-10-30 11:49:00 +00009001 }
9002
9003 Condition cc = no_condition;
9004 bool strict = false;
9005 switch (op) {
9006 case Token::EQ_STRICT:
9007 strict = true;
9008 // Fall through
9009 case Token::EQ:
9010 cc = equal;
9011 break;
9012 case Token::LT:
9013 cc = less;
9014 break;
9015 case Token::GT:
9016 cc = greater;
9017 break;
9018 case Token::LTE:
9019 cc = less_equal;
9020 break;
9021 case Token::GTE:
9022 cc = greater_equal;
9023 break;
9024 case Token::IN: {
Leon Clarkee46be812010-01-19 14:06:41 +00009025 if (!left_already_loaded) Load(left);
Steve Blocka7e24c12009-10-30 11:49:00 +00009026 Load(right);
9027 Result answer = frame_->InvokeBuiltin(Builtins::IN, CALL_FUNCTION, 2);
9028 frame_->Push(&answer); // push the result
9029 return;
9030 }
9031 case Token::INSTANCEOF: {
Leon Clarkee46be812010-01-19 14:06:41 +00009032 if (!left_already_loaded) Load(left);
Steve Blocka7e24c12009-10-30 11:49:00 +00009033 Load(right);
9034 InstanceofStub stub;
9035 Result answer = frame_->CallStub(&stub, 2);
9036 answer.ToRegister();
9037 __ test(answer.reg(), Operand(answer.reg()));
9038 answer.Unuse();
9039 destination()->Split(zero);
9040 return;
9041 }
9042 default:
9043 UNREACHABLE();
9044 }
Steve Block6ded16b2010-05-10 14:33:55 +01009045
9046 if (left->IsTrivial()) {
9047 if (!left_already_loaded) {
9048 Load(right);
9049 Result right_result = frame_->Pop();
9050 frame_->Push(left);
9051 frame_->Push(&right_result);
9052 } else {
9053 Load(right);
9054 }
9055 } else {
9056 if (!left_already_loaded) Load(left);
9057 Load(right);
9058 }
Leon Clarkee46be812010-01-19 14:06:41 +00009059 Comparison(node, cc, strict, destination());
Steve Blocka7e24c12009-10-30 11:49:00 +00009060}
9061
9062
9063#ifdef DEBUG
9064bool CodeGenerator::HasValidEntryRegisters() {
9065 return (allocator()->count(eax) == (frame()->is_used(eax) ? 1 : 0))
9066 && (allocator()->count(ebx) == (frame()->is_used(ebx) ? 1 : 0))
9067 && (allocator()->count(ecx) == (frame()->is_used(ecx) ? 1 : 0))
9068 && (allocator()->count(edx) == (frame()->is_used(edx) ? 1 : 0))
9069 && (allocator()->count(edi) == (frame()->is_used(edi) ? 1 : 0));
9070}
9071#endif
9072
9073
9074// Emit a LoadIC call to get the value from receiver and leave it in
Andrei Popescu402d9372010-02-26 13:31:12 +00009075// dst.
Steve Blocka7e24c12009-10-30 11:49:00 +00009076class DeferredReferenceGetNamedValue: public DeferredCode {
9077 public:
9078 DeferredReferenceGetNamedValue(Register dst,
9079 Register receiver,
9080 Handle<String> name)
9081 : dst_(dst), receiver_(receiver), name_(name) {
9082 set_comment("[ DeferredReferenceGetNamedValue");
9083 }
9084
9085 virtual void Generate();
9086
9087 Label* patch_site() { return &patch_site_; }
9088
9089 private:
9090 Label patch_site_;
9091 Register dst_;
9092 Register receiver_;
9093 Handle<String> name_;
9094};
9095
9096
9097void DeferredReferenceGetNamedValue::Generate() {
Andrei Popescu402d9372010-02-26 13:31:12 +00009098 if (!receiver_.is(eax)) {
9099 __ mov(eax, receiver_);
9100 }
Steve Blocka7e24c12009-10-30 11:49:00 +00009101 __ Set(ecx, Immediate(name_));
9102 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
9103 __ call(ic, RelocInfo::CODE_TARGET);
9104 // The call must be followed by a test eax instruction to indicate
9105 // that the inobject property case was inlined.
9106 //
9107 // Store the delta to the map check instruction here in the test
9108 // instruction. Use masm_-> instead of the __ macro since the
9109 // latter can't return a value.
9110 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
9111 // Here we use masm_-> instead of the __ macro because this is the
9112 // instruction that gets patched and coverage code gets in the way.
9113 masm_->test(eax, Immediate(-delta_to_patch_site));
9114 __ IncrementCounter(&Counters::named_load_inline_miss, 1);
9115
9116 if (!dst_.is(eax)) __ mov(dst_, eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00009117}
9118
9119
9120class DeferredReferenceGetKeyedValue: public DeferredCode {
9121 public:
9122 explicit DeferredReferenceGetKeyedValue(Register dst,
9123 Register receiver,
Andrei Popescu402d9372010-02-26 13:31:12 +00009124 Register key)
9125 : dst_(dst), receiver_(receiver), key_(key) {
Steve Blocka7e24c12009-10-30 11:49:00 +00009126 set_comment("[ DeferredReferenceGetKeyedValue");
9127 }
9128
9129 virtual void Generate();
9130
9131 Label* patch_site() { return &patch_site_; }
9132
9133 private:
9134 Label patch_site_;
9135 Register dst_;
9136 Register receiver_;
9137 Register key_;
Steve Blocka7e24c12009-10-30 11:49:00 +00009138};
9139
9140
9141void DeferredReferenceGetKeyedValue::Generate() {
Andrei Popescu402d9372010-02-26 13:31:12 +00009142 if (!receiver_.is(eax)) {
9143 // Register eax is available for key.
9144 if (!key_.is(eax)) {
9145 __ mov(eax, key_);
9146 }
9147 if (!receiver_.is(edx)) {
9148 __ mov(edx, receiver_);
9149 }
9150 } else if (!key_.is(edx)) {
9151 // Register edx is available for receiver.
9152 if (!receiver_.is(edx)) {
9153 __ mov(edx, receiver_);
9154 }
9155 if (!key_.is(eax)) {
9156 __ mov(eax, key_);
9157 }
9158 } else {
9159 __ xchg(edx, eax);
9160 }
Steve Blocka7e24c12009-10-30 11:49:00 +00009161 // Calculate the delta from the IC call instruction to the map check
9162 // cmp instruction in the inlined version. This delta is stored in
9163 // a test(eax, delta) instruction after the call so that we can find
9164 // it in the IC initialization code and patch the cmp instruction.
9165 // This means that we cannot allow test instructions after calls to
9166 // KeyedLoadIC stubs in other places.
9167 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
Andrei Popescu402d9372010-02-26 13:31:12 +00009168 __ call(ic, RelocInfo::CODE_TARGET);
Steve Blocka7e24c12009-10-30 11:49:00 +00009169 // The delta from the start of the map-compare instruction to the
9170 // test instruction. We use masm_-> directly here instead of the __
9171 // macro because the macro sometimes uses macro expansion to turn
9172 // into something that can't return a value. This is encountered
9173 // when doing generated code coverage tests.
9174 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
9175 // Here we use masm_-> instead of the __ macro because this is the
9176 // instruction that gets patched and coverage code gets in the way.
9177 masm_->test(eax, Immediate(-delta_to_patch_site));
9178 __ IncrementCounter(&Counters::keyed_load_inline_miss, 1);
9179
9180 if (!dst_.is(eax)) __ mov(dst_, eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00009181}
9182
9183
9184class DeferredReferenceSetKeyedValue: public DeferredCode {
9185 public:
9186 DeferredReferenceSetKeyedValue(Register value,
9187 Register key,
Steve Block6ded16b2010-05-10 14:33:55 +01009188 Register receiver,
9189 Register scratch)
9190 : value_(value),
9191 key_(key),
9192 receiver_(receiver),
9193 scratch_(scratch) {
Steve Blocka7e24c12009-10-30 11:49:00 +00009194 set_comment("[ DeferredReferenceSetKeyedValue");
9195 }
9196
9197 virtual void Generate();
9198
9199 Label* patch_site() { return &patch_site_; }
9200
9201 private:
9202 Register value_;
9203 Register key_;
9204 Register receiver_;
Steve Block6ded16b2010-05-10 14:33:55 +01009205 Register scratch_;
Steve Blocka7e24c12009-10-30 11:49:00 +00009206 Label patch_site_;
9207};
9208
9209
9210void DeferredReferenceSetKeyedValue::Generate() {
9211 __ IncrementCounter(&Counters::keyed_store_inline_miss, 1);
Steve Block6ded16b2010-05-10 14:33:55 +01009212 // Move value_ to eax, key_ to ecx, and receiver_ to edx.
9213 Register old_value = value_;
9214
9215 // First, move value to eax.
9216 if (!value_.is(eax)) {
9217 if (key_.is(eax)) {
9218 // Move key_ out of eax, preferably to ecx.
9219 if (!value_.is(ecx) && !receiver_.is(ecx)) {
9220 __ mov(ecx, key_);
9221 key_ = ecx;
9222 } else {
9223 __ mov(scratch_, key_);
9224 key_ = scratch_;
9225 }
9226 }
9227 if (receiver_.is(eax)) {
9228 // Move receiver_ out of eax, preferably to edx.
9229 if (!value_.is(edx) && !key_.is(edx)) {
9230 __ mov(edx, receiver_);
9231 receiver_ = edx;
9232 } else {
9233 // Both moves to scratch are from eax, also, no valid path hits both.
9234 __ mov(scratch_, receiver_);
9235 receiver_ = scratch_;
9236 }
9237 }
9238 __ mov(eax, value_);
9239 value_ = eax;
9240 }
9241
9242 // Now value_ is in eax. Move the other two to the right positions.
9243 // We do not update the variables key_ and receiver_ to ecx and edx.
9244 if (key_.is(ecx)) {
9245 if (!receiver_.is(edx)) {
9246 __ mov(edx, receiver_);
9247 }
9248 } else if (key_.is(edx)) {
9249 if (receiver_.is(ecx)) {
9250 __ xchg(edx, ecx);
9251 } else {
9252 __ mov(ecx, key_);
9253 if (!receiver_.is(edx)) {
9254 __ mov(edx, receiver_);
9255 }
9256 }
9257 } else { // Key is not in edx or ecx.
9258 if (!receiver_.is(edx)) {
9259 __ mov(edx, receiver_);
9260 }
9261 __ mov(ecx, key_);
9262 }
9263
Steve Blocka7e24c12009-10-30 11:49:00 +00009264 // Call the IC stub.
9265 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
9266 __ call(ic, RelocInfo::CODE_TARGET);
9267 // The delta from the start of the map-compare instruction to the
9268 // test instruction. We use masm_-> directly here instead of the
9269 // __ macro because the macro sometimes uses macro expansion to turn
9270 // into something that can't return a value. This is encountered
9271 // when doing generated code coverage tests.
9272 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
9273 // Here we use masm_-> instead of the __ macro because this is the
9274 // instruction that gets patched and coverage code gets in the way.
9275 masm_->test(eax, Immediate(-delta_to_patch_site));
Steve Block6ded16b2010-05-10 14:33:55 +01009276 // Restore value (returned from store IC) register.
9277 if (!old_value.is(eax)) __ mov(old_value, eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00009278}
9279
9280
Andrei Popescu402d9372010-02-26 13:31:12 +00009281Result CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) {
9282#ifdef DEBUG
9283 int original_height = frame()->height();
9284#endif
9285 Result result;
9286 // Do not inline the inobject property case for loads from the global
9287 // object. Also do not inline for unoptimized code. This saves time in
9288 // the code generator. Unoptimized code is toplevel code or code that is
9289 // not in a loop.
9290 if (is_contextual || scope()->is_global_scope() || loop_nesting() == 0) {
9291 Comment cmnt(masm(), "[ Load from named Property");
9292 frame()->Push(name);
9293
9294 RelocInfo::Mode mode = is_contextual
9295 ? RelocInfo::CODE_TARGET_CONTEXT
9296 : RelocInfo::CODE_TARGET;
9297 result = frame()->CallLoadIC(mode);
9298 // A test eax instruction following the call signals that the inobject
9299 // property case was inlined. Ensure that there is not a test eax
9300 // instruction here.
9301 __ nop();
9302 } else {
9303 // Inline the inobject property case.
9304 Comment cmnt(masm(), "[ Inlined named property load");
9305 Result receiver = frame()->Pop();
9306 receiver.ToRegister();
9307
9308 result = allocator()->Allocate();
9309 ASSERT(result.is_valid());
9310 DeferredReferenceGetNamedValue* deferred =
9311 new DeferredReferenceGetNamedValue(result.reg(), receiver.reg(), name);
9312
9313 // Check that the receiver is a heap object.
9314 __ test(receiver.reg(), Immediate(kSmiTagMask));
9315 deferred->Branch(zero);
9316
9317 __ bind(deferred->patch_site());
9318 // This is the map check instruction that will be patched (so we can't
9319 // use the double underscore macro that may insert instructions).
9320 // Initially use an invalid map to force a failure.
9321 masm()->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
9322 Immediate(Factory::null_value()));
9323 // This branch is always a forwards branch so it's always a fixed size
9324 // which allows the assert below to succeed and patching to work.
9325 deferred->Branch(not_equal);
9326
9327 // The delta from the patch label to the load offset must be statically
9328 // known.
9329 ASSERT(masm()->SizeOfCodeGeneratedSince(deferred->patch_site()) ==
9330 LoadIC::kOffsetToLoadInstruction);
9331 // The initial (invalid) offset has to be large enough to force a 32-bit
9332 // instruction encoding to allow patching with an arbitrary offset. Use
9333 // kMaxInt (minus kHeapObjectTag).
9334 int offset = kMaxInt;
9335 masm()->mov(result.reg(), FieldOperand(receiver.reg(), offset));
9336
9337 __ IncrementCounter(&Counters::named_load_inline, 1);
9338 deferred->BindExit();
9339 }
9340 ASSERT(frame()->height() == original_height - 1);
9341 return result;
9342}
9343
9344
9345Result CodeGenerator::EmitNamedStore(Handle<String> name, bool is_contextual) {
9346#ifdef DEBUG
9347 int expected_height = frame()->height() - (is_contextual ? 1 : 2);
9348#endif
Kristian Monsen50ef84f2010-07-29 15:18:00 +01009349
9350 Result result;
9351 if (is_contextual || scope()->is_global_scope() || loop_nesting() == 0) {
9352 result = frame()->CallStoreIC(name, is_contextual);
9353 // A test eax instruction following the call signals that the inobject
9354 // property case was inlined. Ensure that there is not a test eax
9355 // instruction here.
9356 __ nop();
9357 } else {
9358 // Inline the in-object property case.
9359 JumpTarget slow, done;
9360 Label patch_site;
9361
9362 // Get the value and receiver from the stack.
9363 Result value = frame()->Pop();
9364 value.ToRegister();
9365 Result receiver = frame()->Pop();
9366 receiver.ToRegister();
9367
9368 // Allocate result register.
9369 result = allocator()->Allocate();
9370 ASSERT(result.is_valid() && receiver.is_valid() && value.is_valid());
9371
9372 // Check that the receiver is a heap object.
9373 __ test(receiver.reg(), Immediate(kSmiTagMask));
9374 slow.Branch(zero, &value, &receiver);
9375
9376 // This is the map check instruction that will be patched (so we can't
9377 // use the double underscore macro that may insert instructions).
9378 // Initially use an invalid map to force a failure.
9379 __ bind(&patch_site);
9380 masm()->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
9381 Immediate(Factory::null_value()));
9382 // This branch is always a forwards branch so it's always a fixed size
9383 // which allows the assert below to succeed and patching to work.
9384 slow.Branch(not_equal, &value, &receiver);
9385
9386 // The delta from the patch label to the store offset must be
9387 // statically known.
9388 ASSERT(masm()->SizeOfCodeGeneratedSince(&patch_site) ==
9389 StoreIC::kOffsetToStoreInstruction);
9390
9391 // The initial (invalid) offset has to be large enough to force a 32-bit
9392 // instruction encoding to allow patching with an arbitrary offset. Use
9393 // kMaxInt (minus kHeapObjectTag).
9394 int offset = kMaxInt;
9395 __ mov(FieldOperand(receiver.reg(), offset), value.reg());
9396 __ mov(result.reg(), Operand(value.reg()));
9397
9398 // Allocate scratch register for write barrier.
9399 Result scratch = allocator()->Allocate();
9400 ASSERT(scratch.is_valid());
9401
9402 // The write barrier clobbers all input registers, so spill the
9403 // receiver and the value.
9404 frame_->Spill(receiver.reg());
9405 frame_->Spill(value.reg());
9406
9407 // If the receiver and the value share a register allocate a new
9408 // register for the receiver.
9409 if (receiver.reg().is(value.reg())) {
9410 receiver = allocator()->Allocate();
9411 ASSERT(receiver.is_valid());
9412 __ mov(receiver.reg(), Operand(value.reg()));
9413 }
9414
9415 // Update the write barrier. To save instructions in the inlined
9416 // version we do not filter smis.
9417 Label skip_write_barrier;
9418 __ InNewSpace(receiver.reg(), value.reg(), equal, &skip_write_barrier);
9419 int delta_to_record_write = masm_->SizeOfCodeGeneratedSince(&patch_site);
9420 __ lea(scratch.reg(), Operand(receiver.reg(), offset));
9421 __ RecordWriteHelper(receiver.reg(), scratch.reg(), value.reg());
9422 if (FLAG_debug_code) {
9423 __ mov(receiver.reg(), Immediate(BitCast<int32_t>(kZapValue)));
9424 __ mov(value.reg(), Immediate(BitCast<int32_t>(kZapValue)));
9425 __ mov(scratch.reg(), Immediate(BitCast<int32_t>(kZapValue)));
9426 }
9427 __ bind(&skip_write_barrier);
9428 value.Unuse();
9429 scratch.Unuse();
9430 receiver.Unuse();
9431 done.Jump(&result);
9432
9433 slow.Bind(&value, &receiver);
9434 frame()->Push(&receiver);
9435 frame()->Push(&value);
9436 result = frame()->CallStoreIC(name, is_contextual);
9437 // Encode the offset to the map check instruction and the offset
9438 // to the write barrier store address computation in a test eax
9439 // instruction.
9440 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site);
9441 __ test(eax,
9442 Immediate((delta_to_record_write << 16) | delta_to_patch_site));
9443 done.Bind(&result);
9444 }
Andrei Popescu402d9372010-02-26 13:31:12 +00009445
9446 ASSERT_EQ(expected_height, frame()->height());
9447 return result;
9448}
9449
9450
9451Result CodeGenerator::EmitKeyedLoad() {
9452#ifdef DEBUG
9453 int original_height = frame()->height();
9454#endif
9455 Result result;
9456 // Inline array load code if inside of a loop. We do not know the
9457 // receiver map yet, so we initially generate the code with a check
9458 // against an invalid map. In the inline cache code, we patch the map
9459 // check if appropriate.
Leon Clarked91b9f72010-01-27 17:25:45 +00009460 if (loop_nesting() > 0) {
9461 Comment cmnt(masm_, "[ Inlined load from keyed Property");
9462
Leon Clarked91b9f72010-01-27 17:25:45 +00009463 // Use a fresh temporary to load the elements without destroying
9464 // the receiver which is needed for the deferred slow case.
9465 Result elements = allocator()->Allocate();
9466 ASSERT(elements.is_valid());
9467
Leon Clarkef7060e22010-06-03 12:02:55 +01009468 Result key = frame_->Pop();
9469 Result receiver = frame_->Pop();
9470 key.ToRegister();
9471 receiver.ToRegister();
9472
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009473 // If key and receiver are shared registers on the frame, their values will
9474 // be automatically saved and restored when going to deferred code.
9475 // The result is in elements, which is guaranteed non-shared.
Leon Clarked91b9f72010-01-27 17:25:45 +00009476 DeferredReferenceGetKeyedValue* deferred =
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009477 new DeferredReferenceGetKeyedValue(elements.reg(),
Leon Clarked91b9f72010-01-27 17:25:45 +00009478 receiver.reg(),
Andrei Popescu402d9372010-02-26 13:31:12 +00009479 key.reg());
Leon Clarked91b9f72010-01-27 17:25:45 +00009480
Andrei Popescu402d9372010-02-26 13:31:12 +00009481 __ test(receiver.reg(), Immediate(kSmiTagMask));
9482 deferred->Branch(zero);
Leon Clarked91b9f72010-01-27 17:25:45 +00009483
Leon Clarkef7060e22010-06-03 12:02:55 +01009484 // Check that the receiver has the expected map.
Leon Clarked91b9f72010-01-27 17:25:45 +00009485 // Initially, use an invalid map. The map is patched in the IC
9486 // initialization code.
9487 __ bind(deferred->patch_site());
9488 // Use masm-> here instead of the double underscore macro since extra
9489 // coverage code can interfere with the patching.
9490 masm_->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
Steve Block8defd9f2010-07-08 12:39:36 +01009491 Immediate(Factory::null_value()));
Leon Clarked91b9f72010-01-27 17:25:45 +00009492 deferred->Branch(not_equal);
9493
9494 // Check that the key is a smi.
Steve Block6ded16b2010-05-10 14:33:55 +01009495 if (!key.is_smi()) {
9496 __ test(key.reg(), Immediate(kSmiTagMask));
9497 deferred->Branch(not_zero);
9498 } else {
9499 if (FLAG_debug_code) __ AbortIfNotSmi(key.reg());
9500 }
Leon Clarked91b9f72010-01-27 17:25:45 +00009501
Iain Merrick75681382010-08-19 15:07:18 +01009502 // Get the elements array from the receiver.
Leon Clarked91b9f72010-01-27 17:25:45 +00009503 __ mov(elements.reg(),
9504 FieldOperand(receiver.reg(), JSObject::kElementsOffset));
Iain Merrick75681382010-08-19 15:07:18 +01009505 __ AssertFastElements(elements.reg());
Leon Clarked91b9f72010-01-27 17:25:45 +00009506
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009507 // Check that the key is within bounds.
9508 __ cmp(key.reg(),
Leon Clarked91b9f72010-01-27 17:25:45 +00009509 FieldOperand(elements.reg(), FixedArray::kLengthOffset));
9510 deferred->Branch(above_equal);
9511
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009512 // Load and check that the result is not the hole.
9513 // Key holds a smi.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01009514 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009515 __ mov(elements.reg(),
9516 FieldOperand(elements.reg(),
9517 key.reg(),
9518 times_2,
9519 FixedArray::kHeaderSize));
9520 result = elements;
Andrei Popescu402d9372010-02-26 13:31:12 +00009521 __ cmp(Operand(result.reg()), Immediate(Factory::the_hole_value()));
Leon Clarked91b9f72010-01-27 17:25:45 +00009522 deferred->Branch(equal);
9523 __ IncrementCounter(&Counters::keyed_load_inline, 1);
9524
9525 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00009526 } else {
9527 Comment cmnt(masm_, "[ Load from keyed Property");
Andrei Popescu402d9372010-02-26 13:31:12 +00009528 result = frame_->CallKeyedLoadIC(RelocInfo::CODE_TARGET);
Leon Clarked91b9f72010-01-27 17:25:45 +00009529 // Make sure that we do not have a test instruction after the
9530 // call. A test instruction after the call is used to
9531 // indicate that we have generated an inline version of the
9532 // keyed load. The explicit nop instruction is here because
9533 // the push that follows might be peep-hole optimized away.
9534 __ nop();
Leon Clarked91b9f72010-01-27 17:25:45 +00009535 }
Andrei Popescu402d9372010-02-26 13:31:12 +00009536 ASSERT(frame()->height() == original_height - 2);
9537 return result;
9538}
9539
9540
9541Result CodeGenerator::EmitKeyedStore(StaticType* key_type) {
9542#ifdef DEBUG
9543 int original_height = frame()->height();
9544#endif
9545 Result result;
9546 // Generate inlined version of the keyed store if the code is in a loop
9547 // and the key is likely to be a smi.
9548 if (loop_nesting() > 0 && key_type->IsLikelySmi()) {
9549 Comment cmnt(masm(), "[ Inlined store to keyed Property");
9550
9551 // Get the receiver, key and value into registers.
9552 result = frame()->Pop();
9553 Result key = frame()->Pop();
9554 Result receiver = frame()->Pop();
9555
9556 Result tmp = allocator_->Allocate();
9557 ASSERT(tmp.is_valid());
Steve Block6ded16b2010-05-10 14:33:55 +01009558 Result tmp2 = allocator_->Allocate();
9559 ASSERT(tmp2.is_valid());
Andrei Popescu402d9372010-02-26 13:31:12 +00009560
9561 // Determine whether the value is a constant before putting it in a
9562 // register.
9563 bool value_is_constant = result.is_constant();
9564
9565 // Make sure that value, key and receiver are in registers.
9566 result.ToRegister();
9567 key.ToRegister();
9568 receiver.ToRegister();
9569
9570 DeferredReferenceSetKeyedValue* deferred =
9571 new DeferredReferenceSetKeyedValue(result.reg(),
9572 key.reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01009573 receiver.reg(),
9574 tmp.reg());
Andrei Popescu402d9372010-02-26 13:31:12 +00009575
9576 // Check that the receiver is not a smi.
9577 __ test(receiver.reg(), Immediate(kSmiTagMask));
9578 deferred->Branch(zero);
9579
Steve Block6ded16b2010-05-10 14:33:55 +01009580 // Check that the key is a smi.
9581 if (!key.is_smi()) {
9582 __ test(key.reg(), Immediate(kSmiTagMask));
9583 deferred->Branch(not_zero);
9584 } else {
9585 if (FLAG_debug_code) __ AbortIfNotSmi(key.reg());
9586 }
9587
Andrei Popescu402d9372010-02-26 13:31:12 +00009588 // Check that the receiver is a JSArray.
Steve Block6ded16b2010-05-10 14:33:55 +01009589 __ CmpObjectType(receiver.reg(), JS_ARRAY_TYPE, tmp.reg());
Andrei Popescu402d9372010-02-26 13:31:12 +00009590 deferred->Branch(not_equal);
9591
9592 // Check that the key is within bounds. Both the key and the length of
Steve Block6ded16b2010-05-10 14:33:55 +01009593 // the JSArray are smis. Use unsigned comparison to handle negative keys.
Andrei Popescu402d9372010-02-26 13:31:12 +00009594 __ cmp(key.reg(),
9595 FieldOperand(receiver.reg(), JSArray::kLengthOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01009596 deferred->Branch(above_equal);
Andrei Popescu402d9372010-02-26 13:31:12 +00009597
9598 // Get the elements array from the receiver and check that it is not a
9599 // dictionary.
9600 __ mov(tmp.reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01009601 FieldOperand(receiver.reg(), JSArray::kElementsOffset));
9602
9603 // Check whether it is possible to omit the write barrier. If the elements
9604 // array is in new space or the value written is a smi we can safely update
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009605 // the elements array without write barrier.
Steve Block6ded16b2010-05-10 14:33:55 +01009606 Label in_new_space;
9607 __ InNewSpace(tmp.reg(), tmp2.reg(), equal, &in_new_space);
9608 if (!value_is_constant) {
9609 __ test(result.reg(), Immediate(kSmiTagMask));
9610 deferred->Branch(not_zero);
9611 }
9612
9613 __ bind(&in_new_space);
Andrei Popescu402d9372010-02-26 13:31:12 +00009614 // Bind the deferred code patch site to be able to locate the fixed
9615 // array map comparison. When debugging, we patch this comparison to
9616 // always fail so that we will hit the IC call in the deferred code
9617 // which will allow the debugger to break for fast case stores.
9618 __ bind(deferred->patch_site());
9619 __ cmp(FieldOperand(tmp.reg(), HeapObject::kMapOffset),
9620 Immediate(Factory::fixed_array_map()));
9621 deferred->Branch(not_equal);
9622
9623 // Store the value.
Kristian Monsen25f61362010-05-21 11:50:48 +01009624 __ mov(FixedArrayElementOperand(tmp.reg(), key.reg()), result.reg());
Andrei Popescu402d9372010-02-26 13:31:12 +00009625 __ IncrementCounter(&Counters::keyed_store_inline, 1);
9626
9627 deferred->BindExit();
9628 } else {
9629 result = frame()->CallKeyedStoreIC();
9630 // Make sure that we do not have a test instruction after the
9631 // call. A test instruction after the call is used to
9632 // indicate that we have generated an inline version of the
9633 // keyed store.
9634 __ nop();
Andrei Popescu402d9372010-02-26 13:31:12 +00009635 }
9636 ASSERT(frame()->height() == original_height - 3);
9637 return result;
Leon Clarked91b9f72010-01-27 17:25:45 +00009638}
9639
9640
Steve Blocka7e24c12009-10-30 11:49:00 +00009641#undef __
9642#define __ ACCESS_MASM(masm)
9643
9644
9645Handle<String> Reference::GetName() {
9646 ASSERT(type_ == NAMED);
9647 Property* property = expression_->AsProperty();
9648 if (property == NULL) {
9649 // Global variable reference treated as a named property reference.
9650 VariableProxy* proxy = expression_->AsVariableProxy();
9651 ASSERT(proxy->AsVariable() != NULL);
9652 ASSERT(proxy->AsVariable()->is_global());
9653 return proxy->name();
9654 } else {
9655 Literal* raw_name = property->key()->AsLiteral();
9656 ASSERT(raw_name != NULL);
Andrei Popescu402d9372010-02-26 13:31:12 +00009657 return Handle<String>::cast(raw_name->handle());
Steve Blocka7e24c12009-10-30 11:49:00 +00009658 }
9659}
9660
9661
Steve Blockd0582a62009-12-15 09:54:21 +00009662void Reference::GetValue() {
Steve Blocka7e24c12009-10-30 11:49:00 +00009663 ASSERT(!cgen_->in_spilled_code());
9664 ASSERT(cgen_->HasValidEntryRegisters());
9665 ASSERT(!is_illegal());
9666 MacroAssembler* masm = cgen_->masm();
9667
9668 // Record the source position for the property load.
9669 Property* property = expression_->AsProperty();
9670 if (property != NULL) {
9671 cgen_->CodeForSourcePosition(property->position());
9672 }
9673
9674 switch (type_) {
9675 case SLOT: {
9676 Comment cmnt(masm, "[ Load from Slot");
9677 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
9678 ASSERT(slot != NULL);
Leon Clarkef7060e22010-06-03 12:02:55 +01009679 cgen_->LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF);
Andrei Popescu402d9372010-02-26 13:31:12 +00009680 if (!persist_after_get_) set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00009681 break;
9682 }
9683
9684 case NAMED: {
Steve Blocka7e24c12009-10-30 11:49:00 +00009685 Variable* var = expression_->AsVariableProxy()->AsVariable();
9686 bool is_global = var != NULL;
9687 ASSERT(!is_global || var->is_global());
Andrei Popescu402d9372010-02-26 13:31:12 +00009688 if (persist_after_get_) cgen_->frame()->Dup();
9689 Result result = cgen_->EmitNamedLoad(GetName(), is_global);
9690 if (!persist_after_get_) set_unloaded();
9691 cgen_->frame()->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00009692 break;
9693 }
9694
9695 case KEYED: {
Andrei Popescu402d9372010-02-26 13:31:12 +00009696 if (persist_after_get_) {
9697 cgen_->frame()->PushElementAt(1);
9698 cgen_->frame()->PushElementAt(1);
9699 }
9700 Result value = cgen_->EmitKeyedLoad();
Leon Clarked91b9f72010-01-27 17:25:45 +00009701 cgen_->frame()->Push(&value);
Andrei Popescu402d9372010-02-26 13:31:12 +00009702 if (!persist_after_get_) set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00009703 break;
9704 }
9705
9706 default:
9707 UNREACHABLE();
9708 }
9709}
9710
9711
Steve Blockd0582a62009-12-15 09:54:21 +00009712void Reference::TakeValue() {
Steve Blocka7e24c12009-10-30 11:49:00 +00009713 // For non-constant frame-allocated slots, we invalidate the value in the
9714 // slot. For all others, we fall back on GetValue.
9715 ASSERT(!cgen_->in_spilled_code());
9716 ASSERT(!is_illegal());
9717 if (type_ != SLOT) {
Steve Blockd0582a62009-12-15 09:54:21 +00009718 GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00009719 return;
9720 }
9721
9722 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
9723 ASSERT(slot != NULL);
9724 if (slot->type() == Slot::LOOKUP ||
9725 slot->type() == Slot::CONTEXT ||
9726 slot->var()->mode() == Variable::CONST ||
9727 slot->is_arguments()) {
Steve Blockd0582a62009-12-15 09:54:21 +00009728 GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00009729 return;
9730 }
9731
9732 // Only non-constant, frame-allocated parameters and locals can
9733 // reach here. Be careful not to use the optimizations for arguments
9734 // object access since it may not have been initialized yet.
9735 ASSERT(!slot->is_arguments());
9736 if (slot->type() == Slot::PARAMETER) {
9737 cgen_->frame()->TakeParameterAt(slot->index());
9738 } else {
9739 ASSERT(slot->type() == Slot::LOCAL);
9740 cgen_->frame()->TakeLocalAt(slot->index());
9741 }
Leon Clarked91b9f72010-01-27 17:25:45 +00009742
9743 ASSERT(persist_after_get_);
9744 // Do not unload the reference, because it is used in SetValue.
Steve Blocka7e24c12009-10-30 11:49:00 +00009745}
9746
9747
9748void Reference::SetValue(InitState init_state) {
9749 ASSERT(cgen_->HasValidEntryRegisters());
9750 ASSERT(!is_illegal());
9751 MacroAssembler* masm = cgen_->masm();
9752 switch (type_) {
9753 case SLOT: {
9754 Comment cmnt(masm, "[ Store to Slot");
9755 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
9756 ASSERT(slot != NULL);
9757 cgen_->StoreToSlot(slot, init_state);
Andrei Popescu402d9372010-02-26 13:31:12 +00009758 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00009759 break;
9760 }
9761
9762 case NAMED: {
9763 Comment cmnt(masm, "[ Store to named Property");
Andrei Popescu402d9372010-02-26 13:31:12 +00009764 Result answer = cgen_->EmitNamedStore(GetName(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00009765 cgen_->frame()->Push(&answer);
Leon Clarke4515c472010-02-03 11:58:03 +00009766 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00009767 break;
9768 }
9769
9770 case KEYED: {
9771 Comment cmnt(masm, "[ Store to keyed Property");
Steve Blocka7e24c12009-10-30 11:49:00 +00009772 Property* property = expression()->AsProperty();
9773 ASSERT(property != NULL);
Steve Block6ded16b2010-05-10 14:33:55 +01009774
Andrei Popescu402d9372010-02-26 13:31:12 +00009775 Result answer = cgen_->EmitKeyedStore(property->key()->type());
9776 cgen_->frame()->Push(&answer);
9777 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00009778 break;
9779 }
9780
Andrei Popescu402d9372010-02-26 13:31:12 +00009781 case UNLOADED:
9782 case ILLEGAL:
Steve Blocka7e24c12009-10-30 11:49:00 +00009783 UNREACHABLE();
9784 }
9785}
9786
9787
Leon Clarkee46be812010-01-19 14:06:41 +00009788void FastNewClosureStub::Generate(MacroAssembler* masm) {
Steve Block6ded16b2010-05-10 14:33:55 +01009789 // Create a new closure from the given function info in new
9790 // space. Set the context to the current context in esi.
Leon Clarkee46be812010-01-19 14:06:41 +00009791 Label gc;
9792 __ AllocateInNewSpace(JSFunction::kSize, eax, ebx, ecx, &gc, TAG_OBJECT);
9793
Steve Block6ded16b2010-05-10 14:33:55 +01009794 // Get the function info from the stack.
Leon Clarkee46be812010-01-19 14:06:41 +00009795 __ mov(edx, Operand(esp, 1 * kPointerSize));
9796
9797 // Compute the function map in the current global context and set that
9798 // as the map of the allocated object.
9799 __ mov(ecx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
9800 __ mov(ecx, FieldOperand(ecx, GlobalObject::kGlobalContextOffset));
9801 __ mov(ecx, Operand(ecx, Context::SlotOffset(Context::FUNCTION_MAP_INDEX)));
9802 __ mov(FieldOperand(eax, JSObject::kMapOffset), ecx);
9803
Steve Block6ded16b2010-05-10 14:33:55 +01009804 // Initialize the rest of the function. We don't have to update the
9805 // write barrier because the allocated object is in new space.
9806 __ mov(ebx, Immediate(Factory::empty_fixed_array()));
9807 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset), ebx);
9808 __ mov(FieldOperand(eax, JSObject::kElementsOffset), ebx);
9809 __ mov(FieldOperand(eax, JSFunction::kPrototypeOrInitialMapOffset),
9810 Immediate(Factory::the_hole_value()));
9811 __ mov(FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset), edx);
9812 __ mov(FieldOperand(eax, JSFunction::kContextOffset), esi);
9813 __ mov(FieldOperand(eax, JSFunction::kLiteralsOffset), ebx);
Leon Clarkee46be812010-01-19 14:06:41 +00009814
Iain Merrick75681382010-08-19 15:07:18 +01009815 // Initialize the code pointer in the function to be the one
9816 // found in the shared function info object.
9817 __ mov(edx, FieldOperand(edx, SharedFunctionInfo::kCodeOffset));
9818 __ mov(FieldOperand(eax, JSFunction::kCodeOffset), edx);
9819
Leon Clarkee46be812010-01-19 14:06:41 +00009820 // Return and remove the on-stack parameter.
9821 __ ret(1 * kPointerSize);
9822
9823 // Create a new closure through the slower runtime call.
9824 __ bind(&gc);
9825 __ pop(ecx); // Temporarily remove return address.
9826 __ pop(edx);
9827 __ push(esi);
9828 __ push(edx);
9829 __ push(ecx); // Restore return address.
Steve Block6ded16b2010-05-10 14:33:55 +01009830 __ TailCallRuntime(Runtime::kNewClosure, 2, 1);
Leon Clarkee46be812010-01-19 14:06:41 +00009831}
9832
9833
9834void FastNewContextStub::Generate(MacroAssembler* masm) {
9835 // Try to allocate the context in new space.
9836 Label gc;
9837 int length = slots_ + Context::MIN_CONTEXT_SLOTS;
9838 __ AllocateInNewSpace((length * kPointerSize) + FixedArray::kHeaderSize,
9839 eax, ebx, ecx, &gc, TAG_OBJECT);
9840
9841 // Get the function from the stack.
9842 __ mov(ecx, Operand(esp, 1 * kPointerSize));
9843
9844 // Setup the object header.
9845 __ mov(FieldOperand(eax, HeapObject::kMapOffset), Factory::context_map());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009846 __ mov(FieldOperand(eax, Context::kLengthOffset),
9847 Immediate(Smi::FromInt(length)));
Leon Clarkee46be812010-01-19 14:06:41 +00009848
9849 // Setup the fixed slots.
9850 __ xor_(ebx, Operand(ebx)); // Set to NULL.
9851 __ mov(Operand(eax, Context::SlotOffset(Context::CLOSURE_INDEX)), ecx);
9852 __ mov(Operand(eax, Context::SlotOffset(Context::FCONTEXT_INDEX)), eax);
9853 __ mov(Operand(eax, Context::SlotOffset(Context::PREVIOUS_INDEX)), ebx);
9854 __ mov(Operand(eax, Context::SlotOffset(Context::EXTENSION_INDEX)), ebx);
9855
9856 // Copy the global object from the surrounding context. We go through the
9857 // context in the function (ecx) to match the allocation behavior we have
9858 // in the runtime system (see Heap::AllocateFunctionContext).
9859 __ mov(ebx, FieldOperand(ecx, JSFunction::kContextOffset));
9860 __ mov(ebx, Operand(ebx, Context::SlotOffset(Context::GLOBAL_INDEX)));
9861 __ mov(Operand(eax, Context::SlotOffset(Context::GLOBAL_INDEX)), ebx);
9862
9863 // Initialize the rest of the slots to undefined.
9864 __ mov(ebx, Factory::undefined_value());
9865 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) {
9866 __ mov(Operand(eax, Context::SlotOffset(i)), ebx);
9867 }
9868
9869 // Return and remove the on-stack parameter.
9870 __ mov(esi, Operand(eax));
9871 __ ret(1 * kPointerSize);
9872
9873 // Need to collect. Call into runtime system.
9874 __ bind(&gc);
Steve Block6ded16b2010-05-10 14:33:55 +01009875 __ TailCallRuntime(Runtime::kNewContext, 1, 1);
Leon Clarkee46be812010-01-19 14:06:41 +00009876}
9877
9878
9879void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
Andrei Popescu402d9372010-02-26 13:31:12 +00009880 // Stack layout on entry:
9881 //
9882 // [esp + kPointerSize]: constant elements.
9883 // [esp + (2 * kPointerSize)]: literal index.
9884 // [esp + (3 * kPointerSize)]: literals array.
9885
9886 // All sizes here are multiples of kPointerSize.
Leon Clarkee46be812010-01-19 14:06:41 +00009887 int elements_size = (length_ > 0) ? FixedArray::SizeFor(length_) : 0;
9888 int size = JSArray::kSize + elements_size;
9889
9890 // Load boilerplate object into ecx and check if we need to create a
9891 // boilerplate.
9892 Label slow_case;
9893 __ mov(ecx, Operand(esp, 3 * kPointerSize));
9894 __ mov(eax, Operand(esp, 2 * kPointerSize));
Kristian Monsen50ef84f2010-07-29 15:18:00 +01009895 STATIC_ASSERT(kPointerSize == 4);
9896 STATIC_ASSERT(kSmiTagSize == 1);
9897 STATIC_ASSERT(kSmiTag == 0);
Kristian Monsen25f61362010-05-21 11:50:48 +01009898 __ mov(ecx, CodeGenerator::FixedArrayElementOperand(ecx, eax));
Leon Clarkee46be812010-01-19 14:06:41 +00009899 __ cmp(ecx, Factory::undefined_value());
9900 __ j(equal, &slow_case);
9901
Iain Merrick75681382010-08-19 15:07:18 +01009902 if (FLAG_debug_code) {
9903 const char* message;
9904 Handle<Map> expected_map;
9905 if (mode_ == CLONE_ELEMENTS) {
9906 message = "Expected (writable) fixed array";
9907 expected_map = Factory::fixed_array_map();
9908 } else {
9909 ASSERT(mode_ == COPY_ON_WRITE_ELEMENTS);
9910 message = "Expected copy-on-write fixed array";
9911 expected_map = Factory::fixed_cow_array_map();
9912 }
9913 __ push(ecx);
9914 __ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset));
9915 __ cmp(FieldOperand(ecx, HeapObject::kMapOffset), expected_map);
9916 __ Assert(equal, message);
9917 __ pop(ecx);
9918 }
9919
Leon Clarkee46be812010-01-19 14:06:41 +00009920 // Allocate both the JS array and the elements array in one big
9921 // allocation. This avoids multiple limit checks.
9922 __ AllocateInNewSpace(size, eax, ebx, edx, &slow_case, TAG_OBJECT);
9923
9924 // Copy the JS array part.
9925 for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
9926 if ((i != JSArray::kElementsOffset) || (length_ == 0)) {
9927 __ mov(ebx, FieldOperand(ecx, i));
9928 __ mov(FieldOperand(eax, i), ebx);
9929 }
9930 }
9931
9932 if (length_ > 0) {
9933 // Get hold of the elements array of the boilerplate and setup the
9934 // elements pointer in the resulting object.
9935 __ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset));
9936 __ lea(edx, Operand(eax, JSArray::kSize));
9937 __ mov(FieldOperand(eax, JSArray::kElementsOffset), edx);
9938
9939 // Copy the elements array.
9940 for (int i = 0; i < elements_size; i += kPointerSize) {
9941 __ mov(ebx, FieldOperand(ecx, i));
9942 __ mov(FieldOperand(edx, i), ebx);
9943 }
9944 }
9945
9946 // Return and remove the on-stack parameters.
9947 __ ret(3 * kPointerSize);
9948
9949 __ bind(&slow_case);
Steve Block6ded16b2010-05-10 14:33:55 +01009950 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1);
Leon Clarkee46be812010-01-19 14:06:41 +00009951}
9952
9953
Steve Blocka7e24c12009-10-30 11:49:00 +00009954// NOTE: The stub does not handle the inlined cases (Smis, Booleans, undefined).
9955void ToBooleanStub::Generate(MacroAssembler* masm) {
9956 Label false_result, true_result, not_string;
9957 __ mov(eax, Operand(esp, 1 * kPointerSize));
9958
9959 // 'null' => false.
9960 __ cmp(eax, Factory::null_value());
9961 __ j(equal, &false_result);
9962
9963 // Get the map and type of the heap object.
9964 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
9965 __ movzx_b(ecx, FieldOperand(edx, Map::kInstanceTypeOffset));
9966
9967 // Undetectable => false.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009968 __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
9969 1 << Map::kIsUndetectable);
Steve Blocka7e24c12009-10-30 11:49:00 +00009970 __ j(not_zero, &false_result);
9971
9972 // JavaScript object => true.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009973 __ CmpInstanceType(edx, FIRST_JS_OBJECT_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00009974 __ j(above_equal, &true_result);
9975
9976 // String value => false iff empty.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009977 __ CmpInstanceType(edx, FIRST_NONSTRING_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00009978 __ j(above_equal, &not_string);
Kristian Monsen50ef84f2010-07-29 15:18:00 +01009979 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01009980 __ cmp(FieldOperand(eax, String::kLengthOffset), Immediate(0));
Steve Blocka7e24c12009-10-30 11:49:00 +00009981 __ j(zero, &false_result);
9982 __ jmp(&true_result);
9983
9984 __ bind(&not_string);
9985 // HeapNumber => false iff +0, -0, or NaN.
9986 __ cmp(edx, Factory::heap_number_map());
9987 __ j(not_equal, &true_result);
9988 __ fldz();
9989 __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset));
Steve Block3ce2e202009-11-05 08:53:23 +00009990 __ FCmp();
Steve Blocka7e24c12009-10-30 11:49:00 +00009991 __ j(zero, &false_result);
9992 // Fall through to |true_result|.
9993
9994 // Return 1/0 for true/false in eax.
9995 __ bind(&true_result);
9996 __ mov(eax, 1);
9997 __ ret(1 * kPointerSize);
9998 __ bind(&false_result);
9999 __ mov(eax, 0);
10000 __ ret(1 * kPointerSize);
10001}
10002
10003
Steve Block3ce2e202009-11-05 08:53:23 +000010004void GenericBinaryOpStub::GenerateCall(
10005 MacroAssembler* masm,
10006 Register left,
10007 Register right) {
10008 if (!ArgsInRegistersSupported()) {
10009 // Pass arguments on the stack.
10010 __ push(left);
10011 __ push(right);
10012 } else {
10013 // The calling convention with registers is left in edx and right in eax.
Steve Blockd0582a62009-12-15 09:54:21 +000010014 Register left_arg = edx;
10015 Register right_arg = eax;
10016 if (!(left.is(left_arg) && right.is(right_arg))) {
10017 if (left.is(right_arg) && right.is(left_arg)) {
Steve Block3ce2e202009-11-05 08:53:23 +000010018 if (IsOperationCommutative()) {
10019 SetArgsReversed();
10020 } else {
10021 __ xchg(left, right);
10022 }
Steve Blockd0582a62009-12-15 09:54:21 +000010023 } else if (left.is(left_arg)) {
10024 __ mov(right_arg, right);
Andrei Popescu402d9372010-02-26 13:31:12 +000010025 } else if (right.is(right_arg)) {
10026 __ mov(left_arg, left);
Steve Blockd0582a62009-12-15 09:54:21 +000010027 } else if (left.is(right_arg)) {
Steve Block3ce2e202009-11-05 08:53:23 +000010028 if (IsOperationCommutative()) {
Steve Blockd0582a62009-12-15 09:54:21 +000010029 __ mov(left_arg, right);
Steve Block3ce2e202009-11-05 08:53:23 +000010030 SetArgsReversed();
10031 } else {
Steve Blockd0582a62009-12-15 09:54:21 +000010032 // Order of moves important to avoid destroying left argument.
10033 __ mov(left_arg, left);
10034 __ mov(right_arg, right);
Steve Block3ce2e202009-11-05 08:53:23 +000010035 }
Steve Blockd0582a62009-12-15 09:54:21 +000010036 } else if (right.is(left_arg)) {
Steve Block3ce2e202009-11-05 08:53:23 +000010037 if (IsOperationCommutative()) {
Steve Blockd0582a62009-12-15 09:54:21 +000010038 __ mov(right_arg, left);
Steve Block3ce2e202009-11-05 08:53:23 +000010039 SetArgsReversed();
10040 } else {
Steve Blockd0582a62009-12-15 09:54:21 +000010041 // Order of moves important to avoid destroying right argument.
10042 __ mov(right_arg, right);
10043 __ mov(left_arg, left);
Steve Block3ce2e202009-11-05 08:53:23 +000010044 }
Steve Block3ce2e202009-11-05 08:53:23 +000010045 } else {
Steve Blockd0582a62009-12-15 09:54:21 +000010046 // Order of moves is not important.
10047 __ mov(left_arg, left);
10048 __ mov(right_arg, right);
Steve Block3ce2e202009-11-05 08:53:23 +000010049 }
10050 }
10051
10052 // Update flags to indicate that arguments are in registers.
10053 SetArgsInRegisters();
Steve Blockd0582a62009-12-15 09:54:21 +000010054 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
Steve Block3ce2e202009-11-05 08:53:23 +000010055 }
10056
10057 // Call the stub.
10058 __ CallStub(this);
10059}
10060
10061
10062void GenericBinaryOpStub::GenerateCall(
10063 MacroAssembler* masm,
10064 Register left,
10065 Smi* right) {
10066 if (!ArgsInRegistersSupported()) {
10067 // Pass arguments on the stack.
10068 __ push(left);
10069 __ push(Immediate(right));
10070 } else {
Steve Blockd0582a62009-12-15 09:54:21 +000010071 // The calling convention with registers is left in edx and right in eax.
10072 Register left_arg = edx;
10073 Register right_arg = eax;
10074 if (left.is(left_arg)) {
10075 __ mov(right_arg, Immediate(right));
10076 } else if (left.is(right_arg) && IsOperationCommutative()) {
10077 __ mov(left_arg, Immediate(right));
Steve Block3ce2e202009-11-05 08:53:23 +000010078 SetArgsReversed();
10079 } else {
Andrei Popescu402d9372010-02-26 13:31:12 +000010080 // For non-commutative operations, left and right_arg might be
10081 // the same register. Therefore, the order of the moves is
10082 // important here in order to not overwrite left before moving
10083 // it to left_arg.
Steve Blockd0582a62009-12-15 09:54:21 +000010084 __ mov(left_arg, left);
10085 __ mov(right_arg, Immediate(right));
Steve Block3ce2e202009-11-05 08:53:23 +000010086 }
10087
10088 // Update flags to indicate that arguments are in registers.
10089 SetArgsInRegisters();
Steve Blockd0582a62009-12-15 09:54:21 +000010090 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
Steve Block3ce2e202009-11-05 08:53:23 +000010091 }
10092
10093 // Call the stub.
10094 __ CallStub(this);
10095}
10096
10097
10098void GenericBinaryOpStub::GenerateCall(
10099 MacroAssembler* masm,
10100 Smi* left,
10101 Register right) {
10102 if (!ArgsInRegistersSupported()) {
10103 // Pass arguments on the stack.
10104 __ push(Immediate(left));
10105 __ push(right);
10106 } else {
Steve Blockd0582a62009-12-15 09:54:21 +000010107 // The calling convention with registers is left in edx and right in eax.
10108 Register left_arg = edx;
10109 Register right_arg = eax;
10110 if (right.is(right_arg)) {
10111 __ mov(left_arg, Immediate(left));
10112 } else if (right.is(left_arg) && IsOperationCommutative()) {
10113 __ mov(right_arg, Immediate(left));
10114 SetArgsReversed();
Steve Block3ce2e202009-11-05 08:53:23 +000010115 } else {
Andrei Popescu402d9372010-02-26 13:31:12 +000010116 // For non-commutative operations, right and left_arg might be
10117 // the same register. Therefore, the order of the moves is
10118 // important here in order to not overwrite right before moving
10119 // it to right_arg.
Steve Blockd0582a62009-12-15 09:54:21 +000010120 __ mov(right_arg, right);
Andrei Popescu402d9372010-02-26 13:31:12 +000010121 __ mov(left_arg, Immediate(left));
Steve Block3ce2e202009-11-05 08:53:23 +000010122 }
10123 // Update flags to indicate that arguments are in registers.
10124 SetArgsInRegisters();
Steve Blockd0582a62009-12-15 09:54:21 +000010125 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
Steve Block3ce2e202009-11-05 08:53:23 +000010126 }
10127
10128 // Call the stub.
10129 __ CallStub(this);
10130}
10131
10132
Leon Clarked91b9f72010-01-27 17:25:45 +000010133Result GenericBinaryOpStub::GenerateCall(MacroAssembler* masm,
10134 VirtualFrame* frame,
10135 Result* left,
10136 Result* right) {
10137 if (ArgsInRegistersSupported()) {
10138 SetArgsInRegisters();
10139 return frame->CallStub(this, left, right);
10140 } else {
10141 frame->Push(left);
10142 frame->Push(right);
10143 return frame->CallStub(this, 2);
10144 }
10145}
10146
10147
Steve Blocka7e24c12009-10-30 11:49:00 +000010148void GenericBinaryOpStub::GenerateSmiCode(MacroAssembler* masm, Label* slow) {
Leon Clarked91b9f72010-01-27 17:25:45 +000010149 // 1. Move arguments into edx, eax except for DIV and MOD, which need the
10150 // dividend in eax and edx free for the division. Use eax, ebx for those.
10151 Comment load_comment(masm, "-- Load arguments");
10152 Register left = edx;
10153 Register right = eax;
10154 if (op_ == Token::DIV || op_ == Token::MOD) {
10155 left = eax;
10156 right = ebx;
10157 if (HasArgsInRegisters()) {
10158 __ mov(ebx, eax);
10159 __ mov(eax, edx);
10160 }
10161 }
10162 if (!HasArgsInRegisters()) {
10163 __ mov(right, Operand(esp, 1 * kPointerSize));
10164 __ mov(left, Operand(esp, 2 * kPointerSize));
10165 }
Steve Blocka7e24c12009-10-30 11:49:00 +000010166
Steve Block6ded16b2010-05-10 14:33:55 +010010167 if (static_operands_type_.IsSmi()) {
10168 if (FLAG_debug_code) {
10169 __ AbortIfNotSmi(left);
10170 __ AbortIfNotSmi(right);
10171 }
10172 if (op_ == Token::BIT_OR) {
10173 __ or_(right, Operand(left));
10174 GenerateReturn(masm);
10175 return;
10176 } else if (op_ == Token::BIT_AND) {
10177 __ and_(right, Operand(left));
10178 GenerateReturn(masm);
10179 return;
10180 } else if (op_ == Token::BIT_XOR) {
10181 __ xor_(right, Operand(left));
10182 GenerateReturn(masm);
10183 return;
10184 }
10185 }
10186
Leon Clarked91b9f72010-01-27 17:25:45 +000010187 // 2. Prepare the smi check of both operands by oring them together.
10188 Comment smi_check_comment(masm, "-- Smi check arguments");
10189 Label not_smis;
10190 Register combined = ecx;
10191 ASSERT(!left.is(combined) && !right.is(combined));
Steve Blocka7e24c12009-10-30 11:49:00 +000010192 switch (op_) {
Leon Clarked91b9f72010-01-27 17:25:45 +000010193 case Token::BIT_OR:
10194 // Perform the operation into eax and smi check the result. Preserve
10195 // eax in case the result is not a smi.
10196 ASSERT(!left.is(ecx) && !right.is(ecx));
10197 __ mov(ecx, right);
10198 __ or_(right, Operand(left)); // Bitwise or is commutative.
10199 combined = right;
10200 break;
10201
10202 case Token::BIT_XOR:
10203 case Token::BIT_AND:
Leon Clarkeeab96aa2010-01-27 16:31:12 +000010204 case Token::ADD:
Steve Blocka7e24c12009-10-30 11:49:00 +000010205 case Token::SUB:
Leon Clarked91b9f72010-01-27 17:25:45 +000010206 case Token::MUL:
Steve Blocka7e24c12009-10-30 11:49:00 +000010207 case Token::DIV:
10208 case Token::MOD:
Leon Clarked91b9f72010-01-27 17:25:45 +000010209 __ mov(combined, right);
10210 __ or_(combined, Operand(left));
10211 break;
10212
10213 case Token::SHL:
10214 case Token::SAR:
10215 case Token::SHR:
10216 // Move the right operand into ecx for the shift operation, use eax
10217 // for the smi check register.
10218 ASSERT(!left.is(ecx) && !right.is(ecx));
10219 __ mov(ecx, right);
10220 __ or_(right, Operand(left));
10221 combined = right;
Steve Blocka7e24c12009-10-30 11:49:00 +000010222 break;
10223
10224 default:
Steve Blocka7e24c12009-10-30 11:49:00 +000010225 break;
10226 }
10227
Leon Clarked91b9f72010-01-27 17:25:45 +000010228 // 3. Perform the smi check of the operands.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010010229 STATIC_ASSERT(kSmiTag == 0); // Adjust zero check if not the case.
Leon Clarked91b9f72010-01-27 17:25:45 +000010230 __ test(combined, Immediate(kSmiTagMask));
10231 __ j(not_zero, &not_smis, not_taken);
Steve Blocka7e24c12009-10-30 11:49:00 +000010232
Leon Clarked91b9f72010-01-27 17:25:45 +000010233 // 4. Operands are both smis, perform the operation leaving the result in
10234 // eax and check the result if necessary.
10235 Comment perform_smi(masm, "-- Perform smi operation");
10236 Label use_fp_on_smis;
Steve Blocka7e24c12009-10-30 11:49:00 +000010237 switch (op_) {
Leon Clarked91b9f72010-01-27 17:25:45 +000010238 case Token::BIT_OR:
10239 // Nothing to do.
10240 break;
10241
10242 case Token::BIT_XOR:
10243 ASSERT(right.is(eax));
10244 __ xor_(right, Operand(left)); // Bitwise xor is commutative.
10245 break;
10246
10247 case Token::BIT_AND:
10248 ASSERT(right.is(eax));
10249 __ and_(right, Operand(left)); // Bitwise and is commutative.
10250 break;
10251
10252 case Token::SHL:
10253 // Remove tags from operands (but keep sign).
10254 __ SmiUntag(left);
10255 __ SmiUntag(ecx);
10256 // Perform the operation.
10257 __ shl_cl(left);
10258 // Check that the *signed* result fits in a smi.
10259 __ cmp(left, 0xc0000000);
10260 __ j(sign, &use_fp_on_smis, not_taken);
10261 // Tag the result and store it in register eax.
10262 __ SmiTag(left);
10263 __ mov(eax, left);
10264 break;
10265
10266 case Token::SAR:
10267 // Remove tags from operands (but keep sign).
10268 __ SmiUntag(left);
10269 __ SmiUntag(ecx);
10270 // Perform the operation.
10271 __ sar_cl(left);
10272 // Tag the result and store it in register eax.
10273 __ SmiTag(left);
10274 __ mov(eax, left);
10275 break;
10276
10277 case Token::SHR:
10278 // Remove tags from operands (but keep sign).
10279 __ SmiUntag(left);
10280 __ SmiUntag(ecx);
10281 // Perform the operation.
10282 __ shr_cl(left);
10283 // Check that the *unsigned* result fits in a smi.
10284 // Neither of the two high-order bits can be set:
10285 // - 0x80000000: high bit would be lost when smi tagging.
10286 // - 0x40000000: this number would convert to negative when
10287 // Smi tagging these two cases can only happen with shifts
10288 // by 0 or 1 when handed a valid smi.
10289 __ test(left, Immediate(0xc0000000));
10290 __ j(not_zero, slow, not_taken);
10291 // Tag the result and store it in register eax.
10292 __ SmiTag(left);
10293 __ mov(eax, left);
10294 break;
10295
Steve Blocka7e24c12009-10-30 11:49:00 +000010296 case Token::ADD:
Leon Clarked91b9f72010-01-27 17:25:45 +000010297 ASSERT(right.is(eax));
10298 __ add(right, Operand(left)); // Addition is commutative.
10299 __ j(overflow, &use_fp_on_smis, not_taken);
10300 break;
10301
Steve Blocka7e24c12009-10-30 11:49:00 +000010302 case Token::SUB:
Leon Clarked91b9f72010-01-27 17:25:45 +000010303 __ sub(left, Operand(right));
10304 __ j(overflow, &use_fp_on_smis, not_taken);
10305 __ mov(eax, left);
Steve Blocka7e24c12009-10-30 11:49:00 +000010306 break;
10307
10308 case Token::MUL:
10309 // If the smi tag is 0 we can just leave the tag on one operand.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010010310 STATIC_ASSERT(kSmiTag == 0); // Adjust code below if not the case.
Leon Clarked91b9f72010-01-27 17:25:45 +000010311 // We can't revert the multiplication if the result is not a smi
10312 // so save the right operand.
10313 __ mov(ebx, right);
Steve Blocka7e24c12009-10-30 11:49:00 +000010314 // Remove tag from one of the operands (but keep sign).
Leon Clarked91b9f72010-01-27 17:25:45 +000010315 __ SmiUntag(right);
Steve Blocka7e24c12009-10-30 11:49:00 +000010316 // Do multiplication.
Leon Clarked91b9f72010-01-27 17:25:45 +000010317 __ imul(right, Operand(left)); // Multiplication is commutative.
10318 __ j(overflow, &use_fp_on_smis, not_taken);
10319 // Check for negative zero result. Use combined = left | right.
10320 __ NegativeZeroTest(right, combined, &use_fp_on_smis);
Steve Blocka7e24c12009-10-30 11:49:00 +000010321 break;
10322
10323 case Token::DIV:
Leon Clarked91b9f72010-01-27 17:25:45 +000010324 // We can't revert the division if the result is not a smi so
10325 // save the left operand.
10326 __ mov(edi, left);
10327 // Check for 0 divisor.
10328 __ test(right, Operand(right));
10329 __ j(zero, &use_fp_on_smis, not_taken);
10330 // Sign extend left into edx:eax.
10331 ASSERT(left.is(eax));
10332 __ cdq();
10333 // Divide edx:eax by right.
10334 __ idiv(right);
10335 // Check for the corner case of dividing the most negative smi by
10336 // -1. We cannot use the overflow flag, since it is not set by idiv
10337 // instruction.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010010338 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
Steve Blocka7e24c12009-10-30 11:49:00 +000010339 __ cmp(eax, 0x40000000);
Leon Clarked91b9f72010-01-27 17:25:45 +000010340 __ j(equal, &use_fp_on_smis);
10341 // Check for negative zero result. Use combined = left | right.
10342 __ NegativeZeroTest(eax, combined, &use_fp_on_smis);
Steve Blocka7e24c12009-10-30 11:49:00 +000010343 // Check that the remainder is zero.
10344 __ test(edx, Operand(edx));
Leon Clarked91b9f72010-01-27 17:25:45 +000010345 __ j(not_zero, &use_fp_on_smis);
Steve Blocka7e24c12009-10-30 11:49:00 +000010346 // Tag the result and store it in register eax.
Leon Clarkee46be812010-01-19 14:06:41 +000010347 __ SmiTag(eax);
Steve Blocka7e24c12009-10-30 11:49:00 +000010348 break;
10349
10350 case Token::MOD:
Leon Clarked91b9f72010-01-27 17:25:45 +000010351 // Check for 0 divisor.
10352 __ test(right, Operand(right));
10353 __ j(zero, &not_smis, not_taken);
10354
10355 // Sign extend left into edx:eax.
10356 ASSERT(left.is(eax));
10357 __ cdq();
10358 // Divide edx:eax by right.
10359 __ idiv(right);
10360 // Check for negative zero result. Use combined = left | right.
10361 __ NegativeZeroTest(edx, combined, slow);
Steve Blocka7e24c12009-10-30 11:49:00 +000010362 // Move remainder to register eax.
Leon Clarked91b9f72010-01-27 17:25:45 +000010363 __ mov(eax, edx);
Steve Blocka7e24c12009-10-30 11:49:00 +000010364 break;
10365
10366 default:
10367 UNREACHABLE();
Leon Clarked91b9f72010-01-27 17:25:45 +000010368 }
10369
10370 // 5. Emit return of result in eax.
10371 GenerateReturn(masm);
10372
10373 // 6. For some operations emit inline code to perform floating point
10374 // operations on known smis (e.g., if the result of the operation
10375 // overflowed the smi range).
10376 switch (op_) {
10377 case Token::SHL: {
10378 Comment perform_float(masm, "-- Perform float operation on smis");
10379 __ bind(&use_fp_on_smis);
10380 // Result we want is in left == edx, so we can put the allocated heap
10381 // number in eax.
10382 __ AllocateHeapNumber(eax, ecx, ebx, slow);
10383 // Store the result in the HeapNumber and return.
10384 if (CpuFeatures::IsSupported(SSE2)) {
10385 CpuFeatures::Scope use_sse2(SSE2);
10386 __ cvtsi2sd(xmm0, Operand(left));
10387 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
10388 } else {
10389 // It's OK to overwrite the right argument on the stack because we
10390 // are about to return.
10391 __ mov(Operand(esp, 1 * kPointerSize), left);
10392 __ fild_s(Operand(esp, 1 * kPointerSize));
10393 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
10394 }
10395 GenerateReturn(masm);
10396 break;
10397 }
10398
10399 case Token::ADD:
10400 case Token::SUB:
10401 case Token::MUL:
10402 case Token::DIV: {
10403 Comment perform_float(masm, "-- Perform float operation on smis");
10404 __ bind(&use_fp_on_smis);
10405 // Restore arguments to edx, eax.
10406 switch (op_) {
10407 case Token::ADD:
10408 // Revert right = right + left.
10409 __ sub(right, Operand(left));
10410 break;
10411 case Token::SUB:
10412 // Revert left = left - right.
10413 __ add(left, Operand(right));
10414 break;
10415 case Token::MUL:
10416 // Right was clobbered but a copy is in ebx.
10417 __ mov(right, ebx);
10418 break;
10419 case Token::DIV:
10420 // Left was clobbered but a copy is in edi. Right is in ebx for
10421 // division.
10422 __ mov(edx, edi);
10423 __ mov(eax, right);
10424 break;
10425 default: UNREACHABLE();
10426 break;
10427 }
10428 __ AllocateHeapNumber(ecx, ebx, no_reg, slow);
10429 if (CpuFeatures::IsSupported(SSE2)) {
10430 CpuFeatures::Scope use_sse2(SSE2);
10431 FloatingPointHelper::LoadSSE2Smis(masm, ebx);
10432 switch (op_) {
10433 case Token::ADD: __ addsd(xmm0, xmm1); break;
10434 case Token::SUB: __ subsd(xmm0, xmm1); break;
10435 case Token::MUL: __ mulsd(xmm0, xmm1); break;
10436 case Token::DIV: __ divsd(xmm0, xmm1); break;
10437 default: UNREACHABLE();
10438 }
10439 __ movdbl(FieldOperand(ecx, HeapNumber::kValueOffset), xmm0);
10440 } else { // SSE2 not available, use FPU.
10441 FloatingPointHelper::LoadFloatSmis(masm, ebx);
10442 switch (op_) {
10443 case Token::ADD: __ faddp(1); break;
10444 case Token::SUB: __ fsubp(1); break;
10445 case Token::MUL: __ fmulp(1); break;
10446 case Token::DIV: __ fdivp(1); break;
10447 default: UNREACHABLE();
10448 }
10449 __ fstp_d(FieldOperand(ecx, HeapNumber::kValueOffset));
10450 }
10451 __ mov(eax, ecx);
10452 GenerateReturn(masm);
10453 break;
10454 }
10455
10456 default:
10457 break;
10458 }
10459
10460 // 7. Non-smi operands, fall out to the non-smi code with the operands in
10461 // edx and eax.
10462 Comment done_comment(masm, "-- Enter non-smi code");
10463 __ bind(&not_smis);
10464 switch (op_) {
10465 case Token::BIT_OR:
10466 case Token::SHL:
10467 case Token::SAR:
10468 case Token::SHR:
10469 // Right operand is saved in ecx and eax was destroyed by the smi
10470 // check.
10471 __ mov(eax, ecx);
10472 break;
10473
10474 case Token::DIV:
10475 case Token::MOD:
10476 // Operands are in eax, ebx at this point.
10477 __ mov(edx, eax);
10478 __ mov(eax, ebx);
10479 break;
10480
10481 default:
Steve Blocka7e24c12009-10-30 11:49:00 +000010482 break;
10483 }
10484}
10485
10486
10487void GenericBinaryOpStub::Generate(MacroAssembler* masm) {
10488 Label call_runtime;
10489
Steve Block3ce2e202009-11-05 08:53:23 +000010490 __ IncrementCounter(&Counters::generic_binary_stub_calls, 1);
Steve Blocka7e24c12009-10-30 11:49:00 +000010491
Steve Block3ce2e202009-11-05 08:53:23 +000010492 // Generate fast case smi code if requested. This flag is set when the fast
10493 // case smi code is not generated by the caller. Generating it here will speed
10494 // up common operations.
Steve Block6ded16b2010-05-10 14:33:55 +010010495 if (ShouldGenerateSmiCode()) {
Leon Clarked91b9f72010-01-27 17:25:45 +000010496 GenerateSmiCode(masm, &call_runtime);
10497 } else if (op_ != Token::MOD) { // MOD goes straight to runtime.
Steve Block6ded16b2010-05-10 14:33:55 +010010498 if (!HasArgsInRegisters()) {
10499 GenerateLoadArguments(masm);
10500 }
Steve Blocka7e24c12009-10-30 11:49:00 +000010501 }
10502
Steve Blocka7e24c12009-10-30 11:49:00 +000010503 // Floating point case.
Steve Block6ded16b2010-05-10 14:33:55 +010010504 if (ShouldGenerateFPCode()) {
10505 switch (op_) {
10506 case Token::ADD:
10507 case Token::SUB:
10508 case Token::MUL:
10509 case Token::DIV: {
10510 if (runtime_operands_type_ == BinaryOpIC::DEFAULT &&
10511 HasSmiCodeInStub()) {
10512 // Execution reaches this point when the first non-smi argument occurs
10513 // (and only if smi code is generated). This is the right moment to
10514 // patch to HEAP_NUMBERS state. The transition is attempted only for
10515 // the four basic operations. The stub stays in the DEFAULT state
10516 // forever for all other operations (also if smi code is skipped).
10517 GenerateTypeTransition(masm);
Leon Clarkeac952652010-07-15 11:15:24 +010010518 break;
Andrei Popescu402d9372010-02-26 13:31:12 +000010519 }
Steve Blocka7e24c12009-10-30 11:49:00 +000010520
Steve Block6ded16b2010-05-10 14:33:55 +010010521 Label not_floats;
Leon Clarkee46be812010-01-19 14:06:41 +000010522 if (CpuFeatures::IsSupported(SSE2)) {
10523 CpuFeatures::Scope use_sse2(SSE2);
Steve Block6ded16b2010-05-10 14:33:55 +010010524 if (static_operands_type_.IsNumber()) {
10525 if (FLAG_debug_code) {
10526 // Assert at runtime that inputs are only numbers.
10527 __ AbortIfNotNumber(edx);
10528 __ AbortIfNotNumber(eax);
10529 }
10530 if (static_operands_type_.IsSmi()) {
10531 if (FLAG_debug_code) {
10532 __ AbortIfNotSmi(edx);
10533 __ AbortIfNotSmi(eax);
10534 }
10535 FloatingPointHelper::LoadSSE2Smis(masm, ecx);
10536 } else {
10537 FloatingPointHelper::LoadSSE2Operands(masm);
10538 }
10539 } else {
10540 FloatingPointHelper::LoadSSE2Operands(masm, &call_runtime);
10541 }
10542
10543 switch (op_) {
10544 case Token::ADD: __ addsd(xmm0, xmm1); break;
10545 case Token::SUB: __ subsd(xmm0, xmm1); break;
10546 case Token::MUL: __ mulsd(xmm0, xmm1); break;
10547 case Token::DIV: __ divsd(xmm0, xmm1); break;
10548 default: UNREACHABLE();
10549 }
10550 GenerateHeapResultAllocation(masm, &call_runtime);
Leon Clarkee46be812010-01-19 14:06:41 +000010551 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
Steve Block6ded16b2010-05-10 14:33:55 +010010552 GenerateReturn(masm);
10553 } else { // SSE2 not available, use FPU.
10554 if (static_operands_type_.IsNumber()) {
10555 if (FLAG_debug_code) {
10556 // Assert at runtime that inputs are only numbers.
10557 __ AbortIfNotNumber(edx);
10558 __ AbortIfNotNumber(eax);
10559 }
10560 } else {
10561 FloatingPointHelper::CheckFloatOperands(masm, &call_runtime, ebx);
10562 }
10563 FloatingPointHelper::LoadFloatOperands(
10564 masm,
10565 ecx,
10566 FloatingPointHelper::ARGS_IN_REGISTERS);
10567 switch (op_) {
10568 case Token::ADD: __ faddp(1); break;
10569 case Token::SUB: __ fsubp(1); break;
10570 case Token::MUL: __ fmulp(1); break;
10571 case Token::DIV: __ fdivp(1); break;
10572 default: UNREACHABLE();
10573 }
10574 Label after_alloc_failure;
10575 GenerateHeapResultAllocation(masm, &after_alloc_failure);
Leon Clarkee46be812010-01-19 14:06:41 +000010576 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
Steve Block6ded16b2010-05-10 14:33:55 +010010577 GenerateReturn(masm);
10578 __ bind(&after_alloc_failure);
10579 __ ffree();
10580 __ jmp(&call_runtime);
Leon Clarkee46be812010-01-19 14:06:41 +000010581 }
Steve Block6ded16b2010-05-10 14:33:55 +010010582 __ bind(&not_floats);
10583 if (runtime_operands_type_ == BinaryOpIC::DEFAULT &&
10584 !HasSmiCodeInStub()) {
10585 // Execution reaches this point when the first non-number argument
10586 // occurs (and only if smi code is skipped from the stub, otherwise
10587 // the patching has already been done earlier in this case branch).
10588 // Try patching to STRINGS for ADD operation.
10589 if (op_ == Token::ADD) {
10590 GenerateTypeTransition(masm);
10591 }
10592 }
10593 break;
Steve Blocka7e24c12009-10-30 11:49:00 +000010594 }
Steve Block6ded16b2010-05-10 14:33:55 +010010595 case Token::MOD: {
10596 // For MOD we go directly to runtime in the non-smi case.
10597 break;
10598 }
10599 case Token::BIT_OR:
10600 case Token::BIT_AND:
10601 case Token::BIT_XOR:
10602 case Token::SAR:
10603 case Token::SHL:
10604 case Token::SHR: {
10605 Label non_smi_result;
10606 FloatingPointHelper::LoadAsIntegers(masm,
10607 static_operands_type_,
10608 use_sse3_,
10609 &call_runtime);
10610 switch (op_) {
10611 case Token::BIT_OR: __ or_(eax, Operand(ecx)); break;
10612 case Token::BIT_AND: __ and_(eax, Operand(ecx)); break;
10613 case Token::BIT_XOR: __ xor_(eax, Operand(ecx)); break;
10614 case Token::SAR: __ sar_cl(eax); break;
10615 case Token::SHL: __ shl_cl(eax); break;
10616 case Token::SHR: __ shr_cl(eax); break;
10617 default: UNREACHABLE();
10618 }
10619 if (op_ == Token::SHR) {
10620 // Check if result is non-negative and fits in a smi.
10621 __ test(eax, Immediate(0xc0000000));
10622 __ j(not_zero, &call_runtime);
10623 } else {
10624 // Check if result fits in a smi.
10625 __ cmp(eax, 0xc0000000);
10626 __ j(negative, &non_smi_result);
10627 }
10628 // Tag smi result and return.
10629 __ SmiTag(eax);
10630 GenerateReturn(masm);
10631
10632 // All ops except SHR return a signed int32 that we load in
10633 // a HeapNumber.
10634 if (op_ != Token::SHR) {
10635 __ bind(&non_smi_result);
10636 // Allocate a heap number if needed.
10637 __ mov(ebx, Operand(eax)); // ebx: result
10638 Label skip_allocation;
10639 switch (mode_) {
10640 case OVERWRITE_LEFT:
10641 case OVERWRITE_RIGHT:
10642 // If the operand was an object, we skip the
10643 // allocation of a heap number.
10644 __ mov(eax, Operand(esp, mode_ == OVERWRITE_RIGHT ?
10645 1 * kPointerSize : 2 * kPointerSize));
10646 __ test(eax, Immediate(kSmiTagMask));
10647 __ j(not_zero, &skip_allocation, not_taken);
10648 // Fall through!
10649 case NO_OVERWRITE:
10650 __ AllocateHeapNumber(eax, ecx, edx, &call_runtime);
10651 __ bind(&skip_allocation);
10652 break;
10653 default: UNREACHABLE();
10654 }
10655 // Store the result in the HeapNumber and return.
10656 if (CpuFeatures::IsSupported(SSE2)) {
10657 CpuFeatures::Scope use_sse2(SSE2);
10658 __ cvtsi2sd(xmm0, Operand(ebx));
10659 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
10660 } else {
10661 __ mov(Operand(esp, 1 * kPointerSize), ebx);
10662 __ fild_s(Operand(esp, 1 * kPointerSize));
10663 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
10664 }
10665 GenerateReturn(masm);
10666 }
10667 break;
10668 }
10669 default: UNREACHABLE(); break;
Steve Blocka7e24c12009-10-30 11:49:00 +000010670 }
Steve Blocka7e24c12009-10-30 11:49:00 +000010671 }
10672
10673 // If all else fails, use the runtime system to get the correct
Steve Block3ce2e202009-11-05 08:53:23 +000010674 // result. If arguments was passed in registers now place them on the
Steve Blockd0582a62009-12-15 09:54:21 +000010675 // stack in the correct order below the return address.
Steve Blocka7e24c12009-10-30 11:49:00 +000010676 __ bind(&call_runtime);
Leon Clarked91b9f72010-01-27 17:25:45 +000010677 if (HasArgsInRegisters()) {
Steve Block6ded16b2010-05-10 14:33:55 +010010678 GenerateRegisterArgsPush(masm);
Steve Block3ce2e202009-11-05 08:53:23 +000010679 }
Steve Block6ded16b2010-05-10 14:33:55 +010010680
Steve Blocka7e24c12009-10-30 11:49:00 +000010681 switch (op_) {
10682 case Token::ADD: {
10683 // Test for string arguments before calling runtime.
Andrei Popescu402d9372010-02-26 13:31:12 +000010684 Label not_strings, not_string1, string1, string1_smi2;
Steve Block6ded16b2010-05-10 14:33:55 +010010685
10686 // If this stub has already generated FP-specific code then the arguments
10687 // are already in edx, eax
10688 if (!ShouldGenerateFPCode() && !HasArgsInRegisters()) {
10689 GenerateLoadArguments(masm);
10690 }
10691
10692 // Registers containing left and right operands respectively.
10693 Register lhs, rhs;
10694 if (HasArgsReversed()) {
10695 lhs = eax;
10696 rhs = edx;
10697 } else {
10698 lhs = edx;
10699 rhs = eax;
10700 }
10701
10702 // Test if first argument is a string.
10703 __ test(lhs, Immediate(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +000010704 __ j(zero, &not_string1);
Steve Block6ded16b2010-05-10 14:33:55 +010010705 __ CmpObjectType(lhs, FIRST_NONSTRING_TYPE, ecx);
Steve Blocka7e24c12009-10-30 11:49:00 +000010706 __ j(above_equal, &not_string1);
10707
Leon Clarked91b9f72010-01-27 17:25:45 +000010708 // First argument is a string, test second.
Steve Block6ded16b2010-05-10 14:33:55 +010010709 __ test(rhs, Immediate(kSmiTagMask));
Andrei Popescu402d9372010-02-26 13:31:12 +000010710 __ j(zero, &string1_smi2);
Steve Block6ded16b2010-05-10 14:33:55 +010010711 __ CmpObjectType(rhs, FIRST_NONSTRING_TYPE, ecx);
Steve Blocka7e24c12009-10-30 11:49:00 +000010712 __ j(above_equal, &string1);
10713
Steve Blockd0582a62009-12-15 09:54:21 +000010714 // First and second argument are strings. Jump to the string add stub.
Andrei Popescu402d9372010-02-26 13:31:12 +000010715 StringAddStub string_add_stub(NO_STRING_CHECK_IN_STUB);
10716 __ TailCallStub(&string_add_stub);
Steve Blocka7e24c12009-10-30 11:49:00 +000010717
Andrei Popescu402d9372010-02-26 13:31:12 +000010718 __ bind(&string1_smi2);
10719 // First argument is a string, second is a smi. Try to lookup the number
10720 // string for the smi in the number string cache.
10721 NumberToStringStub::GenerateLookupNumberStringCache(
Steve Block6ded16b2010-05-10 14:33:55 +010010722 masm, rhs, edi, ebx, ecx, true, &string1);
Andrei Popescu402d9372010-02-26 13:31:12 +000010723
Steve Block6ded16b2010-05-10 14:33:55 +010010724 // Replace second argument on stack and tailcall string add stub to make
10725 // the result.
10726 __ mov(Operand(esp, 1 * kPointerSize), edi);
10727 __ TailCallStub(&string_add_stub);
Andrei Popescu402d9372010-02-26 13:31:12 +000010728
Steve Block6ded16b2010-05-10 14:33:55 +010010729 // Only first argument is a string.
Steve Blocka7e24c12009-10-30 11:49:00 +000010730 __ bind(&string1);
Steve Block6ded16b2010-05-10 14:33:55 +010010731 __ InvokeBuiltin(Builtins::STRING_ADD_LEFT, JUMP_FUNCTION);
Steve Blocka7e24c12009-10-30 11:49:00 +000010732
10733 // First argument was not a string, test second.
10734 __ bind(&not_string1);
Steve Block6ded16b2010-05-10 14:33:55 +010010735 __ test(rhs, Immediate(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +000010736 __ j(zero, &not_strings);
Steve Block6ded16b2010-05-10 14:33:55 +010010737 __ CmpObjectType(rhs, FIRST_NONSTRING_TYPE, ecx);
Steve Blocka7e24c12009-10-30 11:49:00 +000010738 __ j(above_equal, &not_strings);
10739
10740 // Only second argument is a string.
Steve Block6ded16b2010-05-10 14:33:55 +010010741 __ InvokeBuiltin(Builtins::STRING_ADD_RIGHT, JUMP_FUNCTION);
Steve Blocka7e24c12009-10-30 11:49:00 +000010742
10743 __ bind(&not_strings);
10744 // Neither argument is a string.
10745 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION);
10746 break;
10747 }
10748 case Token::SUB:
10749 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION);
10750 break;
10751 case Token::MUL:
10752 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION);
Leon Clarked91b9f72010-01-27 17:25:45 +000010753 break;
Steve Blocka7e24c12009-10-30 11:49:00 +000010754 case Token::DIV:
10755 __ InvokeBuiltin(Builtins::DIV, JUMP_FUNCTION);
10756 break;
10757 case Token::MOD:
10758 __ InvokeBuiltin(Builtins::MOD, JUMP_FUNCTION);
10759 break;
10760 case Token::BIT_OR:
10761 __ InvokeBuiltin(Builtins::BIT_OR, JUMP_FUNCTION);
10762 break;
10763 case Token::BIT_AND:
10764 __ InvokeBuiltin(Builtins::BIT_AND, JUMP_FUNCTION);
10765 break;
10766 case Token::BIT_XOR:
10767 __ InvokeBuiltin(Builtins::BIT_XOR, JUMP_FUNCTION);
10768 break;
10769 case Token::SAR:
10770 __ InvokeBuiltin(Builtins::SAR, JUMP_FUNCTION);
10771 break;
10772 case Token::SHL:
10773 __ InvokeBuiltin(Builtins::SHL, JUMP_FUNCTION);
10774 break;
10775 case Token::SHR:
10776 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION);
10777 break;
10778 default:
10779 UNREACHABLE();
10780 }
10781}
10782
10783
Leon Clarked91b9f72010-01-27 17:25:45 +000010784void GenericBinaryOpStub::GenerateHeapResultAllocation(MacroAssembler* masm,
10785 Label* alloc_failure) {
10786 Label skip_allocation;
10787 OverwriteMode mode = mode_;
10788 if (HasArgsReversed()) {
10789 if (mode == OVERWRITE_RIGHT) {
10790 mode = OVERWRITE_LEFT;
10791 } else if (mode == OVERWRITE_LEFT) {
10792 mode = OVERWRITE_RIGHT;
10793 }
10794 }
10795 switch (mode) {
10796 case OVERWRITE_LEFT: {
10797 // If the argument in edx is already an object, we skip the
10798 // allocation of a heap number.
10799 __ test(edx, Immediate(kSmiTagMask));
10800 __ j(not_zero, &skip_allocation, not_taken);
10801 // Allocate a heap number for the result. Keep eax and edx intact
10802 // for the possible runtime call.
10803 __ AllocateHeapNumber(ebx, ecx, no_reg, alloc_failure);
10804 // Now edx can be overwritten losing one of the arguments as we are
10805 // now done and will not need it any more.
10806 __ mov(edx, Operand(ebx));
10807 __ bind(&skip_allocation);
10808 // Use object in edx as a result holder
10809 __ mov(eax, Operand(edx));
10810 break;
10811 }
10812 case OVERWRITE_RIGHT:
10813 // If the argument in eax is already an object, we skip the
10814 // allocation of a heap number.
10815 __ test(eax, Immediate(kSmiTagMask));
10816 __ j(not_zero, &skip_allocation, not_taken);
10817 // Fall through!
10818 case NO_OVERWRITE:
10819 // Allocate a heap number for the result. Keep eax and edx intact
10820 // for the possible runtime call.
10821 __ AllocateHeapNumber(ebx, ecx, no_reg, alloc_failure);
10822 // Now eax can be overwritten losing one of the arguments as we are
10823 // now done and will not need it any more.
10824 __ mov(eax, ebx);
10825 __ bind(&skip_allocation);
10826 break;
10827 default: UNREACHABLE();
10828 }
10829}
10830
10831
Steve Block3ce2e202009-11-05 08:53:23 +000010832void GenericBinaryOpStub::GenerateLoadArguments(MacroAssembler* masm) {
10833 // If arguments are not passed in registers read them from the stack.
Steve Block6ded16b2010-05-10 14:33:55 +010010834 ASSERT(!HasArgsInRegisters());
10835 __ mov(eax, Operand(esp, 1 * kPointerSize));
10836 __ mov(edx, Operand(esp, 2 * kPointerSize));
Steve Block3ce2e202009-11-05 08:53:23 +000010837}
Steve Blocka7e24c12009-10-30 11:49:00 +000010838
Steve Block3ce2e202009-11-05 08:53:23 +000010839
10840void GenericBinaryOpStub::GenerateReturn(MacroAssembler* masm) {
10841 // If arguments are not passed in registers remove them from the stack before
10842 // returning.
Leon Clarked91b9f72010-01-27 17:25:45 +000010843 if (!HasArgsInRegisters()) {
Steve Block3ce2e202009-11-05 08:53:23 +000010844 __ ret(2 * kPointerSize); // Remove both operands
10845 } else {
10846 __ ret(0);
10847 }
Steve Blocka7e24c12009-10-30 11:49:00 +000010848}
10849
10850
Steve Block6ded16b2010-05-10 14:33:55 +010010851void GenericBinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) {
10852 ASSERT(HasArgsInRegisters());
10853 __ pop(ecx);
10854 if (HasArgsReversed()) {
10855 __ push(eax);
10856 __ push(edx);
10857 } else {
10858 __ push(edx);
10859 __ push(eax);
10860 }
10861 __ push(ecx);
10862}
10863
10864
10865void GenericBinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
Leon Clarkeac952652010-07-15 11:15:24 +010010866 // Ensure the operands are on the stack.
Steve Block6ded16b2010-05-10 14:33:55 +010010867 if (HasArgsInRegisters()) {
10868 GenerateRegisterArgsPush(masm);
Steve Block6ded16b2010-05-10 14:33:55 +010010869 }
10870
Leon Clarkeac952652010-07-15 11:15:24 +010010871 __ pop(ecx); // Save return address.
Steve Block6ded16b2010-05-10 14:33:55 +010010872
Steve Block6ded16b2010-05-10 14:33:55 +010010873 // Left and right arguments are now on top.
Steve Block6ded16b2010-05-10 14:33:55 +010010874 // Push this stub's key. Although the operation and the type info are
10875 // encoded into the key, the encoding is opaque, so push them too.
10876 __ push(Immediate(Smi::FromInt(MinorKey())));
10877 __ push(Immediate(Smi::FromInt(op_)));
10878 __ push(Immediate(Smi::FromInt(runtime_operands_type_)));
10879
Leon Clarkeac952652010-07-15 11:15:24 +010010880 __ push(ecx); // Push return address.
Steve Block6ded16b2010-05-10 14:33:55 +010010881
Leon Clarkeac952652010-07-15 11:15:24 +010010882 // Patch the caller to an appropriate specialized stub and return the
10883 // operation result to the caller of the stub.
Steve Block6ded16b2010-05-10 14:33:55 +010010884 __ TailCallExternalReference(
10885 ExternalReference(IC_Utility(IC::kBinaryOp_Patch)),
Leon Clarkeac952652010-07-15 11:15:24 +010010886 5,
Steve Block6ded16b2010-05-10 14:33:55 +010010887 1);
Steve Block6ded16b2010-05-10 14:33:55 +010010888}
10889
10890
10891Handle<Code> GetBinaryOpStub(int key, BinaryOpIC::TypeInfo type_info) {
10892 GenericBinaryOpStub stub(key, type_info);
10893 return stub.GetCode();
10894}
10895
10896
Andrei Popescu402d9372010-02-26 13:31:12 +000010897void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
10898 // Input on stack:
10899 // esp[4]: argument (should be number).
10900 // esp[0]: return address.
10901 // Test that eax is a number.
10902 Label runtime_call;
10903 Label runtime_call_clear_stack;
10904 Label input_not_smi;
10905 Label loaded;
10906 __ mov(eax, Operand(esp, kPointerSize));
10907 __ test(eax, Immediate(kSmiTagMask));
10908 __ j(not_zero, &input_not_smi);
10909 // Input is a smi. Untag and load it onto the FPU stack.
10910 // Then load the low and high words of the double into ebx, edx.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010010911 STATIC_ASSERT(kSmiTagSize == 1);
Andrei Popescu402d9372010-02-26 13:31:12 +000010912 __ sar(eax, 1);
10913 __ sub(Operand(esp), Immediate(2 * kPointerSize));
10914 __ mov(Operand(esp, 0), eax);
10915 __ fild_s(Operand(esp, 0));
10916 __ fst_d(Operand(esp, 0));
10917 __ pop(edx);
10918 __ pop(ebx);
10919 __ jmp(&loaded);
10920 __ bind(&input_not_smi);
10921 // Check if input is a HeapNumber.
10922 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
10923 __ cmp(Operand(ebx), Immediate(Factory::heap_number_map()));
10924 __ j(not_equal, &runtime_call);
10925 // Input is a HeapNumber. Push it on the FPU stack and load its
10926 // low and high words into ebx, edx.
10927 __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset));
10928 __ mov(edx, FieldOperand(eax, HeapNumber::kExponentOffset));
10929 __ mov(ebx, FieldOperand(eax, HeapNumber::kMantissaOffset));
10930
10931 __ bind(&loaded);
10932 // ST[0] == double value
10933 // ebx = low 32 bits of double value
10934 // edx = high 32 bits of double value
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010010935 // Compute hash (the shifts are arithmetic):
Andrei Popescu402d9372010-02-26 13:31:12 +000010936 // h = (low ^ high); h ^= h >> 16; h ^= h >> 8; h = h & (cacheSize - 1);
10937 __ mov(ecx, ebx);
10938 __ xor_(ecx, Operand(edx));
10939 __ mov(eax, ecx);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010010940 __ sar(eax, 16);
Andrei Popescu402d9372010-02-26 13:31:12 +000010941 __ xor_(ecx, Operand(eax));
10942 __ mov(eax, ecx);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010010943 __ sar(eax, 8);
Andrei Popescu402d9372010-02-26 13:31:12 +000010944 __ xor_(ecx, Operand(eax));
10945 ASSERT(IsPowerOf2(TranscendentalCache::kCacheSize));
10946 __ and_(Operand(ecx), Immediate(TranscendentalCache::kCacheSize - 1));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010010947
Andrei Popescu402d9372010-02-26 13:31:12 +000010948 // ST[0] == double value.
10949 // ebx = low 32 bits of double value.
10950 // edx = high 32 bits of double value.
10951 // ecx = TranscendentalCache::hash(double value).
10952 __ mov(eax,
10953 Immediate(ExternalReference::transcendental_cache_array_address()));
10954 // Eax points to cache array.
10955 __ mov(eax, Operand(eax, type_ * sizeof(TranscendentalCache::caches_[0])));
10956 // Eax points to the cache for the type type_.
10957 // If NULL, the cache hasn't been initialized yet, so go through runtime.
10958 __ test(eax, Operand(eax));
10959 __ j(zero, &runtime_call_clear_stack);
10960#ifdef DEBUG
10961 // Check that the layout of cache elements match expectations.
Steve Block6ded16b2010-05-10 14:33:55 +010010962 { TranscendentalCache::Element test_elem[2];
Andrei Popescu402d9372010-02-26 13:31:12 +000010963 char* elem_start = reinterpret_cast<char*>(&test_elem[0]);
10964 char* elem2_start = reinterpret_cast<char*>(&test_elem[1]);
10965 char* elem_in0 = reinterpret_cast<char*>(&(test_elem[0].in[0]));
10966 char* elem_in1 = reinterpret_cast<char*>(&(test_elem[0].in[1]));
10967 char* elem_out = reinterpret_cast<char*>(&(test_elem[0].output));
10968 CHECK_EQ(12, elem2_start - elem_start); // Two uint_32's and a pointer.
10969 CHECK_EQ(0, elem_in0 - elem_start);
10970 CHECK_EQ(kIntSize, elem_in1 - elem_start);
10971 CHECK_EQ(2 * kIntSize, elem_out - elem_start);
10972 }
10973#endif
10974 // Find the address of the ecx'th entry in the cache, i.e., &eax[ecx*12].
10975 __ lea(ecx, Operand(ecx, ecx, times_2, 0));
10976 __ lea(ecx, Operand(eax, ecx, times_4, 0));
10977 // Check if cache matches: Double value is stored in uint32_t[2] array.
10978 Label cache_miss;
10979 __ cmp(ebx, Operand(ecx, 0));
10980 __ j(not_equal, &cache_miss);
10981 __ cmp(edx, Operand(ecx, kIntSize));
10982 __ j(not_equal, &cache_miss);
10983 // Cache hit!
10984 __ mov(eax, Operand(ecx, 2 * kIntSize));
10985 __ fstp(0);
10986 __ ret(kPointerSize);
10987
10988 __ bind(&cache_miss);
10989 // Update cache with new value.
10990 // We are short on registers, so use no_reg as scratch.
10991 // This gives slightly larger code.
10992 __ AllocateHeapNumber(eax, edi, no_reg, &runtime_call_clear_stack);
10993 GenerateOperation(masm);
10994 __ mov(Operand(ecx, 0), ebx);
10995 __ mov(Operand(ecx, kIntSize), edx);
10996 __ mov(Operand(ecx, 2 * kIntSize), eax);
10997 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
10998 __ ret(kPointerSize);
10999
11000 __ bind(&runtime_call_clear_stack);
11001 __ fstp(0);
11002 __ bind(&runtime_call);
Steve Block6ded16b2010-05-10 14:33:55 +010011003 __ TailCallExternalReference(ExternalReference(RuntimeFunction()), 1, 1);
Andrei Popescu402d9372010-02-26 13:31:12 +000011004}
11005
11006
11007Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() {
11008 switch (type_) {
11009 // Add more cases when necessary.
11010 case TranscendentalCache::SIN: return Runtime::kMath_sin;
11011 case TranscendentalCache::COS: return Runtime::kMath_cos;
11012 default:
11013 UNIMPLEMENTED();
11014 return Runtime::kAbort;
11015 }
11016}
11017
11018
11019void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm) {
11020 // Only free register is edi.
11021 Label done;
11022 ASSERT(type_ == TranscendentalCache::SIN ||
11023 type_ == TranscendentalCache::COS);
11024 // More transcendental types can be added later.
11025
11026 // Both fsin and fcos require arguments in the range +/-2^63 and
11027 // return NaN for infinities and NaN. They can share all code except
11028 // the actual fsin/fcos operation.
11029 Label in_range;
11030 // If argument is outside the range -2^63..2^63, fsin/cos doesn't
11031 // work. We must reduce it to the appropriate range.
11032 __ mov(edi, edx);
11033 __ and_(Operand(edi), Immediate(0x7ff00000)); // Exponent only.
11034 int supported_exponent_limit =
11035 (63 + HeapNumber::kExponentBias) << HeapNumber::kExponentShift;
11036 __ cmp(Operand(edi), Immediate(supported_exponent_limit));
11037 __ j(below, &in_range, taken);
11038 // Check for infinity and NaN. Both return NaN for sin.
11039 __ cmp(Operand(edi), Immediate(0x7ff00000));
11040 Label non_nan_result;
11041 __ j(not_equal, &non_nan_result, taken);
11042 // Input is +/-Infinity or NaN. Result is NaN.
11043 __ fstp(0);
11044 // NaN is represented by 0x7ff8000000000000.
11045 __ push(Immediate(0x7ff80000));
11046 __ push(Immediate(0));
11047 __ fld_d(Operand(esp, 0));
11048 __ add(Operand(esp), Immediate(2 * kPointerSize));
11049 __ jmp(&done);
11050
11051 __ bind(&non_nan_result);
11052
11053 // Use fpmod to restrict argument to the range +/-2*PI.
11054 __ mov(edi, eax); // Save eax before using fnstsw_ax.
11055 __ fldpi();
11056 __ fadd(0);
11057 __ fld(1);
11058 // FPU Stack: input, 2*pi, input.
11059 {
11060 Label no_exceptions;
11061 __ fwait();
11062 __ fnstsw_ax();
11063 // Clear if Illegal Operand or Zero Division exceptions are set.
11064 __ test(Operand(eax), Immediate(5));
11065 __ j(zero, &no_exceptions);
11066 __ fnclex();
11067 __ bind(&no_exceptions);
11068 }
11069
11070 // Compute st(0) % st(1)
11071 {
11072 Label partial_remainder_loop;
11073 __ bind(&partial_remainder_loop);
11074 __ fprem1();
11075 __ fwait();
11076 __ fnstsw_ax();
11077 __ test(Operand(eax), Immediate(0x400 /* C2 */));
11078 // If C2 is set, computation only has partial result. Loop to
11079 // continue computation.
11080 __ j(not_zero, &partial_remainder_loop);
11081 }
11082 // FPU Stack: input, 2*pi, input % 2*pi
11083 __ fstp(2);
11084 __ fstp(0);
11085 __ mov(eax, edi); // Restore eax (allocated HeapNumber pointer).
11086
11087 // FPU Stack: input % 2*pi
11088 __ bind(&in_range);
11089 switch (type_) {
11090 case TranscendentalCache::SIN:
11091 __ fsin();
11092 break;
11093 case TranscendentalCache::COS:
11094 __ fcos();
11095 break;
11096 default:
11097 UNREACHABLE();
11098 }
11099 __ bind(&done);
11100}
11101
11102
Leon Clarkee46be812010-01-19 14:06:41 +000011103// Get the integer part of a heap number. Surprisingly, all this bit twiddling
11104// is faster than using the built-in instructions on floating point registers.
11105// Trashes edi and ebx. Dest is ecx. Source cannot be ecx or one of the
11106// trashed registers.
11107void IntegerConvert(MacroAssembler* masm,
11108 Register source,
Steve Block6ded16b2010-05-10 14:33:55 +010011109 TypeInfo type_info,
Leon Clarkee46be812010-01-19 14:06:41 +000011110 bool use_sse3,
11111 Label* conversion_failure) {
Leon Clarked91b9f72010-01-27 17:25:45 +000011112 ASSERT(!source.is(ecx) && !source.is(edi) && !source.is(ebx));
Leon Clarkee46be812010-01-19 14:06:41 +000011113 Label done, right_exponent, normal_exponent;
11114 Register scratch = ebx;
11115 Register scratch2 = edi;
Kristian Monsen25f61362010-05-21 11:50:48 +010011116 if (type_info.IsInteger32() && CpuFeatures::IsEnabled(SSE2)) {
11117 CpuFeatures::Scope scope(SSE2);
11118 __ cvttsd2si(ecx, FieldOperand(source, HeapNumber::kValueOffset));
11119 return;
11120 }
Steve Block6ded16b2010-05-10 14:33:55 +010011121 if (!type_info.IsInteger32() || !use_sse3) {
11122 // Get exponent word.
11123 __ mov(scratch, FieldOperand(source, HeapNumber::kExponentOffset));
11124 // Get exponent alone in scratch2.
11125 __ mov(scratch2, scratch);
11126 __ and_(scratch2, HeapNumber::kExponentMask);
11127 }
Leon Clarkee46be812010-01-19 14:06:41 +000011128 if (use_sse3) {
11129 CpuFeatures::Scope scope(SSE3);
Steve Block6ded16b2010-05-10 14:33:55 +010011130 if (!type_info.IsInteger32()) {
11131 // Check whether the exponent is too big for a 64 bit signed integer.
11132 static const uint32_t kTooBigExponent =
11133 (HeapNumber::kExponentBias + 63) << HeapNumber::kExponentShift;
11134 __ cmp(Operand(scratch2), Immediate(kTooBigExponent));
11135 __ j(greater_equal, conversion_failure);
11136 }
Leon Clarkee46be812010-01-19 14:06:41 +000011137 // Load x87 register with heap number.
11138 __ fld_d(FieldOperand(source, HeapNumber::kValueOffset));
11139 // Reserve space for 64 bit answer.
11140 __ sub(Operand(esp), Immediate(sizeof(uint64_t))); // Nolint.
11141 // Do conversion, which cannot fail because we checked the exponent.
11142 __ fisttp_d(Operand(esp, 0));
11143 __ mov(ecx, Operand(esp, 0)); // Load low word of answer into ecx.
11144 __ add(Operand(esp), Immediate(sizeof(uint64_t))); // Nolint.
11145 } else {
11146 // Load ecx with zero. We use this either for the final shift or
11147 // for the answer.
11148 __ xor_(ecx, Operand(ecx));
11149 // Check whether the exponent matches a 32 bit signed int that cannot be
11150 // represented by a Smi. A non-smi 32 bit integer is 1.xxx * 2^30 so the
11151 // exponent is 30 (biased). This is the exponent that we are fastest at and
11152 // also the highest exponent we can handle here.
11153 const uint32_t non_smi_exponent =
11154 (HeapNumber::kExponentBias + 30) << HeapNumber::kExponentShift;
11155 __ cmp(Operand(scratch2), Immediate(non_smi_exponent));
11156 // If we have a match of the int32-but-not-Smi exponent then skip some
11157 // logic.
11158 __ j(equal, &right_exponent);
11159 // If the exponent is higher than that then go to slow case. This catches
11160 // numbers that don't fit in a signed int32, infinities and NaNs.
11161 __ j(less, &normal_exponent);
11162
11163 {
11164 // Handle a big exponent. The only reason we have this code is that the
11165 // >>> operator has a tendency to generate numbers with an exponent of 31.
11166 const uint32_t big_non_smi_exponent =
11167 (HeapNumber::kExponentBias + 31) << HeapNumber::kExponentShift;
11168 __ cmp(Operand(scratch2), Immediate(big_non_smi_exponent));
11169 __ j(not_equal, conversion_failure);
11170 // We have the big exponent, typically from >>>. This means the number is
11171 // in the range 2^31 to 2^32 - 1. Get the top bits of the mantissa.
11172 __ mov(scratch2, scratch);
11173 __ and_(scratch2, HeapNumber::kMantissaMask);
11174 // Put back the implicit 1.
11175 __ or_(scratch2, 1 << HeapNumber::kExponentShift);
11176 // Shift up the mantissa bits to take up the space the exponent used to
11177 // take. We just orred in the implicit bit so that took care of one and
11178 // we want to use the full unsigned range so we subtract 1 bit from the
11179 // shift distance.
11180 const int big_shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 1;
11181 __ shl(scratch2, big_shift_distance);
11182 // Get the second half of the double.
11183 __ mov(ecx, FieldOperand(source, HeapNumber::kMantissaOffset));
11184 // Shift down 21 bits to get the most significant 11 bits or the low
11185 // mantissa word.
11186 __ shr(ecx, 32 - big_shift_distance);
11187 __ or_(ecx, Operand(scratch2));
11188 // We have the answer in ecx, but we may need to negate it.
11189 __ test(scratch, Operand(scratch));
11190 __ j(positive, &done);
11191 __ neg(ecx);
11192 __ jmp(&done);
11193 }
11194
11195 __ bind(&normal_exponent);
11196 // Exponent word in scratch, exponent part of exponent word in scratch2.
11197 // Zero in ecx.
11198 // We know the exponent is smaller than 30 (biased). If it is less than
11199 // 0 (biased) then the number is smaller in magnitude than 1.0 * 2^0, ie
11200 // it rounds to zero.
11201 const uint32_t zero_exponent =
11202 (HeapNumber::kExponentBias + 0) << HeapNumber::kExponentShift;
11203 __ sub(Operand(scratch2), Immediate(zero_exponent));
11204 // ecx already has a Smi zero.
11205 __ j(less, &done);
11206
11207 // We have a shifted exponent between 0 and 30 in scratch2.
11208 __ shr(scratch2, HeapNumber::kExponentShift);
11209 __ mov(ecx, Immediate(30));
11210 __ sub(ecx, Operand(scratch2));
11211
11212 __ bind(&right_exponent);
11213 // Here ecx is the shift, scratch is the exponent word.
11214 // Get the top bits of the mantissa.
11215 __ and_(scratch, HeapNumber::kMantissaMask);
11216 // Put back the implicit 1.
11217 __ or_(scratch, 1 << HeapNumber::kExponentShift);
11218 // Shift up the mantissa bits to take up the space the exponent used to
11219 // take. We have kExponentShift + 1 significant bits int he low end of the
11220 // word. Shift them to the top bits.
11221 const int shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 2;
11222 __ shl(scratch, shift_distance);
11223 // Get the second half of the double. For some exponents we don't
11224 // actually need this because the bits get shifted out again, but
11225 // it's probably slower to test than just to do it.
11226 __ mov(scratch2, FieldOperand(source, HeapNumber::kMantissaOffset));
11227 // Shift down 22 bits to get the most significant 10 bits or the low
11228 // mantissa word.
11229 __ shr(scratch2, 32 - shift_distance);
11230 __ or_(scratch2, Operand(scratch));
11231 // Move down according to the exponent.
11232 __ shr_cl(scratch2);
11233 // Now the unsigned answer is in scratch2. We need to move it to ecx and
11234 // we may need to fix the sign.
11235 Label negative;
11236 __ xor_(ecx, Operand(ecx));
11237 __ cmp(ecx, FieldOperand(source, HeapNumber::kExponentOffset));
11238 __ j(greater, &negative);
11239 __ mov(ecx, scratch2);
11240 __ jmp(&done);
11241 __ bind(&negative);
11242 __ sub(ecx, Operand(scratch2));
11243 __ bind(&done);
11244 }
11245}
11246
11247
11248// Input: edx, eax are the left and right objects of a bit op.
11249// Output: eax, ecx are left and right integers for a bit op.
Steve Block6ded16b2010-05-10 14:33:55 +010011250void FloatingPointHelper::LoadNumbersAsIntegers(MacroAssembler* masm,
11251 TypeInfo type_info,
11252 bool use_sse3,
11253 Label* conversion_failure) {
Leon Clarkee46be812010-01-19 14:06:41 +000011254 // Check float operands.
11255 Label arg1_is_object, check_undefined_arg1;
11256 Label arg2_is_object, check_undefined_arg2;
11257 Label load_arg2, done;
11258
Steve Block6ded16b2010-05-10 14:33:55 +010011259 if (!type_info.IsDouble()) {
11260 if (!type_info.IsSmi()) {
11261 __ test(edx, Immediate(kSmiTagMask));
11262 __ j(not_zero, &arg1_is_object);
11263 } else {
11264 if (FLAG_debug_code) __ AbortIfNotSmi(edx);
11265 }
11266 __ SmiUntag(edx);
11267 __ jmp(&load_arg2);
11268 }
11269
11270 __ bind(&arg1_is_object);
11271
11272 // Get the untagged integer version of the edx heap number in ecx.
11273 IntegerConvert(masm, edx, type_info, use_sse3, conversion_failure);
11274 __ mov(edx, ecx);
11275
11276 // Here edx has the untagged integer, eax has a Smi or a heap number.
11277 __ bind(&load_arg2);
11278 if (!type_info.IsDouble()) {
11279 // Test if arg2 is a Smi.
11280 if (!type_info.IsSmi()) {
11281 __ test(eax, Immediate(kSmiTagMask));
11282 __ j(not_zero, &arg2_is_object);
11283 } else {
11284 if (FLAG_debug_code) __ AbortIfNotSmi(eax);
11285 }
11286 __ SmiUntag(eax);
11287 __ mov(ecx, eax);
11288 __ jmp(&done);
11289 }
11290
11291 __ bind(&arg2_is_object);
11292
11293 // Get the untagged integer version of the eax heap number in ecx.
11294 IntegerConvert(masm, eax, type_info, use_sse3, conversion_failure);
11295 __ bind(&done);
11296 __ mov(eax, edx);
11297}
11298
11299
11300// Input: edx, eax are the left and right objects of a bit op.
11301// Output: eax, ecx are left and right integers for a bit op.
11302void FloatingPointHelper::LoadUnknownsAsIntegers(MacroAssembler* masm,
11303 bool use_sse3,
11304 Label* conversion_failure) {
11305 // Check float operands.
11306 Label arg1_is_object, check_undefined_arg1;
11307 Label arg2_is_object, check_undefined_arg2;
11308 Label load_arg2, done;
11309
11310 // Test if arg1 is a Smi.
Leon Clarkee46be812010-01-19 14:06:41 +000011311 __ test(edx, Immediate(kSmiTagMask));
11312 __ j(not_zero, &arg1_is_object);
Steve Block6ded16b2010-05-10 14:33:55 +010011313
Leon Clarkee46be812010-01-19 14:06:41 +000011314 __ SmiUntag(edx);
11315 __ jmp(&load_arg2);
11316
11317 // If the argument is undefined it converts to zero (ECMA-262, section 9.5).
11318 __ bind(&check_undefined_arg1);
11319 __ cmp(edx, Factory::undefined_value());
11320 __ j(not_equal, conversion_failure);
11321 __ mov(edx, Immediate(0));
11322 __ jmp(&load_arg2);
11323
11324 __ bind(&arg1_is_object);
11325 __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset));
11326 __ cmp(ebx, Factory::heap_number_map());
11327 __ j(not_equal, &check_undefined_arg1);
Steve Block6ded16b2010-05-10 14:33:55 +010011328
Leon Clarkee46be812010-01-19 14:06:41 +000011329 // Get the untagged integer version of the edx heap number in ecx.
Steve Block6ded16b2010-05-10 14:33:55 +010011330 IntegerConvert(masm,
11331 edx,
11332 TypeInfo::Unknown(),
11333 use_sse3,
11334 conversion_failure);
Leon Clarkee46be812010-01-19 14:06:41 +000011335 __ mov(edx, ecx);
11336
11337 // Here edx has the untagged integer, eax has a Smi or a heap number.
11338 __ bind(&load_arg2);
Steve Block6ded16b2010-05-10 14:33:55 +010011339
Leon Clarkee46be812010-01-19 14:06:41 +000011340 // Test if arg2 is a Smi.
11341 __ test(eax, Immediate(kSmiTagMask));
11342 __ j(not_zero, &arg2_is_object);
Steve Block6ded16b2010-05-10 14:33:55 +010011343
Leon Clarkee46be812010-01-19 14:06:41 +000011344 __ SmiUntag(eax);
11345 __ mov(ecx, eax);
11346 __ jmp(&done);
11347
11348 // If the argument is undefined it converts to zero (ECMA-262, section 9.5).
11349 __ bind(&check_undefined_arg2);
11350 __ cmp(eax, Factory::undefined_value());
11351 __ j(not_equal, conversion_failure);
11352 __ mov(ecx, Immediate(0));
11353 __ jmp(&done);
11354
11355 __ bind(&arg2_is_object);
11356 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
11357 __ cmp(ebx, Factory::heap_number_map());
11358 __ j(not_equal, &check_undefined_arg2);
Steve Block6ded16b2010-05-10 14:33:55 +010011359
Leon Clarkee46be812010-01-19 14:06:41 +000011360 // Get the untagged integer version of the eax heap number in ecx.
Steve Block6ded16b2010-05-10 14:33:55 +010011361 IntegerConvert(masm,
11362 eax,
11363 TypeInfo::Unknown(),
11364 use_sse3,
11365 conversion_failure);
Leon Clarkee46be812010-01-19 14:06:41 +000011366 __ bind(&done);
11367 __ mov(eax, edx);
11368}
11369
11370
Steve Block6ded16b2010-05-10 14:33:55 +010011371void FloatingPointHelper::LoadAsIntegers(MacroAssembler* masm,
11372 TypeInfo type_info,
11373 bool use_sse3,
11374 Label* conversion_failure) {
11375 if (type_info.IsNumber()) {
11376 LoadNumbersAsIntegers(masm, type_info, use_sse3, conversion_failure);
11377 } else {
11378 LoadUnknownsAsIntegers(masm, use_sse3, conversion_failure);
11379 }
11380}
11381
11382
Steve Blocka7e24c12009-10-30 11:49:00 +000011383void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm,
11384 Register number) {
11385 Label load_smi, done;
11386
11387 __ test(number, Immediate(kSmiTagMask));
11388 __ j(zero, &load_smi, not_taken);
11389 __ fld_d(FieldOperand(number, HeapNumber::kValueOffset));
11390 __ jmp(&done);
11391
11392 __ bind(&load_smi);
Leon Clarkee46be812010-01-19 14:06:41 +000011393 __ SmiUntag(number);
Steve Blocka7e24c12009-10-30 11:49:00 +000011394 __ push(number);
11395 __ fild_s(Operand(esp, 0));
11396 __ pop(number);
11397
11398 __ bind(&done);
11399}
11400
11401
Andrei Popescu402d9372010-02-26 13:31:12 +000011402void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm) {
11403 Label load_smi_edx, load_eax, load_smi_eax, done;
11404 // Load operand in edx into xmm0.
11405 __ test(edx, Immediate(kSmiTagMask));
11406 __ j(zero, &load_smi_edx, not_taken); // Argument in edx is a smi.
11407 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
11408
11409 __ bind(&load_eax);
11410 // Load operand in eax into xmm1.
11411 __ test(eax, Immediate(kSmiTagMask));
11412 __ j(zero, &load_smi_eax, not_taken); // Argument in eax is a smi.
11413 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
11414 __ jmp(&done);
11415
11416 __ bind(&load_smi_edx);
11417 __ SmiUntag(edx); // Untag smi before converting to float.
11418 __ cvtsi2sd(xmm0, Operand(edx));
11419 __ SmiTag(edx); // Retag smi for heap number overwriting test.
11420 __ jmp(&load_eax);
11421
11422 __ bind(&load_smi_eax);
11423 __ SmiUntag(eax); // Untag smi before converting to float.
11424 __ cvtsi2sd(xmm1, Operand(eax));
11425 __ SmiTag(eax); // Retag smi for heap number overwriting test.
11426
11427 __ bind(&done);
11428}
11429
11430
Leon Clarked91b9f72010-01-27 17:25:45 +000011431void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm,
Steve Blocka7e24c12009-10-30 11:49:00 +000011432 Label* not_numbers) {
11433 Label load_smi_edx, load_eax, load_smi_eax, load_float_eax, done;
11434 // Load operand in edx into xmm0, or branch to not_numbers.
11435 __ test(edx, Immediate(kSmiTagMask));
11436 __ j(zero, &load_smi_edx, not_taken); // Argument in edx is a smi.
11437 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), Factory::heap_number_map());
11438 __ j(not_equal, not_numbers); // Argument in edx is not a number.
11439 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
11440 __ bind(&load_eax);
11441 // Load operand in eax into xmm1, or branch to not_numbers.
11442 __ test(eax, Immediate(kSmiTagMask));
11443 __ j(zero, &load_smi_eax, not_taken); // Argument in eax is a smi.
11444 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), Factory::heap_number_map());
11445 __ j(equal, &load_float_eax);
11446 __ jmp(not_numbers); // Argument in eax is not a number.
11447 __ bind(&load_smi_edx);
Leon Clarkee46be812010-01-19 14:06:41 +000011448 __ SmiUntag(edx); // Untag smi before converting to float.
Steve Blocka7e24c12009-10-30 11:49:00 +000011449 __ cvtsi2sd(xmm0, Operand(edx));
Leon Clarkee46be812010-01-19 14:06:41 +000011450 __ SmiTag(edx); // Retag smi for heap number overwriting test.
Steve Blocka7e24c12009-10-30 11:49:00 +000011451 __ jmp(&load_eax);
11452 __ bind(&load_smi_eax);
Leon Clarkee46be812010-01-19 14:06:41 +000011453 __ SmiUntag(eax); // Untag smi before converting to float.
Steve Blocka7e24c12009-10-30 11:49:00 +000011454 __ cvtsi2sd(xmm1, Operand(eax));
Leon Clarkee46be812010-01-19 14:06:41 +000011455 __ SmiTag(eax); // Retag smi for heap number overwriting test.
Steve Blocka7e24c12009-10-30 11:49:00 +000011456 __ jmp(&done);
11457 __ bind(&load_float_eax);
11458 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
11459 __ bind(&done);
11460}
11461
11462
Leon Clarked91b9f72010-01-27 17:25:45 +000011463void FloatingPointHelper::LoadSSE2Smis(MacroAssembler* masm,
11464 Register scratch) {
11465 const Register left = edx;
11466 const Register right = eax;
11467 __ mov(scratch, left);
11468 ASSERT(!scratch.is(right)); // We're about to clobber scratch.
11469 __ SmiUntag(scratch);
11470 __ cvtsi2sd(xmm0, Operand(scratch));
11471
11472 __ mov(scratch, right);
11473 __ SmiUntag(scratch);
11474 __ cvtsi2sd(xmm1, Operand(scratch));
11475}
11476
11477
Steve Blocka7e24c12009-10-30 11:49:00 +000011478void FloatingPointHelper::LoadFloatOperands(MacroAssembler* masm,
Leon Clarked91b9f72010-01-27 17:25:45 +000011479 Register scratch,
11480 ArgLocation arg_location) {
Steve Blocka7e24c12009-10-30 11:49:00 +000011481 Label load_smi_1, load_smi_2, done_load_1, done;
Leon Clarked91b9f72010-01-27 17:25:45 +000011482 if (arg_location == ARGS_IN_REGISTERS) {
11483 __ mov(scratch, edx);
11484 } else {
11485 __ mov(scratch, Operand(esp, 2 * kPointerSize));
11486 }
Steve Blocka7e24c12009-10-30 11:49:00 +000011487 __ test(scratch, Immediate(kSmiTagMask));
11488 __ j(zero, &load_smi_1, not_taken);
11489 __ fld_d(FieldOperand(scratch, HeapNumber::kValueOffset));
11490 __ bind(&done_load_1);
11491
Leon Clarked91b9f72010-01-27 17:25:45 +000011492 if (arg_location == ARGS_IN_REGISTERS) {
11493 __ mov(scratch, eax);
11494 } else {
11495 __ mov(scratch, Operand(esp, 1 * kPointerSize));
11496 }
Steve Blocka7e24c12009-10-30 11:49:00 +000011497 __ test(scratch, Immediate(kSmiTagMask));
11498 __ j(zero, &load_smi_2, not_taken);
11499 __ fld_d(FieldOperand(scratch, HeapNumber::kValueOffset));
11500 __ jmp(&done);
11501
11502 __ bind(&load_smi_1);
Leon Clarkee46be812010-01-19 14:06:41 +000011503 __ SmiUntag(scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +000011504 __ push(scratch);
11505 __ fild_s(Operand(esp, 0));
11506 __ pop(scratch);
11507 __ jmp(&done_load_1);
11508
11509 __ bind(&load_smi_2);
Leon Clarkee46be812010-01-19 14:06:41 +000011510 __ SmiUntag(scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +000011511 __ push(scratch);
11512 __ fild_s(Operand(esp, 0));
11513 __ pop(scratch);
11514
11515 __ bind(&done);
11516}
11517
11518
Leon Clarked91b9f72010-01-27 17:25:45 +000011519void FloatingPointHelper::LoadFloatSmis(MacroAssembler* masm,
11520 Register scratch) {
11521 const Register left = edx;
11522 const Register right = eax;
11523 __ mov(scratch, left);
11524 ASSERT(!scratch.is(right)); // We're about to clobber scratch.
11525 __ SmiUntag(scratch);
11526 __ push(scratch);
11527 __ fild_s(Operand(esp, 0));
11528
11529 __ mov(scratch, right);
11530 __ SmiUntag(scratch);
11531 __ mov(Operand(esp, 0), scratch);
11532 __ fild_s(Operand(esp, 0));
11533 __ pop(scratch);
11534}
11535
11536
Steve Blocka7e24c12009-10-30 11:49:00 +000011537void FloatingPointHelper::CheckFloatOperands(MacroAssembler* masm,
11538 Label* non_float,
11539 Register scratch) {
11540 Label test_other, done;
11541 // Test if both operands are floats or smi -> scratch=k_is_float;
11542 // Otherwise scratch = k_not_float.
11543 __ test(edx, Immediate(kSmiTagMask));
11544 __ j(zero, &test_other, not_taken); // argument in edx is OK
11545 __ mov(scratch, FieldOperand(edx, HeapObject::kMapOffset));
11546 __ cmp(scratch, Factory::heap_number_map());
11547 __ j(not_equal, non_float); // argument in edx is not a number -> NaN
11548
11549 __ bind(&test_other);
11550 __ test(eax, Immediate(kSmiTagMask));
11551 __ j(zero, &done); // argument in eax is OK
11552 __ mov(scratch, FieldOperand(eax, HeapObject::kMapOffset));
11553 __ cmp(scratch, Factory::heap_number_map());
11554 __ j(not_equal, non_float); // argument in eax is not a number -> NaN
11555
11556 // Fall-through: Both operands are numbers.
11557 __ bind(&done);
11558}
11559
11560
Leon Clarkee46be812010-01-19 14:06:41 +000011561void GenericUnaryOpStub::Generate(MacroAssembler* masm) {
11562 Label slow, done;
Steve Blocka7e24c12009-10-30 11:49:00 +000011563
Leon Clarkee46be812010-01-19 14:06:41 +000011564 if (op_ == Token::SUB) {
11565 // Check whether the value is a smi.
11566 Label try_float;
11567 __ test(eax, Immediate(kSmiTagMask));
11568 __ j(not_zero, &try_float, not_taken);
Steve Blocka7e24c12009-10-30 11:49:00 +000011569
Leon Clarkeac952652010-07-15 11:15:24 +010011570 if (negative_zero_ == kStrictNegativeZero) {
11571 // Go slow case if the value of the expression is zero
11572 // to make sure that we switch between 0 and -0.
11573 __ test(eax, Operand(eax));
11574 __ j(zero, &slow, not_taken);
11575 }
Steve Blocka7e24c12009-10-30 11:49:00 +000011576
Leon Clarkee46be812010-01-19 14:06:41 +000011577 // The value of the expression is a smi that is not zero. Try
11578 // optimistic subtraction '0 - value'.
11579 Label undo;
Steve Blocka7e24c12009-10-30 11:49:00 +000011580 __ mov(edx, Operand(eax));
Leon Clarkee46be812010-01-19 14:06:41 +000011581 __ Set(eax, Immediate(0));
11582 __ sub(eax, Operand(edx));
Leon Clarkeac952652010-07-15 11:15:24 +010011583 __ j(no_overflow, &done, taken);
Leon Clarkee46be812010-01-19 14:06:41 +000011584
11585 // Restore eax and go slow case.
11586 __ bind(&undo);
11587 __ mov(eax, Operand(edx));
11588 __ jmp(&slow);
11589
11590 // Try floating point case.
11591 __ bind(&try_float);
11592 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
11593 __ cmp(edx, Factory::heap_number_map());
11594 __ j(not_equal, &slow);
Leon Clarkeac952652010-07-15 11:15:24 +010011595 if (overwrite_ == UNARY_OVERWRITE) {
Leon Clarkee46be812010-01-19 14:06:41 +000011596 __ mov(edx, FieldOperand(eax, HeapNumber::kExponentOffset));
11597 __ xor_(edx, HeapNumber::kSignMask); // Flip sign.
11598 __ mov(FieldOperand(eax, HeapNumber::kExponentOffset), edx);
11599 } else {
11600 __ mov(edx, Operand(eax));
11601 // edx: operand
11602 __ AllocateHeapNumber(eax, ebx, ecx, &undo);
11603 // eax: allocated 'empty' number
11604 __ mov(ecx, FieldOperand(edx, HeapNumber::kExponentOffset));
11605 __ xor_(ecx, HeapNumber::kSignMask); // Flip sign.
11606 __ mov(FieldOperand(eax, HeapNumber::kExponentOffset), ecx);
11607 __ mov(ecx, FieldOperand(edx, HeapNumber::kMantissaOffset));
11608 __ mov(FieldOperand(eax, HeapNumber::kMantissaOffset), ecx);
11609 }
11610 } else if (op_ == Token::BIT_NOT) {
11611 // Check if the operand is a heap number.
11612 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
11613 __ cmp(edx, Factory::heap_number_map());
11614 __ j(not_equal, &slow, not_taken);
11615
11616 // Convert the heap number in eax to an untagged integer in ecx.
Steve Block6ded16b2010-05-10 14:33:55 +010011617 IntegerConvert(masm,
11618 eax,
11619 TypeInfo::Unknown(),
11620 CpuFeatures::IsSupported(SSE3),
11621 &slow);
Leon Clarkee46be812010-01-19 14:06:41 +000011622
11623 // Do the bitwise operation and check if the result fits in a smi.
11624 Label try_float;
11625 __ not_(ecx);
11626 __ cmp(ecx, 0xc0000000);
11627 __ j(sign, &try_float, not_taken);
11628
11629 // Tag the result as a smi and we're done.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010011630 STATIC_ASSERT(kSmiTagSize == 1);
Leon Clarkee46be812010-01-19 14:06:41 +000011631 __ lea(eax, Operand(ecx, times_2, kSmiTag));
11632 __ jmp(&done);
11633
11634 // Try to store the result in a heap number.
11635 __ bind(&try_float);
Leon Clarkeac952652010-07-15 11:15:24 +010011636 if (overwrite_ == UNARY_NO_OVERWRITE) {
Leon Clarkee46be812010-01-19 14:06:41 +000011637 // Allocate a fresh heap number, but don't overwrite eax until
11638 // we're sure we can do it without going through the slow case
11639 // that needs the value in eax.
11640 __ AllocateHeapNumber(ebx, edx, edi, &slow);
11641 __ mov(eax, Operand(ebx));
11642 }
11643 if (CpuFeatures::IsSupported(SSE2)) {
11644 CpuFeatures::Scope use_sse2(SSE2);
11645 __ cvtsi2sd(xmm0, Operand(ecx));
11646 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
11647 } else {
11648 __ push(ecx);
11649 __ fild_s(Operand(esp, 0));
11650 __ pop(ecx);
11651 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
11652 }
11653 } else {
11654 UNIMPLEMENTED();
Steve Blocka7e24c12009-10-30 11:49:00 +000011655 }
11656
Leon Clarkee46be812010-01-19 14:06:41 +000011657 // Return from the stub.
Steve Blocka7e24c12009-10-30 11:49:00 +000011658 __ bind(&done);
Steve Blocka7e24c12009-10-30 11:49:00 +000011659 __ StubReturn(1);
Leon Clarkee46be812010-01-19 14:06:41 +000011660
11661 // Handle the slow case by jumping to the JavaScript builtin.
11662 __ bind(&slow);
11663 __ pop(ecx); // pop return address.
11664 __ push(eax);
11665 __ push(ecx); // push return address
11666 switch (op_) {
11667 case Token::SUB:
11668 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION);
11669 break;
11670 case Token::BIT_NOT:
11671 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION);
11672 break;
11673 default:
11674 UNREACHABLE();
11675 }
Steve Blocka7e24c12009-10-30 11:49:00 +000011676}
11677
11678
Steve Blocka7e24c12009-10-30 11:49:00 +000011679void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
11680 // The key is in edx and the parameter count is in eax.
11681
11682 // The displacement is used for skipping the frame pointer on the
11683 // stack. It is the offset of the last parameter (if any) relative
11684 // to the frame pointer.
11685 static const int kDisplacement = 1 * kPointerSize;
11686
11687 // Check that the key is a smi.
11688 Label slow;
11689 __ test(edx, Immediate(kSmiTagMask));
11690 __ j(not_zero, &slow, not_taken);
11691
11692 // Check if the calling frame is an arguments adaptor frame.
11693 Label adaptor;
11694 __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
11695 __ mov(ecx, Operand(ebx, StandardFrameConstants::kContextOffset));
11696 __ cmp(Operand(ecx), Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
11697 __ j(equal, &adaptor);
11698
11699 // Check index against formal parameters count limit passed in
11700 // through register eax. Use unsigned comparison to get negative
11701 // check for free.
11702 __ cmp(edx, Operand(eax));
11703 __ j(above_equal, &slow, not_taken);
11704
11705 // Read the argument from the stack and return it.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010011706 STATIC_ASSERT(kSmiTagSize == 1);
11707 STATIC_ASSERT(kSmiTag == 0); // Shifting code depends on these.
Steve Blocka7e24c12009-10-30 11:49:00 +000011708 __ lea(ebx, Operand(ebp, eax, times_2, 0));
11709 __ neg(edx);
11710 __ mov(eax, Operand(ebx, edx, times_2, kDisplacement));
11711 __ ret(0);
11712
11713 // Arguments adaptor case: Check index against actual arguments
11714 // limit found in the arguments adaptor frame. Use unsigned
11715 // comparison to get negative check for free.
11716 __ bind(&adaptor);
11717 __ mov(ecx, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
11718 __ cmp(edx, Operand(ecx));
11719 __ j(above_equal, &slow, not_taken);
11720
11721 // Read the argument from the stack and return it.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010011722 STATIC_ASSERT(kSmiTagSize == 1);
11723 STATIC_ASSERT(kSmiTag == 0); // Shifting code depends on these.
Steve Blocka7e24c12009-10-30 11:49:00 +000011724 __ lea(ebx, Operand(ebx, ecx, times_2, 0));
11725 __ neg(edx);
11726 __ mov(eax, Operand(ebx, edx, times_2, kDisplacement));
11727 __ ret(0);
11728
11729 // Slow-case: Handle non-smi or out-of-bounds access to arguments
11730 // by calling the runtime system.
11731 __ bind(&slow);
11732 __ pop(ebx); // Return address.
11733 __ push(edx);
11734 __ push(ebx);
Steve Block6ded16b2010-05-10 14:33:55 +010011735 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
Steve Blocka7e24c12009-10-30 11:49:00 +000011736}
11737
11738
11739void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
Andrei Popescu402d9372010-02-26 13:31:12 +000011740 // esp[0] : return address
11741 // esp[4] : number of parameters
11742 // esp[8] : receiver displacement
11743 // esp[16] : function
11744
Steve Blocka7e24c12009-10-30 11:49:00 +000011745 // The displacement is used for skipping the return address and the
11746 // frame pointer on the stack. It is the offset of the last
11747 // parameter (if any) relative to the frame pointer.
11748 static const int kDisplacement = 2 * kPointerSize;
11749
11750 // Check if the calling frame is an arguments adaptor frame.
Leon Clarkee46be812010-01-19 14:06:41 +000011751 Label adaptor_frame, try_allocate, runtime;
Steve Blocka7e24c12009-10-30 11:49:00 +000011752 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
11753 __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
11754 __ cmp(Operand(ecx), Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
Leon Clarkee46be812010-01-19 14:06:41 +000011755 __ j(equal, &adaptor_frame);
11756
11757 // Get the length from the frame.
11758 __ mov(ecx, Operand(esp, 1 * kPointerSize));
11759 __ jmp(&try_allocate);
Steve Blocka7e24c12009-10-30 11:49:00 +000011760
11761 // Patch the arguments.length and the parameters pointer.
Leon Clarkee46be812010-01-19 14:06:41 +000011762 __ bind(&adaptor_frame);
Steve Blocka7e24c12009-10-30 11:49:00 +000011763 __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
11764 __ mov(Operand(esp, 1 * kPointerSize), ecx);
11765 __ lea(edx, Operand(edx, ecx, times_2, kDisplacement));
11766 __ mov(Operand(esp, 2 * kPointerSize), edx);
11767
Leon Clarkee46be812010-01-19 14:06:41 +000011768 // Try the new space allocation. Start out with computing the size of
11769 // the arguments object and the elements array.
11770 Label add_arguments_object;
11771 __ bind(&try_allocate);
11772 __ test(ecx, Operand(ecx));
11773 __ j(zero, &add_arguments_object);
11774 __ lea(ecx, Operand(ecx, times_2, FixedArray::kHeaderSize));
11775 __ bind(&add_arguments_object);
11776 __ add(Operand(ecx), Immediate(Heap::kArgumentsObjectSize));
11777
11778 // Do the allocation of both objects in one go.
11779 __ AllocateInNewSpace(ecx, eax, edx, ebx, &runtime, TAG_OBJECT);
11780
11781 // Get the arguments boilerplate from the current (global) context.
11782 int offset = Context::SlotOffset(Context::ARGUMENTS_BOILERPLATE_INDEX);
11783 __ mov(edi, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
11784 __ mov(edi, FieldOperand(edi, GlobalObject::kGlobalContextOffset));
11785 __ mov(edi, Operand(edi, offset));
11786
11787 // Copy the JS object part.
11788 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
11789 __ mov(ebx, FieldOperand(edi, i));
11790 __ mov(FieldOperand(eax, i), ebx);
11791 }
11792
11793 // Setup the callee in-object property.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010011794 STATIC_ASSERT(Heap::arguments_callee_index == 0);
Leon Clarkee46be812010-01-19 14:06:41 +000011795 __ mov(ebx, Operand(esp, 3 * kPointerSize));
11796 __ mov(FieldOperand(eax, JSObject::kHeaderSize), ebx);
11797
11798 // Get the length (smi tagged) and set that as an in-object property too.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010011799 STATIC_ASSERT(Heap::arguments_length_index == 1);
Leon Clarkee46be812010-01-19 14:06:41 +000011800 __ mov(ecx, Operand(esp, 1 * kPointerSize));
11801 __ mov(FieldOperand(eax, JSObject::kHeaderSize + kPointerSize), ecx);
11802
11803 // If there are no actual arguments, we're done.
11804 Label done;
11805 __ test(ecx, Operand(ecx));
11806 __ j(zero, &done);
11807
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010011808 // Get the parameters pointer from the stack.
Leon Clarkee46be812010-01-19 14:06:41 +000011809 __ mov(edx, Operand(esp, 2 * kPointerSize));
Leon Clarkee46be812010-01-19 14:06:41 +000011810
11811 // Setup the elements pointer in the allocated arguments object and
11812 // initialize the header in the elements fixed array.
11813 __ lea(edi, Operand(eax, Heap::kArgumentsObjectSize));
11814 __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi);
11815 __ mov(FieldOperand(edi, FixedArray::kMapOffset),
11816 Immediate(Factory::fixed_array_map()));
11817 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010011818 // Untag the length for the loop below.
11819 __ SmiUntag(ecx);
Leon Clarkee46be812010-01-19 14:06:41 +000011820
11821 // Copy the fixed array slots.
11822 Label loop;
11823 __ bind(&loop);
11824 __ mov(ebx, Operand(edx, -1 * kPointerSize)); // Skip receiver.
11825 __ mov(FieldOperand(edi, FixedArray::kHeaderSize), ebx);
11826 __ add(Operand(edi), Immediate(kPointerSize));
11827 __ sub(Operand(edx), Immediate(kPointerSize));
11828 __ dec(ecx);
Leon Clarkee46be812010-01-19 14:06:41 +000011829 __ j(not_zero, &loop);
11830
11831 // Return and remove the on-stack parameters.
11832 __ bind(&done);
11833 __ ret(3 * kPointerSize);
11834
Steve Blocka7e24c12009-10-30 11:49:00 +000011835 // Do the runtime call to allocate the arguments object.
11836 __ bind(&runtime);
Steve Block6ded16b2010-05-10 14:33:55 +010011837 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
Steve Blocka7e24c12009-10-30 11:49:00 +000011838}
11839
11840
Leon Clarkee46be812010-01-19 14:06:41 +000011841void RegExpExecStub::Generate(MacroAssembler* masm) {
Leon Clarke4515c472010-02-03 11:58:03 +000011842 // Just jump directly to runtime if native RegExp is not selected at compile
11843 // time or if regexp entry in generated code is turned off runtime switch or
11844 // at compilation.
Steve Block6ded16b2010-05-10 14:33:55 +010011845#ifdef V8_INTERPRETED_REGEXP
11846 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
11847#else // V8_INTERPRETED_REGEXP
Leon Clarkee46be812010-01-19 14:06:41 +000011848 if (!FLAG_regexp_entry_native) {
Steve Block6ded16b2010-05-10 14:33:55 +010011849 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
Leon Clarkee46be812010-01-19 14:06:41 +000011850 return;
11851 }
11852
11853 // Stack frame on entry.
11854 // esp[0]: return address
11855 // esp[4]: last_match_info (expected JSArray)
11856 // esp[8]: previous index
11857 // esp[12]: subject string
11858 // esp[16]: JSRegExp object
11859
Leon Clarked91b9f72010-01-27 17:25:45 +000011860 static const int kLastMatchInfoOffset = 1 * kPointerSize;
11861 static const int kPreviousIndexOffset = 2 * kPointerSize;
11862 static const int kSubjectOffset = 3 * kPointerSize;
11863 static const int kJSRegExpOffset = 4 * kPointerSize;
11864
11865 Label runtime, invoke_regexp;
11866
11867 // Ensure that a RegExp stack is allocated.
11868 ExternalReference address_of_regexp_stack_memory_address =
11869 ExternalReference::address_of_regexp_stack_memory_address();
11870 ExternalReference address_of_regexp_stack_memory_size =
11871 ExternalReference::address_of_regexp_stack_memory_size();
11872 __ mov(ebx, Operand::StaticVariable(address_of_regexp_stack_memory_size));
11873 __ test(ebx, Operand(ebx));
11874 __ j(zero, &runtime, not_taken);
Leon Clarkee46be812010-01-19 14:06:41 +000011875
11876 // Check that the first argument is a JSRegExp object.
Leon Clarked91b9f72010-01-27 17:25:45 +000011877 __ mov(eax, Operand(esp, kJSRegExpOffset));
Kristian Monsen50ef84f2010-07-29 15:18:00 +010011878 STATIC_ASSERT(kSmiTag == 0);
Leon Clarkee46be812010-01-19 14:06:41 +000011879 __ test(eax, Immediate(kSmiTagMask));
11880 __ j(zero, &runtime);
11881 __ CmpObjectType(eax, JS_REGEXP_TYPE, ecx);
11882 __ j(not_equal, &runtime);
11883 // Check that the RegExp has been compiled (data contains a fixed array).
11884 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
Leon Clarke4515c472010-02-03 11:58:03 +000011885 if (FLAG_debug_code) {
11886 __ test(ecx, Immediate(kSmiTagMask));
11887 __ Check(not_zero, "Unexpected type for RegExp data, FixedArray expected");
11888 __ CmpObjectType(ecx, FIXED_ARRAY_TYPE, ebx);
11889 __ Check(equal, "Unexpected type for RegExp data, FixedArray expected");
11890 }
Leon Clarkee46be812010-01-19 14:06:41 +000011891
11892 // ecx: RegExp data (FixedArray)
11893 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
11894 __ mov(ebx, FieldOperand(ecx, JSRegExp::kDataTagOffset));
11895 __ cmp(Operand(ebx), Immediate(Smi::FromInt(JSRegExp::IRREGEXP)));
11896 __ j(not_equal, &runtime);
11897
11898 // ecx: RegExp data (FixedArray)
11899 // Check that the number of captures fit in the static offsets vector buffer.
11900 __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset));
11901 // Calculate number of capture registers (number_of_captures + 1) * 2. This
11902 // uses the asumption that smis are 2 * their untagged value.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010011903 STATIC_ASSERT(kSmiTag == 0);
11904 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
Leon Clarkee46be812010-01-19 14:06:41 +000011905 __ add(Operand(edx), Immediate(2)); // edx was a smi.
11906 // Check that the static offsets vector buffer is large enough.
11907 __ cmp(edx, OffsetsVector::kStaticOffsetsVectorSize);
11908 __ j(above, &runtime);
11909
11910 // ecx: RegExp data (FixedArray)
11911 // edx: Number of capture registers
11912 // Check that the second argument is a string.
Leon Clarked91b9f72010-01-27 17:25:45 +000011913 __ mov(eax, Operand(esp, kSubjectOffset));
Leon Clarkee46be812010-01-19 14:06:41 +000011914 __ test(eax, Immediate(kSmiTagMask));
11915 __ j(zero, &runtime);
11916 Condition is_string = masm->IsObjectStringType(eax, ebx, ebx);
11917 __ j(NegateCondition(is_string), &runtime);
11918 // Get the length of the string to ebx.
11919 __ mov(ebx, FieldOperand(eax, String::kLengthOffset));
11920
Steve Block6ded16b2010-05-10 14:33:55 +010011921 // ebx: Length of subject string as a smi
Leon Clarkee46be812010-01-19 14:06:41 +000011922 // ecx: RegExp data (FixedArray)
11923 // edx: Number of capture registers
Leon Clarke4515c472010-02-03 11:58:03 +000011924 // Check that the third argument is a positive smi less than the subject
Steve Block6ded16b2010-05-10 14:33:55 +010011925 // string length. A negative value will be greater (unsigned comparison).
Leon Clarked91b9f72010-01-27 17:25:45 +000011926 __ mov(eax, Operand(esp, kPreviousIndexOffset));
Steve Block6ded16b2010-05-10 14:33:55 +010011927 __ test(eax, Immediate(kSmiTagMask));
Kristian Monsen25f61362010-05-21 11:50:48 +010011928 __ j(not_zero, &runtime);
Leon Clarkee46be812010-01-19 14:06:41 +000011929 __ cmp(eax, Operand(ebx));
Steve Block6ded16b2010-05-10 14:33:55 +010011930 __ j(above_equal, &runtime);
Leon Clarkee46be812010-01-19 14:06:41 +000011931
11932 // ecx: RegExp data (FixedArray)
11933 // edx: Number of capture registers
11934 // Check that the fourth object is a JSArray object.
Leon Clarked91b9f72010-01-27 17:25:45 +000011935 __ mov(eax, Operand(esp, kLastMatchInfoOffset));
Leon Clarkee46be812010-01-19 14:06:41 +000011936 __ test(eax, Immediate(kSmiTagMask));
11937 __ j(zero, &runtime);
11938 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
11939 __ j(not_equal, &runtime);
11940 // Check that the JSArray is in fast case.
11941 __ mov(ebx, FieldOperand(eax, JSArray::kElementsOffset));
11942 __ mov(eax, FieldOperand(ebx, HeapObject::kMapOffset));
11943 __ cmp(eax, Factory::fixed_array_map());
11944 __ j(not_equal, &runtime);
11945 // Check that the last match info has space for the capture registers and the
11946 // additional information.
11947 __ mov(eax, FieldOperand(ebx, FixedArray::kLengthOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010011948 __ SmiUntag(eax);
Leon Clarkee46be812010-01-19 14:06:41 +000011949 __ add(Operand(edx), Immediate(RegExpImpl::kLastMatchOverhead));
11950 __ cmp(edx, Operand(eax));
11951 __ j(greater, &runtime);
11952
11953 // ecx: RegExp data (FixedArray)
Leon Clarked91b9f72010-01-27 17:25:45 +000011954 // Check the representation and encoding of the subject string.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010011955 Label seq_ascii_string, seq_two_byte_string, check_code;
Leon Clarked91b9f72010-01-27 17:25:45 +000011956 __ mov(eax, Operand(esp, kSubjectOffset));
Leon Clarkee46be812010-01-19 14:06:41 +000011957 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
11958 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010011959 // First check for flat two byte string.
11960 __ and_(ebx,
11961 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
Kristian Monsen50ef84f2010-07-29 15:18:00 +010011962 STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010011963 __ j(zero, &seq_two_byte_string);
11964 // Any other flat string must be a flat ascii string.
Leon Clarked91b9f72010-01-27 17:25:45 +000011965 __ test(Operand(ebx),
11966 Immediate(kIsNotStringMask | kStringRepresentationMask));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010011967 __ j(zero, &seq_ascii_string);
Leon Clarked91b9f72010-01-27 17:25:45 +000011968
11969 // Check for flat cons string.
11970 // A flat cons string is a cons string where the second part is the empty
11971 // string. In that case the subject string is just the first part of the cons
11972 // string. Also in this case the first part of the cons string is known to be
Leon Clarke4515c472010-02-03 11:58:03 +000011973 // a sequential string or an external string.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010011974 STATIC_ASSERT(kExternalStringTag != 0);
11975 STATIC_ASSERT((kConsStringTag & kExternalStringTag) == 0);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010011976 __ test(Operand(ebx),
11977 Immediate(kIsNotStringMask | kExternalStringTag));
11978 __ j(not_zero, &runtime);
11979 // String is a cons string.
Leon Clarked91b9f72010-01-27 17:25:45 +000011980 __ mov(edx, FieldOperand(eax, ConsString::kSecondOffset));
Leon Clarke4515c472010-02-03 11:58:03 +000011981 __ cmp(Operand(edx), Factory::empty_string());
Leon Clarked91b9f72010-01-27 17:25:45 +000011982 __ j(not_equal, &runtime);
11983 __ mov(eax, FieldOperand(eax, ConsString::kFirstOffset));
11984 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010011985 // String is a cons string with empty second part.
11986 // eax: first part of cons string.
11987 // ebx: map of first part of cons string.
11988 // Is first part a flat two byte string?
11989 __ test_b(FieldOperand(ebx, Map::kInstanceTypeOffset),
11990 kStringRepresentationMask | kStringEncodingMask);
Kristian Monsen50ef84f2010-07-29 15:18:00 +010011991 STATIC_ASSERT((kSeqStringTag | kTwoByteStringTag) == 0);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010011992 __ j(zero, &seq_two_byte_string);
11993 // Any other flat string must be ascii.
11994 __ test_b(FieldOperand(ebx, Map::kInstanceTypeOffset),
11995 kStringRepresentationMask);
Leon Clarke4515c472010-02-03 11:58:03 +000011996 __ j(not_zero, &runtime);
Leon Clarkee46be812010-01-19 14:06:41 +000011997
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010011998 __ bind(&seq_ascii_string);
11999 // eax: subject string (flat ascii)
Leon Clarkee46be812010-01-19 14:06:41 +000012000 // ecx: RegExp data (FixedArray)
Leon Clarkee46be812010-01-19 14:06:41 +000012001 __ mov(edx, FieldOperand(ecx, JSRegExp::kDataAsciiCodeOffset));
Leon Clarked91b9f72010-01-27 17:25:45 +000012002 __ Set(edi, Immediate(1)); // Type is ascii.
12003 __ jmp(&check_code);
12004
12005 __ bind(&seq_two_byte_string);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010012006 // eax: subject string (flat two byte)
Leon Clarked91b9f72010-01-27 17:25:45 +000012007 // ecx: RegExp data (FixedArray)
12008 __ mov(edx, FieldOperand(ecx, JSRegExp::kDataUC16CodeOffset));
12009 __ Set(edi, Immediate(0)); // Type is two byte.
12010
12011 __ bind(&check_code);
Leon Clarke4515c472010-02-03 11:58:03 +000012012 // Check that the irregexp code has been generated for the actual string
12013 // encoding. If it has, the field contains a code object otherwise it contains
12014 // the hole.
Leon Clarkee46be812010-01-19 14:06:41 +000012015 __ CmpObjectType(edx, CODE_TYPE, ebx);
12016 __ j(not_equal, &runtime);
12017
Leon Clarked91b9f72010-01-27 17:25:45 +000012018 // eax: subject string
12019 // edx: code
Leon Clarke4515c472010-02-03 11:58:03 +000012020 // edi: encoding of subject string (1 if ascii, 0 if two_byte);
Leon Clarkee46be812010-01-19 14:06:41 +000012021 // Load used arguments before starting to push arguments for call to native
12022 // RegExp code to avoid handling changing stack height.
Leon Clarked91b9f72010-01-27 17:25:45 +000012023 __ mov(ebx, Operand(esp, kPreviousIndexOffset));
Leon Clarked91b9f72010-01-27 17:25:45 +000012024 __ SmiUntag(ebx); // Previous index from smi.
Leon Clarkee46be812010-01-19 14:06:41 +000012025
12026 // eax: subject string
12027 // ebx: previous index
12028 // edx: code
Leon Clarke4515c472010-02-03 11:58:03 +000012029 // edi: encoding of subject string (1 if ascii 0 if two_byte);
Leon Clarkee46be812010-01-19 14:06:41 +000012030 // All checks done. Now push arguments for native regexp code.
12031 __ IncrementCounter(&Counters::regexp_entry_native, 1);
12032
Steve Block6ded16b2010-05-10 14:33:55 +010012033 static const int kRegExpExecuteArguments = 7;
12034 __ PrepareCallCFunction(kRegExpExecuteArguments, ecx);
12035
Leon Clarked91b9f72010-01-27 17:25:45 +000012036 // Argument 7: Indicate that this is a direct call from JavaScript.
Steve Block6ded16b2010-05-10 14:33:55 +010012037 __ mov(Operand(esp, 6 * kPointerSize), Immediate(1));
Leon Clarkee46be812010-01-19 14:06:41 +000012038
Leon Clarked91b9f72010-01-27 17:25:45 +000012039 // Argument 6: Start (high end) of backtracking stack memory area.
Leon Clarkee46be812010-01-19 14:06:41 +000012040 __ mov(ecx, Operand::StaticVariable(address_of_regexp_stack_memory_address));
12041 __ add(ecx, Operand::StaticVariable(address_of_regexp_stack_memory_size));
Steve Block6ded16b2010-05-10 14:33:55 +010012042 __ mov(Operand(esp, 5 * kPointerSize), ecx);
Leon Clarkee46be812010-01-19 14:06:41 +000012043
Leon Clarkee46be812010-01-19 14:06:41 +000012044 // Argument 5: static offsets vector buffer.
Steve Block6ded16b2010-05-10 14:33:55 +010012045 __ mov(Operand(esp, 4 * kPointerSize),
12046 Immediate(ExternalReference::address_of_static_offsets_vector()));
Leon Clarkee46be812010-01-19 14:06:41 +000012047
Leon Clarked91b9f72010-01-27 17:25:45 +000012048 // Argument 4: End of string data
12049 // Argument 3: Start of string data
Steve Block6ded16b2010-05-10 14:33:55 +010012050 Label setup_two_byte, setup_rest;
Leon Clarked91b9f72010-01-27 17:25:45 +000012051 __ test(edi, Operand(edi));
12052 __ mov(edi, FieldOperand(eax, String::kLengthOffset));
Steve Block6ded16b2010-05-10 14:33:55 +010012053 __ j(zero, &setup_two_byte);
12054 __ SmiUntag(edi);
Leon Clarked91b9f72010-01-27 17:25:45 +000012055 __ lea(ecx, FieldOperand(eax, edi, times_1, SeqAsciiString::kHeaderSize));
Steve Block6ded16b2010-05-10 14:33:55 +010012056 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4.
Leon Clarked91b9f72010-01-27 17:25:45 +000012057 __ lea(ecx, FieldOperand(eax, ebx, times_1, SeqAsciiString::kHeaderSize));
Steve Block6ded16b2010-05-10 14:33:55 +010012058 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3.
12059 __ jmp(&setup_rest);
Leon Clarkee46be812010-01-19 14:06:41 +000012060
Steve Block6ded16b2010-05-10 14:33:55 +010012061 __ bind(&setup_two_byte);
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012062 STATIC_ASSERT(kSmiTag == 0);
12063 STATIC_ASSERT(kSmiTagSize == 1); // edi is smi (powered by 2).
Steve Block6ded16b2010-05-10 14:33:55 +010012064 __ lea(ecx, FieldOperand(eax, edi, times_1, SeqTwoByteString::kHeaderSize));
12065 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4.
Leon Clarked91b9f72010-01-27 17:25:45 +000012066 __ lea(ecx, FieldOperand(eax, ebx, times_2, SeqTwoByteString::kHeaderSize));
Steve Block6ded16b2010-05-10 14:33:55 +010012067 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3.
Leon Clarked91b9f72010-01-27 17:25:45 +000012068
Steve Block6ded16b2010-05-10 14:33:55 +010012069 __ bind(&setup_rest);
Leon Clarkee46be812010-01-19 14:06:41 +000012070
12071 // Argument 2: Previous index.
Steve Block6ded16b2010-05-10 14:33:55 +010012072 __ mov(Operand(esp, 1 * kPointerSize), ebx);
Leon Clarkee46be812010-01-19 14:06:41 +000012073
12074 // Argument 1: Subject string.
Steve Block6ded16b2010-05-10 14:33:55 +010012075 __ mov(Operand(esp, 0 * kPointerSize), eax);
Leon Clarkee46be812010-01-19 14:06:41 +000012076
12077 // Locate the code entry and call it.
12078 __ add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
Steve Block6ded16b2010-05-10 14:33:55 +010012079 __ CallCFunction(edx, kRegExpExecuteArguments);
Leon Clarkee46be812010-01-19 14:06:41 +000012080
12081 // Check the result.
12082 Label success;
12083 __ cmp(eax, NativeRegExpMacroAssembler::SUCCESS);
12084 __ j(equal, &success, taken);
12085 Label failure;
12086 __ cmp(eax, NativeRegExpMacroAssembler::FAILURE);
12087 __ j(equal, &failure, taken);
12088 __ cmp(eax, NativeRegExpMacroAssembler::EXCEPTION);
12089 // If not exception it can only be retry. Handle that in the runtime system.
12090 __ j(not_equal, &runtime);
12091 // Result must now be exception. If there is no pending exception already a
12092 // stack overflow (on the backtrack stack) was detected in RegExp code but
12093 // haven't created the exception yet. Handle that in the runtime system.
Steve Block6ded16b2010-05-10 14:33:55 +010012094 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
Leon Clarkee46be812010-01-19 14:06:41 +000012095 ExternalReference pending_exception(Top::k_pending_exception_address);
12096 __ mov(eax,
12097 Operand::StaticVariable(ExternalReference::the_hole_value_location()));
12098 __ cmp(eax, Operand::StaticVariable(pending_exception));
12099 __ j(equal, &runtime);
12100 __ bind(&failure);
12101 // For failure and exception return null.
12102 __ mov(Operand(eax), Factory::null_value());
12103 __ ret(4 * kPointerSize);
12104
12105 // Load RegExp data.
12106 __ bind(&success);
Leon Clarked91b9f72010-01-27 17:25:45 +000012107 __ mov(eax, Operand(esp, kJSRegExpOffset));
Leon Clarkee46be812010-01-19 14:06:41 +000012108 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
12109 __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset));
12110 // Calculate number of capture registers (number_of_captures + 1) * 2.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012111 STATIC_ASSERT(kSmiTag == 0);
12112 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
Leon Clarkee46be812010-01-19 14:06:41 +000012113 __ add(Operand(edx), Immediate(2)); // edx was a smi.
12114
12115 // edx: Number of capture registers
12116 // Load last_match_info which is still known to be a fast case JSArray.
Leon Clarked91b9f72010-01-27 17:25:45 +000012117 __ mov(eax, Operand(esp, kLastMatchInfoOffset));
Leon Clarkee46be812010-01-19 14:06:41 +000012118 __ mov(ebx, FieldOperand(eax, JSArray::kElementsOffset));
12119
12120 // ebx: last_match_info backing store (FixedArray)
12121 // edx: number of capture registers
12122 // Store the capture count.
12123 __ SmiTag(edx); // Number of capture registers to smi.
12124 __ mov(FieldOperand(ebx, RegExpImpl::kLastCaptureCountOffset), edx);
12125 __ SmiUntag(edx); // Number of capture registers back from smi.
12126 // Store last subject and last input.
Leon Clarked91b9f72010-01-27 17:25:45 +000012127 __ mov(eax, Operand(esp, kSubjectOffset));
Leon Clarkee46be812010-01-19 14:06:41 +000012128 __ mov(FieldOperand(ebx, RegExpImpl::kLastSubjectOffset), eax);
12129 __ mov(ecx, ebx);
12130 __ RecordWrite(ecx, RegExpImpl::kLastSubjectOffset, eax, edi);
Leon Clarked91b9f72010-01-27 17:25:45 +000012131 __ mov(eax, Operand(esp, kSubjectOffset));
Leon Clarkee46be812010-01-19 14:06:41 +000012132 __ mov(FieldOperand(ebx, RegExpImpl::kLastInputOffset), eax);
12133 __ mov(ecx, ebx);
12134 __ RecordWrite(ecx, RegExpImpl::kLastInputOffset, eax, edi);
12135
12136 // Get the static offsets vector filled by the native regexp code.
12137 ExternalReference address_of_static_offsets_vector =
12138 ExternalReference::address_of_static_offsets_vector();
12139 __ mov(ecx, Immediate(address_of_static_offsets_vector));
12140
12141 // ebx: last_match_info backing store (FixedArray)
12142 // ecx: offsets vector
12143 // edx: number of capture registers
12144 Label next_capture, done;
Leon Clarkee46be812010-01-19 14:06:41 +000012145 // Capture register counter starts from number of capture registers and
12146 // counts down until wraping after zero.
12147 __ bind(&next_capture);
12148 __ sub(Operand(edx), Immediate(1));
12149 __ j(negative, &done);
12150 // Read the value from the static offsets vector buffer.
Leon Clarke4515c472010-02-03 11:58:03 +000012151 __ mov(edi, Operand(ecx, edx, times_int_size, 0));
Steve Block6ded16b2010-05-10 14:33:55 +010012152 __ SmiTag(edi);
Leon Clarkee46be812010-01-19 14:06:41 +000012153 // Store the smi value in the last match info.
12154 __ mov(FieldOperand(ebx,
12155 edx,
12156 times_pointer_size,
12157 RegExpImpl::kFirstCaptureOffset),
12158 edi);
12159 __ jmp(&next_capture);
12160 __ bind(&done);
12161
12162 // Return last match info.
Leon Clarked91b9f72010-01-27 17:25:45 +000012163 __ mov(eax, Operand(esp, kLastMatchInfoOffset));
Leon Clarkee46be812010-01-19 14:06:41 +000012164 __ ret(4 * kPointerSize);
12165
12166 // Do the runtime call to execute the regexp.
12167 __ bind(&runtime);
Steve Block6ded16b2010-05-10 14:33:55 +010012168 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
12169#endif // V8_INTERPRETED_REGEXP
Leon Clarkee46be812010-01-19 14:06:41 +000012170}
12171
12172
Andrei Popescu402d9372010-02-26 13:31:12 +000012173void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm,
12174 Register object,
12175 Register result,
12176 Register scratch1,
12177 Register scratch2,
12178 bool object_is_smi,
12179 Label* not_found) {
Andrei Popescu402d9372010-02-26 13:31:12 +000012180 // Use of registers. Register result is used as a temporary.
12181 Register number_string_cache = result;
12182 Register mask = scratch1;
12183 Register scratch = scratch2;
12184
12185 // Load the number string cache.
12186 ExternalReference roots_address = ExternalReference::roots_address();
12187 __ mov(scratch, Immediate(Heap::kNumberStringCacheRootIndex));
12188 __ mov(number_string_cache,
12189 Operand::StaticArray(scratch, times_pointer_size, roots_address));
12190 // Make the hash mask from the length of the number string cache. It
12191 // contains two elements (number and string) for each cache entry.
12192 __ mov(mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010012193 __ shr(mask, kSmiTagSize + 1); // Untag length and divide it by two.
Andrei Popescu402d9372010-02-26 13:31:12 +000012194 __ sub(Operand(mask), Immediate(1)); // Make mask.
Steve Block6ded16b2010-05-10 14:33:55 +010012195
Andrei Popescu402d9372010-02-26 13:31:12 +000012196 // Calculate the entry in the number string cache. The hash value in the
Steve Block6ded16b2010-05-10 14:33:55 +010012197 // number string cache for smis is just the smi value, and the hash for
12198 // doubles is the xor of the upper and lower words. See
12199 // Heap::GetNumberStringCache.
12200 Label smi_hash_calculated;
12201 Label load_result_from_cache;
12202 if (object_is_smi) {
12203 __ mov(scratch, object);
12204 __ SmiUntag(scratch);
12205 } else {
12206 Label not_smi, hash_calculated;
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012207 STATIC_ASSERT(kSmiTag == 0);
Steve Block6ded16b2010-05-10 14:33:55 +010012208 __ test(object, Immediate(kSmiTagMask));
12209 __ j(not_zero, &not_smi);
12210 __ mov(scratch, object);
12211 __ SmiUntag(scratch);
12212 __ jmp(&smi_hash_calculated);
12213 __ bind(&not_smi);
12214 __ cmp(FieldOperand(object, HeapObject::kMapOffset),
12215 Factory::heap_number_map());
12216 __ j(not_equal, not_found);
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012217 STATIC_ASSERT(8 == kDoubleSize);
Steve Block6ded16b2010-05-10 14:33:55 +010012218 __ mov(scratch, FieldOperand(object, HeapNumber::kValueOffset));
12219 __ xor_(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4));
12220 // Object is heap number and hash is now in scratch. Calculate cache index.
12221 __ and_(scratch, Operand(mask));
12222 Register index = scratch;
12223 Register probe = mask;
12224 __ mov(probe,
12225 FieldOperand(number_string_cache,
12226 index,
12227 times_twice_pointer_size,
12228 FixedArray::kHeaderSize));
12229 __ test(probe, Immediate(kSmiTagMask));
12230 __ j(zero, not_found);
12231 if (CpuFeatures::IsSupported(SSE2)) {
12232 CpuFeatures::Scope fscope(SSE2);
12233 __ movdbl(xmm0, FieldOperand(object, HeapNumber::kValueOffset));
12234 __ movdbl(xmm1, FieldOperand(probe, HeapNumber::kValueOffset));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010012235 __ ucomisd(xmm0, xmm1);
Steve Block6ded16b2010-05-10 14:33:55 +010012236 } else {
12237 __ fld_d(FieldOperand(object, HeapNumber::kValueOffset));
12238 __ fld_d(FieldOperand(probe, HeapNumber::kValueOffset));
12239 __ FCmp();
12240 }
12241 __ j(parity_even, not_found); // Bail out if NaN is involved.
12242 __ j(not_equal, not_found); // The cache did not contain this value.
12243 __ jmp(&load_result_from_cache);
12244 }
12245
12246 __ bind(&smi_hash_calculated);
12247 // Object is smi and hash is now in scratch. Calculate cache index.
Andrei Popescu402d9372010-02-26 13:31:12 +000012248 __ and_(scratch, Operand(mask));
Steve Block6ded16b2010-05-10 14:33:55 +010012249 Register index = scratch;
Andrei Popescu402d9372010-02-26 13:31:12 +000012250 // Check if the entry is the smi we are looking for.
12251 __ cmp(object,
12252 FieldOperand(number_string_cache,
Steve Block6ded16b2010-05-10 14:33:55 +010012253 index,
Andrei Popescu402d9372010-02-26 13:31:12 +000012254 times_twice_pointer_size,
12255 FixedArray::kHeaderSize));
12256 __ j(not_equal, not_found);
12257
12258 // Get the result from the cache.
Steve Block6ded16b2010-05-10 14:33:55 +010012259 __ bind(&load_result_from_cache);
Andrei Popescu402d9372010-02-26 13:31:12 +000012260 __ mov(result,
12261 FieldOperand(number_string_cache,
Steve Block6ded16b2010-05-10 14:33:55 +010012262 index,
Andrei Popescu402d9372010-02-26 13:31:12 +000012263 times_twice_pointer_size,
12264 FixedArray::kHeaderSize + kPointerSize));
12265 __ IncrementCounter(&Counters::number_to_string_native, 1);
12266}
12267
12268
12269void NumberToStringStub::Generate(MacroAssembler* masm) {
12270 Label runtime;
12271
12272 __ mov(ebx, Operand(esp, kPointerSize));
12273
12274 // Generate code to lookup number in the number string cache.
12275 GenerateLookupNumberStringCache(masm, ebx, eax, ecx, edx, false, &runtime);
12276 __ ret(1 * kPointerSize);
12277
12278 __ bind(&runtime);
12279 // Handle number to string in the runtime system if not found in the cache.
Steve Block6ded16b2010-05-10 14:33:55 +010012280 __ TailCallRuntime(Runtime::kNumberToStringSkipCache, 1, 1);
12281}
12282
12283
Steve Block6ded16b2010-05-10 14:33:55 +010012284static int NegativeComparisonResult(Condition cc) {
12285 ASSERT(cc != equal);
12286 ASSERT((cc == less) || (cc == less_equal)
12287 || (cc == greater) || (cc == greater_equal));
12288 return (cc == greater || cc == greater_equal) ? LESS : GREATER;
Andrei Popescu402d9372010-02-26 13:31:12 +000012289}
12290
12291
Steve Blocka7e24c12009-10-30 11:49:00 +000012292void CompareStub::Generate(MacroAssembler* masm) {
Ben Murdoch3bec4d22010-07-22 14:51:16 +010012293 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
12294
Steve Block8defd9f2010-07-08 12:39:36 +010012295 Label check_unequal_objects, done;
Steve Blocka7e24c12009-10-30 11:49:00 +000012296
12297 // NOTICE! This code is only reached after a smi-fast-case check, so
12298 // it is certain that at least one operand isn't a smi.
12299
Steve Block6ded16b2010-05-10 14:33:55 +010012300 // Identical objects can be compared fast, but there are some tricky cases
12301 // for NaN and undefined.
12302 {
12303 Label not_identical;
12304 __ cmp(eax, Operand(edx));
12305 __ j(not_equal, &not_identical);
Steve Blocka7e24c12009-10-30 11:49:00 +000012306
Steve Block6ded16b2010-05-10 14:33:55 +010012307 if (cc_ != equal) {
12308 // Check for undefined. undefined OP undefined is false even though
12309 // undefined == undefined.
12310 Label check_for_nan;
12311 __ cmp(edx, Factory::undefined_value());
12312 __ j(not_equal, &check_for_nan);
12313 __ Set(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc_))));
12314 __ ret(0);
12315 __ bind(&check_for_nan);
12316 }
Steve Blocka7e24c12009-10-30 11:49:00 +000012317
Steve Block6ded16b2010-05-10 14:33:55 +010012318 // Test for NaN. Sadly, we can't just compare to Factory::nan_value(),
12319 // so we do the second best thing - test it ourselves.
12320 // Note: if cc_ != equal, never_nan_nan_ is not used.
12321 if (never_nan_nan_ && (cc_ == equal)) {
12322 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
12323 __ ret(0);
12324 } else {
Steve Block6ded16b2010-05-10 14:33:55 +010012325 Label heap_number;
Steve Block6ded16b2010-05-10 14:33:55 +010012326 __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
12327 Immediate(Factory::heap_number_map()));
12328 __ j(equal, &heap_number);
Steve Block8defd9f2010-07-08 12:39:36 +010012329 if (cc_ != equal) {
12330 // Call runtime on identical JSObjects. Otherwise return equal.
12331 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
12332 __ j(above_equal, &not_identical);
12333 }
Steve Block6ded16b2010-05-10 14:33:55 +010012334 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
12335 __ ret(0);
Steve Blocka7e24c12009-10-30 11:49:00 +000012336
Steve Block6ded16b2010-05-10 14:33:55 +010012337 __ bind(&heap_number);
12338 // It is a heap number, so return non-equal if it's NaN and equal if
12339 // it's not NaN.
12340 // The representation of NaN values has all exponent bits (52..62) set,
12341 // and not all mantissa bits (0..51) clear.
12342 // We only accept QNaNs, which have bit 51 set.
12343 // Read top bits of double representation (second word of value).
12344
12345 // Value is a QNaN if value & kQuietNaNMask == kQuietNaNMask, i.e.,
12346 // all bits in the mask are set. We only need to check the word
12347 // that contains the exponent and high bit of the mantissa.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012348 STATIC_ASSERT(((kQuietNaNHighBitsMask << 1) & 0x80000000u) != 0);
Steve Block6ded16b2010-05-10 14:33:55 +010012349 __ mov(edx, FieldOperand(edx, HeapNumber::kExponentOffset));
12350 __ xor_(eax, Operand(eax));
12351 // Shift value and mask so kQuietNaNHighBitsMask applies to topmost
12352 // bits.
12353 __ add(edx, Operand(edx));
12354 __ cmp(edx, kQuietNaNHighBitsMask << 1);
12355 if (cc_ == equal) {
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012356 STATIC_ASSERT(EQUAL != 1);
Leon Clarkee46be812010-01-19 14:06:41 +000012357 __ setcc(above_equal, eax);
12358 __ ret(0);
Steve Block6ded16b2010-05-10 14:33:55 +010012359 } else {
12360 Label nan;
12361 __ j(above_equal, &nan);
12362 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
12363 __ ret(0);
12364 __ bind(&nan);
12365 __ Set(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc_))));
12366 __ ret(0);
Leon Clarkee46be812010-01-19 14:06:41 +000012367 }
Steve Blocka7e24c12009-10-30 11:49:00 +000012368 }
12369
Steve Block6ded16b2010-05-10 14:33:55 +010012370 __ bind(&not_identical);
12371 }
12372
Steve Block8defd9f2010-07-08 12:39:36 +010012373 // Strict equality can quickly decide whether objects are equal.
12374 // Non-strict object equality is slower, so it is handled later in the stub.
12375 if (cc_ == equal && strict_) {
Steve Block6ded16b2010-05-10 14:33:55 +010012376 Label slow; // Fallthrough label.
Steve Block8defd9f2010-07-08 12:39:36 +010012377 Label not_smis;
Steve Blocka7e24c12009-10-30 11:49:00 +000012378 // If we're doing a strict equality comparison, we don't have to do
12379 // type conversion, so we generate code to do fast comparison for objects
12380 // and oddballs. Non-smi numbers and strings still go through the usual
12381 // slow-case code.
Steve Block8defd9f2010-07-08 12:39:36 +010012382 // If either is a Smi (we know that not both are), then they can only
12383 // be equal if the other is a HeapNumber. If so, use the slow case.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012384 STATIC_ASSERT(kSmiTag == 0);
Steve Block8defd9f2010-07-08 12:39:36 +010012385 ASSERT_EQ(0, Smi::FromInt(0));
12386 __ mov(ecx, Immediate(kSmiTagMask));
12387 __ and_(ecx, Operand(eax));
12388 __ test(ecx, Operand(edx));
12389 __ j(not_zero, &not_smis);
12390 // One operand is a smi.
Steve Blocka7e24c12009-10-30 11:49:00 +000012391
Steve Block8defd9f2010-07-08 12:39:36 +010012392 // Check whether the non-smi is a heap number.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012393 STATIC_ASSERT(kSmiTagMask == 1);
Steve Block8defd9f2010-07-08 12:39:36 +010012394 // ecx still holds eax & kSmiTag, which is either zero or one.
12395 __ sub(Operand(ecx), Immediate(0x01));
12396 __ mov(ebx, edx);
12397 __ xor_(ebx, Operand(eax));
12398 __ and_(ebx, Operand(ecx)); // ebx holds either 0 or eax ^ edx.
12399 __ xor_(ebx, Operand(eax));
12400 // if eax was smi, ebx is now edx, else eax.
Steve Blocka7e24c12009-10-30 11:49:00 +000012401
Steve Block8defd9f2010-07-08 12:39:36 +010012402 // Check if the non-smi operand is a heap number.
12403 __ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
12404 Immediate(Factory::heap_number_map()));
12405 // If heap number, handle it in the slow case.
12406 __ j(equal, &slow);
12407 // Return non-equal (ebx is not zero)
12408 __ mov(eax, ebx);
12409 __ ret(0);
Steve Blocka7e24c12009-10-30 11:49:00 +000012410
Steve Block8defd9f2010-07-08 12:39:36 +010012411 __ bind(&not_smis);
12412 // If either operand is a JSObject or an oddball value, then they are not
12413 // equal since their pointers are different
12414 // There is no test for undetectability in strict equality.
Steve Blocka7e24c12009-10-30 11:49:00 +000012415
Steve Block8defd9f2010-07-08 12:39:36 +010012416 // Get the type of the first operand.
12417 // If the first object is a JS object, we have done pointer comparison.
12418 Label first_non_object;
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012419 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
Steve Block8defd9f2010-07-08 12:39:36 +010012420 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
12421 __ j(below, &first_non_object);
Steve Blocka7e24c12009-10-30 11:49:00 +000012422
Steve Block8defd9f2010-07-08 12:39:36 +010012423 // Return non-zero (eax is not zero)
12424 Label return_not_equal;
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012425 STATIC_ASSERT(kHeapObjectTag != 0);
Steve Block8defd9f2010-07-08 12:39:36 +010012426 __ bind(&return_not_equal);
12427 __ ret(0);
Steve Blocka7e24c12009-10-30 11:49:00 +000012428
Steve Block8defd9f2010-07-08 12:39:36 +010012429 __ bind(&first_non_object);
12430 // Check for oddballs: true, false, null, undefined.
12431 __ CmpInstanceType(ecx, ODDBALL_TYPE);
12432 __ j(equal, &return_not_equal);
Steve Blocka7e24c12009-10-30 11:49:00 +000012433
Steve Block8defd9f2010-07-08 12:39:36 +010012434 __ CmpObjectType(edx, FIRST_JS_OBJECT_TYPE, ecx);
12435 __ j(above_equal, &return_not_equal);
Steve Blocka7e24c12009-10-30 11:49:00 +000012436
Steve Block8defd9f2010-07-08 12:39:36 +010012437 // Check for oddballs: true, false, null, undefined.
12438 __ CmpInstanceType(ecx, ODDBALL_TYPE);
12439 __ j(equal, &return_not_equal);
Steve Blocka7e24c12009-10-30 11:49:00 +000012440
Steve Block8defd9f2010-07-08 12:39:36 +010012441 // Fall through to the general case.
Steve Blocka7e24c12009-10-30 11:49:00 +000012442 __ bind(&slow);
12443 }
12444
Steve Block6ded16b2010-05-10 14:33:55 +010012445 // Generate the number comparison code.
12446 if (include_number_compare_) {
12447 Label non_number_comparison;
12448 Label unordered;
12449 if (CpuFeatures::IsSupported(SSE2)) {
12450 CpuFeatures::Scope use_sse2(SSE2);
12451 CpuFeatures::Scope use_cmov(CMOV);
Steve Blocka7e24c12009-10-30 11:49:00 +000012452
Steve Block6ded16b2010-05-10 14:33:55 +010012453 FloatingPointHelper::LoadSSE2Operands(masm, &non_number_comparison);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010012454 __ ucomisd(xmm0, xmm1);
Steve Blocka7e24c12009-10-30 11:49:00 +000012455
Steve Block6ded16b2010-05-10 14:33:55 +010012456 // Don't base result on EFLAGS when a NaN is involved.
12457 __ j(parity_even, &unordered, not_taken);
12458 // Return a result of -1, 0, or 1, based on EFLAGS.
12459 __ mov(eax, 0); // equal
12460 __ mov(ecx, Immediate(Smi::FromInt(1)));
12461 __ cmov(above, eax, Operand(ecx));
12462 __ mov(ecx, Immediate(Smi::FromInt(-1)));
12463 __ cmov(below, eax, Operand(ecx));
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012464 __ ret(0);
Steve Block6ded16b2010-05-10 14:33:55 +010012465 } else {
12466 FloatingPointHelper::CheckFloatOperands(
12467 masm, &non_number_comparison, ebx);
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012468 FloatingPointHelper::LoadFloatOperand(masm, eax);
12469 FloatingPointHelper::LoadFloatOperand(masm, edx);
Steve Block6ded16b2010-05-10 14:33:55 +010012470 __ FCmp();
Steve Blocka7e24c12009-10-30 11:49:00 +000012471
Steve Block6ded16b2010-05-10 14:33:55 +010012472 // Don't base result on EFLAGS when a NaN is involved.
12473 __ j(parity_even, &unordered, not_taken);
Steve Blocka7e24c12009-10-30 11:49:00 +000012474
Steve Block6ded16b2010-05-10 14:33:55 +010012475 Label below_label, above_label;
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012476 // Return a result of -1, 0, or 1, based on EFLAGS.
Steve Block6ded16b2010-05-10 14:33:55 +010012477 __ j(below, &below_label, not_taken);
12478 __ j(above, &above_label, not_taken);
Steve Blocka7e24c12009-10-30 11:49:00 +000012479
Steve Block6ded16b2010-05-10 14:33:55 +010012480 __ xor_(eax, Operand(eax));
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012481 __ ret(0);
Steve Blocka7e24c12009-10-30 11:49:00 +000012482
Steve Block6ded16b2010-05-10 14:33:55 +010012483 __ bind(&below_label);
12484 __ mov(eax, Immediate(Smi::FromInt(-1)));
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012485 __ ret(0);
Steve Blocka7e24c12009-10-30 11:49:00 +000012486
Steve Block6ded16b2010-05-10 14:33:55 +010012487 __ bind(&above_label);
12488 __ mov(eax, Immediate(Smi::FromInt(1)));
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012489 __ ret(0);
Steve Block6ded16b2010-05-10 14:33:55 +010012490 }
12491
12492 // If one of the numbers was NaN, then the result is always false.
12493 // The cc is never not-equal.
12494 __ bind(&unordered);
12495 ASSERT(cc_ != not_equal);
12496 if (cc_ == less || cc_ == less_equal) {
12497 __ mov(eax, Immediate(Smi::FromInt(1)));
12498 } else {
12499 __ mov(eax, Immediate(Smi::FromInt(-1)));
12500 }
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012501 __ ret(0);
Steve Block6ded16b2010-05-10 14:33:55 +010012502
12503 // The number comparison code did not provide a valid result.
12504 __ bind(&non_number_comparison);
Steve Blocka7e24c12009-10-30 11:49:00 +000012505 }
Steve Blocka7e24c12009-10-30 11:49:00 +000012506
12507 // Fast negative check for symbol-to-symbol equality.
Leon Clarkee46be812010-01-19 14:06:41 +000012508 Label check_for_strings;
Steve Blocka7e24c12009-10-30 11:49:00 +000012509 if (cc_ == equal) {
Leon Clarkee46be812010-01-19 14:06:41 +000012510 BranchIfNonSymbol(masm, &check_for_strings, eax, ecx);
12511 BranchIfNonSymbol(masm, &check_for_strings, edx, ecx);
Steve Blocka7e24c12009-10-30 11:49:00 +000012512
12513 // We've already checked for object identity, so if both operands
12514 // are symbols they aren't equal. Register eax already holds a
12515 // non-zero value, which indicates not equal, so just return.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012516 __ ret(0);
Steve Blocka7e24c12009-10-30 11:49:00 +000012517 }
12518
Leon Clarkee46be812010-01-19 14:06:41 +000012519 __ bind(&check_for_strings);
12520
Steve Block8defd9f2010-07-08 12:39:36 +010012521 __ JumpIfNotBothSequentialAsciiStrings(edx, eax, ecx, ebx,
12522 &check_unequal_objects);
Leon Clarkee46be812010-01-19 14:06:41 +000012523
12524 // Inline comparison of ascii strings.
12525 StringCompareStub::GenerateCompareFlatAsciiStrings(masm,
12526 edx,
12527 eax,
12528 ecx,
12529 ebx,
12530 edi);
12531#ifdef DEBUG
12532 __ Abort("Unexpected fall-through from string comparison");
12533#endif
12534
Steve Block8defd9f2010-07-08 12:39:36 +010012535 __ bind(&check_unequal_objects);
12536 if (cc_ == equal && !strict_) {
12537 // Non-strict equality. Objects are unequal if
12538 // they are both JSObjects and not undetectable,
12539 // and their pointers are different.
12540 Label not_both_objects;
12541 Label return_unequal;
12542 // At most one is a smi, so we can test for smi by adding the two.
12543 // A smi plus a heap object has the low bit set, a heap object plus
12544 // a heap object has the low bit clear.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012545 STATIC_ASSERT(kSmiTag == 0);
12546 STATIC_ASSERT(kSmiTagMask == 1);
Steve Block8defd9f2010-07-08 12:39:36 +010012547 __ lea(ecx, Operand(eax, edx, times_1, 0));
12548 __ test(ecx, Immediate(kSmiTagMask));
12549 __ j(not_zero, &not_both_objects);
12550 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
12551 __ j(below, &not_both_objects);
12552 __ CmpObjectType(edx, FIRST_JS_OBJECT_TYPE, ebx);
12553 __ j(below, &not_both_objects);
12554 // We do not bail out after this point. Both are JSObjects, and
12555 // they are equal if and only if both are undetectable.
12556 // The and of the undetectable flags is 1 if and only if they are equal.
12557 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
12558 1 << Map::kIsUndetectable);
12559 __ j(zero, &return_unequal);
12560 __ test_b(FieldOperand(ebx, Map::kBitFieldOffset),
12561 1 << Map::kIsUndetectable);
12562 __ j(zero, &return_unequal);
12563 // The objects are both undetectable, so they both compare as the value
12564 // undefined, and are equal.
12565 __ Set(eax, Immediate(EQUAL));
12566 __ bind(&return_unequal);
12567 // Return non-equal by returning the non-zero object pointer in eax,
12568 // or return equal if we fell through to here.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012569 __ ret(0); // rax, rdx were pushed
Steve Block8defd9f2010-07-08 12:39:36 +010012570 __ bind(&not_both_objects);
12571 }
12572
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012573 // Push arguments below the return address.
Steve Blocka7e24c12009-10-30 11:49:00 +000012574 __ pop(ecx);
Steve Blocka7e24c12009-10-30 11:49:00 +000012575 __ push(edx);
12576 __ push(eax);
12577
12578 // Figure out which native to call and setup the arguments.
12579 Builtins::JavaScript builtin;
12580 if (cc_ == equal) {
12581 builtin = strict_ ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
12582 } else {
12583 builtin = Builtins::COMPARE;
Steve Block6ded16b2010-05-10 14:33:55 +010012584 __ push(Immediate(Smi::FromInt(NegativeComparisonResult(cc_))));
Steve Blocka7e24c12009-10-30 11:49:00 +000012585 }
12586
12587 // Restore return address on the stack.
12588 __ push(ecx);
12589
12590 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
12591 // tagged as a small integer.
12592 __ InvokeBuiltin(builtin, JUMP_FUNCTION);
12593}
12594
12595
12596void CompareStub::BranchIfNonSymbol(MacroAssembler* masm,
12597 Label* label,
12598 Register object,
12599 Register scratch) {
12600 __ test(object, Immediate(kSmiTagMask));
12601 __ j(zero, label);
12602 __ mov(scratch, FieldOperand(object, HeapObject::kMapOffset));
12603 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
12604 __ and_(scratch, kIsSymbolMask | kIsNotStringMask);
12605 __ cmp(scratch, kSymbolTag | kStringTag);
12606 __ j(not_equal, label);
12607}
12608
12609
12610void StackCheckStub::Generate(MacroAssembler* masm) {
12611 // Because builtins always remove the receiver from the stack, we
12612 // have to fake one to avoid underflowing the stack. The receiver
12613 // must be inserted below the return address on the stack so we
12614 // temporarily store that in a register.
12615 __ pop(eax);
12616 __ push(Immediate(Smi::FromInt(0)));
12617 __ push(eax);
12618
12619 // Do tail-call to runtime routine.
Steve Block6ded16b2010-05-10 14:33:55 +010012620 __ TailCallRuntime(Runtime::kStackGuard, 1, 1);
Steve Blocka7e24c12009-10-30 11:49:00 +000012621}
12622
12623
12624void CallFunctionStub::Generate(MacroAssembler* masm) {
12625 Label slow;
12626
Leon Clarkee46be812010-01-19 14:06:41 +000012627 // If the receiver might be a value (string, number or boolean) check for this
12628 // and box it if it is.
12629 if (ReceiverMightBeValue()) {
12630 // Get the receiver from the stack.
12631 // +1 ~ return address
12632 Label receiver_is_value, receiver_is_js_object;
12633 __ mov(eax, Operand(esp, (argc_ + 1) * kPointerSize));
12634
12635 // Check if receiver is a smi (which is a number value).
12636 __ test(eax, Immediate(kSmiTagMask));
12637 __ j(zero, &receiver_is_value, not_taken);
12638
12639 // Check if the receiver is a valid JS object.
12640 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, edi);
12641 __ j(above_equal, &receiver_is_js_object);
12642
12643 // Call the runtime to box the value.
12644 __ bind(&receiver_is_value);
12645 __ EnterInternalFrame();
12646 __ push(eax);
12647 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
12648 __ LeaveInternalFrame();
12649 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), eax);
12650
12651 __ bind(&receiver_is_js_object);
12652 }
12653
Steve Blocka7e24c12009-10-30 11:49:00 +000012654 // Get the function to call from the stack.
12655 // +2 ~ receiver, return address
12656 __ mov(edi, Operand(esp, (argc_ + 2) * kPointerSize));
12657
12658 // Check that the function really is a JavaScript function.
12659 __ test(edi, Immediate(kSmiTagMask));
12660 __ j(zero, &slow, not_taken);
12661 // Goto slow case if we do not have a function.
12662 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
12663 __ j(not_equal, &slow, not_taken);
12664
12665 // Fast-case: Just invoke the function.
12666 ParameterCount actual(argc_);
12667 __ InvokeFunction(edi, actual, JUMP_FUNCTION);
12668
12669 // Slow-case: Non-function called.
12670 __ bind(&slow);
Andrei Popescu402d9372010-02-26 13:31:12 +000012671 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
12672 // of the original receiver from the call site).
12673 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), edi);
Steve Blocka7e24c12009-10-30 11:49:00 +000012674 __ Set(eax, Immediate(argc_));
12675 __ Set(ebx, Immediate(0));
12676 __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
12677 Handle<Code> adaptor(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
12678 __ jmp(adaptor, RelocInfo::CODE_TARGET);
12679}
12680
12681
Steve Blocka7e24c12009-10-30 11:49:00 +000012682void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
12683 // eax holds the exception.
12684
12685 // Adjust this code if not the case.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012686 STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
Steve Blocka7e24c12009-10-30 11:49:00 +000012687
12688 // Drop the sp to the top of the handler.
12689 ExternalReference handler_address(Top::k_handler_address);
12690 __ mov(esp, Operand::StaticVariable(handler_address));
12691
12692 // Restore next handler and frame pointer, discard handler state.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012693 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +000012694 __ pop(Operand::StaticVariable(handler_address));
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012695 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 1 * kPointerSize);
Steve Blocka7e24c12009-10-30 11:49:00 +000012696 __ pop(ebp);
12697 __ pop(edx); // Remove state.
12698
12699 // Before returning we restore the context from the frame pointer if
12700 // not NULL. The frame pointer is NULL in the exception handler of
12701 // a JS entry frame.
12702 __ xor_(esi, Operand(esi)); // Tentatively set context pointer to NULL.
12703 Label skip;
12704 __ cmp(ebp, 0);
12705 __ j(equal, &skip, not_taken);
12706 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
12707 __ bind(&skip);
12708
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012709 STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
Steve Blocka7e24c12009-10-30 11:49:00 +000012710 __ ret(0);
12711}
12712
12713
Steve Blockd0582a62009-12-15 09:54:21 +000012714// If true, a Handle<T> passed by value is passed and returned by
12715// using the location_ field directly. If false, it is passed and
12716// returned as a pointer to a handle.
Steve Block6ded16b2010-05-10 14:33:55 +010012717#ifdef USING_BSD_ABI
Steve Blockd0582a62009-12-15 09:54:21 +000012718static const bool kPassHandlesDirectly = true;
12719#else
12720static const bool kPassHandlesDirectly = false;
12721#endif
12722
12723
12724void ApiGetterEntryStub::Generate(MacroAssembler* masm) {
Ben Murdochbb769b22010-08-11 14:56:33 +010012725 Label empty_handle;
Steve Blockd0582a62009-12-15 09:54:21 +000012726 Label prologue;
12727 Label promote_scheduled_exception;
12728 __ EnterApiExitFrame(ExitFrame::MODE_NORMAL, kStackSpace, kArgc);
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012729 STATIC_ASSERT(kArgc == 4);
Steve Blockd0582a62009-12-15 09:54:21 +000012730 if (kPassHandlesDirectly) {
12731 // When handles as passed directly we don't have to allocate extra
12732 // space for and pass an out parameter.
12733 __ mov(Operand(esp, 0 * kPointerSize), ebx); // name.
12734 __ mov(Operand(esp, 1 * kPointerSize), eax); // arguments pointer.
12735 } else {
12736 // The function expects three arguments to be passed but we allocate
12737 // four to get space for the output cell. The argument slots are filled
12738 // as follows:
12739 //
12740 // 3: output cell
12741 // 2: arguments pointer
12742 // 1: name
12743 // 0: pointer to the output cell
12744 //
12745 // Note that this is one more "argument" than the function expects
12746 // so the out cell will have to be popped explicitly after returning
12747 // from the function.
12748 __ mov(Operand(esp, 1 * kPointerSize), ebx); // name.
12749 __ mov(Operand(esp, 2 * kPointerSize), eax); // arguments pointer.
12750 __ mov(ebx, esp);
12751 __ add(Operand(ebx), Immediate(3 * kPointerSize));
12752 __ mov(Operand(esp, 0 * kPointerSize), ebx); // output
12753 __ mov(Operand(esp, 3 * kPointerSize), Immediate(0)); // out cell.
12754 }
12755 // Call the api function!
12756 __ call(fun()->address(), RelocInfo::RUNTIME_ENTRY);
12757 // Check if the function scheduled an exception.
12758 ExternalReference scheduled_exception_address =
12759 ExternalReference::scheduled_exception_address();
12760 __ cmp(Operand::StaticVariable(scheduled_exception_address),
12761 Immediate(Factory::the_hole_value()));
12762 __ j(not_equal, &promote_scheduled_exception, not_taken);
12763 if (!kPassHandlesDirectly) {
12764 // The returned value is a pointer to the handle holding the result.
12765 // Dereference this to get to the location.
12766 __ mov(eax, Operand(eax, 0));
12767 }
Ben Murdochbb769b22010-08-11 14:56:33 +010012768 // Check if the result handle holds 0.
Steve Blockd0582a62009-12-15 09:54:21 +000012769 __ test(eax, Operand(eax));
Ben Murdochbb769b22010-08-11 14:56:33 +010012770 __ j(zero, &empty_handle, not_taken);
Steve Blockd0582a62009-12-15 09:54:21 +000012771 // It was non-zero. Dereference to get the result value.
Steve Blockd0582a62009-12-15 09:54:21 +000012772 __ mov(eax, Operand(eax, 0));
12773 __ bind(&prologue);
12774 __ LeaveExitFrame(ExitFrame::MODE_NORMAL);
12775 __ ret(0);
12776 __ bind(&promote_scheduled_exception);
Steve Block6ded16b2010-05-10 14:33:55 +010012777 __ TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
Ben Murdochbb769b22010-08-11 14:56:33 +010012778 __ bind(&empty_handle);
12779 // It was zero; the result is undefined.
12780 __ mov(eax, Factory::undefined_value());
12781 __ jmp(&prologue);
Steve Blockd0582a62009-12-15 09:54:21 +000012782}
12783
12784
Steve Blocka7e24c12009-10-30 11:49:00 +000012785void CEntryStub::GenerateCore(MacroAssembler* masm,
12786 Label* throw_normal_exception,
12787 Label* throw_termination_exception,
12788 Label* throw_out_of_memory_exception,
Steve Blocka7e24c12009-10-30 11:49:00 +000012789 bool do_gc,
Steve Block6ded16b2010-05-10 14:33:55 +010012790 bool always_allocate_scope,
12791 int /* alignment_skew */) {
Steve Blocka7e24c12009-10-30 11:49:00 +000012792 // eax: result parameter for PerformGC, if any
12793 // ebx: pointer to C function (C callee-saved)
12794 // ebp: frame pointer (restored after C call)
12795 // esp: stack pointer (restored after C call)
12796 // edi: number of arguments including receiver (C callee-saved)
12797 // esi: pointer to the first argument (C callee-saved)
12798
Leon Clarke4515c472010-02-03 11:58:03 +000012799 // Result returned in eax, or eax+edx if result_size_ is 2.
12800
Steve Block6ded16b2010-05-10 14:33:55 +010012801 // Check stack alignment.
12802 if (FLAG_debug_code) {
12803 __ CheckStackAlignment();
12804 }
12805
Steve Blocka7e24c12009-10-30 11:49:00 +000012806 if (do_gc) {
Steve Block6ded16b2010-05-10 14:33:55 +010012807 // Pass failure code returned from last attempt as first argument to
12808 // PerformGC. No need to use PrepareCallCFunction/CallCFunction here as the
12809 // stack alignment is known to be correct. This function takes one argument
12810 // which is passed on the stack, and we know that the stack has been
12811 // prepared to pass at least one argument.
Steve Blocka7e24c12009-10-30 11:49:00 +000012812 __ mov(Operand(esp, 0 * kPointerSize), eax); // Result.
12813 __ call(FUNCTION_ADDR(Runtime::PerformGC), RelocInfo::RUNTIME_ENTRY);
12814 }
12815
12816 ExternalReference scope_depth =
12817 ExternalReference::heap_always_allocate_scope_depth();
12818 if (always_allocate_scope) {
12819 __ inc(Operand::StaticVariable(scope_depth));
12820 }
12821
12822 // Call C function.
12823 __ mov(Operand(esp, 0 * kPointerSize), edi); // argc.
12824 __ mov(Operand(esp, 1 * kPointerSize), esi); // argv.
12825 __ call(Operand(ebx));
12826 // Result is in eax or edx:eax - do not destroy these registers!
12827
12828 if (always_allocate_scope) {
12829 __ dec(Operand::StaticVariable(scope_depth));
12830 }
12831
12832 // Make sure we're not trying to return 'the hole' from the runtime
12833 // call as this may lead to crashes in the IC code later.
12834 if (FLAG_debug_code) {
12835 Label okay;
12836 __ cmp(eax, Factory::the_hole_value());
12837 __ j(not_equal, &okay);
12838 __ int3();
12839 __ bind(&okay);
12840 }
12841
12842 // Check for failure result.
12843 Label failure_returned;
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012844 STATIC_ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +000012845 __ lea(ecx, Operand(eax, 1));
12846 // Lower 2 bits of ecx are 0 iff eax has failure tag.
12847 __ test(ecx, Immediate(kFailureTagMask));
12848 __ j(zero, &failure_returned, not_taken);
12849
12850 // Exit the JavaScript to C++ exit frame.
Leon Clarke4515c472010-02-03 11:58:03 +000012851 __ LeaveExitFrame(mode_);
Steve Blocka7e24c12009-10-30 11:49:00 +000012852 __ ret(0);
12853
12854 // Handling of failure.
12855 __ bind(&failure_returned);
12856
12857 Label retry;
12858 // If the returned exception is RETRY_AFTER_GC continue at retry label
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012859 STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +000012860 __ test(eax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
12861 __ j(zero, &retry, taken);
12862
12863 // Special handling of out of memory exceptions.
12864 __ cmp(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException()));
12865 __ j(equal, throw_out_of_memory_exception);
12866
12867 // Retrieve the pending exception and clear the variable.
12868 ExternalReference pending_exception_address(Top::k_pending_exception_address);
12869 __ mov(eax, Operand::StaticVariable(pending_exception_address));
12870 __ mov(edx,
12871 Operand::StaticVariable(ExternalReference::the_hole_value_location()));
12872 __ mov(Operand::StaticVariable(pending_exception_address), edx);
12873
12874 // Special handling of termination exceptions which are uncatchable
12875 // by javascript code.
12876 __ cmp(eax, Factory::termination_exception());
12877 __ j(equal, throw_termination_exception);
12878
12879 // Handle normal exception.
12880 __ jmp(throw_normal_exception);
12881
12882 // Retry.
12883 __ bind(&retry);
12884}
12885
12886
12887void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm,
12888 UncatchableExceptionType type) {
12889 // Adjust this code if not the case.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012890 STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
Steve Blocka7e24c12009-10-30 11:49:00 +000012891
12892 // Drop sp to the top stack handler.
12893 ExternalReference handler_address(Top::k_handler_address);
12894 __ mov(esp, Operand::StaticVariable(handler_address));
12895
12896 // Unwind the handlers until the ENTRY handler is found.
12897 Label loop, done;
12898 __ bind(&loop);
12899 // Load the type of the current stack handler.
12900 const int kStateOffset = StackHandlerConstants::kStateOffset;
12901 __ cmp(Operand(esp, kStateOffset), Immediate(StackHandler::ENTRY));
12902 __ j(equal, &done);
12903 // Fetch the next handler in the list.
12904 const int kNextOffset = StackHandlerConstants::kNextOffset;
12905 __ mov(esp, Operand(esp, kNextOffset));
12906 __ jmp(&loop);
12907 __ bind(&done);
12908
12909 // Set the top handler address to next handler past the current ENTRY handler.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012910 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +000012911 __ pop(Operand::StaticVariable(handler_address));
12912
12913 if (type == OUT_OF_MEMORY) {
12914 // Set external caught exception to false.
12915 ExternalReference external_caught(Top::k_external_caught_exception_address);
12916 __ mov(eax, false);
12917 __ mov(Operand::StaticVariable(external_caught), eax);
12918
12919 // Set pending exception and eax to out of memory exception.
12920 ExternalReference pending_exception(Top::k_pending_exception_address);
12921 __ mov(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException()));
12922 __ mov(Operand::StaticVariable(pending_exception), eax);
12923 }
12924
12925 // Clear the context pointer.
12926 __ xor_(esi, Operand(esi));
12927
12928 // Restore fp from handler and discard handler state.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012929 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 1 * kPointerSize);
Steve Blocka7e24c12009-10-30 11:49:00 +000012930 __ pop(ebp);
12931 __ pop(edx); // State.
12932
Kristian Monsen50ef84f2010-07-29 15:18:00 +010012933 STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
Steve Blocka7e24c12009-10-30 11:49:00 +000012934 __ ret(0);
12935}
12936
12937
Leon Clarke4515c472010-02-03 11:58:03 +000012938void CEntryStub::Generate(MacroAssembler* masm) {
Steve Blocka7e24c12009-10-30 11:49:00 +000012939 // eax: number of arguments including receiver
12940 // ebx: pointer to C function (C callee-saved)
12941 // ebp: frame pointer (restored after C call)
12942 // esp: stack pointer (restored after C call)
12943 // esi: current context (C callee-saved)
12944 // edi: JS function of the caller (C callee-saved)
12945
12946 // NOTE: Invocations of builtins may return failure objects instead
12947 // of a proper result. The builtin entry handles this by performing
12948 // a garbage collection and retrying the builtin (twice).
12949
Steve Blocka7e24c12009-10-30 11:49:00 +000012950 // Enter the exit frame that transitions from JavaScript to C++.
Leon Clarke4515c472010-02-03 11:58:03 +000012951 __ EnterExitFrame(mode_);
Steve Blocka7e24c12009-10-30 11:49:00 +000012952
12953 // eax: result parameter for PerformGC, if any (setup below)
12954 // ebx: pointer to builtin function (C callee-saved)
12955 // ebp: frame pointer (restored after C call)
12956 // esp: stack pointer (restored after C call)
12957 // edi: number of arguments including receiver (C callee-saved)
12958 // esi: argv pointer (C callee-saved)
12959
12960 Label throw_normal_exception;
12961 Label throw_termination_exception;
12962 Label throw_out_of_memory_exception;
12963
12964 // Call into the runtime system.
12965 GenerateCore(masm,
12966 &throw_normal_exception,
12967 &throw_termination_exception,
12968 &throw_out_of_memory_exception,
Steve Blocka7e24c12009-10-30 11:49:00 +000012969 false,
12970 false);
12971
12972 // Do space-specific GC and retry runtime call.
12973 GenerateCore(masm,
12974 &throw_normal_exception,
12975 &throw_termination_exception,
12976 &throw_out_of_memory_exception,
Steve Blocka7e24c12009-10-30 11:49:00 +000012977 true,
12978 false);
12979
12980 // Do full GC and retry runtime call one final time.
12981 Failure* failure = Failure::InternalError();
12982 __ mov(eax, Immediate(reinterpret_cast<int32_t>(failure)));
12983 GenerateCore(masm,
12984 &throw_normal_exception,
12985 &throw_termination_exception,
12986 &throw_out_of_memory_exception,
Steve Blocka7e24c12009-10-30 11:49:00 +000012987 true,
12988 true);
12989
12990 __ bind(&throw_out_of_memory_exception);
12991 GenerateThrowUncatchable(masm, OUT_OF_MEMORY);
12992
12993 __ bind(&throw_termination_exception);
12994 GenerateThrowUncatchable(masm, TERMINATION);
12995
12996 __ bind(&throw_normal_exception);
12997 GenerateThrowTOS(masm);
12998}
12999
13000
13001void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
13002 Label invoke, exit;
13003#ifdef ENABLE_LOGGING_AND_PROFILING
13004 Label not_outermost_js, not_outermost_js_2;
13005#endif
13006
13007 // Setup frame.
13008 __ push(ebp);
13009 __ mov(ebp, Operand(esp));
13010
13011 // Push marker in two places.
13012 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
13013 __ push(Immediate(Smi::FromInt(marker))); // context slot
13014 __ push(Immediate(Smi::FromInt(marker))); // function slot
13015 // Save callee-saved registers (C calling conventions).
13016 __ push(edi);
13017 __ push(esi);
13018 __ push(ebx);
13019
13020 // Save copies of the top frame descriptor on the stack.
13021 ExternalReference c_entry_fp(Top::k_c_entry_fp_address);
13022 __ push(Operand::StaticVariable(c_entry_fp));
13023
13024#ifdef ENABLE_LOGGING_AND_PROFILING
13025 // If this is the outermost JS call, set js_entry_sp value.
13026 ExternalReference js_entry_sp(Top::k_js_entry_sp_address);
13027 __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0));
13028 __ j(not_equal, &not_outermost_js);
13029 __ mov(Operand::StaticVariable(js_entry_sp), ebp);
13030 __ bind(&not_outermost_js);
13031#endif
13032
13033 // Call a faked try-block that does the invoke.
13034 __ call(&invoke);
13035
13036 // Caught exception: Store result (exception) in the pending
13037 // exception field in the JSEnv and return a failure sentinel.
13038 ExternalReference pending_exception(Top::k_pending_exception_address);
13039 __ mov(Operand::StaticVariable(pending_exception), eax);
13040 __ mov(eax, reinterpret_cast<int32_t>(Failure::Exception()));
13041 __ jmp(&exit);
13042
13043 // Invoke: Link this frame into the handler chain.
13044 __ bind(&invoke);
13045 __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER);
13046
13047 // Clear any pending exceptions.
13048 __ mov(edx,
13049 Operand::StaticVariable(ExternalReference::the_hole_value_location()));
13050 __ mov(Operand::StaticVariable(pending_exception), edx);
13051
13052 // Fake a receiver (NULL).
13053 __ push(Immediate(0)); // receiver
13054
13055 // Invoke the function by calling through JS entry trampoline
13056 // builtin and pop the faked function when we return. Notice that we
13057 // cannot store a reference to the trampoline code directly in this
13058 // stub, because the builtin stubs may not have been generated yet.
13059 if (is_construct) {
13060 ExternalReference construct_entry(Builtins::JSConstructEntryTrampoline);
13061 __ mov(edx, Immediate(construct_entry));
13062 } else {
13063 ExternalReference entry(Builtins::JSEntryTrampoline);
13064 __ mov(edx, Immediate(entry));
13065 }
13066 __ mov(edx, Operand(edx, 0)); // deref address
13067 __ lea(edx, FieldOperand(edx, Code::kHeaderSize));
13068 __ call(Operand(edx));
13069
13070 // Unlink this frame from the handler chain.
13071 __ pop(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
13072 // Pop next_sp.
13073 __ add(Operand(esp), Immediate(StackHandlerConstants::kSize - kPointerSize));
13074
13075#ifdef ENABLE_LOGGING_AND_PROFILING
13076 // If current EBP value is the same as js_entry_sp value, it means that
13077 // the current function is the outermost.
13078 __ cmp(ebp, Operand::StaticVariable(js_entry_sp));
13079 __ j(not_equal, &not_outermost_js_2);
13080 __ mov(Operand::StaticVariable(js_entry_sp), Immediate(0));
13081 __ bind(&not_outermost_js_2);
13082#endif
13083
13084 // Restore the top frame descriptor from the stack.
13085 __ bind(&exit);
13086 __ pop(Operand::StaticVariable(ExternalReference(Top::k_c_entry_fp_address)));
13087
13088 // Restore callee-saved registers (C calling conventions).
13089 __ pop(ebx);
13090 __ pop(esi);
13091 __ pop(edi);
13092 __ add(Operand(esp), Immediate(2 * kPointerSize)); // remove markers
13093
13094 // Restore frame pointer and return.
13095 __ pop(ebp);
13096 __ ret(0);
13097}
13098
13099
13100void InstanceofStub::Generate(MacroAssembler* masm) {
13101 // Get the object - go slow case if it's a smi.
13102 Label slow;
13103 __ mov(eax, Operand(esp, 2 * kPointerSize)); // 2 ~ return address, function
13104 __ test(eax, Immediate(kSmiTagMask));
13105 __ j(zero, &slow, not_taken);
13106
13107 // Check that the left hand is a JS object.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013108 __ IsObjectJSObjectType(eax, eax, edx, &slow);
Steve Blocka7e24c12009-10-30 11:49:00 +000013109
13110 // Get the prototype of the function.
13111 __ mov(edx, Operand(esp, 1 * kPointerSize)); // 1 ~ return address
Kristian Monsen25f61362010-05-21 11:50:48 +010013112 // edx is function, eax is map.
13113
13114 // Look up the function and the map in the instanceof cache.
13115 Label miss;
13116 ExternalReference roots_address = ExternalReference::roots_address();
13117 __ mov(ecx, Immediate(Heap::kInstanceofCacheFunctionRootIndex));
13118 __ cmp(edx, Operand::StaticArray(ecx, times_pointer_size, roots_address));
13119 __ j(not_equal, &miss);
13120 __ mov(ecx, Immediate(Heap::kInstanceofCacheMapRootIndex));
13121 __ cmp(eax, Operand::StaticArray(ecx, times_pointer_size, roots_address));
13122 __ j(not_equal, &miss);
13123 __ mov(ecx, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
13124 __ mov(eax, Operand::StaticArray(ecx, times_pointer_size, roots_address));
13125 __ ret(2 * kPointerSize);
13126
13127 __ bind(&miss);
Steve Blocka7e24c12009-10-30 11:49:00 +000013128 __ TryGetFunctionPrototype(edx, ebx, ecx, &slow);
13129
13130 // Check that the function prototype is a JS object.
13131 __ test(ebx, Immediate(kSmiTagMask));
13132 __ j(zero, &slow, not_taken);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013133 __ IsObjectJSObjectType(ebx, ecx, ecx, &slow);
Steve Blocka7e24c12009-10-30 11:49:00 +000013134
Kristian Monsen25f61362010-05-21 11:50:48 +010013135 // Register mapping:
13136 // eax is object map.
13137 // edx is function.
13138 // ebx is function prototype.
13139 __ mov(ecx, Immediate(Heap::kInstanceofCacheMapRootIndex));
13140 __ mov(Operand::StaticArray(ecx, times_pointer_size, roots_address), eax);
13141 __ mov(ecx, Immediate(Heap::kInstanceofCacheFunctionRootIndex));
13142 __ mov(Operand::StaticArray(ecx, times_pointer_size, roots_address), edx);
13143
Steve Blocka7e24c12009-10-30 11:49:00 +000013144 __ mov(ecx, FieldOperand(eax, Map::kPrototypeOffset));
13145
13146 // Loop through the prototype chain looking for the function prototype.
13147 Label loop, is_instance, is_not_instance;
13148 __ bind(&loop);
13149 __ cmp(ecx, Operand(ebx));
13150 __ j(equal, &is_instance);
13151 __ cmp(Operand(ecx), Immediate(Factory::null_value()));
13152 __ j(equal, &is_not_instance);
13153 __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
13154 __ mov(ecx, FieldOperand(ecx, Map::kPrototypeOffset));
13155 __ jmp(&loop);
13156
13157 __ bind(&is_instance);
13158 __ Set(eax, Immediate(0));
Kristian Monsen25f61362010-05-21 11:50:48 +010013159 __ mov(ecx, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
13160 __ mov(Operand::StaticArray(ecx, times_pointer_size, roots_address), eax);
Steve Blocka7e24c12009-10-30 11:49:00 +000013161 __ ret(2 * kPointerSize);
13162
13163 __ bind(&is_not_instance);
13164 __ Set(eax, Immediate(Smi::FromInt(1)));
Kristian Monsen25f61362010-05-21 11:50:48 +010013165 __ mov(ecx, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
13166 __ mov(Operand::StaticArray(ecx, times_pointer_size, roots_address), eax);
Steve Blocka7e24c12009-10-30 11:49:00 +000013167 __ ret(2 * kPointerSize);
13168
13169 // Slow-case: Go through the JavaScript implementation.
13170 __ bind(&slow);
13171 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
13172}
13173
13174
Steve Block6ded16b2010-05-10 14:33:55 +010013175int CompareStub::MinorKey() {
13176 // Encode the three parameters in a unique 16 bit value. To avoid duplicate
13177 // stubs the never NaN NaN condition is only taken into account if the
13178 // condition is equals.
Ben Murdoch3bec4d22010-07-22 14:51:16 +010013179 ASSERT(static_cast<unsigned>(cc_) < (1 << 12));
13180 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
Steve Block6ded16b2010-05-10 14:33:55 +010013181 return ConditionField::encode(static_cast<unsigned>(cc_))
Ben Murdoch3bec4d22010-07-22 14:51:16 +010013182 | RegisterField::encode(false) // lhs_ and rhs_ are not used
Steve Block6ded16b2010-05-10 14:33:55 +010013183 | StrictField::encode(strict_)
13184 | NeverNanNanField::encode(cc_ == equal ? never_nan_nan_ : false)
13185 | IncludeNumberCompareField::encode(include_number_compare_);
Leon Clarkee46be812010-01-19 14:06:41 +000013186}
13187
13188
Steve Block6ded16b2010-05-10 14:33:55 +010013189// Unfortunately you have to run without snapshots to see most of these
13190// names in the profile since most compare stubs end up in the snapshot.
13191const char* CompareStub::GetName() {
Ben Murdoch3bec4d22010-07-22 14:51:16 +010013192 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
13193
Steve Block6ded16b2010-05-10 14:33:55 +010013194 if (name_ != NULL) return name_;
13195 const int kMaxNameLength = 100;
13196 name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);
13197 if (name_ == NULL) return "OOM";
13198
13199 const char* cc_name;
13200 switch (cc_) {
13201 case less: cc_name = "LT"; break;
13202 case greater: cc_name = "GT"; break;
13203 case less_equal: cc_name = "LE"; break;
13204 case greater_equal: cc_name = "GE"; break;
13205 case equal: cc_name = "EQ"; break;
13206 case not_equal: cc_name = "NE"; break;
13207 default: cc_name = "UnknownCondition"; break;
13208 }
13209
13210 const char* strict_name = "";
13211 if (strict_ && (cc_ == equal || cc_ == not_equal)) {
13212 strict_name = "_STRICT";
13213 }
13214
13215 const char* never_nan_nan_name = "";
13216 if (never_nan_nan_ && (cc_ == equal || cc_ == not_equal)) {
13217 never_nan_nan_name = "_NO_NAN";
13218 }
13219
13220 const char* include_number_compare_name = "";
13221 if (!include_number_compare_) {
13222 include_number_compare_name = "_NO_NUMBER";
13223 }
13224
13225 OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
13226 "CompareStub_%s%s%s%s",
13227 cc_name,
13228 strict_name,
13229 never_nan_nan_name,
13230 include_number_compare_name);
13231 return name_;
13232}
13233
13234
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013235// -------------------------------------------------------------------------
13236// StringCharCodeAtGenerator
13237
13238void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
13239 Label flat_string;
Steve Block6ded16b2010-05-10 14:33:55 +010013240 Label ascii_string;
13241 Label got_char_code;
13242
13243 // If the receiver is a smi trigger the non-string case.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010013244 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013245 __ test(object_, Immediate(kSmiTagMask));
13246 __ j(zero, receiver_not_string_);
Steve Block6ded16b2010-05-10 14:33:55 +010013247
13248 // Fetch the instance type of the receiver into result register.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013249 __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
13250 __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
Steve Block6ded16b2010-05-10 14:33:55 +010013251 // If the receiver is not a string trigger the non-string case.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013252 __ test(result_, Immediate(kIsNotStringMask));
13253 __ j(not_zero, receiver_not_string_);
Steve Block6ded16b2010-05-10 14:33:55 +010013254
13255 // If the index is non-smi trigger the non-smi case.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010013256 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013257 __ test(index_, Immediate(kSmiTagMask));
13258 __ j(not_zero, &index_not_smi_);
13259
13260 // Put smi-tagged index into scratch register.
13261 __ mov(scratch_, index_);
13262 __ bind(&got_smi_index_);
Steve Block6ded16b2010-05-10 14:33:55 +010013263
13264 // Check for index out of range.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013265 __ cmp(scratch_, FieldOperand(object_, String::kLengthOffset));
13266 __ j(above_equal, index_out_of_range_);
Steve Block6ded16b2010-05-10 14:33:55 +010013267
13268 // We need special handling for non-flat strings.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010013269 STATIC_ASSERT(kSeqStringTag == 0);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013270 __ test(result_, Immediate(kStringRepresentationMask));
13271 __ j(zero, &flat_string);
Steve Block6ded16b2010-05-10 14:33:55 +010013272
13273 // Handle non-flat strings.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013274 __ test(result_, Immediate(kIsConsStringMask));
13275 __ j(zero, &call_runtime_);
Steve Block6ded16b2010-05-10 14:33:55 +010013276
13277 // ConsString.
13278 // Check whether the right hand side is the empty string (i.e. if
13279 // this is really a flat string in a cons string). If that is not
13280 // the case we would rather go to the runtime system now to flatten
13281 // the string.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013282 __ cmp(FieldOperand(object_, ConsString::kSecondOffset),
13283 Immediate(Factory::empty_string()));
13284 __ j(not_equal, &call_runtime_);
Steve Block6ded16b2010-05-10 14:33:55 +010013285 // Get the first of the two strings and load its instance type.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013286 __ mov(object_, FieldOperand(object_, ConsString::kFirstOffset));
13287 __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
13288 __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
13289 // If the first cons component is also non-flat, then go to runtime.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010013290 STATIC_ASSERT(kSeqStringTag == 0);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013291 __ test(result_, Immediate(kStringRepresentationMask));
13292 __ j(not_zero, &call_runtime_);
13293
13294 // Check for 1-byte or 2-byte string.
13295 __ bind(&flat_string);
Kristian Monsen50ef84f2010-07-29 15:18:00 +010013296 STATIC_ASSERT(kAsciiStringTag != 0);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013297 __ test(result_, Immediate(kStringEncodingMask));
13298 __ j(not_zero, &ascii_string);
13299
13300 // 2-byte string.
13301 // Load the 2-byte character code into the result register.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010013302 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013303 __ movzx_w(result_, FieldOperand(object_,
13304 scratch_, times_1, // Scratch is smi-tagged.
13305 SeqTwoByteString::kHeaderSize));
13306 __ jmp(&got_char_code);
Steve Block6ded16b2010-05-10 14:33:55 +010013307
13308 // ASCII string.
Steve Block6ded16b2010-05-10 14:33:55 +010013309 // Load the byte into the result register.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013310 __ bind(&ascii_string);
13311 __ SmiUntag(scratch_);
13312 __ movzx_b(result_, FieldOperand(object_,
13313 scratch_, times_1,
13314 SeqAsciiString::kHeaderSize));
Steve Block6ded16b2010-05-10 14:33:55 +010013315 __ bind(&got_char_code);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013316 __ SmiTag(result_);
13317 __ bind(&exit_);
Steve Block6ded16b2010-05-10 14:33:55 +010013318}
13319
13320
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013321void StringCharCodeAtGenerator::GenerateSlow(
13322 MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
13323 __ Abort("Unexpected fallthrough to CharCodeAt slow case");
Steve Block6ded16b2010-05-10 14:33:55 +010013324
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013325 // Index is not a smi.
13326 __ bind(&index_not_smi_);
13327 // If index is a heap number, try converting it to an integer.
13328 __ CheckMap(index_, Factory::heap_number_map(), index_not_number_, true);
13329 call_helper.BeforeCall(masm);
13330 __ push(object_);
13331 __ push(index_);
13332 __ push(index_); // Consumed by runtime conversion function.
13333 if (index_flags_ == STRING_INDEX_IS_NUMBER) {
13334 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
13335 } else {
13336 ASSERT(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
13337 // NumberToSmi discards numbers that are not exact integers.
13338 __ CallRuntime(Runtime::kNumberToSmi, 1);
13339 }
13340 if (!scratch_.is(eax)) {
13341 // Save the conversion result before the pop instructions below
13342 // have a chance to overwrite it.
13343 __ mov(scratch_, eax);
13344 }
13345 __ pop(index_);
13346 __ pop(object_);
13347 // Reload the instance type.
13348 __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
13349 __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
13350 call_helper.AfterCall(masm);
13351 // If index is still not a smi, it must be out of range.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010013352 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013353 __ test(scratch_, Immediate(kSmiTagMask));
13354 __ j(not_zero, index_out_of_range_);
13355 // Otherwise, return to the fast path.
13356 __ jmp(&got_smi_index_);
Steve Block6ded16b2010-05-10 14:33:55 +010013357
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013358 // Call runtime. We get here when the receiver is a string and the
13359 // index is a number, but the code of getting the actual character
13360 // is too complex (e.g., when the string needs to be flattened).
13361 __ bind(&call_runtime_);
13362 call_helper.BeforeCall(masm);
13363 __ push(object_);
13364 __ push(index_);
13365 __ CallRuntime(Runtime::kStringCharCodeAt, 2);
13366 if (!result_.is(eax)) {
13367 __ mov(result_, eax);
13368 }
13369 call_helper.AfterCall(masm);
13370 __ jmp(&exit_);
13371
13372 __ Abort("Unexpected fallthrough from CharCodeAt slow case");
13373}
13374
13375
13376// -------------------------------------------------------------------------
13377// StringCharFromCodeGenerator
13378
13379void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
Steve Block6ded16b2010-05-10 14:33:55 +010013380 // Fast case of Heap::LookupSingleCharacterStringFromCode.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010013381 STATIC_ASSERT(kSmiTag == 0);
13382 STATIC_ASSERT(kSmiShiftSize == 0);
Steve Block6ded16b2010-05-10 14:33:55 +010013383 ASSERT(IsPowerOf2(String::kMaxAsciiCharCode + 1));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013384 __ test(code_,
Steve Block6ded16b2010-05-10 14:33:55 +010013385 Immediate(kSmiTagMask |
13386 ((~String::kMaxAsciiCharCode) << kSmiTagSize)));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013387 __ j(not_zero, &slow_case_, not_taken);
Steve Block6ded16b2010-05-10 14:33:55 +010013388
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013389 __ Set(result_, Immediate(Factory::single_character_string_cache()));
Kristian Monsen50ef84f2010-07-29 15:18:00 +010013390 STATIC_ASSERT(kSmiTag == 0);
13391 STATIC_ASSERT(kSmiTagSize == 1);
13392 STATIC_ASSERT(kSmiShiftSize == 0);
Steve Block6ded16b2010-05-10 14:33:55 +010013393 // At this point code register contains smi tagged ascii char code.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013394 __ mov(result_, FieldOperand(result_,
13395 code_, times_half_pointer_size,
13396 FixedArray::kHeaderSize));
13397 __ cmp(result_, Factory::undefined_value());
13398 __ j(equal, &slow_case_, not_taken);
13399 __ bind(&exit_);
13400}
Steve Block6ded16b2010-05-10 14:33:55 +010013401
Steve Block6ded16b2010-05-10 14:33:55 +010013402
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013403void StringCharFromCodeGenerator::GenerateSlow(
13404 MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
13405 __ Abort("Unexpected fallthrough to CharFromCode slow case");
13406
13407 __ bind(&slow_case_);
13408 call_helper.BeforeCall(masm);
13409 __ push(code_);
13410 __ CallRuntime(Runtime::kCharFromCode, 1);
13411 if (!result_.is(eax)) {
13412 __ mov(result_, eax);
Steve Block6ded16b2010-05-10 14:33:55 +010013413 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013414 call_helper.AfterCall(masm);
13415 __ jmp(&exit_);
13416
13417 __ Abort("Unexpected fallthrough from CharFromCode slow case");
13418}
13419
13420
13421// -------------------------------------------------------------------------
13422// StringCharAtGenerator
13423
13424void StringCharAtGenerator::GenerateFast(MacroAssembler* masm) {
13425 char_code_at_generator_.GenerateFast(masm);
13426 char_from_code_generator_.GenerateFast(masm);
13427}
13428
13429
13430void StringCharAtGenerator::GenerateSlow(
13431 MacroAssembler* masm, const RuntimeCallHelper& call_helper) {
13432 char_code_at_generator_.GenerateSlow(masm, call_helper);
13433 char_from_code_generator_.GenerateSlow(masm, call_helper);
Steve Blocka7e24c12009-10-30 11:49:00 +000013434}
13435
Steve Blockd0582a62009-12-15 09:54:21 +000013436
13437void StringAddStub::Generate(MacroAssembler* masm) {
13438 Label string_add_runtime;
13439
13440 // Load the two arguments.
13441 __ mov(eax, Operand(esp, 2 * kPointerSize)); // First argument.
13442 __ mov(edx, Operand(esp, 1 * kPointerSize)); // Second argument.
13443
13444 // Make sure that both arguments are strings if not known in advance.
13445 if (string_check_) {
13446 __ test(eax, Immediate(kSmiTagMask));
13447 __ j(zero, &string_add_runtime);
13448 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, ebx);
13449 __ j(above_equal, &string_add_runtime);
13450
13451 // First argument is a a string, test second.
13452 __ test(edx, Immediate(kSmiTagMask));
13453 __ j(zero, &string_add_runtime);
13454 __ CmpObjectType(edx, FIRST_NONSTRING_TYPE, ebx);
13455 __ j(above_equal, &string_add_runtime);
13456 }
13457
13458 // Both arguments are strings.
13459 // eax: first string
13460 // edx: second string
13461 // Check if either of the strings are empty. In that case return the other.
13462 Label second_not_zero_length, both_not_zero_length;
13463 __ mov(ecx, FieldOperand(edx, String::kLengthOffset));
Kristian Monsen50ef84f2010-07-29 15:18:00 +010013464 STATIC_ASSERT(kSmiTag == 0);
Steve Blockd0582a62009-12-15 09:54:21 +000013465 __ test(ecx, Operand(ecx));
13466 __ j(not_zero, &second_not_zero_length);
13467 // Second string is empty, result is first string which is already in eax.
13468 __ IncrementCounter(&Counters::string_add_native, 1);
13469 __ ret(2 * kPointerSize);
13470 __ bind(&second_not_zero_length);
13471 __ mov(ebx, FieldOperand(eax, String::kLengthOffset));
Kristian Monsen50ef84f2010-07-29 15:18:00 +010013472 STATIC_ASSERT(kSmiTag == 0);
Steve Blockd0582a62009-12-15 09:54:21 +000013473 __ test(ebx, Operand(ebx));
13474 __ j(not_zero, &both_not_zero_length);
13475 // First string is empty, result is second string which is in edx.
13476 __ mov(eax, edx);
13477 __ IncrementCounter(&Counters::string_add_native, 1);
13478 __ ret(2 * kPointerSize);
13479
13480 // Both strings are non-empty.
13481 // eax: first string
Steve Block6ded16b2010-05-10 14:33:55 +010013482 // ebx: length of first string as a smi
13483 // ecx: length of second string as a smi
Steve Blockd0582a62009-12-15 09:54:21 +000013484 // edx: second string
13485 // Look at the length of the result of adding the two strings.
Andrei Popescu402d9372010-02-26 13:31:12 +000013486 Label string_add_flat_result, longer_than_two;
Steve Blockd0582a62009-12-15 09:54:21 +000013487 __ bind(&both_not_zero_length);
13488 __ add(ebx, Operand(ecx));
Kristian Monsen50ef84f2010-07-29 15:18:00 +010013489 STATIC_ASSERT(Smi::kMaxValue == String::kMaxLength);
Steve Block6ded16b2010-05-10 14:33:55 +010013490 // Handle exceptionally long strings in the runtime system.
13491 __ j(overflow, &string_add_runtime);
Steve Blockd0582a62009-12-15 09:54:21 +000013492 // Use the runtime system when adding two one character strings, as it
13493 // contains optimizations for this specific case using the symbol table.
Steve Block6ded16b2010-05-10 14:33:55 +010013494 __ cmp(Operand(ebx), Immediate(Smi::FromInt(2)));
Andrei Popescu402d9372010-02-26 13:31:12 +000013495 __ j(not_equal, &longer_than_two);
13496
13497 // Check that both strings are non-external ascii strings.
13498 __ JumpIfNotBothSequentialAsciiStrings(eax, edx, ebx, ecx,
13499 &string_add_runtime);
13500
13501 // Get the two characters forming the sub string.
13502 __ movzx_b(ebx, FieldOperand(eax, SeqAsciiString::kHeaderSize));
13503 __ movzx_b(ecx, FieldOperand(edx, SeqAsciiString::kHeaderSize));
13504
13505 // Try to lookup two character string in symbol table. If it is not found
13506 // just allocate a new one.
13507 Label make_two_character_string, make_flat_ascii_string;
Steve Block6ded16b2010-05-10 14:33:55 +010013508 StringHelper::GenerateTwoCharacterSymbolTableProbe(
13509 masm, ebx, ecx, eax, edx, edi, &make_two_character_string);
13510 __ IncrementCounter(&Counters::string_add_native, 1);
Andrei Popescu402d9372010-02-26 13:31:12 +000013511 __ ret(2 * kPointerSize);
13512
13513 __ bind(&make_two_character_string);
Steve Block6ded16b2010-05-10 14:33:55 +010013514 __ Set(ebx, Immediate(Smi::FromInt(2)));
Andrei Popescu402d9372010-02-26 13:31:12 +000013515 __ jmp(&make_flat_ascii_string);
13516
13517 __ bind(&longer_than_two);
Steve Blockd0582a62009-12-15 09:54:21 +000013518 // Check if resulting string will be flat.
Steve Block6ded16b2010-05-10 14:33:55 +010013519 __ cmp(Operand(ebx), Immediate(Smi::FromInt(String::kMinNonFlatLength)));
Steve Blockd0582a62009-12-15 09:54:21 +000013520 __ j(below, &string_add_flat_result);
Steve Blockd0582a62009-12-15 09:54:21 +000013521
13522 // If result is not supposed to be flat allocate a cons string object. If both
13523 // strings are ascii the result is an ascii cons string.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010013524 Label non_ascii, allocated, ascii_data;
Steve Blockd0582a62009-12-15 09:54:21 +000013525 __ mov(edi, FieldOperand(eax, HeapObject::kMapOffset));
13526 __ movzx_b(ecx, FieldOperand(edi, Map::kInstanceTypeOffset));
13527 __ mov(edi, FieldOperand(edx, HeapObject::kMapOffset));
13528 __ movzx_b(edi, FieldOperand(edi, Map::kInstanceTypeOffset));
13529 __ and_(ecx, Operand(edi));
Kristian Monsen50ef84f2010-07-29 15:18:00 +010013530 STATIC_ASSERT(kStringEncodingMask == kAsciiStringTag);
Steve Blockd0582a62009-12-15 09:54:21 +000013531 __ test(ecx, Immediate(kAsciiStringTag));
13532 __ j(zero, &non_ascii);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010013533 __ bind(&ascii_data);
Steve Blockd0582a62009-12-15 09:54:21 +000013534 // Allocate an acsii cons string.
13535 __ AllocateAsciiConsString(ecx, edi, no_reg, &string_add_runtime);
13536 __ bind(&allocated);
13537 // Fill the fields of the cons string.
Steve Block6ded16b2010-05-10 14:33:55 +010013538 if (FLAG_debug_code) __ AbortIfNotSmi(ebx);
Steve Blockd0582a62009-12-15 09:54:21 +000013539 __ mov(FieldOperand(ecx, ConsString::kLengthOffset), ebx);
13540 __ mov(FieldOperand(ecx, ConsString::kHashFieldOffset),
13541 Immediate(String::kEmptyHashField));
13542 __ mov(FieldOperand(ecx, ConsString::kFirstOffset), eax);
13543 __ mov(FieldOperand(ecx, ConsString::kSecondOffset), edx);
13544 __ mov(eax, ecx);
13545 __ IncrementCounter(&Counters::string_add_native, 1);
13546 __ ret(2 * kPointerSize);
13547 __ bind(&non_ascii);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010013548 // At least one of the strings is two-byte. Check whether it happens
13549 // to contain only ascii characters.
13550 // ecx: first instance type AND second instance type.
13551 // edi: second instance type.
13552 __ test(ecx, Immediate(kAsciiDataHintMask));
13553 __ j(not_zero, &ascii_data);
13554 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
13555 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
13556 __ xor_(edi, Operand(ecx));
Kristian Monsen50ef84f2010-07-29 15:18:00 +010013557 STATIC_ASSERT(kAsciiStringTag != 0 && kAsciiDataHintTag != 0);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010013558 __ and_(edi, kAsciiStringTag | kAsciiDataHintTag);
13559 __ cmp(edi, kAsciiStringTag | kAsciiDataHintTag);
13560 __ j(equal, &ascii_data);
Steve Blockd0582a62009-12-15 09:54:21 +000013561 // Allocate a two byte cons string.
13562 __ AllocateConsString(ecx, edi, no_reg, &string_add_runtime);
13563 __ jmp(&allocated);
13564
13565 // Handle creating a flat result. First check that both strings are not
13566 // external strings.
13567 // eax: first string
Steve Block6ded16b2010-05-10 14:33:55 +010013568 // ebx: length of resulting flat string as a smi
Steve Blockd0582a62009-12-15 09:54:21 +000013569 // edx: second string
13570 __ bind(&string_add_flat_result);
13571 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
13572 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
13573 __ and_(ecx, kStringRepresentationMask);
13574 __ cmp(ecx, kExternalStringTag);
13575 __ j(equal, &string_add_runtime);
13576 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
13577 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
13578 __ and_(ecx, kStringRepresentationMask);
13579 __ cmp(ecx, kExternalStringTag);
13580 __ j(equal, &string_add_runtime);
13581 // Now check if both strings are ascii strings.
13582 // eax: first string
Steve Block6ded16b2010-05-10 14:33:55 +010013583 // ebx: length of resulting flat string as a smi
Steve Blockd0582a62009-12-15 09:54:21 +000013584 // edx: second string
13585 Label non_ascii_string_add_flat_result;
Kristian Monsen50ef84f2010-07-29 15:18:00 +010013586 STATIC_ASSERT(kStringEncodingMask == kAsciiStringTag);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013587 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
13588 __ test_b(FieldOperand(ecx, Map::kInstanceTypeOffset), kAsciiStringTag);
Steve Blockd0582a62009-12-15 09:54:21 +000013589 __ j(zero, &non_ascii_string_add_flat_result);
13590 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013591 __ test_b(FieldOperand(ecx, Map::kInstanceTypeOffset), kAsciiStringTag);
Steve Blockd0582a62009-12-15 09:54:21 +000013592 __ j(zero, &string_add_runtime);
Andrei Popescu402d9372010-02-26 13:31:12 +000013593
13594 __ bind(&make_flat_ascii_string);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +010013595 // Both strings are ascii strings. As they are short they are both flat.
Steve Block6ded16b2010-05-10 14:33:55 +010013596 // ebx: length of resulting flat string as a smi
13597 __ SmiUntag(ebx);
Steve Blockd0582a62009-12-15 09:54:21 +000013598 __ AllocateAsciiString(eax, ebx, ecx, edx, edi, &string_add_runtime);
13599 // eax: result string
13600 __ mov(ecx, eax);
13601 // Locate first character of result.
13602 __ add(Operand(ecx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
13603 // Load first argument and locate first character.
13604 __ mov(edx, Operand(esp, 2 * kPointerSize));
13605 __ mov(edi, FieldOperand(edx, String::kLengthOffset));
Steve Block6ded16b2010-05-10 14:33:55 +010013606 __ SmiUntag(edi);
Steve Blockd0582a62009-12-15 09:54:21 +000013607 __ add(Operand(edx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
13608 // eax: result string
13609 // ecx: first character of result
13610 // edx: first char of first argument
13611 // edi: length of first argument
Steve Block6ded16b2010-05-10 14:33:55 +010013612 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, true);
Steve Blockd0582a62009-12-15 09:54:21 +000013613 // Load second argument and locate first character.
13614 __ mov(edx, Operand(esp, 1 * kPointerSize));
13615 __ mov(edi, FieldOperand(edx, String::kLengthOffset));
Steve Block6ded16b2010-05-10 14:33:55 +010013616 __ SmiUntag(edi);
Steve Blockd0582a62009-12-15 09:54:21 +000013617 __ add(Operand(edx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
13618 // eax: result string
13619 // ecx: next character of result
13620 // edx: first char of second argument
13621 // edi: length of second argument
Steve Block6ded16b2010-05-10 14:33:55 +010013622 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, true);
Steve Blockd0582a62009-12-15 09:54:21 +000013623 __ IncrementCounter(&Counters::string_add_native, 1);
13624 __ ret(2 * kPointerSize);
13625
13626 // Handle creating a flat two byte result.
13627 // eax: first string - known to be two byte
Steve Block6ded16b2010-05-10 14:33:55 +010013628 // ebx: length of resulting flat string as a smi
Steve Blockd0582a62009-12-15 09:54:21 +000013629 // edx: second string
13630 __ bind(&non_ascii_string_add_flat_result);
13631 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010013632 __ test_b(FieldOperand(ecx, Map::kInstanceTypeOffset), kAsciiStringTag);
Steve Blockd0582a62009-12-15 09:54:21 +000013633 __ j(not_zero, &string_add_runtime);
13634 // Both strings are two byte strings. As they are short they are both
13635 // flat.
Steve Block6ded16b2010-05-10 14:33:55 +010013636 __ SmiUntag(ebx);
Steve Blockd0582a62009-12-15 09:54:21 +000013637 __ AllocateTwoByteString(eax, ebx, ecx, edx, edi, &string_add_runtime);
13638 // eax: result string
13639 __ mov(ecx, eax);
13640 // Locate first character of result.
13641 __ add(Operand(ecx),
13642 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
13643 // Load first argument and locate first character.
13644 __ mov(edx, Operand(esp, 2 * kPointerSize));
13645 __ mov(edi, FieldOperand(edx, String::kLengthOffset));
Steve Block6ded16b2010-05-10 14:33:55 +010013646 __ SmiUntag(edi);
Steve Blockd0582a62009-12-15 09:54:21 +000013647 __ add(Operand(edx),
13648 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
13649 // eax: result string
13650 // ecx: first character of result
13651 // edx: first char of first argument
13652 // edi: length of first argument
Steve Block6ded16b2010-05-10 14:33:55 +010013653 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, false);
Steve Blockd0582a62009-12-15 09:54:21 +000013654 // Load second argument and locate first character.
13655 __ mov(edx, Operand(esp, 1 * kPointerSize));
13656 __ mov(edi, FieldOperand(edx, String::kLengthOffset));
Steve Block6ded16b2010-05-10 14:33:55 +010013657 __ SmiUntag(edi);
Steve Blockd0582a62009-12-15 09:54:21 +000013658 __ add(Operand(edx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
13659 // eax: result string
13660 // ecx: next character of result
13661 // edx: first char of second argument
13662 // edi: length of second argument
Steve Block6ded16b2010-05-10 14:33:55 +010013663 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, false);
Steve Blockd0582a62009-12-15 09:54:21 +000013664 __ IncrementCounter(&Counters::string_add_native, 1);
13665 __ ret(2 * kPointerSize);
13666
13667 // Just jump to runtime to add the two strings.
13668 __ bind(&string_add_runtime);
Steve Block6ded16b2010-05-10 14:33:55 +010013669 __ TailCallRuntime(Runtime::kStringAdd, 2, 1);
Steve Blockd0582a62009-12-15 09:54:21 +000013670}
13671
13672
Steve Block6ded16b2010-05-10 14:33:55 +010013673void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
13674 Register dest,
13675 Register src,
13676 Register count,
13677 Register scratch,
13678 bool ascii) {
Steve Blockd0582a62009-12-15 09:54:21 +000013679 Label loop;
13680 __ bind(&loop);
13681 // This loop just copies one character at a time, as it is only used for very
13682 // short strings.
13683 if (ascii) {
13684 __ mov_b(scratch, Operand(src, 0));
13685 __ mov_b(Operand(dest, 0), scratch);
13686 __ add(Operand(src), Immediate(1));
13687 __ add(Operand(dest), Immediate(1));
13688 } else {
13689 __ mov_w(scratch, Operand(src, 0));
13690 __ mov_w(Operand(dest, 0), scratch);
13691 __ add(Operand(src), Immediate(2));
13692 __ add(Operand(dest), Immediate(2));
13693 }
13694 __ sub(Operand(count), Immediate(1));
13695 __ j(not_zero, &loop);
13696}
13697
13698
Steve Block6ded16b2010-05-10 14:33:55 +010013699void StringHelper::GenerateCopyCharactersREP(MacroAssembler* masm,
13700 Register dest,
13701 Register src,
13702 Register count,
13703 Register scratch,
13704 bool ascii) {
Kristian Monsen50ef84f2010-07-29 15:18:00 +010013705 // Copy characters using rep movs of doublewords.
13706 // The destination is aligned on a 4 byte boundary because we are
13707 // copying to the beginning of a newly allocated string.
Leon Clarkee46be812010-01-19 14:06:41 +000013708 ASSERT(dest.is(edi)); // rep movs destination
13709 ASSERT(src.is(esi)); // rep movs source
13710 ASSERT(count.is(ecx)); // rep movs count
13711 ASSERT(!scratch.is(dest));
13712 ASSERT(!scratch.is(src));
13713 ASSERT(!scratch.is(count));
13714
13715 // Nothing to do for zero characters.
13716 Label done;
13717 __ test(count, Operand(count));
13718 __ j(zero, &done);
13719
13720 // Make count the number of bytes to copy.
13721 if (!ascii) {
13722 __ shl(count, 1);
13723 }
13724
13725 // Don't enter the rep movs if there are less than 4 bytes to copy.
13726 Label last_bytes;
13727 __ test(count, Immediate(~3));
13728 __ j(zero, &last_bytes);
13729
13730 // Copy from edi to esi using rep movs instruction.
13731 __ mov(scratch, count);
13732 __ sar(count, 2); // Number of doublewords to copy.
Steve Block6ded16b2010-05-10 14:33:55 +010013733 __ cld();
Leon Clarkee46be812010-01-19 14:06:41 +000013734 __ rep_movs();
13735
13736 // Find number of bytes left.
13737 __ mov(count, scratch);
13738 __ and_(count, 3);
13739
13740 // Check if there are more bytes to copy.
13741 __ bind(&last_bytes);
13742 __ test(count, Operand(count));
13743 __ j(zero, &done);
13744
13745 // Copy remaining characters.
13746 Label loop;
13747 __ bind(&loop);
13748 __ mov_b(scratch, Operand(src, 0));
13749 __ mov_b(Operand(dest, 0), scratch);
13750 __ add(Operand(src), Immediate(1));
13751 __ add(Operand(dest), Immediate(1));
13752 __ sub(Operand(count), Immediate(1));
13753 __ j(not_zero, &loop);
13754
13755 __ bind(&done);
13756}
13757
13758
Steve Block6ded16b2010-05-10 14:33:55 +010013759void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
13760 Register c1,
13761 Register c2,
13762 Register scratch1,
13763 Register scratch2,
13764 Register scratch3,
13765 Label* not_found) {
Andrei Popescu402d9372010-02-26 13:31:12 +000013766 // Register scratch3 is the general scratch register in this function.
13767 Register scratch = scratch3;
13768
13769 // Make sure that both characters are not digits as such strings has a
13770 // different hash algorithm. Don't try to look for these in the symbol table.
13771 Label not_array_index;
13772 __ mov(scratch, c1);
13773 __ sub(Operand(scratch), Immediate(static_cast<int>('0')));
13774 __ cmp(Operand(scratch), Immediate(static_cast<int>('9' - '0')));
13775 __ j(above, &not_array_index);
13776 __ mov(scratch, c2);
13777 __ sub(Operand(scratch), Immediate(static_cast<int>('0')));
13778 __ cmp(Operand(scratch), Immediate(static_cast<int>('9' - '0')));
13779 __ j(below_equal, not_found);
13780
13781 __ bind(&not_array_index);
13782 // Calculate the two character string hash.
13783 Register hash = scratch1;
13784 GenerateHashInit(masm, hash, c1, scratch);
13785 GenerateHashAddCharacter(masm, hash, c2, scratch);
13786 GenerateHashGetHash(masm, hash, scratch);
13787
13788 // Collect the two characters in a register.
13789 Register chars = c1;
13790 __ shl(c2, kBitsPerByte);
13791 __ or_(chars, Operand(c2));
13792
13793 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
13794 // hash: hash of two character string.
13795
13796 // Load the symbol table.
13797 Register symbol_table = c2;
13798 ExternalReference roots_address = ExternalReference::roots_address();
13799 __ mov(scratch, Immediate(Heap::kSymbolTableRootIndex));
13800 __ mov(symbol_table,
13801 Operand::StaticArray(scratch, times_pointer_size, roots_address));
13802
13803 // Calculate capacity mask from the symbol table capacity.
13804 Register mask = scratch2;
Steve Block6ded16b2010-05-10 14:33:55 +010013805 __ mov(mask, FieldOperand(symbol_table, SymbolTable::kCapacityOffset));
Andrei Popescu402d9372010-02-26 13:31:12 +000013806 __ SmiUntag(mask);
13807 __ sub(Operand(mask), Immediate(1));
13808
13809 // Registers
13810 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
13811 // hash: hash of two character string
13812 // symbol_table: symbol table
13813 // mask: capacity mask
13814 // scratch: -
13815
13816 // Perform a number of probes in the symbol table.
13817 static const int kProbes = 4;
13818 Label found_in_symbol_table;
13819 Label next_probe[kProbes], next_probe_pop_mask[kProbes];
13820 for (int i = 0; i < kProbes; i++) {
13821 // Calculate entry in symbol table.
13822 __ mov(scratch, hash);
13823 if (i > 0) {
13824 __ add(Operand(scratch), Immediate(SymbolTable::GetProbeOffset(i)));
13825 }
13826 __ and_(scratch, Operand(mask));
13827
Kristian Monsen50ef84f2010-07-29 15:18:00 +010013828 // Load the entry from the symbol table.
Andrei Popescu402d9372010-02-26 13:31:12 +000013829 Register candidate = scratch; // Scratch register contains candidate.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010013830 STATIC_ASSERT(SymbolTable::kEntrySize == 1);
Andrei Popescu402d9372010-02-26 13:31:12 +000013831 __ mov(candidate,
13832 FieldOperand(symbol_table,
13833 scratch,
13834 times_pointer_size,
Steve Block6ded16b2010-05-10 14:33:55 +010013835 SymbolTable::kElementsStartOffset));
Andrei Popescu402d9372010-02-26 13:31:12 +000013836
13837 // If entry is undefined no string with this hash can be found.
13838 __ cmp(candidate, Factory::undefined_value());
13839 __ j(equal, not_found);
13840
13841 // If length is not 2 the string is not a candidate.
Steve Block6ded16b2010-05-10 14:33:55 +010013842 __ cmp(FieldOperand(candidate, String::kLengthOffset),
13843 Immediate(Smi::FromInt(2)));
Andrei Popescu402d9372010-02-26 13:31:12 +000013844 __ j(not_equal, &next_probe[i]);
13845
13846 // As we are out of registers save the mask on the stack and use that
13847 // register as a temporary.
13848 __ push(mask);
13849 Register temp = mask;
13850
13851 // Check that the candidate is a non-external ascii string.
13852 __ mov(temp, FieldOperand(candidate, HeapObject::kMapOffset));
13853 __ movzx_b(temp, FieldOperand(temp, Map::kInstanceTypeOffset));
13854 __ JumpIfInstanceTypeIsNotSequentialAscii(
13855 temp, temp, &next_probe_pop_mask[i]);
13856
13857 // Check if the two characters match.
13858 __ mov(temp, FieldOperand(candidate, SeqAsciiString::kHeaderSize));
13859 __ and_(temp, 0x0000ffff);
13860 __ cmp(chars, Operand(temp));
13861 __ j(equal, &found_in_symbol_table);
13862 __ bind(&next_probe_pop_mask[i]);
13863 __ pop(mask);
13864 __ bind(&next_probe[i]);
13865 }
13866
13867 // No matching 2 character string found by probing.
13868 __ jmp(not_found);
13869
13870 // Scratch register contains result when we fall through to here.
13871 Register result = scratch;
13872 __ bind(&found_in_symbol_table);
Kristian Monsen50ef84f2010-07-29 15:18:00 +010013873 __ pop(mask); // Pop saved mask from the stack.
Andrei Popescu402d9372010-02-26 13:31:12 +000013874 if (!result.is(eax)) {
13875 __ mov(eax, result);
13876 }
13877}
13878
13879
Steve Block6ded16b2010-05-10 14:33:55 +010013880void StringHelper::GenerateHashInit(MacroAssembler* masm,
13881 Register hash,
13882 Register character,
13883 Register scratch) {
Andrei Popescu402d9372010-02-26 13:31:12 +000013884 // hash = character + (character << 10);
13885 __ mov(hash, character);
13886 __ shl(hash, 10);
13887 __ add(hash, Operand(character));
13888 // hash ^= hash >> 6;
13889 __ mov(scratch, hash);
13890 __ sar(scratch, 6);
13891 __ xor_(hash, Operand(scratch));
13892}
13893
13894
Steve Block6ded16b2010-05-10 14:33:55 +010013895void StringHelper::GenerateHashAddCharacter(MacroAssembler* masm,
13896 Register hash,
13897 Register character,
13898 Register scratch) {
Andrei Popescu402d9372010-02-26 13:31:12 +000013899 // hash += character;
13900 __ add(hash, Operand(character));
13901 // hash += hash << 10;
13902 __ mov(scratch, hash);
13903 __ shl(scratch, 10);
13904 __ add(hash, Operand(scratch));
13905 // hash ^= hash >> 6;
13906 __ mov(scratch, hash);
13907 __ sar(scratch, 6);
13908 __ xor_(hash, Operand(scratch));
13909}
13910
13911
Steve Block6ded16b2010-05-10 14:33:55 +010013912void StringHelper::GenerateHashGetHash(MacroAssembler* masm,
13913 Register hash,
13914 Register scratch) {
Andrei Popescu402d9372010-02-26 13:31:12 +000013915 // hash += hash << 3;
13916 __ mov(scratch, hash);
13917 __ shl(scratch, 3);
13918 __ add(hash, Operand(scratch));
13919 // hash ^= hash >> 11;
13920 __ mov(scratch, hash);
13921 __ sar(scratch, 11);
13922 __ xor_(hash, Operand(scratch));
13923 // hash += hash << 15;
13924 __ mov(scratch, hash);
13925 __ shl(scratch, 15);
13926 __ add(hash, Operand(scratch));
13927
13928 // if (hash == 0) hash = 27;
13929 Label hash_not_zero;
13930 __ test(hash, Operand(hash));
13931 __ j(not_zero, &hash_not_zero);
13932 __ mov(hash, Immediate(27));
13933 __ bind(&hash_not_zero);
13934}
13935
13936
Leon Clarkee46be812010-01-19 14:06:41 +000013937void SubStringStub::Generate(MacroAssembler* masm) {
13938 Label runtime;
13939
13940 // Stack frame on entry.
13941 // esp[0]: return address
13942 // esp[4]: to
13943 // esp[8]: from
13944 // esp[12]: string
13945
13946 // Make sure first argument is a string.
13947 __ mov(eax, Operand(esp, 3 * kPointerSize));
Kristian Monsen50ef84f2010-07-29 15:18:00 +010013948 STATIC_ASSERT(kSmiTag == 0);
Leon Clarkee46be812010-01-19 14:06:41 +000013949 __ test(eax, Immediate(kSmiTagMask));
13950 __ j(zero, &runtime);
13951 Condition is_string = masm->IsObjectStringType(eax, ebx, ebx);
13952 __ j(NegateCondition(is_string), &runtime);
13953
13954 // eax: string
13955 // ebx: instance type
Kristian Monsen50ef84f2010-07-29 15:18:00 +010013956
Leon Clarkee46be812010-01-19 14:06:41 +000013957 // Calculate length of sub string using the smi values.
Andrei Popescu402d9372010-02-26 13:31:12 +000013958 Label result_longer_than_two;
13959 __ mov(ecx, Operand(esp, 1 * kPointerSize)); // To index.
Leon Clarkee46be812010-01-19 14:06:41 +000013960 __ test(ecx, Immediate(kSmiTagMask));
13961 __ j(not_zero, &runtime);
Andrei Popescu402d9372010-02-26 13:31:12 +000013962 __ mov(edx, Operand(esp, 2 * kPointerSize)); // From index.
Leon Clarkee46be812010-01-19 14:06:41 +000013963 __ test(edx, Immediate(kSmiTagMask));
13964 __ j(not_zero, &runtime);
13965 __ sub(ecx, Operand(edx));
Steve Block8defd9f2010-07-08 12:39:36 +010013966 __ cmp(ecx, FieldOperand(eax, String::kLengthOffset));
13967 Label return_eax;
13968 __ j(equal, &return_eax);
Andrei Popescu402d9372010-02-26 13:31:12 +000013969 // Special handling of sub-strings of length 1 and 2. One character strings
13970 // are handled in the runtime system (looked up in the single character
13971 // cache). Two character strings are looked for in the symbol cache.
Leon Clarkee46be812010-01-19 14:06:41 +000013972 __ SmiUntag(ecx); // Result length is no longer smi.
13973 __ cmp(ecx, 2);
Andrei Popescu402d9372010-02-26 13:31:12 +000013974 __ j(greater, &result_longer_than_two);
13975 __ j(less, &runtime);
Leon Clarkee46be812010-01-19 14:06:41 +000013976
Andrei Popescu402d9372010-02-26 13:31:12 +000013977 // Sub string of length 2 requested.
13978 // eax: string
13979 // ebx: instance type
13980 // ecx: sub string length (value is 2)
13981 // edx: from index (smi)
13982 __ JumpIfInstanceTypeIsNotSequentialAscii(ebx, ebx, &runtime);
13983
13984 // Get the two characters forming the sub string.
13985 __ SmiUntag(edx); // From index is no longer smi.
13986 __ movzx_b(ebx, FieldOperand(eax, edx, times_1, SeqAsciiString::kHeaderSize));
13987 __ movzx_b(ecx,
13988 FieldOperand(eax, edx, times_1, SeqAsciiString::kHeaderSize + 1));
13989
13990 // Try to lookup two character string in symbol table.
13991 Label make_two_character_string;
Steve Block6ded16b2010-05-10 14:33:55 +010013992 StringHelper::GenerateTwoCharacterSymbolTableProbe(
13993 masm, ebx, ecx, eax, edx, edi, &make_two_character_string);
13994 __ ret(3 * kPointerSize);
Andrei Popescu402d9372010-02-26 13:31:12 +000013995
13996 __ bind(&make_two_character_string);
13997 // Setup registers for allocating the two character string.
13998 __ mov(eax, Operand(esp, 3 * kPointerSize));
13999 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
14000 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
14001 __ Set(ecx, Immediate(2));
14002
14003 __ bind(&result_longer_than_two);
Leon Clarkee46be812010-01-19 14:06:41 +000014004 // eax: string
14005 // ebx: instance type
14006 // ecx: result string length
14007 // Check for flat ascii string
14008 Label non_ascii_flat;
Andrei Popescu402d9372010-02-26 13:31:12 +000014009 __ JumpIfInstanceTypeIsNotSequentialAscii(ebx, ebx, &non_ascii_flat);
Leon Clarkee46be812010-01-19 14:06:41 +000014010
14011 // Allocate the result.
14012 __ AllocateAsciiString(eax, ecx, ebx, edx, edi, &runtime);
14013
14014 // eax: result string
14015 // ecx: result string length
14016 __ mov(edx, esi); // esi used by following code.
14017 // Locate first character of result.
14018 __ mov(edi, eax);
14019 __ add(Operand(edi), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
14020 // Load string argument and locate character of sub string start.
14021 __ mov(esi, Operand(esp, 3 * kPointerSize));
14022 __ add(Operand(esi), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
14023 __ mov(ebx, Operand(esp, 2 * kPointerSize)); // from
14024 __ SmiUntag(ebx);
14025 __ add(esi, Operand(ebx));
14026
14027 // eax: result string
14028 // ecx: result length
14029 // edx: original value of esi
14030 // edi: first character of result
14031 // esi: character of sub string start
Steve Block6ded16b2010-05-10 14:33:55 +010014032 StringHelper::GenerateCopyCharactersREP(masm, edi, esi, ecx, ebx, true);
Leon Clarkee46be812010-01-19 14:06:41 +000014033 __ mov(esi, edx); // Restore esi.
14034 __ IncrementCounter(&Counters::sub_string_native, 1);
14035 __ ret(3 * kPointerSize);
14036
14037 __ bind(&non_ascii_flat);
14038 // eax: string
14039 // ebx: instance type & kStringRepresentationMask | kStringEncodingMask
14040 // ecx: result string length
14041 // Check for flat two byte string
14042 __ cmp(ebx, kSeqStringTag | kTwoByteStringTag);
14043 __ j(not_equal, &runtime);
14044
14045 // Allocate the result.
14046 __ AllocateTwoByteString(eax, ecx, ebx, edx, edi, &runtime);
14047
14048 // eax: result string
14049 // ecx: result string length
14050 __ mov(edx, esi); // esi used by following code.
14051 // Locate first character of result.
14052 __ mov(edi, eax);
14053 __ add(Operand(edi),
14054 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
14055 // Load string argument and locate character of sub string start.
14056 __ mov(esi, Operand(esp, 3 * kPointerSize));
Andrei Popescu31002712010-02-23 13:46:05 +000014057 __ add(Operand(esi),
14058 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
Leon Clarkee46be812010-01-19 14:06:41 +000014059 __ mov(ebx, Operand(esp, 2 * kPointerSize)); // from
14060 // As from is a smi it is 2 times the value which matches the size of a two
14061 // byte character.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010014062 STATIC_ASSERT(kSmiTag == 0);
14063 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
Leon Clarkee46be812010-01-19 14:06:41 +000014064 __ add(esi, Operand(ebx));
14065
14066 // eax: result string
14067 // ecx: result length
14068 // edx: original value of esi
14069 // edi: first character of result
14070 // esi: character of sub string start
Steve Block6ded16b2010-05-10 14:33:55 +010014071 StringHelper::GenerateCopyCharactersREP(masm, edi, esi, ecx, ebx, false);
Leon Clarkee46be812010-01-19 14:06:41 +000014072 __ mov(esi, edx); // Restore esi.
Steve Block8defd9f2010-07-08 12:39:36 +010014073
14074 __ bind(&return_eax);
Leon Clarkee46be812010-01-19 14:06:41 +000014075 __ IncrementCounter(&Counters::sub_string_native, 1);
14076 __ ret(3 * kPointerSize);
14077
14078 // Just jump to runtime to create the sub string.
14079 __ bind(&runtime);
Steve Block6ded16b2010-05-10 14:33:55 +010014080 __ TailCallRuntime(Runtime::kSubString, 3, 1);
Leon Clarkee46be812010-01-19 14:06:41 +000014081}
14082
14083
14084void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
14085 Register left,
14086 Register right,
14087 Register scratch1,
14088 Register scratch2,
14089 Register scratch3) {
Leon Clarked91b9f72010-01-27 17:25:45 +000014090 Label result_not_equal;
14091 Label result_greater;
14092 Label compare_lengths;
Steve Block6ded16b2010-05-10 14:33:55 +010014093
14094 __ IncrementCounter(&Counters::string_compare_native, 1);
14095
Leon Clarked91b9f72010-01-27 17:25:45 +000014096 // Find minimum length.
14097 Label left_shorter;
Leon Clarkee46be812010-01-19 14:06:41 +000014098 __ mov(scratch1, FieldOperand(left, String::kLengthOffset));
Leon Clarked91b9f72010-01-27 17:25:45 +000014099 __ mov(scratch3, scratch1);
14100 __ sub(scratch3, FieldOperand(right, String::kLengthOffset));
14101
14102 Register length_delta = scratch3;
14103
14104 __ j(less_equal, &left_shorter);
14105 // Right string is shorter. Change scratch1 to be length of right string.
14106 __ sub(scratch1, Operand(length_delta));
14107 __ bind(&left_shorter);
14108
14109 Register min_length = scratch1;
14110
14111 // If either length is zero, just compare lengths.
14112 __ test(min_length, Operand(min_length));
14113 __ j(zero, &compare_lengths);
14114
14115 // Change index to run from -min_length to -1 by adding min_length
14116 // to string start. This means that loop ends when index reaches zero,
14117 // which doesn't need an additional compare.
Steve Block6ded16b2010-05-10 14:33:55 +010014118 __ SmiUntag(min_length);
Leon Clarked91b9f72010-01-27 17:25:45 +000014119 __ lea(left,
14120 FieldOperand(left,
14121 min_length, times_1,
14122 SeqAsciiString::kHeaderSize));
14123 __ lea(right,
14124 FieldOperand(right,
14125 min_length, times_1,
14126 SeqAsciiString::kHeaderSize));
14127 __ neg(min_length);
14128
14129 Register index = min_length; // index = -min_length;
14130
14131 {
14132 // Compare loop.
14133 Label loop;
14134 __ bind(&loop);
14135 // Compare characters.
14136 __ mov_b(scratch2, Operand(left, index, times_1, 0));
14137 __ cmpb(scratch2, Operand(right, index, times_1, 0));
14138 __ j(not_equal, &result_not_equal);
14139 __ add(Operand(index), Immediate(1));
14140 __ j(not_zero, &loop);
Leon Clarkee46be812010-01-19 14:06:41 +000014141 }
14142
Leon Clarked91b9f72010-01-27 17:25:45 +000014143 // Compare lengths - strings up to min-length are equal.
Leon Clarkee46be812010-01-19 14:06:41 +000014144 __ bind(&compare_lengths);
Leon Clarked91b9f72010-01-27 17:25:45 +000014145 __ test(length_delta, Operand(length_delta));
Leon Clarkee46be812010-01-19 14:06:41 +000014146 __ j(not_zero, &result_not_equal);
14147
14148 // Result is EQUAL.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010014149 STATIC_ASSERT(EQUAL == 0);
14150 STATIC_ASSERT(kSmiTag == 0);
Leon Clarked91b9f72010-01-27 17:25:45 +000014151 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
Kristian Monsen50ef84f2010-07-29 15:18:00 +010014152 __ ret(0);
Leon Clarked91b9f72010-01-27 17:25:45 +000014153
Leon Clarkee46be812010-01-19 14:06:41 +000014154 __ bind(&result_not_equal);
14155 __ j(greater, &result_greater);
14156
14157 // Result is LESS.
Leon Clarked91b9f72010-01-27 17:25:45 +000014158 __ Set(eax, Immediate(Smi::FromInt(LESS)));
Kristian Monsen50ef84f2010-07-29 15:18:00 +010014159 __ ret(0);
Leon Clarkee46be812010-01-19 14:06:41 +000014160
14161 // Result is GREATER.
14162 __ bind(&result_greater);
Leon Clarked91b9f72010-01-27 17:25:45 +000014163 __ Set(eax, Immediate(Smi::FromInt(GREATER)));
Kristian Monsen50ef84f2010-07-29 15:18:00 +010014164 __ ret(0);
Leon Clarkee46be812010-01-19 14:06:41 +000014165}
14166
14167
14168void StringCompareStub::Generate(MacroAssembler* masm) {
14169 Label runtime;
14170
14171 // Stack frame on entry.
14172 // esp[0]: return address
14173 // esp[4]: right string
14174 // esp[8]: left string
14175
14176 __ mov(edx, Operand(esp, 2 * kPointerSize)); // left
14177 __ mov(eax, Operand(esp, 1 * kPointerSize)); // right
14178
14179 Label not_same;
14180 __ cmp(edx, Operand(eax));
14181 __ j(not_equal, &not_same);
Kristian Monsen50ef84f2010-07-29 15:18:00 +010014182 STATIC_ASSERT(EQUAL == 0);
14183 STATIC_ASSERT(kSmiTag == 0);
Leon Clarked91b9f72010-01-27 17:25:45 +000014184 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
Leon Clarkee46be812010-01-19 14:06:41 +000014185 __ IncrementCounter(&Counters::string_compare_native, 1);
14186 __ ret(2 * kPointerSize);
14187
14188 __ bind(&not_same);
14189
Leon Clarked91b9f72010-01-27 17:25:45 +000014190 // Check that both objects are sequential ascii strings.
14191 __ JumpIfNotBothSequentialAsciiStrings(edx, eax, ecx, ebx, &runtime);
Leon Clarkee46be812010-01-19 14:06:41 +000014192
14193 // Compare flat ascii strings.
Kristian Monsen50ef84f2010-07-29 15:18:00 +010014194 // Drop arguments from the stack.
14195 __ pop(ecx);
14196 __ add(Operand(esp), Immediate(2 * kPointerSize));
14197 __ push(ecx);
Leon Clarkee46be812010-01-19 14:06:41 +000014198 GenerateCompareFlatAsciiStrings(masm, edx, eax, ecx, ebx, edi);
14199
Leon Clarkee46be812010-01-19 14:06:41 +000014200 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
14201 // tagged as a small integer.
14202 __ bind(&runtime);
Steve Block6ded16b2010-05-10 14:33:55 +010014203 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
Leon Clarkee46be812010-01-19 14:06:41 +000014204}
14205
Steve Blocka7e24c12009-10-30 11:49:00 +000014206#undef __
14207
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010014208#define __ masm.
14209
14210MemCopyFunction CreateMemCopyFunction() {
14211 size_t actual_size;
14212 byte* buffer = static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize,
14213 &actual_size,
14214 true));
14215 CHECK(buffer);
14216 HandleScope handles;
14217 MacroAssembler masm(buffer, static_cast<int>(actual_size));
14218
14219 // Generated code is put into a fixed, unmovable, buffer, and not into
14220 // the V8 heap. We can't, and don't, refer to any relocatable addresses
14221 // (e.g. the JavaScript nan-object).
14222
14223 // 32-bit C declaration function calls pass arguments on stack.
14224
14225 // Stack layout:
14226 // esp[12]: Third argument, size.
14227 // esp[8]: Second argument, source pointer.
14228 // esp[4]: First argument, destination pointer.
14229 // esp[0]: return address
14230
14231 const int kDestinationOffset = 1 * kPointerSize;
14232 const int kSourceOffset = 2 * kPointerSize;
14233 const int kSizeOffset = 3 * kPointerSize;
14234
14235 int stack_offset = 0; // Update if we change the stack height.
14236
14237 if (FLAG_debug_code) {
14238 __ cmp(Operand(esp, kSizeOffset + stack_offset),
14239 Immediate(kMinComplexMemCopy));
14240 Label ok;
14241 __ j(greater_equal, &ok);
14242 __ int3();
14243 __ bind(&ok);
14244 }
14245 if (CpuFeatures::IsSupported(SSE2)) {
14246 CpuFeatures::Scope enable(SSE2);
14247 __ push(edi);
14248 __ push(esi);
14249 stack_offset += 2 * kPointerSize;
14250 Register dst = edi;
14251 Register src = esi;
14252 Register count = ecx;
14253 __ mov(dst, Operand(esp, stack_offset + kDestinationOffset));
14254 __ mov(src, Operand(esp, stack_offset + kSourceOffset));
14255 __ mov(count, Operand(esp, stack_offset + kSizeOffset));
14256
14257
14258 __ movdqu(xmm0, Operand(src, 0));
14259 __ movdqu(Operand(dst, 0), xmm0);
14260 __ mov(edx, dst);
14261 __ and_(edx, 0xF);
14262 __ neg(edx);
14263 __ add(Operand(edx), Immediate(16));
14264 __ add(dst, Operand(edx));
14265 __ add(src, Operand(edx));
14266 __ sub(Operand(count), edx);
14267
14268 // edi is now aligned. Check if esi is also aligned.
14269 Label unaligned_source;
14270 __ test(Operand(src), Immediate(0x0F));
14271 __ j(not_zero, &unaligned_source);
14272 {
14273 __ IncrementCounter(&Counters::memcopy_aligned, 1);
14274 // Copy loop for aligned source and destination.
14275 __ mov(edx, count);
14276 Register loop_count = ecx;
14277 Register count = edx;
14278 __ shr(loop_count, 5);
14279 {
14280 // Main copy loop.
14281 Label loop;
14282 __ bind(&loop);
14283 __ prefetch(Operand(src, 0x20), 1);
14284 __ movdqa(xmm0, Operand(src, 0x00));
14285 __ movdqa(xmm1, Operand(src, 0x10));
14286 __ add(Operand(src), Immediate(0x20));
14287
14288 __ movdqa(Operand(dst, 0x00), xmm0);
14289 __ movdqa(Operand(dst, 0x10), xmm1);
14290 __ add(Operand(dst), Immediate(0x20));
14291
14292 __ dec(loop_count);
14293 __ j(not_zero, &loop);
14294 }
14295
14296 // At most 31 bytes to copy.
14297 Label move_less_16;
14298 __ test(Operand(count), Immediate(0x10));
14299 __ j(zero, &move_less_16);
14300 __ movdqa(xmm0, Operand(src, 0));
14301 __ add(Operand(src), Immediate(0x10));
14302 __ movdqa(Operand(dst, 0), xmm0);
14303 __ add(Operand(dst), Immediate(0x10));
14304 __ bind(&move_less_16);
14305
14306 // At most 15 bytes to copy. Copy 16 bytes at end of string.
14307 __ and_(count, 0xF);
14308 __ movdqu(xmm0, Operand(src, count, times_1, -0x10));
14309 __ movdqu(Operand(dst, count, times_1, -0x10), xmm0);
14310
14311 __ pop(esi);
14312 __ pop(edi);
14313 __ ret(0);
14314 }
14315 __ Align(16);
14316 {
14317 // Copy loop for unaligned source and aligned destination.
14318 // If source is not aligned, we can't read it as efficiently.
14319 __ bind(&unaligned_source);
14320 __ IncrementCounter(&Counters::memcopy_unaligned, 1);
14321 __ mov(edx, ecx);
14322 Register loop_count = ecx;
14323 Register count = edx;
14324 __ shr(loop_count, 5);
14325 {
14326 // Main copy loop
14327 Label loop;
14328 __ bind(&loop);
14329 __ prefetch(Operand(src, 0x20), 1);
14330 __ movdqu(xmm0, Operand(src, 0x00));
14331 __ movdqu(xmm1, Operand(src, 0x10));
14332 __ add(Operand(src), Immediate(0x20));
14333
14334 __ movdqa(Operand(dst, 0x00), xmm0);
14335 __ movdqa(Operand(dst, 0x10), xmm1);
14336 __ add(Operand(dst), Immediate(0x20));
14337
14338 __ dec(loop_count);
14339 __ j(not_zero, &loop);
14340 }
14341
14342 // At most 31 bytes to copy.
14343 Label move_less_16;
14344 __ test(Operand(count), Immediate(0x10));
14345 __ j(zero, &move_less_16);
14346 __ movdqu(xmm0, Operand(src, 0));
14347 __ add(Operand(src), Immediate(0x10));
14348 __ movdqa(Operand(dst, 0), xmm0);
14349 __ add(Operand(dst), Immediate(0x10));
14350 __ bind(&move_less_16);
14351
14352 // At most 15 bytes to copy. Copy 16 bytes at end of string.
14353 __ and_(count, 0x0F);
14354 __ movdqu(xmm0, Operand(src, count, times_1, -0x10));
14355 __ movdqu(Operand(dst, count, times_1, -0x10), xmm0);
14356
14357 __ pop(esi);
14358 __ pop(edi);
14359 __ ret(0);
14360 }
14361
14362 } else {
14363 __ IncrementCounter(&Counters::memcopy_noxmm, 1);
14364 // SSE2 not supported. Unlikely to happen in practice.
14365 __ push(edi);
14366 __ push(esi);
14367 stack_offset += 2 * kPointerSize;
14368 __ cld();
14369 Register dst = edi;
14370 Register src = esi;
14371 Register count = ecx;
14372 __ mov(dst, Operand(esp, stack_offset + kDestinationOffset));
14373 __ mov(src, Operand(esp, stack_offset + kSourceOffset));
14374 __ mov(count, Operand(esp, stack_offset + kSizeOffset));
14375
14376 // Copy the first word.
14377 __ mov(eax, Operand(src, 0));
14378 __ mov(Operand(dst, 0), eax);
14379
14380 // Increment src,dstso that dst is aligned.
14381 __ mov(edx, dst);
14382 __ and_(edx, 0x03);
14383 __ neg(edx);
14384 __ add(Operand(edx), Immediate(4)); // edx = 4 - (dst & 3)
14385 __ add(dst, Operand(edx));
14386 __ add(src, Operand(edx));
14387 __ sub(Operand(count), edx);
14388 // edi is now aligned, ecx holds number of remaning bytes to copy.
14389
14390 __ mov(edx, count);
14391 count = edx;
14392 __ shr(ecx, 2); // Make word count instead of byte count.
14393 __ rep_movs();
14394
14395 // At most 3 bytes left to copy. Copy 4 bytes at end of string.
14396 __ and_(count, 3);
14397 __ mov(eax, Operand(src, count, times_1, -4));
14398 __ mov(Operand(dst, count, times_1, -4), eax);
14399
14400 __ pop(esi);
14401 __ pop(edi);
14402 __ ret(0);
14403 }
14404
14405 CodeDesc desc;
14406 masm.GetCode(&desc);
14407 // Call the function from C++.
14408 return FUNCTION_CAST<MemCopyFunction>(buffer);
14409}
14410
14411#undef __
14412
Steve Blocka7e24c12009-10-30 11:49:00 +000014413} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +010014414
14415#endif // V8_TARGET_ARCH_IA32