blob: 4393e446d8752f8ca701ac855c4add9233877dd0 [file] [log] [blame]
Leon Clarked91b9f72010-01-27 17:25:45 +00001// Copyright 2010 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "bootstrapper.h"
31#include "codegen-inl.h"
Steve Blockd0582a62009-12-15 09:54:21 +000032#include "compiler.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000033#include "debug.h"
34#include "ic-inl.h"
Leon Clarkee46be812010-01-19 14:06:41 +000035#include "jsregexp.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000036#include "parser.h"
Leon Clarkee46be812010-01-19 14:06:41 +000037#include "regexp-macro-assembler.h"
38#include "regexp-stack.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000039#include "register-allocator-inl.h"
40#include "runtime.h"
41#include "scopes.h"
Steve Block6ded16b2010-05-10 14:33:55 +010042#include "virtual-frame-inl.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000043
44namespace v8 {
45namespace internal {
46
47#define __ ACCESS_MASM(masm_)
48
49// -------------------------------------------------------------------------
50// Platform-specific DeferredCode functions.
51
52void DeferredCode::SaveRegisters() {
53 for (int i = 0; i < RegisterAllocator::kNumRegisters; i++) {
54 int action = registers_[i];
55 if (action == kPush) {
56 __ push(RegisterAllocator::ToRegister(i));
57 } else if (action != kIgnore && (action & kSyncedFlag) == 0) {
58 __ mov(Operand(ebp, action), RegisterAllocator::ToRegister(i));
59 }
60 }
61}
62
63
64void DeferredCode::RestoreRegisters() {
65 // Restore registers in reverse order due to the stack.
66 for (int i = RegisterAllocator::kNumRegisters - 1; i >= 0; i--) {
67 int action = registers_[i];
68 if (action == kPush) {
69 __ pop(RegisterAllocator::ToRegister(i));
70 } else if (action != kIgnore) {
71 action &= ~kSyncedFlag;
72 __ mov(RegisterAllocator::ToRegister(i), Operand(ebp, action));
73 }
74 }
75}
76
77
78// -------------------------------------------------------------------------
79// CodeGenState implementation.
80
81CodeGenState::CodeGenState(CodeGenerator* owner)
82 : owner_(owner),
Steve Blocka7e24c12009-10-30 11:49:00 +000083 destination_(NULL),
84 previous_(NULL) {
85 owner_->set_state(this);
86}
87
88
89CodeGenState::CodeGenState(CodeGenerator* owner,
Steve Blocka7e24c12009-10-30 11:49:00 +000090 ControlDestination* destination)
91 : owner_(owner),
Steve Blocka7e24c12009-10-30 11:49:00 +000092 destination_(destination),
93 previous_(owner->state()) {
94 owner_->set_state(this);
95}
96
97
98CodeGenState::~CodeGenState() {
99 ASSERT(owner_->state() == this);
100 owner_->set_state(previous_);
101}
102
103
104// -------------------------------------------------------------------------
105// CodeGenerator implementation
106
Andrei Popescu31002712010-02-23 13:46:05 +0000107CodeGenerator::CodeGenerator(MacroAssembler* masm)
108 : deferred_(8),
Leon Clarke4515c472010-02-03 11:58:03 +0000109 masm_(masm),
Andrei Popescu31002712010-02-23 13:46:05 +0000110 info_(NULL),
Steve Blocka7e24c12009-10-30 11:49:00 +0000111 frame_(NULL),
112 allocator_(NULL),
113 state_(NULL),
114 loop_nesting_(0),
Steve Block6ded16b2010-05-10 14:33:55 +0100115 in_safe_int32_mode_(false),
116 safe_int32_mode_enabled_(true),
Steve Blocka7e24c12009-10-30 11:49:00 +0000117 function_return_is_shadowed_(false),
118 in_spilled_code_(false) {
119}
120
121
122// Calling conventions:
123// ebp: caller's frame pointer
124// esp: stack pointer
125// edi: called JS function
126// esi: callee's context
127
Andrei Popescu402d9372010-02-26 13:31:12 +0000128void CodeGenerator::Generate(CompilationInfo* info) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000129 // Record the position for debugging purposes.
Andrei Popescu31002712010-02-23 13:46:05 +0000130 CodeForFunctionPosition(info->function());
Steve Block6ded16b2010-05-10 14:33:55 +0100131 Comment cmnt(masm_, "[ function compiled by virtual frame code generator");
Steve Blocka7e24c12009-10-30 11:49:00 +0000132
133 // Initialize state.
Andrei Popescu31002712010-02-23 13:46:05 +0000134 info_ = info;
Steve Blocka7e24c12009-10-30 11:49:00 +0000135 ASSERT(allocator_ == NULL);
136 RegisterAllocator register_allocator(this);
137 allocator_ = &register_allocator;
138 ASSERT(frame_ == NULL);
139 frame_ = new VirtualFrame();
140 set_in_spilled_code(false);
141
142 // Adjust for function-level loop nesting.
Steve Block6ded16b2010-05-10 14:33:55 +0100143 ASSERT_EQ(0, loop_nesting_);
144 loop_nesting_ = info->loop_nesting();
Steve Blocka7e24c12009-10-30 11:49:00 +0000145
146 JumpTarget::set_compiling_deferred_code(false);
147
148#ifdef DEBUG
149 if (strlen(FLAG_stop_at) > 0 &&
Andrei Popescu31002712010-02-23 13:46:05 +0000150 info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000151 frame_->SpillAll();
152 __ int3();
153 }
154#endif
155
156 // New scope to get automatic timing calculation.
Steve Block6ded16b2010-05-10 14:33:55 +0100157 { HistogramTimerScope codegen_timer(&Counters::code_generation);
Steve Blocka7e24c12009-10-30 11:49:00 +0000158 CodeGenState state(this);
159
160 // Entry:
161 // Stack: receiver, arguments, return address.
162 // ebp: caller's frame pointer
163 // esp: stack pointer
164 // edi: called JS function
165 // esi: callee's context
166 allocator_->Initialize();
Steve Blocka7e24c12009-10-30 11:49:00 +0000167
Andrei Popescu402d9372010-02-26 13:31:12 +0000168 if (info->mode() == CompilationInfo::PRIMARY) {
Leon Clarke4515c472010-02-03 11:58:03 +0000169 frame_->Enter();
170
171 // Allocate space for locals and initialize them.
172 frame_->AllocateStackSlots();
173
174 // Allocate the local context if needed.
Andrei Popescu31002712010-02-23 13:46:05 +0000175 int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
Leon Clarke4515c472010-02-03 11:58:03 +0000176 if (heap_slots > 0) {
177 Comment cmnt(masm_, "[ allocate local context");
178 // Allocate local context.
179 // Get outer context and create a new context based on it.
180 frame_->PushFunction();
181 Result context;
182 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
183 FastNewContextStub stub(heap_slots);
184 context = frame_->CallStub(&stub, 1);
185 } else {
186 context = frame_->CallRuntime(Runtime::kNewContext, 1);
187 }
188
189 // Update context local.
190 frame_->SaveContextRegister();
191
192 // Verify that the runtime call result and esi agree.
193 if (FLAG_debug_code) {
194 __ cmp(context.reg(), Operand(esi));
195 __ Assert(equal, "Runtime::NewContext should end up in esi");
196 }
197 }
198
199 // TODO(1241774): Improve this code:
200 // 1) only needed if we have a context
201 // 2) no need to recompute context ptr every single time
202 // 3) don't copy parameter operand code from SlotOperand!
203 {
204 Comment cmnt2(masm_, "[ copy context parameters into .context");
Leon Clarke4515c472010-02-03 11:58:03 +0000205 // Note that iteration order is relevant here! If we have the same
206 // parameter twice (e.g., function (x, y, x)), and that parameter
207 // needs to be copied into the context, it must be the last argument
208 // passed to the parameter that needs to be copied. This is a rare
209 // case so we don't check for it, instead we rely on the copying
210 // order: such a parameter is copied repeatedly into the same
211 // context location and thus the last value is what is seen inside
212 // the function.
Andrei Popescu31002712010-02-23 13:46:05 +0000213 for (int i = 0; i < scope()->num_parameters(); i++) {
214 Variable* par = scope()->parameter(i);
Leon Clarke4515c472010-02-03 11:58:03 +0000215 Slot* slot = par->slot();
216 if (slot != NULL && slot->type() == Slot::CONTEXT) {
217 // The use of SlotOperand below is safe in unspilled code
218 // because the slot is guaranteed to be a context slot.
219 //
220 // There are no parameters in the global scope.
Andrei Popescu31002712010-02-23 13:46:05 +0000221 ASSERT(!scope()->is_global_scope());
Leon Clarke4515c472010-02-03 11:58:03 +0000222 frame_->PushParameterAt(i);
223 Result value = frame_->Pop();
224 value.ToRegister();
225
226 // SlotOperand loads context.reg() with the context object
227 // stored to, used below in RecordWrite.
228 Result context = allocator_->Allocate();
229 ASSERT(context.is_valid());
230 __ mov(SlotOperand(slot, context.reg()), value.reg());
231 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
232 Result scratch = allocator_->Allocate();
233 ASSERT(scratch.is_valid());
234 frame_->Spill(context.reg());
235 frame_->Spill(value.reg());
236 __ RecordWrite(context.reg(), offset, value.reg(), scratch.reg());
237 }
238 }
239 }
240
241 // Store the arguments object. This must happen after context
242 // initialization because the arguments object may be stored in
243 // the context.
244 if (ArgumentsMode() != NO_ARGUMENTS_ALLOCATION) {
245 StoreArgumentsObject(true);
246 }
247
248 // Initialize ThisFunction reference if present.
Andrei Popescu31002712010-02-23 13:46:05 +0000249 if (scope()->is_function_scope() && scope()->function() != NULL) {
Leon Clarke4515c472010-02-03 11:58:03 +0000250 frame_->Push(Factory::the_hole_value());
Andrei Popescu31002712010-02-23 13:46:05 +0000251 StoreToSlot(scope()->function()->slot(), NOT_CONST_INIT);
Leon Clarke4515c472010-02-03 11:58:03 +0000252 }
253 } else {
254 // When used as the secondary compiler for splitting, ebp, esi,
255 // and edi have been pushed on the stack. Adjust the virtual
256 // frame to match this state.
257 frame_->Adjust(3);
258 allocator_->Unuse(edi);
Andrei Popescu402d9372010-02-26 13:31:12 +0000259
260 // Bind all the bailout labels to the beginning of the function.
261 List<CompilationInfo::Bailout*>* bailouts = info->bailouts();
262 for (int i = 0; i < bailouts->length(); i++) {
263 __ bind(bailouts->at(i)->label());
264 }
Leon Clarke4515c472010-02-03 11:58:03 +0000265 }
266
Steve Blocka7e24c12009-10-30 11:49:00 +0000267 // Initialize the function return target after the locals are set
268 // up, because it needs the expected frame height from the frame.
269 function_return_.set_direction(JumpTarget::BIDIRECTIONAL);
270 function_return_is_shadowed_ = false;
271
Steve Blocka7e24c12009-10-30 11:49:00 +0000272 // Generate code to 'execute' declarations and initialize functions
273 // (source elements). In case of an illegal redeclaration we need to
274 // handle that instead of processing the declarations.
Andrei Popescu31002712010-02-23 13:46:05 +0000275 if (scope()->HasIllegalRedeclaration()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000276 Comment cmnt(masm_, "[ illegal redeclarations");
Andrei Popescu31002712010-02-23 13:46:05 +0000277 scope()->VisitIllegalRedeclaration(this);
Steve Blocka7e24c12009-10-30 11:49:00 +0000278 } else {
279 Comment cmnt(masm_, "[ declarations");
Andrei Popescu31002712010-02-23 13:46:05 +0000280 ProcessDeclarations(scope()->declarations());
Steve Blocka7e24c12009-10-30 11:49:00 +0000281 // Bail out if a stack-overflow exception occurred when processing
282 // declarations.
283 if (HasStackOverflow()) return;
284 }
285
286 if (FLAG_trace) {
287 frame_->CallRuntime(Runtime::kTraceEnter, 0);
288 // Ignore the return value.
289 }
290 CheckStack();
291
292 // Compile the body of the function in a vanilla state. Don't
293 // bother compiling all the code if the scope has an illegal
294 // redeclaration.
Andrei Popescu31002712010-02-23 13:46:05 +0000295 if (!scope()->HasIllegalRedeclaration()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000296 Comment cmnt(masm_, "[ function body");
297#ifdef DEBUG
298 bool is_builtin = Bootstrapper::IsActive();
299 bool should_trace =
300 is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls;
301 if (should_trace) {
302 frame_->CallRuntime(Runtime::kDebugTrace, 0);
303 // Ignore the return value.
304 }
305#endif
Andrei Popescu31002712010-02-23 13:46:05 +0000306 VisitStatements(info->function()->body());
Steve Blocka7e24c12009-10-30 11:49:00 +0000307
308 // Handle the return from the function.
309 if (has_valid_frame()) {
310 // If there is a valid frame, control flow can fall off the end of
311 // the body. In that case there is an implicit return statement.
312 ASSERT(!function_return_is_shadowed_);
Andrei Popescu31002712010-02-23 13:46:05 +0000313 CodeForReturnPosition(info->function());
Steve Blocka7e24c12009-10-30 11:49:00 +0000314 frame_->PrepareForReturn();
315 Result undefined(Factory::undefined_value());
316 if (function_return_.is_bound()) {
317 function_return_.Jump(&undefined);
318 } else {
319 function_return_.Bind(&undefined);
320 GenerateReturnSequence(&undefined);
321 }
322 } else if (function_return_.is_linked()) {
323 // If the return target has dangling jumps to it, then we have not
324 // yet generated the return sequence. This can happen when (a)
325 // control does not flow off the end of the body so we did not
326 // compile an artificial return statement just above, and (b) there
327 // are return statements in the body but (c) they are all shadowed.
328 Result return_value;
329 function_return_.Bind(&return_value);
330 GenerateReturnSequence(&return_value);
331 }
332 }
333 }
334
335 // Adjust for function-level loop nesting.
Steve Block6ded16b2010-05-10 14:33:55 +0100336 ASSERT_EQ(info->loop_nesting(), loop_nesting_);
337 loop_nesting_ = 0;
Steve Blocka7e24c12009-10-30 11:49:00 +0000338
339 // Code generation state must be reset.
340 ASSERT(state_ == NULL);
341 ASSERT(loop_nesting() == 0);
342 ASSERT(!function_return_is_shadowed_);
343 function_return_.Unuse();
344 DeleteFrame();
345
346 // Process any deferred code using the register allocator.
347 if (!HasStackOverflow()) {
348 HistogramTimerScope deferred_timer(&Counters::deferred_code_generation);
349 JumpTarget::set_compiling_deferred_code(true);
350 ProcessDeferred();
351 JumpTarget::set_compiling_deferred_code(false);
352 }
353
354 // There is no need to delete the register allocator, it is a
355 // stack-allocated local.
356 allocator_ = NULL;
Steve Blocka7e24c12009-10-30 11:49:00 +0000357}
358
359
360Operand CodeGenerator::SlotOperand(Slot* slot, Register tmp) {
361 // Currently, this assertion will fail if we try to assign to
362 // a constant variable that is constant because it is read-only
363 // (such as the variable referring to a named function expression).
364 // We need to implement assignments to read-only variables.
365 // Ideally, we should do this during AST generation (by converting
366 // such assignments into expression statements); however, in general
367 // we may not be able to make the decision until past AST generation,
368 // that is when the entire program is known.
369 ASSERT(slot != NULL);
370 int index = slot->index();
371 switch (slot->type()) {
372 case Slot::PARAMETER:
373 return frame_->ParameterAt(index);
374
375 case Slot::LOCAL:
376 return frame_->LocalAt(index);
377
378 case Slot::CONTEXT: {
379 // Follow the context chain if necessary.
380 ASSERT(!tmp.is(esi)); // do not overwrite context register
381 Register context = esi;
382 int chain_length = scope()->ContextChainLength(slot->var()->scope());
383 for (int i = 0; i < chain_length; i++) {
384 // Load the closure.
385 // (All contexts, even 'with' contexts, have a closure,
386 // and it is the same for all contexts inside a function.
387 // There is no need to go to the function context first.)
388 __ mov(tmp, ContextOperand(context, Context::CLOSURE_INDEX));
389 // Load the function context (which is the incoming, outer context).
390 __ mov(tmp, FieldOperand(tmp, JSFunction::kContextOffset));
391 context = tmp;
392 }
393 // We may have a 'with' context now. Get the function context.
394 // (In fact this mov may never be the needed, since the scope analysis
395 // may not permit a direct context access in this case and thus we are
396 // always at a function context. However it is safe to dereference be-
397 // cause the function context of a function context is itself. Before
398 // deleting this mov we should try to create a counter-example first,
399 // though...)
400 __ mov(tmp, ContextOperand(context, Context::FCONTEXT_INDEX));
401 return ContextOperand(tmp, index);
402 }
403
404 default:
405 UNREACHABLE();
406 return Operand(eax);
407 }
408}
409
410
411Operand CodeGenerator::ContextSlotOperandCheckExtensions(Slot* slot,
412 Result tmp,
413 JumpTarget* slow) {
414 ASSERT(slot->type() == Slot::CONTEXT);
415 ASSERT(tmp.is_register());
416 Register context = esi;
417
418 for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) {
419 if (s->num_heap_slots() > 0) {
420 if (s->calls_eval()) {
421 // Check that extension is NULL.
422 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
423 Immediate(0));
424 slow->Branch(not_equal, not_taken);
425 }
426 __ mov(tmp.reg(), ContextOperand(context, Context::CLOSURE_INDEX));
427 __ mov(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset));
428 context = tmp.reg();
429 }
430 }
431 // Check that last extension is NULL.
432 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
433 slow->Branch(not_equal, not_taken);
434 __ mov(tmp.reg(), ContextOperand(context, Context::FCONTEXT_INDEX));
435 return ContextOperand(tmp.reg(), slot->index());
436}
437
438
439// Emit code to load the value of an expression to the top of the
440// frame. If the expression is boolean-valued it may be compiled (or
441// partially compiled) into control flow to the control destination.
442// If force_control is true, control flow is forced.
Steve Block6ded16b2010-05-10 14:33:55 +0100443void CodeGenerator::LoadCondition(Expression* expr,
Steve Blocka7e24c12009-10-30 11:49:00 +0000444 ControlDestination* dest,
445 bool force_control) {
446 ASSERT(!in_spilled_code());
447 int original_height = frame_->height();
448
Steve Blockd0582a62009-12-15 09:54:21 +0000449 { CodeGenState new_state(this, dest);
Steve Block6ded16b2010-05-10 14:33:55 +0100450 Visit(expr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000451
452 // If we hit a stack overflow, we may not have actually visited
453 // the expression. In that case, we ensure that we have a
454 // valid-looking frame state because we will continue to generate
455 // code as we unwind the C++ stack.
456 //
457 // It's possible to have both a stack overflow and a valid frame
458 // state (eg, a subexpression overflowed, visiting it returned
459 // with a dummied frame state, and visiting this expression
460 // returned with a normal-looking state).
461 if (HasStackOverflow() &&
462 !dest->is_used() &&
463 frame_->height() == original_height) {
464 dest->Goto(true);
465 }
466 }
467
468 if (force_control && !dest->is_used()) {
469 // Convert the TOS value into flow to the control destination.
470 ToBoolean(dest);
471 }
472
473 ASSERT(!(force_control && !dest->is_used()));
474 ASSERT(dest->is_used() || frame_->height() == original_height + 1);
475}
476
477
Steve Blockd0582a62009-12-15 09:54:21 +0000478void CodeGenerator::LoadAndSpill(Expression* expression) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000479 ASSERT(in_spilled_code());
480 set_in_spilled_code(false);
Steve Blockd0582a62009-12-15 09:54:21 +0000481 Load(expression);
Steve Blocka7e24c12009-10-30 11:49:00 +0000482 frame_->SpillAll();
483 set_in_spilled_code(true);
484}
485
486
Steve Block6ded16b2010-05-10 14:33:55 +0100487void CodeGenerator::LoadInSafeInt32Mode(Expression* expr,
488 BreakTarget* unsafe_bailout) {
489 set_unsafe_bailout(unsafe_bailout);
490 set_in_safe_int32_mode(true);
491 Load(expr);
492 Result value = frame_->Pop();
493 ASSERT(frame_->HasNoUntaggedInt32Elements());
494 if (expr->GuaranteedSmiResult()) {
495 ConvertInt32ResultToSmi(&value);
496 } else {
497 ConvertInt32ResultToNumber(&value);
498 }
499 set_in_safe_int32_mode(false);
500 set_unsafe_bailout(NULL);
501 frame_->Push(&value);
502}
503
504
505void CodeGenerator::LoadWithSafeInt32ModeDisabled(Expression* expr) {
506 set_safe_int32_mode_enabled(false);
507 Load(expr);
508 set_safe_int32_mode_enabled(true);
509}
510
511
512void CodeGenerator::ConvertInt32ResultToSmi(Result* value) {
513 ASSERT(value->is_untagged_int32());
514 if (value->is_register()) {
515 __ add(value->reg(), Operand(value->reg()));
516 } else {
517 ASSERT(value->is_constant());
518 ASSERT(value->handle()->IsSmi());
519 }
520 value->set_untagged_int32(false);
521 value->set_type_info(TypeInfo::Smi());
522}
523
524
525void CodeGenerator::ConvertInt32ResultToNumber(Result* value) {
526 ASSERT(value->is_untagged_int32());
527 if (value->is_register()) {
528 Register val = value->reg();
529 JumpTarget done;
530 __ add(val, Operand(val));
531 done.Branch(no_overflow, value);
532 __ sar(val, 1);
533 // If there was an overflow, bits 30 and 31 of the original number disagree.
534 __ xor_(val, 0x80000000u);
535 if (CpuFeatures::IsSupported(SSE2)) {
536 CpuFeatures::Scope fscope(SSE2);
537 __ cvtsi2sd(xmm0, Operand(val));
538 } else {
539 // Move val to ST[0] in the FPU
540 // Push and pop are safe with respect to the virtual frame because
541 // all synced elements are below the actual stack pointer.
542 __ push(val);
543 __ fild_s(Operand(esp, 0));
544 __ pop(val);
545 }
546 Result scratch = allocator_->Allocate();
547 ASSERT(scratch.is_register());
548 Label allocation_failed;
549 __ AllocateHeapNumber(val, scratch.reg(),
550 no_reg, &allocation_failed);
551 VirtualFrame* clone = new VirtualFrame(frame_);
552 scratch.Unuse();
553 if (CpuFeatures::IsSupported(SSE2)) {
554 CpuFeatures::Scope fscope(SSE2);
555 __ movdbl(FieldOperand(val, HeapNumber::kValueOffset), xmm0);
556 } else {
557 __ fstp_d(FieldOperand(val, HeapNumber::kValueOffset));
558 }
559 done.Jump(value);
560
561 // Establish the virtual frame, cloned from where AllocateHeapNumber
562 // jumped to allocation_failed.
563 RegisterFile empty_regs;
564 SetFrame(clone, &empty_regs);
565 __ bind(&allocation_failed);
566 unsafe_bailout_->Jump();
567
568 done.Bind(value);
569 } else {
570 ASSERT(value->is_constant());
571 }
572 value->set_untagged_int32(false);
573 value->set_type_info(TypeInfo::Integer32());
574}
575
576
Steve Blockd0582a62009-12-15 09:54:21 +0000577void CodeGenerator::Load(Expression* expr) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000578#ifdef DEBUG
579 int original_height = frame_->height();
580#endif
581 ASSERT(!in_spilled_code());
Steve Blocka7e24c12009-10-30 11:49:00 +0000582
Steve Block6ded16b2010-05-10 14:33:55 +0100583 // If the expression should be a side-effect-free 32-bit int computation,
584 // compile that SafeInt32 path, and a bailout path.
585 if (!in_safe_int32_mode() &&
586 safe_int32_mode_enabled() &&
587 expr->side_effect_free() &&
588 expr->num_bit_ops() > 2 &&
589 CpuFeatures::IsSupported(SSE2)) {
590 BreakTarget unsafe_bailout;
591 JumpTarget done;
592 unsafe_bailout.set_expected_height(frame_->height());
593 LoadInSafeInt32Mode(expr, &unsafe_bailout);
594 done.Jump();
595
596 if (unsafe_bailout.is_linked()) {
597 unsafe_bailout.Bind();
598 LoadWithSafeInt32ModeDisabled(expr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000599 }
Steve Block6ded16b2010-05-10 14:33:55 +0100600 done.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +0000601 } else {
Steve Block6ded16b2010-05-10 14:33:55 +0100602 JumpTarget true_target;
603 JumpTarget false_target;
604
605 ControlDestination dest(&true_target, &false_target, true);
606 LoadCondition(expr, &dest, false);
607
608 if (dest.false_was_fall_through()) {
609 // The false target was just bound.
Steve Blocka7e24c12009-10-30 11:49:00 +0000610 JumpTarget loaded;
Steve Block6ded16b2010-05-10 14:33:55 +0100611 frame_->Push(Factory::false_value());
612 // There may be dangling jumps to the true target.
Steve Blocka7e24c12009-10-30 11:49:00 +0000613 if (true_target.is_linked()) {
Steve Block6ded16b2010-05-10 14:33:55 +0100614 loaded.Jump();
Steve Blocka7e24c12009-10-30 11:49:00 +0000615 true_target.Bind();
616 frame_->Push(Factory::true_value());
Steve Block6ded16b2010-05-10 14:33:55 +0100617 loaded.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +0000618 }
Steve Block6ded16b2010-05-10 14:33:55 +0100619
620 } else if (dest.is_used()) {
621 // There is true, and possibly false, control flow (with true as
622 // the fall through).
623 JumpTarget loaded;
624 frame_->Push(Factory::true_value());
Steve Blocka7e24c12009-10-30 11:49:00 +0000625 if (false_target.is_linked()) {
Steve Block6ded16b2010-05-10 14:33:55 +0100626 loaded.Jump();
Steve Blocka7e24c12009-10-30 11:49:00 +0000627 false_target.Bind();
628 frame_->Push(Factory::false_value());
Steve Block6ded16b2010-05-10 14:33:55 +0100629 loaded.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +0000630 }
Steve Block6ded16b2010-05-10 14:33:55 +0100631
632 } else {
633 // We have a valid value on top of the frame, but we still may
634 // have dangling jumps to the true and false targets from nested
635 // subexpressions (eg, the left subexpressions of the
636 // short-circuited boolean operators).
637 ASSERT(has_valid_frame());
638 if (true_target.is_linked() || false_target.is_linked()) {
639 JumpTarget loaded;
640 loaded.Jump(); // Don't lose the current TOS.
641 if (true_target.is_linked()) {
642 true_target.Bind();
643 frame_->Push(Factory::true_value());
644 if (false_target.is_linked()) {
645 loaded.Jump();
646 }
647 }
648 if (false_target.is_linked()) {
649 false_target.Bind();
650 frame_->Push(Factory::false_value());
651 }
652 loaded.Bind();
653 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000654 }
655 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000656 ASSERT(has_valid_frame());
657 ASSERT(frame_->height() == original_height + 1);
658}
659
660
661void CodeGenerator::LoadGlobal() {
662 if (in_spilled_code()) {
663 frame_->EmitPush(GlobalObject());
664 } else {
665 Result temp = allocator_->Allocate();
666 __ mov(temp.reg(), GlobalObject());
667 frame_->Push(&temp);
668 }
669}
670
671
672void CodeGenerator::LoadGlobalReceiver() {
673 Result temp = allocator_->Allocate();
674 Register reg = temp.reg();
675 __ mov(reg, GlobalObject());
676 __ mov(reg, FieldOperand(reg, GlobalObject::kGlobalReceiverOffset));
677 frame_->Push(&temp);
678}
679
680
Steve Blockd0582a62009-12-15 09:54:21 +0000681void CodeGenerator::LoadTypeofExpression(Expression* expr) {
682 // Special handling of identifiers as subexpressions of typeof.
683 Variable* variable = expr->AsVariableProxy()->AsVariable();
Steve Blocka7e24c12009-10-30 11:49:00 +0000684 if (variable != NULL && !variable->is_this() && variable->is_global()) {
Steve Blockd0582a62009-12-15 09:54:21 +0000685 // For a global variable we build the property reference
686 // <global>.<variable> and perform a (regular non-contextual) property
687 // load to make sure we do not get reference errors.
Steve Blocka7e24c12009-10-30 11:49:00 +0000688 Slot global(variable, Slot::CONTEXT, Context::GLOBAL_INDEX);
689 Literal key(variable->name());
Steve Blocka7e24c12009-10-30 11:49:00 +0000690 Property property(&global, &key, RelocInfo::kNoPosition);
Steve Blockd0582a62009-12-15 09:54:21 +0000691 Reference ref(this, &property);
692 ref.GetValue();
693 } else if (variable != NULL && variable->slot() != NULL) {
694 // For a variable that rewrites to a slot, we signal it is the immediate
695 // subexpression of a typeof.
Andrei Popescu402d9372010-02-26 13:31:12 +0000696 Result result =
697 LoadFromSlotCheckForArguments(variable->slot(), INSIDE_TYPEOF);
698 frame()->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +0000699 } else {
Steve Blockd0582a62009-12-15 09:54:21 +0000700 // Anything else can be handled normally.
701 Load(expr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000702 }
703}
704
705
Andrei Popescu31002712010-02-23 13:46:05 +0000706ArgumentsAllocationMode CodeGenerator::ArgumentsMode() {
707 if (scope()->arguments() == NULL) return NO_ARGUMENTS_ALLOCATION;
708 ASSERT(scope()->arguments_shadow() != NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000709 // We don't want to do lazy arguments allocation for functions that
710 // have heap-allocated contexts, because it interfers with the
711 // uninitialized const tracking in the context objects.
Andrei Popescu31002712010-02-23 13:46:05 +0000712 return (scope()->num_heap_slots() > 0)
Steve Blocka7e24c12009-10-30 11:49:00 +0000713 ? EAGER_ARGUMENTS_ALLOCATION
714 : LAZY_ARGUMENTS_ALLOCATION;
715}
716
717
718Result CodeGenerator::StoreArgumentsObject(bool initial) {
719 ArgumentsAllocationMode mode = ArgumentsMode();
720 ASSERT(mode != NO_ARGUMENTS_ALLOCATION);
721
722 Comment cmnt(masm_, "[ store arguments object");
723 if (mode == LAZY_ARGUMENTS_ALLOCATION && initial) {
724 // When using lazy arguments allocation, we store the hole value
725 // as a sentinel indicating that the arguments object hasn't been
726 // allocated yet.
727 frame_->Push(Factory::the_hole_value());
728 } else {
729 ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT);
730 frame_->PushFunction();
731 frame_->PushReceiverSlotAddress();
Andrei Popescu31002712010-02-23 13:46:05 +0000732 frame_->Push(Smi::FromInt(scope()->num_parameters()));
Steve Blocka7e24c12009-10-30 11:49:00 +0000733 Result result = frame_->CallStub(&stub, 3);
734 frame_->Push(&result);
735 }
736
Andrei Popescu31002712010-02-23 13:46:05 +0000737 Variable* arguments = scope()->arguments()->var();
738 Variable* shadow = scope()->arguments_shadow()->var();
Leon Clarkee46be812010-01-19 14:06:41 +0000739 ASSERT(arguments != NULL && arguments->slot() != NULL);
740 ASSERT(shadow != NULL && shadow->slot() != NULL);
741 JumpTarget done;
742 bool skip_arguments = false;
743 if (mode == LAZY_ARGUMENTS_ALLOCATION && !initial) {
744 // We have to skip storing into the arguments slot if it has already
745 // been written to. This can happen if the a function has a local
746 // variable named 'arguments'.
Andrei Popescu402d9372010-02-26 13:31:12 +0000747 Result probe = LoadFromSlot(arguments->slot(), NOT_INSIDE_TYPEOF);
Leon Clarkee46be812010-01-19 14:06:41 +0000748 if (probe.is_constant()) {
749 // We have to skip updating the arguments object if it has
750 // been assigned a proper value.
751 skip_arguments = !probe.handle()->IsTheHole();
752 } else {
753 __ cmp(Operand(probe.reg()), Immediate(Factory::the_hole_value()));
754 probe.Unuse();
755 done.Branch(not_equal);
Steve Blocka7e24c12009-10-30 11:49:00 +0000756 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000757 }
Leon Clarkee46be812010-01-19 14:06:41 +0000758 if (!skip_arguments) {
759 StoreToSlot(arguments->slot(), NOT_CONST_INIT);
760 if (mode == LAZY_ARGUMENTS_ALLOCATION) done.Bind();
761 }
762 StoreToSlot(shadow->slot(), NOT_CONST_INIT);
Steve Blocka7e24c12009-10-30 11:49:00 +0000763 return frame_->Pop();
764}
765
Leon Clarked91b9f72010-01-27 17:25:45 +0000766//------------------------------------------------------------------------------
767// CodeGenerator implementation of variables, lookups, and stores.
Steve Blocka7e24c12009-10-30 11:49:00 +0000768
Leon Clarked91b9f72010-01-27 17:25:45 +0000769Reference::Reference(CodeGenerator* cgen,
770 Expression* expression,
771 bool persist_after_get)
772 : cgen_(cgen),
773 expression_(expression),
774 type_(ILLEGAL),
775 persist_after_get_(persist_after_get) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000776 cgen->LoadReference(this);
777}
778
779
780Reference::~Reference() {
Leon Clarked91b9f72010-01-27 17:25:45 +0000781 ASSERT(is_unloaded() || is_illegal());
Steve Blocka7e24c12009-10-30 11:49:00 +0000782}
783
784
785void CodeGenerator::LoadReference(Reference* ref) {
786 // References are loaded from both spilled and unspilled code. Set the
787 // state to unspilled to allow that (and explicitly spill after
788 // construction at the construction sites).
789 bool was_in_spilled_code = in_spilled_code_;
790 in_spilled_code_ = false;
791
792 Comment cmnt(masm_, "[ LoadReference");
793 Expression* e = ref->expression();
794 Property* property = e->AsProperty();
795 Variable* var = e->AsVariableProxy()->AsVariable();
796
797 if (property != NULL) {
798 // The expression is either a property or a variable proxy that rewrites
799 // to a property.
800 Load(property->obj());
Leon Clarkee46be812010-01-19 14:06:41 +0000801 if (property->key()->IsPropertyName()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000802 ref->set_type(Reference::NAMED);
803 } else {
804 Load(property->key());
805 ref->set_type(Reference::KEYED);
806 }
807 } else if (var != NULL) {
808 // The expression is a variable proxy that does not rewrite to a
809 // property. Global variables are treated as named property references.
810 if (var->is_global()) {
Andrei Popescu402d9372010-02-26 13:31:12 +0000811 // If eax is free, the register allocator prefers it. Thus the code
812 // generator will load the global object into eax, which is where
813 // LoadIC wants it. Most uses of Reference call LoadIC directly
814 // after the reference is created.
815 frame_->Spill(eax);
Steve Blocka7e24c12009-10-30 11:49:00 +0000816 LoadGlobal();
817 ref->set_type(Reference::NAMED);
818 } else {
819 ASSERT(var->slot() != NULL);
820 ref->set_type(Reference::SLOT);
821 }
822 } else {
823 // Anything else is a runtime error.
824 Load(e);
825 frame_->CallRuntime(Runtime::kThrowReferenceError, 1);
826 }
827
828 in_spilled_code_ = was_in_spilled_code;
829}
830
831
Steve Blocka7e24c12009-10-30 11:49:00 +0000832// ECMA-262, section 9.2, page 30: ToBoolean(). Pop the top of stack and
833// convert it to a boolean in the condition code register or jump to
834// 'false_target'/'true_target' as appropriate.
835void CodeGenerator::ToBoolean(ControlDestination* dest) {
836 Comment cmnt(masm_, "[ ToBoolean");
837
838 // The value to convert should be popped from the frame.
839 Result value = frame_->Pop();
840 value.ToRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +0000841
Steve Block6ded16b2010-05-10 14:33:55 +0100842 if (value.is_integer32()) { // Also takes Smi case.
843 Comment cmnt(masm_, "ONLY_INTEGER_32");
Andrei Popescu402d9372010-02-26 13:31:12 +0000844 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100845 Label ok;
846 __ AbortIfNotNumber(value.reg());
847 __ test(value.reg(), Immediate(kSmiTagMask));
848 __ j(zero, &ok);
849 __ fldz();
850 __ fld_d(FieldOperand(value.reg(), HeapNumber::kValueOffset));
851 __ FCmp();
852 __ j(not_zero, &ok);
853 __ Abort("Smi was wrapped in HeapNumber in output from bitop");
854 __ bind(&ok);
855 }
856 // In the integer32 case there are no Smis hidden in heap numbers, so we
857 // need only test for Smi zero.
858 __ test(value.reg(), Operand(value.reg()));
859 dest->false_target()->Branch(zero);
860 value.Unuse();
861 dest->Split(not_zero);
862 } else if (value.is_number()) {
863 Comment cmnt(masm_, "ONLY_NUMBER");
864 // Fast case if TypeInfo indicates only numbers.
865 if (FLAG_debug_code) {
866 __ AbortIfNotNumber(value.reg());
Andrei Popescu402d9372010-02-26 13:31:12 +0000867 }
868 // Smi => false iff zero.
869 ASSERT(kSmiTag == 0);
870 __ test(value.reg(), Operand(value.reg()));
871 dest->false_target()->Branch(zero);
872 __ test(value.reg(), Immediate(kSmiTagMask));
873 dest->true_target()->Branch(zero);
874 __ fldz();
875 __ fld_d(FieldOperand(value.reg(), HeapNumber::kValueOffset));
876 __ FCmp();
877 value.Unuse();
878 dest->Split(not_zero);
879 } else {
880 // Fast case checks.
881 // 'false' => false.
882 __ cmp(value.reg(), Factory::false_value());
883 dest->false_target()->Branch(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +0000884
Andrei Popescu402d9372010-02-26 13:31:12 +0000885 // 'true' => true.
886 __ cmp(value.reg(), Factory::true_value());
887 dest->true_target()->Branch(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +0000888
Andrei Popescu402d9372010-02-26 13:31:12 +0000889 // 'undefined' => false.
890 __ cmp(value.reg(), Factory::undefined_value());
891 dest->false_target()->Branch(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +0000892
Andrei Popescu402d9372010-02-26 13:31:12 +0000893 // Smi => false iff zero.
894 ASSERT(kSmiTag == 0);
895 __ test(value.reg(), Operand(value.reg()));
896 dest->false_target()->Branch(zero);
897 __ test(value.reg(), Immediate(kSmiTagMask));
898 dest->true_target()->Branch(zero);
Steve Blocka7e24c12009-10-30 11:49:00 +0000899
Andrei Popescu402d9372010-02-26 13:31:12 +0000900 // Call the stub for all other cases.
901 frame_->Push(&value); // Undo the Pop() from above.
902 ToBooleanStub stub;
903 Result temp = frame_->CallStub(&stub, 1);
904 // Convert the result to a condition code.
905 __ test(temp.reg(), Operand(temp.reg()));
906 temp.Unuse();
907 dest->Split(not_equal);
908 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000909}
910
911
912class FloatingPointHelper : public AllStatic {
913 public:
Leon Clarked91b9f72010-01-27 17:25:45 +0000914
915 enum ArgLocation {
916 ARGS_ON_STACK,
917 ARGS_IN_REGISTERS
918 };
919
Steve Blocka7e24c12009-10-30 11:49:00 +0000920 // Code pattern for loading a floating point value. Input value must
921 // be either a smi or a heap number object (fp value). Requirements:
922 // operand in register number. Returns operand as floating point number
923 // on FPU stack.
924 static void LoadFloatOperand(MacroAssembler* masm, Register number);
Steve Block6ded16b2010-05-10 14:33:55 +0100925
Steve Blocka7e24c12009-10-30 11:49:00 +0000926 // Code pattern for loading floating point values. Input values must
927 // be either smi or heap number objects (fp values). Requirements:
Leon Clarked91b9f72010-01-27 17:25:45 +0000928 // operand_1 on TOS+1 or in edx, operand_2 on TOS+2 or in eax.
929 // Returns operands as floating point numbers on FPU stack.
930 static void LoadFloatOperands(MacroAssembler* masm,
931 Register scratch,
932 ArgLocation arg_location = ARGS_ON_STACK);
933
934 // Similar to LoadFloatOperand but assumes that both operands are smis.
935 // Expects operands in edx, eax.
936 static void LoadFloatSmis(MacroAssembler* masm, Register scratch);
937
Steve Blocka7e24c12009-10-30 11:49:00 +0000938 // Test if operands are smi or number objects (fp). Requirements:
939 // operand_1 in eax, operand_2 in edx; falls through on float
940 // operands, jumps to the non_float label otherwise.
941 static void CheckFloatOperands(MacroAssembler* masm,
942 Label* non_float,
943 Register scratch);
Steve Block6ded16b2010-05-10 14:33:55 +0100944
Leon Clarkee46be812010-01-19 14:06:41 +0000945 // Takes the operands in edx and eax and loads them as integers in eax
946 // and ecx.
947 static void LoadAsIntegers(MacroAssembler* masm,
Steve Block6ded16b2010-05-10 14:33:55 +0100948 TypeInfo type_info,
Leon Clarkee46be812010-01-19 14:06:41 +0000949 bool use_sse3,
950 Label* operand_conversion_failure);
Steve Block6ded16b2010-05-10 14:33:55 +0100951 static void LoadNumbersAsIntegers(MacroAssembler* masm,
952 TypeInfo type_info,
953 bool use_sse3,
954 Label* operand_conversion_failure);
955 static void LoadUnknownsAsIntegers(MacroAssembler* masm,
956 bool use_sse3,
957 Label* operand_conversion_failure);
958
Andrei Popescu402d9372010-02-26 13:31:12 +0000959 // Test if operands are smis or heap numbers and load them
960 // into xmm0 and xmm1 if they are. Operands are in edx and eax.
961 // Leaves operands unchanged.
962 static void LoadSSE2Operands(MacroAssembler* masm);
Steve Block6ded16b2010-05-10 14:33:55 +0100963
Steve Blocka7e24c12009-10-30 11:49:00 +0000964 // Test if operands are numbers (smi or HeapNumber objects), and load
965 // them into xmm0 and xmm1 if they are. Jump to label not_numbers if
966 // either operand is not a number. Operands are in edx and eax.
967 // Leaves operands unchanged.
Leon Clarked91b9f72010-01-27 17:25:45 +0000968 static void LoadSSE2Operands(MacroAssembler* masm, Label* not_numbers);
969
970 // Similar to LoadSSE2Operands but assumes that both operands are smis.
971 // Expects operands in edx, eax.
972 static void LoadSSE2Smis(MacroAssembler* masm, Register scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000973};
974
975
976const char* GenericBinaryOpStub::GetName() {
Leon Clarkee46be812010-01-19 14:06:41 +0000977 if (name_ != NULL) return name_;
978 const int kMaxNameLength = 100;
979 name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);
980 if (name_ == NULL) return "OOM";
981 const char* op_name = Token::Name(op_);
982 const char* overwrite_name;
983 switch (mode_) {
984 case NO_OVERWRITE: overwrite_name = "Alloc"; break;
985 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break;
986 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
987 default: overwrite_name = "UnknownOverwrite"; break;
Steve Blocka7e24c12009-10-30 11:49:00 +0000988 }
Leon Clarkee46be812010-01-19 14:06:41 +0000989
990 OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
Steve Block6ded16b2010-05-10 14:33:55 +0100991 "GenericBinaryOpStub_%s_%s%s_%s%s_%s_%s",
Leon Clarkee46be812010-01-19 14:06:41 +0000992 op_name,
993 overwrite_name,
994 (flags_ & NO_SMI_CODE_IN_STUB) ? "_NoSmiInStub" : "",
995 args_in_registers_ ? "RegArgs" : "StackArgs",
Andrei Popescu402d9372010-02-26 13:31:12 +0000996 args_reversed_ ? "_R" : "",
Steve Block6ded16b2010-05-10 14:33:55 +0100997 static_operands_type_.ToString(),
998 BinaryOpIC::GetName(runtime_operands_type_));
Leon Clarkee46be812010-01-19 14:06:41 +0000999 return name_;
Steve Blocka7e24c12009-10-30 11:49:00 +00001000}
1001
1002
1003// Call the specialized stub for a binary operation.
1004class DeferredInlineBinaryOperation: public DeferredCode {
1005 public:
1006 DeferredInlineBinaryOperation(Token::Value op,
1007 Register dst,
1008 Register left,
1009 Register right,
Steve Block6ded16b2010-05-10 14:33:55 +01001010 TypeInfo left_info,
1011 TypeInfo right_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00001012 OverwriteMode mode)
Steve Block6ded16b2010-05-10 14:33:55 +01001013 : op_(op), dst_(dst), left_(left), right_(right),
1014 left_info_(left_info), right_info_(right_info), mode_(mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001015 set_comment("[ DeferredInlineBinaryOperation");
1016 }
1017
1018 virtual void Generate();
1019
1020 private:
1021 Token::Value op_;
1022 Register dst_;
1023 Register left_;
1024 Register right_;
Steve Block6ded16b2010-05-10 14:33:55 +01001025 TypeInfo left_info_;
1026 TypeInfo right_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +00001027 OverwriteMode mode_;
1028};
1029
1030
1031void DeferredInlineBinaryOperation::Generate() {
Leon Clarkee46be812010-01-19 14:06:41 +00001032 Label done;
1033 if (CpuFeatures::IsSupported(SSE2) && ((op_ == Token::ADD) ||
1034 (op_ ==Token::SUB) ||
1035 (op_ == Token::MUL) ||
1036 (op_ == Token::DIV))) {
1037 CpuFeatures::Scope use_sse2(SSE2);
1038 Label call_runtime, after_alloc_failure;
1039 Label left_smi, right_smi, load_right, do_op;
Steve Block6ded16b2010-05-10 14:33:55 +01001040 if (!left_info_.IsSmi()) {
1041 __ test(left_, Immediate(kSmiTagMask));
1042 __ j(zero, &left_smi);
1043 if (!left_info_.IsNumber()) {
1044 __ cmp(FieldOperand(left_, HeapObject::kMapOffset),
1045 Factory::heap_number_map());
1046 __ j(not_equal, &call_runtime);
1047 }
1048 __ movdbl(xmm0, FieldOperand(left_, HeapNumber::kValueOffset));
1049 if (mode_ == OVERWRITE_LEFT) {
1050 __ mov(dst_, left_);
1051 }
1052 __ jmp(&load_right);
Leon Clarkee46be812010-01-19 14:06:41 +00001053
Steve Block6ded16b2010-05-10 14:33:55 +01001054 __ bind(&left_smi);
1055 } else {
1056 if (FLAG_debug_code) __ AbortIfNotSmi(left_);
1057 }
Leon Clarkee46be812010-01-19 14:06:41 +00001058 __ SmiUntag(left_);
1059 __ cvtsi2sd(xmm0, Operand(left_));
1060 __ SmiTag(left_);
1061 if (mode_ == OVERWRITE_LEFT) {
1062 Label alloc_failure;
1063 __ push(left_);
1064 __ AllocateHeapNumber(dst_, left_, no_reg, &after_alloc_failure);
1065 __ pop(left_);
1066 }
1067
1068 __ bind(&load_right);
Steve Block6ded16b2010-05-10 14:33:55 +01001069 if (!right_info_.IsSmi()) {
1070 __ test(right_, Immediate(kSmiTagMask));
1071 __ j(zero, &right_smi);
1072 if (!right_info_.IsNumber()) {
1073 __ cmp(FieldOperand(right_, HeapObject::kMapOffset),
1074 Factory::heap_number_map());
1075 __ j(not_equal, &call_runtime);
1076 }
1077 __ movdbl(xmm1, FieldOperand(right_, HeapNumber::kValueOffset));
1078 if (mode_ == OVERWRITE_RIGHT) {
1079 __ mov(dst_, right_);
1080 } else if (mode_ == NO_OVERWRITE) {
1081 Label alloc_failure;
1082 __ push(left_);
1083 __ AllocateHeapNumber(dst_, left_, no_reg, &after_alloc_failure);
1084 __ pop(left_);
1085 }
1086 __ jmp(&do_op);
Leon Clarkee46be812010-01-19 14:06:41 +00001087
Steve Block6ded16b2010-05-10 14:33:55 +01001088 __ bind(&right_smi);
1089 } else {
1090 if (FLAG_debug_code) __ AbortIfNotSmi(right_);
1091 }
Leon Clarkee46be812010-01-19 14:06:41 +00001092 __ SmiUntag(right_);
1093 __ cvtsi2sd(xmm1, Operand(right_));
1094 __ SmiTag(right_);
1095 if (mode_ == OVERWRITE_RIGHT || mode_ == NO_OVERWRITE) {
1096 Label alloc_failure;
1097 __ push(left_);
1098 __ AllocateHeapNumber(dst_, left_, no_reg, &after_alloc_failure);
1099 __ pop(left_);
1100 }
1101
1102 __ bind(&do_op);
1103 switch (op_) {
1104 case Token::ADD: __ addsd(xmm0, xmm1); break;
1105 case Token::SUB: __ subsd(xmm0, xmm1); break;
1106 case Token::MUL: __ mulsd(xmm0, xmm1); break;
1107 case Token::DIV: __ divsd(xmm0, xmm1); break;
1108 default: UNREACHABLE();
1109 }
1110 __ movdbl(FieldOperand(dst_, HeapNumber::kValueOffset), xmm0);
1111 __ jmp(&done);
1112
1113 __ bind(&after_alloc_failure);
1114 __ pop(left_);
1115 __ bind(&call_runtime);
1116 }
Steve Block6ded16b2010-05-10 14:33:55 +01001117 GenericBinaryOpStub stub(op_,
1118 mode_,
1119 NO_SMI_CODE_IN_STUB,
1120 TypeInfo::Combine(left_info_, right_info_));
Steve Block3ce2e202009-11-05 08:53:23 +00001121 stub.GenerateCall(masm_, left_, right_);
Steve Blocka7e24c12009-10-30 11:49:00 +00001122 if (!dst_.is(eax)) __ mov(dst_, eax);
Leon Clarkee46be812010-01-19 14:06:41 +00001123 __ bind(&done);
Steve Blocka7e24c12009-10-30 11:49:00 +00001124}
1125
1126
Steve Block6ded16b2010-05-10 14:33:55 +01001127static TypeInfo CalculateTypeInfo(TypeInfo operands_type,
1128 Token::Value op,
1129 const Result& right,
1130 const Result& left) {
1131 // Set TypeInfo of result according to the operation performed.
1132 // Rely on the fact that smis have a 31 bit payload on ia32.
1133 ASSERT(kSmiValueSize == 31);
1134 switch (op) {
1135 case Token::COMMA:
1136 return right.type_info();
1137 case Token::OR:
1138 case Token::AND:
1139 // Result type can be either of the two input types.
1140 return operands_type;
1141 case Token::BIT_AND: {
1142 // Anding with positive Smis will give you a Smi.
1143 if (right.is_constant() && right.handle()->IsSmi() &&
1144 Smi::cast(*right.handle())->value() >= 0) {
1145 return TypeInfo::Smi();
1146 } else if (left.is_constant() && left.handle()->IsSmi() &&
1147 Smi::cast(*left.handle())->value() >= 0) {
1148 return TypeInfo::Smi();
1149 }
1150 return (operands_type.IsSmi())
1151 ? TypeInfo::Smi()
1152 : TypeInfo::Integer32();
1153 }
1154 case Token::BIT_OR: {
1155 // Oring with negative Smis will give you a Smi.
1156 if (right.is_constant() && right.handle()->IsSmi() &&
1157 Smi::cast(*right.handle())->value() < 0) {
1158 return TypeInfo::Smi();
1159 } else if (left.is_constant() && left.handle()->IsSmi() &&
1160 Smi::cast(*left.handle())->value() < 0) {
1161 return TypeInfo::Smi();
1162 }
1163 return (operands_type.IsSmi())
1164 ? TypeInfo::Smi()
1165 : TypeInfo::Integer32();
1166 }
1167 case Token::BIT_XOR:
1168 // Result is always a 32 bit integer. Smi property of inputs is preserved.
1169 return (operands_type.IsSmi())
1170 ? TypeInfo::Smi()
1171 : TypeInfo::Integer32();
1172 case Token::SAR:
1173 if (left.is_smi()) return TypeInfo::Smi();
1174 // Result is a smi if we shift by a constant >= 1, otherwise an integer32.
1175 // Shift amount is masked with 0x1F (ECMA standard 11.7.2).
1176 return (right.is_constant() && right.handle()->IsSmi()
1177 && (Smi::cast(*right.handle())->value() & 0x1F) >= 1)
1178 ? TypeInfo::Smi()
1179 : TypeInfo::Integer32();
1180 case Token::SHR:
1181 // Result is a smi if we shift by a constant >= 2, an integer32 if
1182 // we shift by 1, and an unsigned 32-bit integer if we shift by 0.
1183 if (right.is_constant() && right.handle()->IsSmi()) {
1184 int shift_amount = Smi::cast(*right.handle())->value() & 0x1F;
1185 if (shift_amount > 1) {
1186 return TypeInfo::Smi();
1187 } else if (shift_amount > 0) {
1188 return TypeInfo::Integer32();
1189 }
1190 }
1191 return TypeInfo::Number();
1192 case Token::ADD:
1193 if (operands_type.IsSmi()) {
1194 // The Integer32 range is big enough to take the sum of any two Smis.
1195 return TypeInfo::Integer32();
1196 } else if (operands_type.IsNumber()) {
1197 return TypeInfo::Number();
1198 } else if (left.type_info().IsString() || right.type_info().IsString()) {
1199 return TypeInfo::String();
1200 } else {
1201 return TypeInfo::Unknown();
1202 }
1203 case Token::SHL:
1204 return TypeInfo::Integer32();
1205 case Token::SUB:
1206 // The Integer32 range is big enough to take the difference of any two
1207 // Smis.
1208 return (operands_type.IsSmi()) ?
1209 TypeInfo::Integer32() :
1210 TypeInfo::Number();
1211 case Token::MUL:
1212 case Token::DIV:
1213 case Token::MOD:
1214 // Result is always a number.
1215 return TypeInfo::Number();
1216 default:
1217 UNREACHABLE();
1218 }
1219 UNREACHABLE();
1220 return TypeInfo::Unknown();
1221}
1222
1223
1224void CodeGenerator::GenericBinaryOperation(BinaryOperation* expr,
Steve Blocka7e24c12009-10-30 11:49:00 +00001225 OverwriteMode overwrite_mode) {
1226 Comment cmnt(masm_, "[ BinaryOperation");
Steve Block6ded16b2010-05-10 14:33:55 +01001227 Token::Value op = expr->op();
Steve Blocka7e24c12009-10-30 11:49:00 +00001228 Comment cmnt_token(masm_, Token::String(op));
1229
1230 if (op == Token::COMMA) {
1231 // Simply discard left value.
1232 frame_->Nip(1);
1233 return;
1234 }
1235
Steve Blocka7e24c12009-10-30 11:49:00 +00001236 Result right = frame_->Pop();
1237 Result left = frame_->Pop();
1238
1239 if (op == Token::ADD) {
Steve Block6ded16b2010-05-10 14:33:55 +01001240 const bool left_is_string = left.type_info().IsString();
1241 const bool right_is_string = right.type_info().IsString();
1242 // Make sure constant strings have string type info.
1243 ASSERT(!(left.is_constant() && left.handle()->IsString()) ||
1244 left_is_string);
1245 ASSERT(!(right.is_constant() && right.handle()->IsString()) ||
1246 right_is_string);
Steve Blocka7e24c12009-10-30 11:49:00 +00001247 if (left_is_string || right_is_string) {
1248 frame_->Push(&left);
1249 frame_->Push(&right);
1250 Result answer;
1251 if (left_is_string) {
1252 if (right_is_string) {
Steve Block6ded16b2010-05-10 14:33:55 +01001253 StringAddStub stub(NO_STRING_CHECK_IN_STUB);
1254 answer = frame_->CallStub(&stub, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001255 } else {
1256 answer =
1257 frame_->InvokeBuiltin(Builtins::STRING_ADD_LEFT, CALL_FUNCTION, 2);
1258 }
1259 } else if (right_is_string) {
1260 answer =
1261 frame_->InvokeBuiltin(Builtins::STRING_ADD_RIGHT, CALL_FUNCTION, 2);
1262 }
Steve Block6ded16b2010-05-10 14:33:55 +01001263 answer.set_type_info(TypeInfo::String());
Steve Blocka7e24c12009-10-30 11:49:00 +00001264 frame_->Push(&answer);
1265 return;
1266 }
1267 // Neither operand is known to be a string.
1268 }
1269
Andrei Popescu402d9372010-02-26 13:31:12 +00001270 bool left_is_smi_constant = left.is_constant() && left.handle()->IsSmi();
1271 bool left_is_non_smi_constant = left.is_constant() && !left.handle()->IsSmi();
1272 bool right_is_smi_constant = right.is_constant() && right.handle()->IsSmi();
1273 bool right_is_non_smi_constant =
1274 right.is_constant() && !right.handle()->IsSmi();
Steve Blocka7e24c12009-10-30 11:49:00 +00001275
Andrei Popescu402d9372010-02-26 13:31:12 +00001276 if (left_is_smi_constant && right_is_smi_constant) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001277 // Compute the constant result at compile time, and leave it on the frame.
1278 int left_int = Smi::cast(*left.handle())->value();
1279 int right_int = Smi::cast(*right.handle())->value();
1280 if (FoldConstantSmis(op, left_int, right_int)) return;
1281 }
1282
Andrei Popescu402d9372010-02-26 13:31:12 +00001283 // Get number type of left and right sub-expressions.
Steve Block6ded16b2010-05-10 14:33:55 +01001284 TypeInfo operands_type =
1285 TypeInfo::Combine(left.type_info(), right.type_info());
1286
1287 TypeInfo result_type = CalculateTypeInfo(operands_type, op, right, left);
Andrei Popescu402d9372010-02-26 13:31:12 +00001288
Leon Clarked91b9f72010-01-27 17:25:45 +00001289 Result answer;
Andrei Popescu402d9372010-02-26 13:31:12 +00001290 if (left_is_non_smi_constant || right_is_non_smi_constant) {
Leon Clarked91b9f72010-01-27 17:25:45 +00001291 // Go straight to the slow case, with no smi code.
Andrei Popescu402d9372010-02-26 13:31:12 +00001292 GenericBinaryOpStub stub(op,
1293 overwrite_mode,
1294 NO_SMI_CODE_IN_STUB,
1295 operands_type);
Leon Clarked91b9f72010-01-27 17:25:45 +00001296 answer = stub.GenerateCall(masm_, frame_, &left, &right);
Andrei Popescu402d9372010-02-26 13:31:12 +00001297 } else if (right_is_smi_constant) {
Steve Block6ded16b2010-05-10 14:33:55 +01001298 answer = ConstantSmiBinaryOperation(expr, &left, right.handle(),
1299 false, overwrite_mode);
Andrei Popescu402d9372010-02-26 13:31:12 +00001300 } else if (left_is_smi_constant) {
Steve Block6ded16b2010-05-10 14:33:55 +01001301 answer = ConstantSmiBinaryOperation(expr, &right, left.handle(),
1302 true, overwrite_mode);
Leon Clarkeeab96aa2010-01-27 16:31:12 +00001303 } else {
Leon Clarked91b9f72010-01-27 17:25:45 +00001304 // Set the flags based on the operation, type and loop nesting level.
1305 // Bit operations always assume they likely operate on Smis. Still only
1306 // generate the inline Smi check code if this operation is part of a loop.
1307 // For all other operations only inline the Smi check code for likely smis
1308 // if the operation is part of a loop.
Steve Block6ded16b2010-05-10 14:33:55 +01001309 if (loop_nesting() > 0 &&
1310 (Token::IsBitOp(op) ||
1311 operands_type.IsInteger32() ||
1312 expr->type()->IsLikelySmi())) {
1313 answer = LikelySmiBinaryOperation(expr, &left, &right, overwrite_mode);
Leon Clarked91b9f72010-01-27 17:25:45 +00001314 } else {
Andrei Popescu402d9372010-02-26 13:31:12 +00001315 GenericBinaryOpStub stub(op,
1316 overwrite_mode,
1317 NO_GENERIC_BINARY_FLAGS,
1318 operands_type);
Leon Clarked91b9f72010-01-27 17:25:45 +00001319 answer = stub.GenerateCall(masm_, frame_, &left, &right);
Leon Clarkeeab96aa2010-01-27 16:31:12 +00001320 }
Leon Clarkeeab96aa2010-01-27 16:31:12 +00001321 }
Andrei Popescu402d9372010-02-26 13:31:12 +00001322
Steve Block6ded16b2010-05-10 14:33:55 +01001323 answer.set_type_info(result_type);
Leon Clarked91b9f72010-01-27 17:25:45 +00001324 frame_->Push(&answer);
Steve Blocka7e24c12009-10-30 11:49:00 +00001325}
1326
1327
1328bool CodeGenerator::FoldConstantSmis(Token::Value op, int left, int right) {
1329 Object* answer_object = Heap::undefined_value();
1330 switch (op) {
1331 case Token::ADD:
1332 if (Smi::IsValid(left + right)) {
1333 answer_object = Smi::FromInt(left + right);
1334 }
1335 break;
1336 case Token::SUB:
1337 if (Smi::IsValid(left - right)) {
1338 answer_object = Smi::FromInt(left - right);
1339 }
1340 break;
1341 case Token::MUL: {
1342 double answer = static_cast<double>(left) * right;
1343 if (answer >= Smi::kMinValue && answer <= Smi::kMaxValue) {
1344 // If the product is zero and the non-zero factor is negative,
1345 // the spec requires us to return floating point negative zero.
1346 if (answer != 0 || (left >= 0 && right >= 0)) {
1347 answer_object = Smi::FromInt(static_cast<int>(answer));
1348 }
1349 }
1350 }
1351 break;
1352 case Token::DIV:
1353 case Token::MOD:
1354 break;
1355 case Token::BIT_OR:
1356 answer_object = Smi::FromInt(left | right);
1357 break;
1358 case Token::BIT_AND:
1359 answer_object = Smi::FromInt(left & right);
1360 break;
1361 case Token::BIT_XOR:
1362 answer_object = Smi::FromInt(left ^ right);
1363 break;
1364
1365 case Token::SHL: {
1366 int shift_amount = right & 0x1F;
1367 if (Smi::IsValid(left << shift_amount)) {
1368 answer_object = Smi::FromInt(left << shift_amount);
1369 }
1370 break;
1371 }
1372 case Token::SHR: {
1373 int shift_amount = right & 0x1F;
1374 unsigned int unsigned_left = left;
1375 unsigned_left >>= shift_amount;
1376 if (unsigned_left <= static_cast<unsigned int>(Smi::kMaxValue)) {
1377 answer_object = Smi::FromInt(unsigned_left);
1378 }
1379 break;
1380 }
1381 case Token::SAR: {
1382 int shift_amount = right & 0x1F;
1383 unsigned int unsigned_left = left;
1384 if (left < 0) {
1385 // Perform arithmetic shift of a negative number by
1386 // complementing number, logical shifting, complementing again.
1387 unsigned_left = ~unsigned_left;
1388 unsigned_left >>= shift_amount;
1389 unsigned_left = ~unsigned_left;
1390 } else {
1391 unsigned_left >>= shift_amount;
1392 }
1393 ASSERT(Smi::IsValid(unsigned_left)); // Converted to signed.
1394 answer_object = Smi::FromInt(unsigned_left); // Converted to signed.
1395 break;
1396 }
1397 default:
1398 UNREACHABLE();
1399 break;
1400 }
1401 if (answer_object == Heap::undefined_value()) {
1402 return false;
1403 }
1404 frame_->Push(Handle<Object>(answer_object));
1405 return true;
1406}
1407
1408
Steve Block6ded16b2010-05-10 14:33:55 +01001409static void CheckTwoForSminess(MacroAssembler* masm,
1410 Register left, Register right, Register scratch,
1411 TypeInfo left_info, TypeInfo right_info,
1412 DeferredInlineBinaryOperation* deferred);
1413
1414
Steve Blocka7e24c12009-10-30 11:49:00 +00001415// Implements a binary operation using a deferred code object and some
1416// inline code to operate on smis quickly.
Steve Block6ded16b2010-05-10 14:33:55 +01001417Result CodeGenerator::LikelySmiBinaryOperation(BinaryOperation* expr,
Leon Clarked91b9f72010-01-27 17:25:45 +00001418 Result* left,
1419 Result* right,
1420 OverwriteMode overwrite_mode) {
Kristian Monsen25f61362010-05-21 11:50:48 +01001421 // Copy the type info because left and right may be overwritten.
1422 TypeInfo left_type_info = left->type_info();
1423 TypeInfo right_type_info = right->type_info();
Steve Block6ded16b2010-05-10 14:33:55 +01001424 Token::Value op = expr->op();
Leon Clarked91b9f72010-01-27 17:25:45 +00001425 Result answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00001426 // Special handling of div and mod because they use fixed registers.
1427 if (op == Token::DIV || op == Token::MOD) {
1428 // We need eax as the quotient register, edx as the remainder
1429 // register, neither left nor right in eax or edx, and left copied
1430 // to eax.
1431 Result quotient;
1432 Result remainder;
1433 bool left_is_in_eax = false;
1434 // Step 1: get eax for quotient.
1435 if ((left->is_register() && left->reg().is(eax)) ||
1436 (right->is_register() && right->reg().is(eax))) {
1437 // One or both is in eax. Use a fresh non-edx register for
1438 // them.
1439 Result fresh = allocator_->Allocate();
1440 ASSERT(fresh.is_valid());
1441 if (fresh.reg().is(edx)) {
1442 remainder = fresh;
1443 fresh = allocator_->Allocate();
1444 ASSERT(fresh.is_valid());
1445 }
1446 if (left->is_register() && left->reg().is(eax)) {
1447 quotient = *left;
1448 *left = fresh;
1449 left_is_in_eax = true;
1450 }
1451 if (right->is_register() && right->reg().is(eax)) {
1452 quotient = *right;
1453 *right = fresh;
1454 }
1455 __ mov(fresh.reg(), eax);
1456 } else {
1457 // Neither left nor right is in eax.
1458 quotient = allocator_->Allocate(eax);
1459 }
1460 ASSERT(quotient.is_register() && quotient.reg().is(eax));
1461 ASSERT(!(left->is_register() && left->reg().is(eax)));
1462 ASSERT(!(right->is_register() && right->reg().is(eax)));
1463
1464 // Step 2: get edx for remainder if necessary.
1465 if (!remainder.is_valid()) {
1466 if ((left->is_register() && left->reg().is(edx)) ||
1467 (right->is_register() && right->reg().is(edx))) {
1468 Result fresh = allocator_->Allocate();
1469 ASSERT(fresh.is_valid());
1470 if (left->is_register() && left->reg().is(edx)) {
1471 remainder = *left;
1472 *left = fresh;
1473 }
1474 if (right->is_register() && right->reg().is(edx)) {
1475 remainder = *right;
1476 *right = fresh;
1477 }
1478 __ mov(fresh.reg(), edx);
1479 } else {
1480 // Neither left nor right is in edx.
1481 remainder = allocator_->Allocate(edx);
1482 }
1483 }
1484 ASSERT(remainder.is_register() && remainder.reg().is(edx));
1485 ASSERT(!(left->is_register() && left->reg().is(edx)));
1486 ASSERT(!(right->is_register() && right->reg().is(edx)));
1487
1488 left->ToRegister();
1489 right->ToRegister();
1490 frame_->Spill(eax);
1491 frame_->Spill(edx);
1492
1493 // Check that left and right are smi tagged.
1494 DeferredInlineBinaryOperation* deferred =
1495 new DeferredInlineBinaryOperation(op,
1496 (op == Token::DIV) ? eax : edx,
1497 left->reg(),
1498 right->reg(),
Kristian Monsen25f61362010-05-21 11:50:48 +01001499 left_type_info,
1500 right_type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00001501 overwrite_mode);
1502 if (left->reg().is(right->reg())) {
1503 __ test(left->reg(), Immediate(kSmiTagMask));
1504 } else {
1505 // Use the quotient register as a scratch for the tag check.
1506 if (!left_is_in_eax) __ mov(eax, left->reg());
1507 left_is_in_eax = false; // About to destroy the value in eax.
1508 __ or_(eax, Operand(right->reg()));
1509 ASSERT(kSmiTag == 0); // Adjust test if not the case.
1510 __ test(eax, Immediate(kSmiTagMask));
1511 }
1512 deferred->Branch(not_zero);
1513
1514 if (!left_is_in_eax) __ mov(eax, left->reg());
1515 // Sign extend eax into edx:eax.
1516 __ cdq();
1517 // Check for 0 divisor.
1518 __ test(right->reg(), Operand(right->reg()));
1519 deferred->Branch(zero);
1520 // Divide edx:eax by the right operand.
1521 __ idiv(right->reg());
1522
1523 // Complete the operation.
1524 if (op == Token::DIV) {
1525 // Check for negative zero result. If result is zero, and divisor
1526 // is negative, return a floating point negative zero. The
1527 // virtual frame is unchanged in this block, so local control flow
Steve Block6ded16b2010-05-10 14:33:55 +01001528 // can use a Label rather than a JumpTarget. If the context of this
1529 // expression will treat -0 like 0, do not do this test.
1530 if (!expr->no_negative_zero()) {
1531 Label non_zero_result;
1532 __ test(left->reg(), Operand(left->reg()));
1533 __ j(not_zero, &non_zero_result);
1534 __ test(right->reg(), Operand(right->reg()));
1535 deferred->Branch(negative);
1536 __ bind(&non_zero_result);
1537 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001538 // Check for the corner case of dividing the most negative smi by
1539 // -1. We cannot use the overflow flag, since it is not set by
1540 // idiv instruction.
1541 ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
1542 __ cmp(eax, 0x40000000);
1543 deferred->Branch(equal);
1544 // Check that the remainder is zero.
1545 __ test(edx, Operand(edx));
1546 deferred->Branch(not_zero);
1547 // Tag the result and store it in the quotient register.
Leon Clarkee46be812010-01-19 14:06:41 +00001548 __ SmiTag(eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00001549 deferred->BindExit();
1550 left->Unuse();
1551 right->Unuse();
Leon Clarked91b9f72010-01-27 17:25:45 +00001552 answer = quotient;
Steve Blocka7e24c12009-10-30 11:49:00 +00001553 } else {
1554 ASSERT(op == Token::MOD);
1555 // Check for a negative zero result. If the result is zero, and
1556 // the dividend is negative, return a floating point negative
1557 // zero. The frame is unchanged in this block, so local control
1558 // flow can use a Label rather than a JumpTarget.
Steve Block6ded16b2010-05-10 14:33:55 +01001559 if (!expr->no_negative_zero()) {
1560 Label non_zero_result;
1561 __ test(edx, Operand(edx));
1562 __ j(not_zero, &non_zero_result, taken);
1563 __ test(left->reg(), Operand(left->reg()));
1564 deferred->Branch(negative);
1565 __ bind(&non_zero_result);
1566 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001567 deferred->BindExit();
1568 left->Unuse();
1569 right->Unuse();
Leon Clarked91b9f72010-01-27 17:25:45 +00001570 answer = remainder;
Steve Blocka7e24c12009-10-30 11:49:00 +00001571 }
Leon Clarked91b9f72010-01-27 17:25:45 +00001572 ASSERT(answer.is_valid());
1573 return answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00001574 }
1575
1576 // Special handling of shift operations because they use fixed
1577 // registers.
1578 if (op == Token::SHL || op == Token::SHR || op == Token::SAR) {
1579 // Move left out of ecx if necessary.
1580 if (left->is_register() && left->reg().is(ecx)) {
1581 *left = allocator_->Allocate();
1582 ASSERT(left->is_valid());
1583 __ mov(left->reg(), ecx);
1584 }
1585 right->ToRegister(ecx);
1586 left->ToRegister();
1587 ASSERT(left->is_register() && !left->reg().is(ecx));
1588 ASSERT(right->is_register() && right->reg().is(ecx));
1589
1590 // We will modify right, it must be spilled.
1591 frame_->Spill(ecx);
1592
1593 // Use a fresh answer register to avoid spilling the left operand.
Leon Clarked91b9f72010-01-27 17:25:45 +00001594 answer = allocator_->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00001595 ASSERT(answer.is_valid());
1596 // Check that both operands are smis using the answer register as a
1597 // temporary.
1598 DeferredInlineBinaryOperation* deferred =
1599 new DeferredInlineBinaryOperation(op,
1600 answer.reg(),
1601 left->reg(),
1602 ecx,
Kristian Monsen25f61362010-05-21 11:50:48 +01001603 left_type_info,
1604 right_type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00001605 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00001606
Steve Block6ded16b2010-05-10 14:33:55 +01001607 Label do_op, left_nonsmi;
1608 // If right is a smi we make a fast case if left is either a smi
1609 // or a heapnumber.
Kristian Monsen25f61362010-05-21 11:50:48 +01001610 if (CpuFeatures::IsSupported(SSE2) && right_type_info.IsSmi()) {
Steve Block6ded16b2010-05-10 14:33:55 +01001611 CpuFeatures::Scope use_sse2(SSE2);
1612 __ mov(answer.reg(), left->reg());
1613 // Fast case - both are actually smis.
Kristian Monsen25f61362010-05-21 11:50:48 +01001614 if (!left_type_info.IsSmi()) {
Steve Block6ded16b2010-05-10 14:33:55 +01001615 __ test(answer.reg(), Immediate(kSmiTagMask));
1616 __ j(not_zero, &left_nonsmi);
1617 } else {
1618 if (FLAG_debug_code) __ AbortIfNotSmi(left->reg());
1619 }
1620 if (FLAG_debug_code) __ AbortIfNotSmi(right->reg());
1621 __ SmiUntag(answer.reg());
1622 __ jmp(&do_op);
1623
1624 __ bind(&left_nonsmi);
1625 // Branch if not a heapnumber.
1626 __ cmp(FieldOperand(answer.reg(), HeapObject::kMapOffset),
1627 Factory::heap_number_map());
1628 deferred->Branch(not_equal);
1629
1630 // Load integer value into answer register using truncation.
1631 __ cvttsd2si(answer.reg(),
1632 FieldOperand(answer.reg(), HeapNumber::kValueOffset));
1633 // Branch if we do not fit in a smi.
1634 __ cmp(answer.reg(), 0xc0000000);
1635 deferred->Branch(negative);
1636 } else {
1637 CheckTwoForSminess(masm_, left->reg(), right->reg(), answer.reg(),
Kristian Monsen25f61362010-05-21 11:50:48 +01001638 left_type_info, right_type_info, deferred);
Steve Block6ded16b2010-05-10 14:33:55 +01001639
1640 // Untag both operands.
1641 __ mov(answer.reg(), left->reg());
1642 __ SmiUntag(answer.reg());
1643 }
1644
1645 __ bind(&do_op);
Leon Clarkee46be812010-01-19 14:06:41 +00001646 __ SmiUntag(ecx);
Steve Blocka7e24c12009-10-30 11:49:00 +00001647 // Perform the operation.
1648 switch (op) {
1649 case Token::SAR:
Steve Blockd0582a62009-12-15 09:54:21 +00001650 __ sar_cl(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001651 // No checks of result necessary
1652 break;
1653 case Token::SHR: {
1654 Label result_ok;
Steve Blockd0582a62009-12-15 09:54:21 +00001655 __ shr_cl(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001656 // Check that the *unsigned* result fits in a smi. Neither of
1657 // the two high-order bits can be set:
1658 // * 0x80000000: high bit would be lost when smi tagging.
1659 // * 0x40000000: this number would convert to negative when smi
1660 // tagging.
1661 // These two cases can only happen with shifts by 0 or 1 when
1662 // handed a valid smi. If the answer cannot be represented by a
1663 // smi, restore the left and right arguments, and jump to slow
1664 // case. The low bit of the left argument may be lost, but only
1665 // in a case where it is dropped anyway.
1666 __ test(answer.reg(), Immediate(0xc0000000));
1667 __ j(zero, &result_ok);
Leon Clarkee46be812010-01-19 14:06:41 +00001668 __ SmiTag(ecx);
Steve Blocka7e24c12009-10-30 11:49:00 +00001669 deferred->Jump();
1670 __ bind(&result_ok);
1671 break;
1672 }
1673 case Token::SHL: {
1674 Label result_ok;
Steve Blockd0582a62009-12-15 09:54:21 +00001675 __ shl_cl(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001676 // Check that the *signed* result fits in a smi.
1677 __ cmp(answer.reg(), 0xc0000000);
1678 __ j(positive, &result_ok);
Leon Clarkee46be812010-01-19 14:06:41 +00001679 __ SmiTag(ecx);
Steve Blocka7e24c12009-10-30 11:49:00 +00001680 deferred->Jump();
1681 __ bind(&result_ok);
1682 break;
1683 }
1684 default:
1685 UNREACHABLE();
1686 }
1687 // Smi-tag the result in answer.
Leon Clarkee46be812010-01-19 14:06:41 +00001688 __ SmiTag(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001689 deferred->BindExit();
1690 left->Unuse();
1691 right->Unuse();
Leon Clarked91b9f72010-01-27 17:25:45 +00001692 ASSERT(answer.is_valid());
1693 return answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00001694 }
1695
1696 // Handle the other binary operations.
1697 left->ToRegister();
1698 right->ToRegister();
1699 // A newly allocated register answer is used to hold the answer. The
1700 // registers containing left and right are not modified so they don't
1701 // need to be spilled in the fast case.
Leon Clarked91b9f72010-01-27 17:25:45 +00001702 answer = allocator_->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00001703 ASSERT(answer.is_valid());
1704
1705 // Perform the smi tag check.
1706 DeferredInlineBinaryOperation* deferred =
1707 new DeferredInlineBinaryOperation(op,
1708 answer.reg(),
1709 left->reg(),
1710 right->reg(),
Kristian Monsen25f61362010-05-21 11:50:48 +01001711 left_type_info,
1712 right_type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00001713 overwrite_mode);
Steve Block6ded16b2010-05-10 14:33:55 +01001714 CheckTwoForSminess(masm_, left->reg(), right->reg(), answer.reg(),
Kristian Monsen25f61362010-05-21 11:50:48 +01001715 left_type_info, right_type_info, deferred);
Steve Block6ded16b2010-05-10 14:33:55 +01001716
Steve Blocka7e24c12009-10-30 11:49:00 +00001717 __ mov(answer.reg(), left->reg());
1718 switch (op) {
1719 case Token::ADD:
Leon Clarked91b9f72010-01-27 17:25:45 +00001720 __ add(answer.reg(), Operand(right->reg()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001721 deferred->Branch(overflow);
1722 break;
1723
1724 case Token::SUB:
Leon Clarked91b9f72010-01-27 17:25:45 +00001725 __ sub(answer.reg(), Operand(right->reg()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001726 deferred->Branch(overflow);
1727 break;
1728
1729 case Token::MUL: {
1730 // If the smi tag is 0 we can just leave the tag on one operand.
1731 ASSERT(kSmiTag == 0); // Adjust code below if not the case.
1732 // Remove smi tag from the left operand (but keep sign).
1733 // Left-hand operand has been copied into answer.
Leon Clarkee46be812010-01-19 14:06:41 +00001734 __ SmiUntag(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001735 // Do multiplication of smis, leaving result in answer.
1736 __ imul(answer.reg(), Operand(right->reg()));
1737 // Go slow on overflows.
1738 deferred->Branch(overflow);
1739 // Check for negative zero result. If product is zero, and one
1740 // argument is negative, go to slow case. The frame is unchanged
1741 // in this block, so local control flow can use a Label rather
1742 // than a JumpTarget.
Steve Block6ded16b2010-05-10 14:33:55 +01001743 if (!expr->no_negative_zero()) {
1744 Label non_zero_result;
1745 __ test(answer.reg(), Operand(answer.reg()));
1746 __ j(not_zero, &non_zero_result, taken);
1747 __ mov(answer.reg(), left->reg());
1748 __ or_(answer.reg(), Operand(right->reg()));
1749 deferred->Branch(negative);
1750 __ xor_(answer.reg(), Operand(answer.reg())); // Positive 0 is correct.
1751 __ bind(&non_zero_result);
1752 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001753 break;
1754 }
1755
1756 case Token::BIT_OR:
1757 __ or_(answer.reg(), Operand(right->reg()));
1758 break;
1759
1760 case Token::BIT_AND:
1761 __ and_(answer.reg(), Operand(right->reg()));
1762 break;
1763
1764 case Token::BIT_XOR:
1765 __ xor_(answer.reg(), Operand(right->reg()));
1766 break;
1767
1768 default:
1769 UNREACHABLE();
1770 break;
1771 }
1772 deferred->BindExit();
1773 left->Unuse();
1774 right->Unuse();
Leon Clarked91b9f72010-01-27 17:25:45 +00001775 ASSERT(answer.is_valid());
1776 return answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00001777}
1778
1779
1780// Call the appropriate binary operation stub to compute src op value
1781// and leave the result in dst.
1782class DeferredInlineSmiOperation: public DeferredCode {
1783 public:
1784 DeferredInlineSmiOperation(Token::Value op,
1785 Register dst,
1786 Register src,
Steve Block6ded16b2010-05-10 14:33:55 +01001787 TypeInfo type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00001788 Smi* value,
1789 OverwriteMode overwrite_mode)
1790 : op_(op),
1791 dst_(dst),
1792 src_(src),
Steve Block6ded16b2010-05-10 14:33:55 +01001793 type_info_(type_info),
Steve Blocka7e24c12009-10-30 11:49:00 +00001794 value_(value),
1795 overwrite_mode_(overwrite_mode) {
Steve Block6ded16b2010-05-10 14:33:55 +01001796 if (type_info.IsSmi()) overwrite_mode_ = NO_OVERWRITE;
Steve Blocka7e24c12009-10-30 11:49:00 +00001797 set_comment("[ DeferredInlineSmiOperation");
1798 }
1799
1800 virtual void Generate();
1801
1802 private:
1803 Token::Value op_;
1804 Register dst_;
1805 Register src_;
Steve Block6ded16b2010-05-10 14:33:55 +01001806 TypeInfo type_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +00001807 Smi* value_;
1808 OverwriteMode overwrite_mode_;
1809};
1810
1811
1812void DeferredInlineSmiOperation::Generate() {
Steve Blocka7e24c12009-10-30 11:49:00 +00001813 // For mod we don't generate all the Smi code inline.
1814 GenericBinaryOpStub stub(
1815 op_,
1816 overwrite_mode_,
Steve Block6ded16b2010-05-10 14:33:55 +01001817 (op_ == Token::MOD) ? NO_GENERIC_BINARY_FLAGS : NO_SMI_CODE_IN_STUB,
1818 TypeInfo::Combine(TypeInfo::Smi(), type_info_));
Steve Block3ce2e202009-11-05 08:53:23 +00001819 stub.GenerateCall(masm_, src_, value_);
Steve Blocka7e24c12009-10-30 11:49:00 +00001820 if (!dst_.is(eax)) __ mov(dst_, eax);
1821}
1822
1823
1824// Call the appropriate binary operation stub to compute value op src
1825// and leave the result in dst.
1826class DeferredInlineSmiOperationReversed: public DeferredCode {
1827 public:
1828 DeferredInlineSmiOperationReversed(Token::Value op,
1829 Register dst,
1830 Smi* value,
1831 Register src,
Steve Block6ded16b2010-05-10 14:33:55 +01001832 TypeInfo type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00001833 OverwriteMode overwrite_mode)
1834 : op_(op),
1835 dst_(dst),
Steve Block6ded16b2010-05-10 14:33:55 +01001836 type_info_(type_info),
Steve Blocka7e24c12009-10-30 11:49:00 +00001837 value_(value),
1838 src_(src),
1839 overwrite_mode_(overwrite_mode) {
1840 set_comment("[ DeferredInlineSmiOperationReversed");
1841 }
1842
1843 virtual void Generate();
1844
1845 private:
1846 Token::Value op_;
1847 Register dst_;
Steve Block6ded16b2010-05-10 14:33:55 +01001848 TypeInfo type_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +00001849 Smi* value_;
1850 Register src_;
1851 OverwriteMode overwrite_mode_;
1852};
1853
1854
1855void DeferredInlineSmiOperationReversed::Generate() {
Steve Block6ded16b2010-05-10 14:33:55 +01001856 GenericBinaryOpStub igostub(
1857 op_,
1858 overwrite_mode_,
1859 NO_SMI_CODE_IN_STUB,
1860 TypeInfo::Combine(TypeInfo::Smi(), type_info_));
Steve Block3ce2e202009-11-05 08:53:23 +00001861 igostub.GenerateCall(masm_, value_, src_);
Steve Blocka7e24c12009-10-30 11:49:00 +00001862 if (!dst_.is(eax)) __ mov(dst_, eax);
1863}
1864
1865
1866// The result of src + value is in dst. It either overflowed or was not
1867// smi tagged. Undo the speculative addition and call the appropriate
1868// specialized stub for add. The result is left in dst.
1869class DeferredInlineSmiAdd: public DeferredCode {
1870 public:
1871 DeferredInlineSmiAdd(Register dst,
Steve Block6ded16b2010-05-10 14:33:55 +01001872 TypeInfo type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00001873 Smi* value,
1874 OverwriteMode overwrite_mode)
Steve Block6ded16b2010-05-10 14:33:55 +01001875 : dst_(dst),
1876 type_info_(type_info),
1877 value_(value),
1878 overwrite_mode_(overwrite_mode) {
1879 if (type_info_.IsSmi()) overwrite_mode_ = NO_OVERWRITE;
Steve Blocka7e24c12009-10-30 11:49:00 +00001880 set_comment("[ DeferredInlineSmiAdd");
1881 }
1882
1883 virtual void Generate();
1884
1885 private:
1886 Register dst_;
Steve Block6ded16b2010-05-10 14:33:55 +01001887 TypeInfo type_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +00001888 Smi* value_;
1889 OverwriteMode overwrite_mode_;
1890};
1891
1892
1893void DeferredInlineSmiAdd::Generate() {
1894 // Undo the optimistic add operation and call the shared stub.
1895 __ sub(Operand(dst_), Immediate(value_));
Steve Block6ded16b2010-05-10 14:33:55 +01001896 GenericBinaryOpStub igostub(
1897 Token::ADD,
1898 overwrite_mode_,
1899 NO_SMI_CODE_IN_STUB,
1900 TypeInfo::Combine(TypeInfo::Smi(), type_info_));
Steve Block3ce2e202009-11-05 08:53:23 +00001901 igostub.GenerateCall(masm_, dst_, value_);
Steve Blocka7e24c12009-10-30 11:49:00 +00001902 if (!dst_.is(eax)) __ mov(dst_, eax);
1903}
1904
1905
1906// The result of value + src is in dst. It either overflowed or was not
1907// smi tagged. Undo the speculative addition and call the appropriate
1908// specialized stub for add. The result is left in dst.
1909class DeferredInlineSmiAddReversed: public DeferredCode {
1910 public:
1911 DeferredInlineSmiAddReversed(Register dst,
Steve Block6ded16b2010-05-10 14:33:55 +01001912 TypeInfo type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00001913 Smi* value,
1914 OverwriteMode overwrite_mode)
Steve Block6ded16b2010-05-10 14:33:55 +01001915 : dst_(dst),
1916 type_info_(type_info),
1917 value_(value),
1918 overwrite_mode_(overwrite_mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001919 set_comment("[ DeferredInlineSmiAddReversed");
1920 }
1921
1922 virtual void Generate();
1923
1924 private:
1925 Register dst_;
Steve Block6ded16b2010-05-10 14:33:55 +01001926 TypeInfo type_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +00001927 Smi* value_;
1928 OverwriteMode overwrite_mode_;
1929};
1930
1931
1932void DeferredInlineSmiAddReversed::Generate() {
1933 // Undo the optimistic add operation and call the shared stub.
1934 __ sub(Operand(dst_), Immediate(value_));
Steve Block6ded16b2010-05-10 14:33:55 +01001935 GenericBinaryOpStub igostub(
1936 Token::ADD,
1937 overwrite_mode_,
1938 NO_SMI_CODE_IN_STUB,
1939 TypeInfo::Combine(TypeInfo::Smi(), type_info_));
Steve Block3ce2e202009-11-05 08:53:23 +00001940 igostub.GenerateCall(masm_, value_, dst_);
Steve Blocka7e24c12009-10-30 11:49:00 +00001941 if (!dst_.is(eax)) __ mov(dst_, eax);
1942}
1943
1944
1945// The result of src - value is in dst. It either overflowed or was not
1946// smi tagged. Undo the speculative subtraction and call the
1947// appropriate specialized stub for subtract. The result is left in
1948// dst.
1949class DeferredInlineSmiSub: public DeferredCode {
1950 public:
1951 DeferredInlineSmiSub(Register dst,
Steve Block6ded16b2010-05-10 14:33:55 +01001952 TypeInfo type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00001953 Smi* value,
1954 OverwriteMode overwrite_mode)
Steve Block6ded16b2010-05-10 14:33:55 +01001955 : dst_(dst),
1956 type_info_(type_info),
1957 value_(value),
1958 overwrite_mode_(overwrite_mode) {
1959 if (type_info.IsSmi()) overwrite_mode_ = NO_OVERWRITE;
Steve Blocka7e24c12009-10-30 11:49:00 +00001960 set_comment("[ DeferredInlineSmiSub");
1961 }
1962
1963 virtual void Generate();
1964
1965 private:
1966 Register dst_;
Steve Block6ded16b2010-05-10 14:33:55 +01001967 TypeInfo type_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +00001968 Smi* value_;
1969 OverwriteMode overwrite_mode_;
1970};
1971
1972
1973void DeferredInlineSmiSub::Generate() {
1974 // Undo the optimistic sub operation and call the shared stub.
1975 __ add(Operand(dst_), Immediate(value_));
Steve Block6ded16b2010-05-10 14:33:55 +01001976 GenericBinaryOpStub igostub(
1977 Token::SUB,
1978 overwrite_mode_,
1979 NO_SMI_CODE_IN_STUB,
1980 TypeInfo::Combine(TypeInfo::Smi(), type_info_));
Steve Block3ce2e202009-11-05 08:53:23 +00001981 igostub.GenerateCall(masm_, dst_, value_);
Steve Blocka7e24c12009-10-30 11:49:00 +00001982 if (!dst_.is(eax)) __ mov(dst_, eax);
1983}
1984
1985
Kristian Monsen25f61362010-05-21 11:50:48 +01001986Result CodeGenerator::ConstantSmiBinaryOperation(BinaryOperation* expr,
1987 Result* operand,
1988 Handle<Object> value,
1989 bool reversed,
1990 OverwriteMode overwrite_mode) {
1991 // Generate inline code for a binary operation when one of the
1992 // operands is a constant smi. Consumes the argument "operand".
Steve Blocka7e24c12009-10-30 11:49:00 +00001993 if (IsUnsafeSmi(value)) {
1994 Result unsafe_operand(value);
1995 if (reversed) {
Steve Block6ded16b2010-05-10 14:33:55 +01001996 return LikelySmiBinaryOperation(expr, &unsafe_operand, operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00001997 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00001998 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01001999 return LikelySmiBinaryOperation(expr, operand, &unsafe_operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00002000 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002001 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002002 }
2003
2004 // Get the literal value.
2005 Smi* smi_value = Smi::cast(*value);
2006 int int_value = smi_value->value();
2007
Steve Block6ded16b2010-05-10 14:33:55 +01002008 Token::Value op = expr->op();
Leon Clarked91b9f72010-01-27 17:25:45 +00002009 Result answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00002010 switch (op) {
2011 case Token::ADD: {
2012 operand->ToRegister();
2013 frame_->Spill(operand->reg());
2014
2015 // Optimistically add. Call the specialized add stub if the
2016 // result is not a smi or overflows.
2017 DeferredCode* deferred = NULL;
2018 if (reversed) {
2019 deferred = new DeferredInlineSmiAddReversed(operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002020 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002021 smi_value,
2022 overwrite_mode);
2023 } else {
2024 deferred = new DeferredInlineSmiAdd(operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002025 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002026 smi_value,
2027 overwrite_mode);
2028 }
2029 __ add(Operand(operand->reg()), Immediate(value));
2030 deferred->Branch(overflow);
Steve Block6ded16b2010-05-10 14:33:55 +01002031 if (!operand->type_info().IsSmi()) {
2032 __ test(operand->reg(), Immediate(kSmiTagMask));
2033 deferred->Branch(not_zero);
2034 } else if (FLAG_debug_code) {
2035 __ AbortIfNotSmi(operand->reg());
2036 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002037 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00002038 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00002039 break;
2040 }
2041
2042 case Token::SUB: {
2043 DeferredCode* deferred = NULL;
Steve Blocka7e24c12009-10-30 11:49:00 +00002044 if (reversed) {
2045 // The reversed case is only hit when the right operand is not a
2046 // constant.
2047 ASSERT(operand->is_register());
2048 answer = allocator()->Allocate();
2049 ASSERT(answer.is_valid());
2050 __ Set(answer.reg(), Immediate(value));
Steve Block6ded16b2010-05-10 14:33:55 +01002051 deferred =
2052 new DeferredInlineSmiOperationReversed(op,
2053 answer.reg(),
2054 smi_value,
2055 operand->reg(),
2056 operand->type_info(),
2057 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002058 __ sub(answer.reg(), Operand(operand->reg()));
2059 } else {
2060 operand->ToRegister();
2061 frame_->Spill(operand->reg());
2062 answer = *operand;
2063 deferred = new DeferredInlineSmiSub(operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002064 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002065 smi_value,
2066 overwrite_mode);
2067 __ sub(Operand(operand->reg()), Immediate(value));
2068 }
2069 deferred->Branch(overflow);
Steve Block6ded16b2010-05-10 14:33:55 +01002070 if (!operand->type_info().IsSmi()) {
2071 __ test(answer.reg(), Immediate(kSmiTagMask));
2072 deferred->Branch(not_zero);
2073 } else if (FLAG_debug_code) {
2074 __ AbortIfNotSmi(operand->reg());
2075 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002076 deferred->BindExit();
2077 operand->Unuse();
Steve Blocka7e24c12009-10-30 11:49:00 +00002078 break;
2079 }
2080
2081 case Token::SAR:
2082 if (reversed) {
2083 Result constant_operand(value);
Steve Block6ded16b2010-05-10 14:33:55 +01002084 answer = LikelySmiBinaryOperation(expr, &constant_operand, operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00002085 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002086 } else {
2087 // Only the least significant 5 bits of the shift value are used.
2088 // In the slow case, this masking is done inside the runtime call.
2089 int shift_value = int_value & 0x1f;
2090 operand->ToRegister();
2091 frame_->Spill(operand->reg());
Steve Block6ded16b2010-05-10 14:33:55 +01002092 if (!operand->type_info().IsSmi()) {
2093 DeferredInlineSmiOperation* deferred =
2094 new DeferredInlineSmiOperation(op,
2095 operand->reg(),
2096 operand->reg(),
2097 operand->type_info(),
2098 smi_value,
2099 overwrite_mode);
2100 __ test(operand->reg(), Immediate(kSmiTagMask));
2101 deferred->Branch(not_zero);
2102 if (shift_value > 0) {
2103 __ sar(operand->reg(), shift_value);
2104 __ and_(operand->reg(), ~kSmiTagMask);
2105 }
2106 deferred->BindExit();
2107 } else {
2108 if (FLAG_debug_code) {
2109 __ AbortIfNotSmi(operand->reg());
2110 }
2111 if (shift_value > 0) {
2112 __ sar(operand->reg(), shift_value);
2113 __ and_(operand->reg(), ~kSmiTagMask);
2114 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002115 }
Leon Clarked91b9f72010-01-27 17:25:45 +00002116 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00002117 }
2118 break;
2119
2120 case Token::SHR:
2121 if (reversed) {
2122 Result constant_operand(value);
Steve Block6ded16b2010-05-10 14:33:55 +01002123 answer = LikelySmiBinaryOperation(expr, &constant_operand, operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00002124 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002125 } else {
2126 // Only the least significant 5 bits of the shift value are used.
2127 // In the slow case, this masking is done inside the runtime call.
2128 int shift_value = int_value & 0x1f;
2129 operand->ToRegister();
Leon Clarked91b9f72010-01-27 17:25:45 +00002130 answer = allocator()->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00002131 ASSERT(answer.is_valid());
2132 DeferredInlineSmiOperation* deferred =
2133 new DeferredInlineSmiOperation(op,
2134 answer.reg(),
2135 operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002136 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002137 smi_value,
2138 overwrite_mode);
Steve Block6ded16b2010-05-10 14:33:55 +01002139 if (!operand->type_info().IsSmi()) {
2140 __ test(operand->reg(), Immediate(kSmiTagMask));
2141 deferred->Branch(not_zero);
2142 } else if (FLAG_debug_code) {
2143 __ AbortIfNotSmi(operand->reg());
2144 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002145 __ mov(answer.reg(), operand->reg());
Leon Clarkee46be812010-01-19 14:06:41 +00002146 __ SmiUntag(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00002147 __ shr(answer.reg(), shift_value);
2148 // A negative Smi shifted right two is in the positive Smi range.
2149 if (shift_value < 2) {
2150 __ test(answer.reg(), Immediate(0xc0000000));
2151 deferred->Branch(not_zero);
2152 }
2153 operand->Unuse();
Leon Clarkee46be812010-01-19 14:06:41 +00002154 __ SmiTag(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00002155 deferred->BindExit();
Steve Blocka7e24c12009-10-30 11:49:00 +00002156 }
2157 break;
2158
2159 case Token::SHL:
2160 if (reversed) {
Steve Block6ded16b2010-05-10 14:33:55 +01002161 // Move operand into ecx and also into a second register.
2162 // If operand is already in a register, take advantage of that.
2163 // This lets us modify ecx, but still bail out to deferred code.
Leon Clarkee46be812010-01-19 14:06:41 +00002164 Result right;
2165 Result right_copy_in_ecx;
Steve Block6ded16b2010-05-10 14:33:55 +01002166 TypeInfo right_type_info = operand->type_info();
Leon Clarkee46be812010-01-19 14:06:41 +00002167 operand->ToRegister();
2168 if (operand->reg().is(ecx)) {
2169 right = allocator()->Allocate();
2170 __ mov(right.reg(), ecx);
2171 frame_->Spill(ecx);
2172 right_copy_in_ecx = *operand;
2173 } else {
2174 right_copy_in_ecx = allocator()->Allocate(ecx);
2175 __ mov(ecx, operand->reg());
2176 right = *operand;
2177 }
2178 operand->Unuse();
2179
Leon Clarked91b9f72010-01-27 17:25:45 +00002180 answer = allocator()->Allocate();
Leon Clarkee46be812010-01-19 14:06:41 +00002181 DeferredInlineSmiOperationReversed* deferred =
2182 new DeferredInlineSmiOperationReversed(op,
2183 answer.reg(),
2184 smi_value,
2185 right.reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002186 right_type_info,
Leon Clarkee46be812010-01-19 14:06:41 +00002187 overwrite_mode);
2188 __ mov(answer.reg(), Immediate(int_value));
2189 __ sar(ecx, kSmiTagSize);
Steve Block6ded16b2010-05-10 14:33:55 +01002190 if (!right_type_info.IsSmi()) {
2191 deferred->Branch(carry);
2192 } else if (FLAG_debug_code) {
2193 __ AbortIfNotSmi(right.reg());
2194 }
Leon Clarkee46be812010-01-19 14:06:41 +00002195 __ shl_cl(answer.reg());
2196 __ cmp(answer.reg(), 0xc0000000);
2197 deferred->Branch(sign);
2198 __ SmiTag(answer.reg());
2199
2200 deferred->BindExit();
Steve Blocka7e24c12009-10-30 11:49:00 +00002201 } else {
2202 // Only the least significant 5 bits of the shift value are used.
2203 // In the slow case, this masking is done inside the runtime call.
2204 int shift_value = int_value & 0x1f;
2205 operand->ToRegister();
2206 if (shift_value == 0) {
2207 // Spill operand so it can be overwritten in the slow case.
2208 frame_->Spill(operand->reg());
2209 DeferredInlineSmiOperation* deferred =
2210 new DeferredInlineSmiOperation(op,
2211 operand->reg(),
2212 operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002213 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002214 smi_value,
2215 overwrite_mode);
2216 __ test(operand->reg(), Immediate(kSmiTagMask));
2217 deferred->Branch(not_zero);
2218 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00002219 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00002220 } else {
2221 // Use a fresh temporary for nonzero shift values.
Leon Clarked91b9f72010-01-27 17:25:45 +00002222 answer = allocator()->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00002223 ASSERT(answer.is_valid());
2224 DeferredInlineSmiOperation* deferred =
2225 new DeferredInlineSmiOperation(op,
2226 answer.reg(),
2227 operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002228 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002229 smi_value,
2230 overwrite_mode);
Steve Block6ded16b2010-05-10 14:33:55 +01002231 if (!operand->type_info().IsSmi()) {
2232 __ test(operand->reg(), Immediate(kSmiTagMask));
2233 deferred->Branch(not_zero);
2234 } else if (FLAG_debug_code) {
2235 __ AbortIfNotSmi(operand->reg());
2236 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002237 __ mov(answer.reg(), operand->reg());
2238 ASSERT(kSmiTag == 0); // adjust code if not the case
2239 // We do no shifts, only the Smi conversion, if shift_value is 1.
2240 if (shift_value > 1) {
2241 __ shl(answer.reg(), shift_value - 1);
2242 }
2243 // Convert int result to Smi, checking that it is in int range.
2244 ASSERT(kSmiTagSize == 1); // adjust code if not the case
2245 __ add(answer.reg(), Operand(answer.reg()));
2246 deferred->Branch(overflow);
2247 deferred->BindExit();
2248 operand->Unuse();
Steve Blocka7e24c12009-10-30 11:49:00 +00002249 }
2250 }
2251 break;
2252
2253 case Token::BIT_OR:
2254 case Token::BIT_XOR:
2255 case Token::BIT_AND: {
2256 operand->ToRegister();
2257 frame_->Spill(operand->reg());
2258 DeferredCode* deferred = NULL;
2259 if (reversed) {
Steve Block6ded16b2010-05-10 14:33:55 +01002260 deferred =
2261 new DeferredInlineSmiOperationReversed(op,
2262 operand->reg(),
2263 smi_value,
2264 operand->reg(),
2265 operand->type_info(),
2266 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002267 } else {
2268 deferred = new DeferredInlineSmiOperation(op,
2269 operand->reg(),
2270 operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002271 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002272 smi_value,
2273 overwrite_mode);
2274 }
Steve Block6ded16b2010-05-10 14:33:55 +01002275 if (!operand->type_info().IsSmi()) {
2276 __ test(operand->reg(), Immediate(kSmiTagMask));
2277 deferred->Branch(not_zero);
2278 } else if (FLAG_debug_code) {
2279 __ AbortIfNotSmi(operand->reg());
2280 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002281 if (op == Token::BIT_AND) {
2282 __ and_(Operand(operand->reg()), Immediate(value));
2283 } else if (op == Token::BIT_XOR) {
2284 if (int_value != 0) {
2285 __ xor_(Operand(operand->reg()), Immediate(value));
2286 }
2287 } else {
2288 ASSERT(op == Token::BIT_OR);
2289 if (int_value != 0) {
2290 __ or_(Operand(operand->reg()), Immediate(value));
2291 }
2292 }
2293 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00002294 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00002295 break;
2296 }
2297
Andrei Popescu402d9372010-02-26 13:31:12 +00002298 case Token::DIV:
2299 if (!reversed && int_value == 2) {
2300 operand->ToRegister();
2301 frame_->Spill(operand->reg());
2302
2303 DeferredInlineSmiOperation* deferred =
2304 new DeferredInlineSmiOperation(op,
2305 operand->reg(),
2306 operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002307 operand->type_info(),
Andrei Popescu402d9372010-02-26 13:31:12 +00002308 smi_value,
2309 overwrite_mode);
2310 // Check that lowest log2(value) bits of operand are zero, and test
2311 // smi tag at the same time.
2312 ASSERT_EQ(0, kSmiTag);
2313 ASSERT_EQ(1, kSmiTagSize);
2314 __ test(operand->reg(), Immediate(3));
2315 deferred->Branch(not_zero); // Branch if non-smi or odd smi.
2316 __ sar(operand->reg(), 1);
2317 deferred->BindExit();
2318 answer = *operand;
2319 } else {
2320 // Cannot fall through MOD to default case, so we duplicate the
2321 // default case here.
2322 Result constant_operand(value);
2323 if (reversed) {
Steve Block6ded16b2010-05-10 14:33:55 +01002324 answer = LikelySmiBinaryOperation(expr, &constant_operand, operand,
Andrei Popescu402d9372010-02-26 13:31:12 +00002325 overwrite_mode);
2326 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01002327 answer = LikelySmiBinaryOperation(expr, operand, &constant_operand,
Andrei Popescu402d9372010-02-26 13:31:12 +00002328 overwrite_mode);
2329 }
2330 }
2331 break;
Steve Block6ded16b2010-05-10 14:33:55 +01002332
Steve Blocka7e24c12009-10-30 11:49:00 +00002333 // Generate inline code for mod of powers of 2 and negative powers of 2.
2334 case Token::MOD:
2335 if (!reversed &&
2336 int_value != 0 &&
2337 (IsPowerOf2(int_value) || IsPowerOf2(-int_value))) {
2338 operand->ToRegister();
2339 frame_->Spill(operand->reg());
Steve Block6ded16b2010-05-10 14:33:55 +01002340 DeferredCode* deferred =
2341 new DeferredInlineSmiOperation(op,
2342 operand->reg(),
2343 operand->reg(),
2344 operand->type_info(),
2345 smi_value,
2346 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002347 // Check for negative or non-Smi left hand side.
Steve Block6ded16b2010-05-10 14:33:55 +01002348 __ test(operand->reg(), Immediate(kSmiTagMask | kSmiSignMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00002349 deferred->Branch(not_zero);
2350 if (int_value < 0) int_value = -int_value;
2351 if (int_value == 1) {
2352 __ mov(operand->reg(), Immediate(Smi::FromInt(0)));
2353 } else {
2354 __ and_(operand->reg(), (int_value << kSmiTagSize) - 1);
2355 }
2356 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00002357 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00002358 break;
2359 }
2360 // Fall through if we did not find a power of 2 on the right hand side!
2361
2362 default: {
2363 Result constant_operand(value);
2364 if (reversed) {
Steve Block6ded16b2010-05-10 14:33:55 +01002365 answer = LikelySmiBinaryOperation(expr, &constant_operand, operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00002366 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002367 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01002368 answer = LikelySmiBinaryOperation(expr, operand, &constant_operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00002369 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002370 }
2371 break;
2372 }
2373 }
Leon Clarked91b9f72010-01-27 17:25:45 +00002374 ASSERT(answer.is_valid());
2375 return answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00002376}
2377
2378
Leon Clarkee46be812010-01-19 14:06:41 +00002379static bool CouldBeNaN(const Result& result) {
Steve Block6ded16b2010-05-10 14:33:55 +01002380 if (result.type_info().IsSmi()) return false;
2381 if (result.type_info().IsInteger32()) return false;
Leon Clarkee46be812010-01-19 14:06:41 +00002382 if (!result.is_constant()) return true;
2383 if (!result.handle()->IsHeapNumber()) return false;
2384 return isnan(HeapNumber::cast(*result.handle())->value());
2385}
2386
2387
Steve Block6ded16b2010-05-10 14:33:55 +01002388// Convert from signed to unsigned comparison to match the way EFLAGS are set
2389// by FPU and XMM compare instructions.
2390static Condition DoubleCondition(Condition cc) {
2391 switch (cc) {
2392 case less: return below;
2393 case equal: return equal;
2394 case less_equal: return below_equal;
2395 case greater: return above;
2396 case greater_equal: return above_equal;
2397 default: UNREACHABLE();
2398 }
2399 UNREACHABLE();
2400 return equal;
2401}
2402
2403
Leon Clarkee46be812010-01-19 14:06:41 +00002404void CodeGenerator::Comparison(AstNode* node,
2405 Condition cc,
Steve Blocka7e24c12009-10-30 11:49:00 +00002406 bool strict,
2407 ControlDestination* dest) {
2408 // Strict only makes sense for equality comparisons.
2409 ASSERT(!strict || cc == equal);
2410
2411 Result left_side;
2412 Result right_side;
2413 // Implement '>' and '<=' by reversal to obtain ECMA-262 conversion order.
2414 if (cc == greater || cc == less_equal) {
2415 cc = ReverseCondition(cc);
2416 left_side = frame_->Pop();
2417 right_side = frame_->Pop();
2418 } else {
2419 right_side = frame_->Pop();
2420 left_side = frame_->Pop();
2421 }
2422 ASSERT(cc == less || cc == equal || cc == greater_equal);
2423
Leon Clarkee46be812010-01-19 14:06:41 +00002424 // If either side is a constant of some sort, we can probably optimize the
2425 // comparison.
2426 bool left_side_constant_smi = false;
2427 bool left_side_constant_null = false;
2428 bool left_side_constant_1_char_string = false;
2429 if (left_side.is_constant()) {
2430 left_side_constant_smi = left_side.handle()->IsSmi();
2431 left_side_constant_null = left_side.handle()->IsNull();
2432 left_side_constant_1_char_string =
2433 (left_side.handle()->IsString() &&
Steve Block6ded16b2010-05-10 14:33:55 +01002434 String::cast(*left_side.handle())->length() == 1 &&
2435 String::cast(*left_side.handle())->IsAsciiRepresentation());
Leon Clarkee46be812010-01-19 14:06:41 +00002436 }
2437 bool right_side_constant_smi = false;
2438 bool right_side_constant_null = false;
2439 bool right_side_constant_1_char_string = false;
2440 if (right_side.is_constant()) {
2441 right_side_constant_smi = right_side.handle()->IsSmi();
2442 right_side_constant_null = right_side.handle()->IsNull();
2443 right_side_constant_1_char_string =
2444 (right_side.handle()->IsString() &&
Steve Block6ded16b2010-05-10 14:33:55 +01002445 String::cast(*right_side.handle())->length() == 1 &&
2446 String::cast(*right_side.handle())->IsAsciiRepresentation());
Leon Clarkee46be812010-01-19 14:06:41 +00002447 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002448
2449 if (left_side_constant_smi || right_side_constant_smi) {
2450 if (left_side_constant_smi && right_side_constant_smi) {
2451 // Trivial case, comparing two constants.
2452 int left_value = Smi::cast(*left_side.handle())->value();
2453 int right_value = Smi::cast(*right_side.handle())->value();
2454 switch (cc) {
2455 case less:
2456 dest->Goto(left_value < right_value);
2457 break;
2458 case equal:
2459 dest->Goto(left_value == right_value);
2460 break;
2461 case greater_equal:
2462 dest->Goto(left_value >= right_value);
2463 break;
2464 default:
2465 UNREACHABLE();
2466 }
Leon Clarkee46be812010-01-19 14:06:41 +00002467 } else {
2468 // Only one side is a constant Smi.
Steve Blocka7e24c12009-10-30 11:49:00 +00002469 // If left side is a constant Smi, reverse the operands.
2470 // Since one side is a constant Smi, conversion order does not matter.
2471 if (left_side_constant_smi) {
2472 Result temp = left_side;
2473 left_side = right_side;
2474 right_side = temp;
2475 cc = ReverseCondition(cc);
Steve Block6ded16b2010-05-10 14:33:55 +01002476 // This may re-introduce greater or less_equal as the value of cc.
Steve Blocka7e24c12009-10-30 11:49:00 +00002477 // CompareStub and the inline code both support all values of cc.
2478 }
2479 // Implement comparison against a constant Smi, inlining the case
2480 // where both sides are Smis.
2481 left_side.ToRegister();
Leon Clarkee46be812010-01-19 14:06:41 +00002482 Register left_reg = left_side.reg();
2483 Handle<Object> right_val = right_side.handle();
Steve Blocka7e24c12009-10-30 11:49:00 +00002484
2485 // Here we split control flow to the stub call and inlined cases
2486 // before finally splitting it to the control destination. We use
2487 // a jump target and branching to duplicate the virtual frame at
2488 // the first split. We manually handle the off-frame references
2489 // by reconstituting them on the non-fall-through path.
Steve Blocka7e24c12009-10-30 11:49:00 +00002490
Steve Block6ded16b2010-05-10 14:33:55 +01002491 if (left_side.is_smi()) {
Kristian Monsen25f61362010-05-21 11:50:48 +01002492 if (FLAG_debug_code) {
2493 __ AbortIfNotSmi(left_side.reg());
2494 }
Steve Block6ded16b2010-05-10 14:33:55 +01002495 } else {
2496 JumpTarget is_smi;
2497 __ test(left_side.reg(), Immediate(kSmiTagMask));
2498 is_smi.Branch(zero, taken);
2499
2500 bool is_loop_condition = (node->AsExpression() != NULL) &&
2501 node->AsExpression()->is_loop_condition();
2502 if (!is_loop_condition &&
2503 CpuFeatures::IsSupported(SSE2) &&
2504 right_val->IsSmi()) {
2505 // Right side is a constant smi and left side has been checked
2506 // not to be a smi.
2507 CpuFeatures::Scope use_sse2(SSE2);
2508 JumpTarget not_number;
2509 __ cmp(FieldOperand(left_reg, HeapObject::kMapOffset),
2510 Immediate(Factory::heap_number_map()));
2511 not_number.Branch(not_equal, &left_side);
2512 __ movdbl(xmm1,
2513 FieldOperand(left_reg, HeapNumber::kValueOffset));
2514 int value = Smi::cast(*right_val)->value();
2515 if (value == 0) {
2516 __ xorpd(xmm0, xmm0);
2517 } else {
2518 Result temp = allocator()->Allocate();
2519 __ mov(temp.reg(), Immediate(value));
2520 __ cvtsi2sd(xmm0, Operand(temp.reg()));
2521 temp.Unuse();
2522 }
Kristian Monsen25f61362010-05-21 11:50:48 +01002523 __ ucomisd(xmm1, xmm0);
Steve Block6ded16b2010-05-10 14:33:55 +01002524 // Jump to builtin for NaN.
2525 not_number.Branch(parity_even, &left_side);
2526 left_side.Unuse();
2527 dest->true_target()->Branch(DoubleCondition(cc));
2528 dest->false_target()->Jump();
2529 not_number.Bind(&left_side);
Leon Clarkee46be812010-01-19 14:06:41 +00002530 }
Steve Block6ded16b2010-05-10 14:33:55 +01002531
2532 // Setup and call the compare stub.
2533 CompareStub stub(cc, strict, kCantBothBeNaN);
2534 Result result = frame_->CallStub(&stub, &left_side, &right_side);
2535 result.ToRegister();
2536 __ cmp(result.reg(), 0);
2537 result.Unuse();
2538 dest->true_target()->Branch(cc);
Leon Clarkee46be812010-01-19 14:06:41 +00002539 dest->false_target()->Jump();
Steve Block6ded16b2010-05-10 14:33:55 +01002540
2541 is_smi.Bind();
Leon Clarkee46be812010-01-19 14:06:41 +00002542 }
2543
Steve Blocka7e24c12009-10-30 11:49:00 +00002544 left_side = Result(left_reg);
2545 right_side = Result(right_val);
2546 // Test smi equality and comparison by signed int comparison.
2547 if (IsUnsafeSmi(right_side.handle())) {
2548 right_side.ToRegister();
2549 __ cmp(left_side.reg(), Operand(right_side.reg()));
2550 } else {
2551 __ cmp(Operand(left_side.reg()), Immediate(right_side.handle()));
2552 }
2553 left_side.Unuse();
2554 right_side.Unuse();
2555 dest->Split(cc);
2556 }
Leon Clarkee46be812010-01-19 14:06:41 +00002557
Steve Blocka7e24c12009-10-30 11:49:00 +00002558 } else if (cc == equal &&
2559 (left_side_constant_null || right_side_constant_null)) {
2560 // To make null checks efficient, we check if either the left side or
2561 // the right side is the constant 'null'.
2562 // If so, we optimize the code by inlining a null check instead of
2563 // calling the (very) general runtime routine for checking equality.
2564 Result operand = left_side_constant_null ? right_side : left_side;
2565 right_side.Unuse();
2566 left_side.Unuse();
2567 operand.ToRegister();
2568 __ cmp(operand.reg(), Factory::null_value());
2569 if (strict) {
2570 operand.Unuse();
2571 dest->Split(equal);
2572 } else {
2573 // The 'null' value is only equal to 'undefined' if using non-strict
2574 // comparisons.
2575 dest->true_target()->Branch(equal);
2576 __ cmp(operand.reg(), Factory::undefined_value());
2577 dest->true_target()->Branch(equal);
2578 __ test(operand.reg(), Immediate(kSmiTagMask));
2579 dest->false_target()->Branch(equal);
2580
2581 // It can be an undetectable object.
2582 // Use a scratch register in preference to spilling operand.reg().
2583 Result temp = allocator()->Allocate();
2584 ASSERT(temp.is_valid());
2585 __ mov(temp.reg(),
2586 FieldOperand(operand.reg(), HeapObject::kMapOffset));
2587 __ movzx_b(temp.reg(),
2588 FieldOperand(temp.reg(), Map::kBitFieldOffset));
2589 __ test(temp.reg(), Immediate(1 << Map::kIsUndetectable));
2590 temp.Unuse();
2591 operand.Unuse();
2592 dest->Split(not_zero);
2593 }
Leon Clarkee46be812010-01-19 14:06:41 +00002594 } else if (left_side_constant_1_char_string ||
2595 right_side_constant_1_char_string) {
2596 if (left_side_constant_1_char_string && right_side_constant_1_char_string) {
2597 // Trivial case, comparing two constants.
2598 int left_value = String::cast(*left_side.handle())->Get(0);
2599 int right_value = String::cast(*right_side.handle())->Get(0);
2600 switch (cc) {
2601 case less:
2602 dest->Goto(left_value < right_value);
2603 break;
2604 case equal:
2605 dest->Goto(left_value == right_value);
2606 break;
2607 case greater_equal:
2608 dest->Goto(left_value >= right_value);
2609 break;
2610 default:
2611 UNREACHABLE();
2612 }
2613 } else {
2614 // Only one side is a constant 1 character string.
2615 // If left side is a constant 1-character string, reverse the operands.
2616 // Since one side is a constant string, conversion order does not matter.
2617 if (left_side_constant_1_char_string) {
2618 Result temp = left_side;
2619 left_side = right_side;
2620 right_side = temp;
2621 cc = ReverseCondition(cc);
2622 // This may reintroduce greater or less_equal as the value of cc.
2623 // CompareStub and the inline code both support all values of cc.
2624 }
2625 // Implement comparison against a constant string, inlining the case
2626 // where both sides are strings.
2627 left_side.ToRegister();
2628
2629 // Here we split control flow to the stub call and inlined cases
2630 // before finally splitting it to the control destination. We use
2631 // a jump target and branching to duplicate the virtual frame at
2632 // the first split. We manually handle the off-frame references
2633 // by reconstituting them on the non-fall-through path.
2634 JumpTarget is_not_string, is_string;
2635 Register left_reg = left_side.reg();
2636 Handle<Object> right_val = right_side.handle();
Steve Block6ded16b2010-05-10 14:33:55 +01002637 ASSERT(StringShape(String::cast(*right_val)).IsSymbol());
Leon Clarkee46be812010-01-19 14:06:41 +00002638 __ test(left_side.reg(), Immediate(kSmiTagMask));
2639 is_not_string.Branch(zero, &left_side);
2640 Result temp = allocator_->Allocate();
2641 ASSERT(temp.is_valid());
2642 __ mov(temp.reg(),
2643 FieldOperand(left_side.reg(), HeapObject::kMapOffset));
2644 __ movzx_b(temp.reg(),
2645 FieldOperand(temp.reg(), Map::kInstanceTypeOffset));
2646 // If we are testing for equality then make use of the symbol shortcut.
2647 // Check if the right left hand side has the same type as the left hand
2648 // side (which is always a symbol).
2649 if (cc == equal) {
2650 Label not_a_symbol;
2651 ASSERT(kSymbolTag != 0);
2652 // Ensure that no non-strings have the symbol bit set.
2653 ASSERT(kNotStringTag + kIsSymbolMask > LAST_TYPE);
2654 __ test(temp.reg(), Immediate(kIsSymbolMask)); // Test the symbol bit.
2655 __ j(zero, &not_a_symbol);
2656 // They are symbols, so do identity compare.
2657 __ cmp(left_side.reg(), right_side.handle());
2658 dest->true_target()->Branch(equal);
2659 dest->false_target()->Branch(not_equal);
2660 __ bind(&not_a_symbol);
2661 }
Steve Block6ded16b2010-05-10 14:33:55 +01002662 // Call the compare stub if the left side is not a flat ascii string.
Leon Clarkee46be812010-01-19 14:06:41 +00002663 __ and_(temp.reg(),
2664 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
2665 __ cmp(temp.reg(), kStringTag | kSeqStringTag | kAsciiStringTag);
2666 temp.Unuse();
2667 is_string.Branch(equal, &left_side);
2668
2669 // Setup and call the compare stub.
2670 is_not_string.Bind(&left_side);
2671 CompareStub stub(cc, strict, kCantBothBeNaN);
2672 Result result = frame_->CallStub(&stub, &left_side, &right_side);
2673 result.ToRegister();
2674 __ cmp(result.reg(), 0);
2675 result.Unuse();
2676 dest->true_target()->Branch(cc);
2677 dest->false_target()->Jump();
2678
2679 is_string.Bind(&left_side);
Steve Block6ded16b2010-05-10 14:33:55 +01002680 // left_side is a sequential ASCII string.
Leon Clarkee46be812010-01-19 14:06:41 +00002681 left_side = Result(left_reg);
2682 right_side = Result(right_val);
2683 Result temp2 = allocator_->Allocate();
2684 ASSERT(temp2.is_valid());
2685 // Test string equality and comparison.
2686 if (cc == equal) {
2687 Label comparison_done;
2688 __ cmp(FieldOperand(left_side.reg(), String::kLengthOffset),
Steve Block6ded16b2010-05-10 14:33:55 +01002689 Immediate(Smi::FromInt(1)));
Leon Clarkee46be812010-01-19 14:06:41 +00002690 __ j(not_equal, &comparison_done);
2691 uint8_t char_value =
Steve Block6ded16b2010-05-10 14:33:55 +01002692 static_cast<uint8_t>(String::cast(*right_val)->Get(0));
Leon Clarkee46be812010-01-19 14:06:41 +00002693 __ cmpb(FieldOperand(left_side.reg(), SeqAsciiString::kHeaderSize),
2694 char_value);
2695 __ bind(&comparison_done);
2696 } else {
2697 __ mov(temp2.reg(),
2698 FieldOperand(left_side.reg(), String::kLengthOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01002699 __ SmiUntag(temp2.reg());
Leon Clarkee46be812010-01-19 14:06:41 +00002700 __ sub(Operand(temp2.reg()), Immediate(1));
2701 Label comparison;
Steve Block6ded16b2010-05-10 14:33:55 +01002702 // If the length is 0 then the subtraction gave -1 which compares less
Leon Clarkee46be812010-01-19 14:06:41 +00002703 // than any character.
2704 __ j(negative, &comparison);
2705 // Otherwise load the first character.
2706 __ movzx_b(temp2.reg(),
2707 FieldOperand(left_side.reg(), SeqAsciiString::kHeaderSize));
2708 __ bind(&comparison);
Steve Block6ded16b2010-05-10 14:33:55 +01002709 // Compare the first character of the string with the
2710 // constant 1-character string.
Leon Clarkee46be812010-01-19 14:06:41 +00002711 uint8_t char_value =
Steve Block6ded16b2010-05-10 14:33:55 +01002712 static_cast<uint8_t>(String::cast(*right_val)->Get(0));
Leon Clarkee46be812010-01-19 14:06:41 +00002713 __ cmp(Operand(temp2.reg()), Immediate(char_value));
2714 Label characters_were_different;
2715 __ j(not_equal, &characters_were_different);
2716 // If the first character is the same then the long string sorts after
2717 // the short one.
2718 __ cmp(FieldOperand(left_side.reg(), String::kLengthOffset),
Steve Block6ded16b2010-05-10 14:33:55 +01002719 Immediate(Smi::FromInt(1)));
Leon Clarkee46be812010-01-19 14:06:41 +00002720 __ bind(&characters_were_different);
2721 }
2722 temp2.Unuse();
2723 left_side.Unuse();
2724 right_side.Unuse();
2725 dest->Split(cc);
2726 }
2727 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01002728 // Neither side is a constant Smi, constant 1-char string or constant null.
2729 // If either side is a non-smi constant, or known to be a heap number skip
2730 // the smi check.
Steve Blocka7e24c12009-10-30 11:49:00 +00002731 bool known_non_smi =
2732 (left_side.is_constant() && !left_side.handle()->IsSmi()) ||
Steve Block6ded16b2010-05-10 14:33:55 +01002733 (right_side.is_constant() && !right_side.handle()->IsSmi()) ||
2734 left_side.type_info().IsDouble() ||
2735 right_side.type_info().IsDouble();
Leon Clarkee46be812010-01-19 14:06:41 +00002736 NaNInformation nan_info =
2737 (CouldBeNaN(left_side) && CouldBeNaN(right_side)) ?
2738 kBothCouldBeNaN :
2739 kCantBothBeNaN;
Steve Block6ded16b2010-05-10 14:33:55 +01002740
2741 // Inline number comparison handling any combination of smi's and heap
2742 // numbers if:
2743 // code is in a loop
2744 // the compare operation is different from equal
2745 // compare is not a for-loop comparison
2746 // The reason for excluding equal is that it will most likely be done
2747 // with smi's (not heap numbers) and the code to comparing smi's is inlined
2748 // separately. The same reason applies for for-loop comparison which will
2749 // also most likely be smi comparisons.
2750 bool is_loop_condition = (node->AsExpression() != NULL)
2751 && node->AsExpression()->is_loop_condition();
2752 bool inline_number_compare =
2753 loop_nesting() > 0 && cc != equal && !is_loop_condition;
2754
2755 // Left and right needed in registers for the following code.
Steve Blocka7e24c12009-10-30 11:49:00 +00002756 left_side.ToRegister();
2757 right_side.ToRegister();
2758
2759 if (known_non_smi) {
Steve Block6ded16b2010-05-10 14:33:55 +01002760 // Inline the equality check if both operands can't be a NaN. If both
2761 // objects are the same they are equal.
2762 if (nan_info == kCantBothBeNaN && cc == equal) {
2763 __ cmp(left_side.reg(), Operand(right_side.reg()));
2764 dest->true_target()->Branch(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +00002765 }
Steve Block6ded16b2010-05-10 14:33:55 +01002766
2767 // Inline number comparison.
2768 if (inline_number_compare) {
2769 GenerateInlineNumberComparison(&left_side, &right_side, cc, dest);
2770 }
2771
2772 // End of in-line compare, call out to the compare stub. Don't include
2773 // number comparison in the stub if it was inlined.
2774 CompareStub stub(cc, strict, nan_info, !inline_number_compare);
2775 Result answer = frame_->CallStub(&stub, &left_side, &right_side);
2776 __ test(answer.reg(), Operand(answer.reg()));
Steve Blocka7e24c12009-10-30 11:49:00 +00002777 answer.Unuse();
2778 dest->Split(cc);
2779 } else {
2780 // Here we split control flow to the stub call and inlined cases
2781 // before finally splitting it to the control destination. We use
2782 // a jump target and branching to duplicate the virtual frame at
2783 // the first split. We manually handle the off-frame references
2784 // by reconstituting them on the non-fall-through path.
2785 JumpTarget is_smi;
2786 Register left_reg = left_side.reg();
2787 Register right_reg = right_side.reg();
2788
Steve Block6ded16b2010-05-10 14:33:55 +01002789 // In-line check for comparing two smis.
Steve Blocka7e24c12009-10-30 11:49:00 +00002790 Result temp = allocator_->Allocate();
2791 ASSERT(temp.is_valid());
2792 __ mov(temp.reg(), left_side.reg());
2793 __ or_(temp.reg(), Operand(right_side.reg()));
2794 __ test(temp.reg(), Immediate(kSmiTagMask));
2795 temp.Unuse();
2796 is_smi.Branch(zero, taken);
Steve Block6ded16b2010-05-10 14:33:55 +01002797
2798 // Inline the equality check if both operands can't be a NaN. If both
2799 // objects are the same they are equal.
2800 if (nan_info == kCantBothBeNaN && cc == equal) {
2801 __ cmp(left_side.reg(), Operand(right_side.reg()));
2802 dest->true_target()->Branch(equal);
2803 }
2804
2805 // Inline number comparison.
2806 if (inline_number_compare) {
2807 GenerateInlineNumberComparison(&left_side, &right_side, cc, dest);
2808 }
2809
2810 // End of in-line compare, call out to the compare stub. Don't include
2811 // number comparison in the stub if it was inlined.
2812 CompareStub stub(cc, strict, nan_info, !inline_number_compare);
Steve Blocka7e24c12009-10-30 11:49:00 +00002813 Result answer = frame_->CallStub(&stub, &left_side, &right_side);
Kristian Monsen25f61362010-05-21 11:50:48 +01002814 __ test(answer.reg(), Operand(answer.reg()));
Steve Blocka7e24c12009-10-30 11:49:00 +00002815 answer.Unuse();
2816 dest->true_target()->Branch(cc);
2817 dest->false_target()->Jump();
2818
2819 is_smi.Bind();
2820 left_side = Result(left_reg);
2821 right_side = Result(right_reg);
2822 __ cmp(left_side.reg(), Operand(right_side.reg()));
2823 right_side.Unuse();
2824 left_side.Unuse();
2825 dest->Split(cc);
2826 }
2827 }
2828}
2829
2830
Steve Block6ded16b2010-05-10 14:33:55 +01002831// Check that the comparison operand is a number. Jump to not_numbers jump
2832// target passing the left and right result if the operand is not a number.
2833static void CheckComparisonOperand(MacroAssembler* masm_,
2834 Result* operand,
2835 Result* left_side,
2836 Result* right_side,
2837 JumpTarget* not_numbers) {
2838 // Perform check if operand is not known to be a number.
2839 if (!operand->type_info().IsNumber()) {
2840 Label done;
2841 __ test(operand->reg(), Immediate(kSmiTagMask));
2842 __ j(zero, &done);
2843 __ cmp(FieldOperand(operand->reg(), HeapObject::kMapOffset),
2844 Immediate(Factory::heap_number_map()));
2845 not_numbers->Branch(not_equal, left_side, right_side, not_taken);
2846 __ bind(&done);
2847 }
2848}
2849
2850
2851// Load a comparison operand to the FPU stack. This assumes that the operand has
2852// already been checked and is a number.
2853static void LoadComparisonOperand(MacroAssembler* masm_,
2854 Result* operand) {
2855 Label done;
2856 if (operand->type_info().IsDouble()) {
2857 // Operand is known to be a heap number, just load it.
2858 __ fld_d(FieldOperand(operand->reg(), HeapNumber::kValueOffset));
2859 } else if (operand->type_info().IsSmi()) {
2860 // Operand is known to be a smi. Convert it to double and keep the original
2861 // smi.
2862 __ SmiUntag(operand->reg());
2863 __ push(operand->reg());
2864 __ fild_s(Operand(esp, 0));
2865 __ pop(operand->reg());
2866 __ SmiTag(operand->reg());
2867 } else {
2868 // Operand type not known, check for smi otherwise assume heap number.
2869 Label smi;
2870 __ test(operand->reg(), Immediate(kSmiTagMask));
2871 __ j(zero, &smi);
2872 __ fld_d(FieldOperand(operand->reg(), HeapNumber::kValueOffset));
2873 __ jmp(&done);
2874 __ bind(&smi);
2875 __ SmiUntag(operand->reg());
2876 __ push(operand->reg());
2877 __ fild_s(Operand(esp, 0));
2878 __ pop(operand->reg());
2879 __ SmiTag(operand->reg());
2880 __ jmp(&done);
2881 }
2882 __ bind(&done);
2883}
2884
2885
2886// Load a comparison operand into into a XMM register. Jump to not_numbers jump
2887// target passing the left and right result if the operand is not a number.
2888static void LoadComparisonOperandSSE2(MacroAssembler* masm_,
2889 Result* operand,
2890 XMMRegister reg,
2891 Result* left_side,
2892 Result* right_side,
2893 JumpTarget* not_numbers) {
2894 Label done;
2895 if (operand->type_info().IsDouble()) {
2896 // Operand is known to be a heap number, just load it.
2897 __ movdbl(reg, FieldOperand(operand->reg(), HeapNumber::kValueOffset));
2898 } else if (operand->type_info().IsSmi()) {
2899 // Operand is known to be a smi. Convert it to double and keep the original
2900 // smi.
2901 __ SmiUntag(operand->reg());
2902 __ cvtsi2sd(reg, Operand(operand->reg()));
2903 __ SmiTag(operand->reg());
2904 } else {
2905 // Operand type not known, check for smi or heap number.
2906 Label smi;
2907 __ test(operand->reg(), Immediate(kSmiTagMask));
2908 __ j(zero, &smi);
2909 if (!operand->type_info().IsNumber()) {
2910 __ cmp(FieldOperand(operand->reg(), HeapObject::kMapOffset),
2911 Immediate(Factory::heap_number_map()));
2912 not_numbers->Branch(not_equal, left_side, right_side, taken);
2913 }
2914 __ movdbl(reg, FieldOperand(operand->reg(), HeapNumber::kValueOffset));
2915 __ jmp(&done);
2916
2917 __ bind(&smi);
2918 // Comvert smi to float and keep the original smi.
2919 __ SmiUntag(operand->reg());
2920 __ cvtsi2sd(reg, Operand(operand->reg()));
2921 __ SmiTag(operand->reg());
2922 __ jmp(&done);
2923 }
2924 __ bind(&done);
2925}
2926
2927
2928void CodeGenerator::GenerateInlineNumberComparison(Result* left_side,
2929 Result* right_side,
2930 Condition cc,
2931 ControlDestination* dest) {
2932 ASSERT(left_side->is_register());
2933 ASSERT(right_side->is_register());
2934
2935 JumpTarget not_numbers;
2936 if (CpuFeatures::IsSupported(SSE2)) {
2937 CpuFeatures::Scope use_sse2(SSE2);
2938
2939 // Load left and right operand into registers xmm0 and xmm1 and compare.
2940 LoadComparisonOperandSSE2(masm_, left_side, xmm0, left_side, right_side,
2941 &not_numbers);
2942 LoadComparisonOperandSSE2(masm_, right_side, xmm1, left_side, right_side,
2943 &not_numbers);
2944 __ comisd(xmm0, xmm1);
2945 } else {
2946 Label check_right, compare;
2947
2948 // Make sure that both comparison operands are numbers.
2949 CheckComparisonOperand(masm_, left_side, left_side, right_side,
2950 &not_numbers);
2951 CheckComparisonOperand(masm_, right_side, left_side, right_side,
2952 &not_numbers);
2953
2954 // Load right and left operand to FPU stack and compare.
2955 LoadComparisonOperand(masm_, right_side);
2956 LoadComparisonOperand(masm_, left_side);
2957 __ FCmp();
2958 }
2959
2960 // Bail out if a NaN is involved.
2961 not_numbers.Branch(parity_even, left_side, right_side, not_taken);
2962
2963 // Split to destination targets based on comparison.
2964 left_side->Unuse();
2965 right_side->Unuse();
2966 dest->true_target()->Branch(DoubleCondition(cc));
2967 dest->false_target()->Jump();
2968
2969 not_numbers.Bind(left_side, right_side);
2970}
2971
2972
Steve Blocka7e24c12009-10-30 11:49:00 +00002973// Call the function just below TOS on the stack with the given
2974// arguments. The receiver is the TOS.
2975void CodeGenerator::CallWithArguments(ZoneList<Expression*>* args,
Leon Clarkee46be812010-01-19 14:06:41 +00002976 CallFunctionFlags flags,
Steve Blocka7e24c12009-10-30 11:49:00 +00002977 int position) {
2978 // Push the arguments ("left-to-right") on the stack.
2979 int arg_count = args->length();
2980 for (int i = 0; i < arg_count; i++) {
2981 Load(args->at(i));
2982 }
2983
2984 // Record the position for debugging purposes.
2985 CodeForSourcePosition(position);
2986
2987 // Use the shared code stub to call the function.
2988 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Leon Clarkee46be812010-01-19 14:06:41 +00002989 CallFunctionStub call_function(arg_count, in_loop, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00002990 Result answer = frame_->CallStub(&call_function, arg_count + 1);
2991 // Restore context and replace function on the stack with the
2992 // result of the stub invocation.
2993 frame_->RestoreContextRegister();
2994 frame_->SetElementAt(0, &answer);
2995}
2996
2997
Leon Clarked91b9f72010-01-27 17:25:45 +00002998void CodeGenerator::CallApplyLazy(Expression* applicand,
Steve Blocka7e24c12009-10-30 11:49:00 +00002999 Expression* receiver,
3000 VariableProxy* arguments,
3001 int position) {
Leon Clarked91b9f72010-01-27 17:25:45 +00003002 // An optimized implementation of expressions of the form
3003 // x.apply(y, arguments).
3004 // If the arguments object of the scope has not been allocated,
3005 // and x.apply is Function.prototype.apply, this optimization
3006 // just copies y and the arguments of the current function on the
3007 // stack, as receiver and arguments, and calls x.
3008 // In the implementation comments, we call x the applicand
3009 // and y the receiver.
Steve Blocka7e24c12009-10-30 11:49:00 +00003010 ASSERT(ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION);
3011 ASSERT(arguments->IsArguments());
3012
Leon Clarked91b9f72010-01-27 17:25:45 +00003013 // Load applicand.apply onto the stack. This will usually
Steve Blocka7e24c12009-10-30 11:49:00 +00003014 // give us a megamorphic load site. Not super, but it works.
Leon Clarked91b9f72010-01-27 17:25:45 +00003015 Load(applicand);
Andrei Popescu402d9372010-02-26 13:31:12 +00003016 frame()->Dup();
Leon Clarked91b9f72010-01-27 17:25:45 +00003017 Handle<String> name = Factory::LookupAsciiSymbol("apply");
3018 frame()->Push(name);
3019 Result answer = frame()->CallLoadIC(RelocInfo::CODE_TARGET);
3020 __ nop();
3021 frame()->Push(&answer);
Steve Blocka7e24c12009-10-30 11:49:00 +00003022
3023 // Load the receiver and the existing arguments object onto the
3024 // expression stack. Avoid allocating the arguments object here.
3025 Load(receiver);
Andrei Popescu402d9372010-02-26 13:31:12 +00003026 Result existing_args =
3027 LoadFromSlot(scope()->arguments()->var()->slot(), NOT_INSIDE_TYPEOF);
3028 frame()->Push(&existing_args);
Steve Blocka7e24c12009-10-30 11:49:00 +00003029
3030 // Emit the source position information after having loaded the
3031 // receiver and the arguments.
3032 CodeForSourcePosition(position);
Leon Clarked91b9f72010-01-27 17:25:45 +00003033 // Contents of frame at this point:
3034 // Frame[0]: arguments object of the current function or the hole.
3035 // Frame[1]: receiver
3036 // Frame[2]: applicand.apply
3037 // Frame[3]: applicand.
Steve Blocka7e24c12009-10-30 11:49:00 +00003038
3039 // Check if the arguments object has been lazily allocated
3040 // already. If so, just use that instead of copying the arguments
3041 // from the stack. This also deals with cases where a local variable
3042 // named 'arguments' has been introduced.
3043 frame_->Dup();
3044 Result probe = frame_->Pop();
Leon Clarked91b9f72010-01-27 17:25:45 +00003045 { VirtualFrame::SpilledScope spilled_scope;
3046 Label slow, done;
3047 bool try_lazy = true;
3048 if (probe.is_constant()) {
3049 try_lazy = probe.handle()->IsTheHole();
3050 } else {
3051 __ cmp(Operand(probe.reg()), Immediate(Factory::the_hole_value()));
3052 probe.Unuse();
3053 __ j(not_equal, &slow);
3054 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003055
Leon Clarked91b9f72010-01-27 17:25:45 +00003056 if (try_lazy) {
3057 Label build_args;
3058 // Get rid of the arguments object probe.
3059 frame_->Drop(); // Can be called on a spilled frame.
3060 // Stack now has 3 elements on it.
3061 // Contents of stack at this point:
3062 // esp[0]: receiver
3063 // esp[1]: applicand.apply
3064 // esp[2]: applicand.
Steve Blocka7e24c12009-10-30 11:49:00 +00003065
Leon Clarked91b9f72010-01-27 17:25:45 +00003066 // Check that the receiver really is a JavaScript object.
3067 __ mov(eax, Operand(esp, 0));
3068 __ test(eax, Immediate(kSmiTagMask));
3069 __ j(zero, &build_args);
Steve Blocka7e24c12009-10-30 11:49:00 +00003070 // We allow all JSObjects including JSFunctions. As long as
3071 // JS_FUNCTION_TYPE is the last instance type and it is right
3072 // after LAST_JS_OBJECT_TYPE, we do not have to check the upper
3073 // bound.
3074 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
3075 ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
Leon Clarked91b9f72010-01-27 17:25:45 +00003076 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
3077 __ j(below, &build_args);
Steve Blocka7e24c12009-10-30 11:49:00 +00003078
Leon Clarked91b9f72010-01-27 17:25:45 +00003079 // Check that applicand.apply is Function.prototype.apply.
3080 __ mov(eax, Operand(esp, kPointerSize));
3081 __ test(eax, Immediate(kSmiTagMask));
3082 __ j(zero, &build_args);
3083 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ecx);
3084 __ j(not_equal, &build_args);
3085 __ mov(ecx, FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset));
Leon Clarkeeab96aa2010-01-27 16:31:12 +00003086 Handle<Code> apply_code(Builtins::builtin(Builtins::FunctionApply));
Leon Clarked91b9f72010-01-27 17:25:45 +00003087 __ cmp(FieldOperand(ecx, SharedFunctionInfo::kCodeOffset),
Leon Clarkeeab96aa2010-01-27 16:31:12 +00003088 Immediate(apply_code));
Leon Clarked91b9f72010-01-27 17:25:45 +00003089 __ j(not_equal, &build_args);
3090
3091 // Check that applicand is a function.
3092 __ mov(edi, Operand(esp, 2 * kPointerSize));
3093 __ test(edi, Immediate(kSmiTagMask));
3094 __ j(zero, &build_args);
3095 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
3096 __ j(not_equal, &build_args);
3097
3098 // Copy the arguments to this function possibly from the
3099 // adaptor frame below it.
3100 Label invoke, adapted;
3101 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
3102 __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
3103 __ cmp(Operand(ecx),
3104 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3105 __ j(equal, &adapted);
3106
3107 // No arguments adaptor frame. Copy fixed number of arguments.
Andrei Popescu31002712010-02-23 13:46:05 +00003108 __ mov(eax, Immediate(scope()->num_parameters()));
3109 for (int i = 0; i < scope()->num_parameters(); i++) {
Leon Clarked91b9f72010-01-27 17:25:45 +00003110 __ push(frame_->ParameterAt(i));
3111 }
3112 __ jmp(&invoke);
3113
3114 // Arguments adaptor frame present. Copy arguments from there, but
3115 // avoid copying too many arguments to avoid stack overflows.
3116 __ bind(&adapted);
3117 static const uint32_t kArgumentsLimit = 1 * KB;
3118 __ mov(eax, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
3119 __ SmiUntag(eax);
3120 __ mov(ecx, Operand(eax));
3121 __ cmp(eax, kArgumentsLimit);
3122 __ j(above, &build_args);
3123
3124 // Loop through the arguments pushing them onto the execution
3125 // stack. We don't inform the virtual frame of the push, so we don't
3126 // have to worry about getting rid of the elements from the virtual
3127 // frame.
3128 Label loop;
3129 // ecx is a small non-negative integer, due to the test above.
3130 __ test(ecx, Operand(ecx));
3131 __ j(zero, &invoke);
3132 __ bind(&loop);
3133 __ push(Operand(edx, ecx, times_pointer_size, 1 * kPointerSize));
3134 __ dec(ecx);
3135 __ j(not_zero, &loop);
3136
3137 // Invoke the function.
3138 __ bind(&invoke);
3139 ParameterCount actual(eax);
3140 __ InvokeFunction(edi, actual, CALL_FUNCTION);
3141 // Drop applicand.apply and applicand from the stack, and push
3142 // the result of the function call, but leave the spilled frame
3143 // unchanged, with 3 elements, so it is correct when we compile the
3144 // slow-case code.
3145 __ add(Operand(esp), Immediate(2 * kPointerSize));
3146 __ push(eax);
3147 // Stack now has 1 element:
3148 // esp[0]: result
3149 __ jmp(&done);
3150
3151 // Slow-case: Allocate the arguments object since we know it isn't
3152 // there, and fall-through to the slow-case where we call
3153 // applicand.apply.
3154 __ bind(&build_args);
3155 // Stack now has 3 elements, because we have jumped from where:
3156 // esp[0]: receiver
3157 // esp[1]: applicand.apply
3158 // esp[2]: applicand.
3159
3160 // StoreArgumentsObject requires a correct frame, and may modify it.
3161 Result arguments_object = StoreArgumentsObject(false);
3162 frame_->SpillAll();
3163 arguments_object.ToRegister();
3164 frame_->EmitPush(arguments_object.reg());
3165 arguments_object.Unuse();
3166 // Stack and frame now have 4 elements.
3167 __ bind(&slow);
Leon Clarkeeab96aa2010-01-27 16:31:12 +00003168 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003169
Leon Clarked91b9f72010-01-27 17:25:45 +00003170 // Generic computation of x.apply(y, args) with no special optimization.
3171 // Flip applicand.apply and applicand on the stack, so
3172 // applicand looks like the receiver of the applicand.apply call.
3173 // Then process it as a normal function call.
3174 __ mov(eax, Operand(esp, 3 * kPointerSize));
3175 __ mov(ebx, Operand(esp, 2 * kPointerSize));
3176 __ mov(Operand(esp, 2 * kPointerSize), eax);
3177 __ mov(Operand(esp, 3 * kPointerSize), ebx);
Leon Clarkeeab96aa2010-01-27 16:31:12 +00003178
Leon Clarked91b9f72010-01-27 17:25:45 +00003179 CallFunctionStub call_function(2, NOT_IN_LOOP, NO_CALL_FUNCTION_FLAGS);
3180 Result res = frame_->CallStub(&call_function, 3);
3181 // The function and its two arguments have been dropped.
3182 frame_->Drop(1); // Drop the receiver as well.
3183 res.ToRegister();
3184 frame_->EmitPush(res.reg());
3185 // Stack now has 1 element:
3186 // esp[0]: result
3187 if (try_lazy) __ bind(&done);
3188 } // End of spilled scope.
3189 // Restore the context register after a call.
Steve Blocka7e24c12009-10-30 11:49:00 +00003190 frame_->RestoreContextRegister();
3191}
3192
3193
3194class DeferredStackCheck: public DeferredCode {
3195 public:
3196 DeferredStackCheck() {
3197 set_comment("[ DeferredStackCheck");
3198 }
3199
3200 virtual void Generate();
3201};
3202
3203
3204void DeferredStackCheck::Generate() {
3205 StackCheckStub stub;
3206 __ CallStub(&stub);
3207}
3208
3209
3210void CodeGenerator::CheckStack() {
Steve Blockd0582a62009-12-15 09:54:21 +00003211 DeferredStackCheck* deferred = new DeferredStackCheck;
3212 ExternalReference stack_limit =
3213 ExternalReference::address_of_stack_limit();
3214 __ cmp(esp, Operand::StaticVariable(stack_limit));
3215 deferred->Branch(below);
3216 deferred->BindExit();
Steve Blocka7e24c12009-10-30 11:49:00 +00003217}
3218
3219
3220void CodeGenerator::VisitAndSpill(Statement* statement) {
3221 ASSERT(in_spilled_code());
3222 set_in_spilled_code(false);
3223 Visit(statement);
3224 if (frame_ != NULL) {
3225 frame_->SpillAll();
3226 }
3227 set_in_spilled_code(true);
3228}
3229
3230
3231void CodeGenerator::VisitStatementsAndSpill(ZoneList<Statement*>* statements) {
3232 ASSERT(in_spilled_code());
3233 set_in_spilled_code(false);
3234 VisitStatements(statements);
3235 if (frame_ != NULL) {
3236 frame_->SpillAll();
3237 }
3238 set_in_spilled_code(true);
3239}
3240
3241
3242void CodeGenerator::VisitStatements(ZoneList<Statement*>* statements) {
3243 ASSERT(!in_spilled_code());
3244 for (int i = 0; has_valid_frame() && i < statements->length(); i++) {
3245 Visit(statements->at(i));
3246 }
3247}
3248
3249
3250void CodeGenerator::VisitBlock(Block* node) {
3251 ASSERT(!in_spilled_code());
3252 Comment cmnt(masm_, "[ Block");
3253 CodeForStatementPosition(node);
3254 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
3255 VisitStatements(node->statements());
3256 if (node->break_target()->is_linked()) {
3257 node->break_target()->Bind();
3258 }
3259 node->break_target()->Unuse();
3260}
3261
3262
3263void CodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
3264 // Call the runtime to declare the globals. The inevitable call
3265 // will sync frame elements to memory anyway, so we do it eagerly to
3266 // allow us to push the arguments directly into place.
3267 frame_->SyncRange(0, frame_->element_count() - 1);
3268
Steve Block3ce2e202009-11-05 08:53:23 +00003269 frame_->EmitPush(esi); // The context is the first argument.
Steve Blocka7e24c12009-10-30 11:49:00 +00003270 frame_->EmitPush(Immediate(pairs));
Steve Blocka7e24c12009-10-30 11:49:00 +00003271 frame_->EmitPush(Immediate(Smi::FromInt(is_eval() ? 1 : 0)));
3272 Result ignored = frame_->CallRuntime(Runtime::kDeclareGlobals, 3);
3273 // Return value is ignored.
3274}
3275
3276
3277void CodeGenerator::VisitDeclaration(Declaration* node) {
3278 Comment cmnt(masm_, "[ Declaration");
3279 Variable* var = node->proxy()->var();
3280 ASSERT(var != NULL); // must have been resolved
3281 Slot* slot = var->slot();
3282
3283 // If it was not possible to allocate the variable at compile time,
3284 // we need to "declare" it at runtime to make sure it actually
3285 // exists in the local context.
3286 if (slot != NULL && slot->type() == Slot::LOOKUP) {
3287 // Variables with a "LOOKUP" slot were introduced as non-locals
3288 // during variable resolution and must have mode DYNAMIC.
3289 ASSERT(var->is_dynamic());
3290 // For now, just do a runtime call. Sync the virtual frame eagerly
3291 // so we can simply push the arguments into place.
3292 frame_->SyncRange(0, frame_->element_count() - 1);
3293 frame_->EmitPush(esi);
3294 frame_->EmitPush(Immediate(var->name()));
3295 // Declaration nodes are always introduced in one of two modes.
3296 ASSERT(node->mode() == Variable::VAR || node->mode() == Variable::CONST);
3297 PropertyAttributes attr = node->mode() == Variable::VAR ? NONE : READ_ONLY;
3298 frame_->EmitPush(Immediate(Smi::FromInt(attr)));
3299 // Push initial value, if any.
3300 // Note: For variables we must not push an initial value (such as
3301 // 'undefined') because we may have a (legal) redeclaration and we
3302 // must not destroy the current value.
3303 if (node->mode() == Variable::CONST) {
3304 frame_->EmitPush(Immediate(Factory::the_hole_value()));
3305 } else if (node->fun() != NULL) {
3306 Load(node->fun());
3307 } else {
3308 frame_->EmitPush(Immediate(Smi::FromInt(0))); // no initial value!
3309 }
3310 Result ignored = frame_->CallRuntime(Runtime::kDeclareContextSlot, 4);
3311 // Ignore the return value (declarations are statements).
3312 return;
3313 }
3314
3315 ASSERT(!var->is_global());
3316
3317 // If we have a function or a constant, we need to initialize the variable.
3318 Expression* val = NULL;
3319 if (node->mode() == Variable::CONST) {
3320 val = new Literal(Factory::the_hole_value());
3321 } else {
3322 val = node->fun(); // NULL if we don't have a function
3323 }
3324
3325 if (val != NULL) {
3326 {
3327 // Set the initial value.
3328 Reference target(this, node->proxy());
3329 Load(val);
3330 target.SetValue(NOT_CONST_INIT);
3331 // The reference is removed from the stack (preserving TOS) when
3332 // it goes out of scope.
3333 }
3334 // Get rid of the assigned value (declarations are statements).
3335 frame_->Drop();
3336 }
3337}
3338
3339
3340void CodeGenerator::VisitExpressionStatement(ExpressionStatement* node) {
3341 ASSERT(!in_spilled_code());
3342 Comment cmnt(masm_, "[ ExpressionStatement");
3343 CodeForStatementPosition(node);
3344 Expression* expression = node->expression();
3345 expression->MarkAsStatement();
3346 Load(expression);
3347 // Remove the lingering expression result from the top of stack.
3348 frame_->Drop();
3349}
3350
3351
3352void CodeGenerator::VisitEmptyStatement(EmptyStatement* node) {
3353 ASSERT(!in_spilled_code());
3354 Comment cmnt(masm_, "// EmptyStatement");
3355 CodeForStatementPosition(node);
3356 // nothing to do
3357}
3358
3359
3360void CodeGenerator::VisitIfStatement(IfStatement* node) {
3361 ASSERT(!in_spilled_code());
3362 Comment cmnt(masm_, "[ IfStatement");
3363 // Generate different code depending on which parts of the if statement
3364 // are present or not.
3365 bool has_then_stm = node->HasThenStatement();
3366 bool has_else_stm = node->HasElseStatement();
3367
3368 CodeForStatementPosition(node);
3369 JumpTarget exit;
3370 if (has_then_stm && has_else_stm) {
3371 JumpTarget then;
3372 JumpTarget else_;
3373 ControlDestination dest(&then, &else_, true);
Steve Blockd0582a62009-12-15 09:54:21 +00003374 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00003375
3376 if (dest.false_was_fall_through()) {
3377 // The else target was bound, so we compile the else part first.
3378 Visit(node->else_statement());
3379
3380 // We may have dangling jumps to the then part.
3381 if (then.is_linked()) {
3382 if (has_valid_frame()) exit.Jump();
3383 then.Bind();
3384 Visit(node->then_statement());
3385 }
3386 } else {
3387 // The then target was bound, so we compile the then part first.
3388 Visit(node->then_statement());
3389
3390 if (else_.is_linked()) {
3391 if (has_valid_frame()) exit.Jump();
3392 else_.Bind();
3393 Visit(node->else_statement());
3394 }
3395 }
3396
3397 } else if (has_then_stm) {
3398 ASSERT(!has_else_stm);
3399 JumpTarget then;
3400 ControlDestination dest(&then, &exit, true);
Steve Blockd0582a62009-12-15 09:54:21 +00003401 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00003402
3403 if (dest.false_was_fall_through()) {
3404 // The exit label was bound. We may have dangling jumps to the
3405 // then part.
3406 if (then.is_linked()) {
3407 exit.Unuse();
3408 exit.Jump();
3409 then.Bind();
3410 Visit(node->then_statement());
3411 }
3412 } else {
3413 // The then label was bound.
3414 Visit(node->then_statement());
3415 }
3416
3417 } else if (has_else_stm) {
3418 ASSERT(!has_then_stm);
3419 JumpTarget else_;
3420 ControlDestination dest(&exit, &else_, false);
Steve Blockd0582a62009-12-15 09:54:21 +00003421 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00003422
3423 if (dest.true_was_fall_through()) {
3424 // The exit label was bound. We may have dangling jumps to the
3425 // else part.
3426 if (else_.is_linked()) {
3427 exit.Unuse();
3428 exit.Jump();
3429 else_.Bind();
3430 Visit(node->else_statement());
3431 }
3432 } else {
3433 // The else label was bound.
3434 Visit(node->else_statement());
3435 }
3436
3437 } else {
3438 ASSERT(!has_then_stm && !has_else_stm);
3439 // We only care about the condition's side effects (not its value
3440 // or control flow effect). LoadCondition is called without
3441 // forcing control flow.
3442 ControlDestination dest(&exit, &exit, true);
Steve Blockd0582a62009-12-15 09:54:21 +00003443 LoadCondition(node->condition(), &dest, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00003444 if (!dest.is_used()) {
3445 // We got a value on the frame rather than (or in addition to)
3446 // control flow.
3447 frame_->Drop();
3448 }
3449 }
3450
3451 if (exit.is_linked()) {
3452 exit.Bind();
3453 }
3454}
3455
3456
3457void CodeGenerator::VisitContinueStatement(ContinueStatement* node) {
3458 ASSERT(!in_spilled_code());
3459 Comment cmnt(masm_, "[ ContinueStatement");
3460 CodeForStatementPosition(node);
3461 node->target()->continue_target()->Jump();
3462}
3463
3464
3465void CodeGenerator::VisitBreakStatement(BreakStatement* node) {
3466 ASSERT(!in_spilled_code());
3467 Comment cmnt(masm_, "[ BreakStatement");
3468 CodeForStatementPosition(node);
3469 node->target()->break_target()->Jump();
3470}
3471
3472
3473void CodeGenerator::VisitReturnStatement(ReturnStatement* node) {
3474 ASSERT(!in_spilled_code());
3475 Comment cmnt(masm_, "[ ReturnStatement");
3476
3477 CodeForStatementPosition(node);
3478 Load(node->expression());
3479 Result return_value = frame_->Pop();
Steve Blockd0582a62009-12-15 09:54:21 +00003480 masm()->WriteRecordedPositions();
Steve Blocka7e24c12009-10-30 11:49:00 +00003481 if (function_return_is_shadowed_) {
3482 function_return_.Jump(&return_value);
3483 } else {
3484 frame_->PrepareForReturn();
3485 if (function_return_.is_bound()) {
3486 // If the function return label is already bound we reuse the
3487 // code by jumping to the return site.
3488 function_return_.Jump(&return_value);
3489 } else {
3490 function_return_.Bind(&return_value);
3491 GenerateReturnSequence(&return_value);
3492 }
3493 }
3494}
3495
3496
3497void CodeGenerator::GenerateReturnSequence(Result* return_value) {
3498 // The return value is a live (but not currently reference counted)
3499 // reference to eax. This is safe because the current frame does not
3500 // contain a reference to eax (it is prepared for the return by spilling
3501 // all registers).
3502 if (FLAG_trace) {
3503 frame_->Push(return_value);
3504 *return_value = frame_->CallRuntime(Runtime::kTraceExit, 1);
3505 }
3506 return_value->ToRegister(eax);
3507
3508 // Add a label for checking the size of the code used for returning.
3509 Label check_exit_codesize;
3510 masm_->bind(&check_exit_codesize);
3511
3512 // Leave the frame and return popping the arguments and the
3513 // receiver.
3514 frame_->Exit();
Andrei Popescu31002712010-02-23 13:46:05 +00003515 masm_->ret((scope()->num_parameters() + 1) * kPointerSize);
Steve Blocka7e24c12009-10-30 11:49:00 +00003516 DeleteFrame();
3517
3518#ifdef ENABLE_DEBUGGER_SUPPORT
3519 // Check that the size of the code used for returning matches what is
3520 // expected by the debugger.
Steve Blockd0582a62009-12-15 09:54:21 +00003521 ASSERT_EQ(Assembler::kJSReturnSequenceLength,
Steve Blocka7e24c12009-10-30 11:49:00 +00003522 masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
3523#endif
3524}
3525
3526
3527void CodeGenerator::VisitWithEnterStatement(WithEnterStatement* node) {
3528 ASSERT(!in_spilled_code());
3529 Comment cmnt(masm_, "[ WithEnterStatement");
3530 CodeForStatementPosition(node);
3531 Load(node->expression());
3532 Result context;
3533 if (node->is_catch_block()) {
3534 context = frame_->CallRuntime(Runtime::kPushCatchContext, 1);
3535 } else {
3536 context = frame_->CallRuntime(Runtime::kPushContext, 1);
3537 }
3538
3539 // Update context local.
3540 frame_->SaveContextRegister();
3541
3542 // Verify that the runtime call result and esi agree.
3543 if (FLAG_debug_code) {
3544 __ cmp(context.reg(), Operand(esi));
3545 __ Assert(equal, "Runtime::NewContext should end up in esi");
3546 }
3547}
3548
3549
3550void CodeGenerator::VisitWithExitStatement(WithExitStatement* node) {
3551 ASSERT(!in_spilled_code());
3552 Comment cmnt(masm_, "[ WithExitStatement");
3553 CodeForStatementPosition(node);
3554 // Pop context.
3555 __ mov(esi, ContextOperand(esi, Context::PREVIOUS_INDEX));
3556 // Update context local.
3557 frame_->SaveContextRegister();
3558}
3559
3560
3561void CodeGenerator::VisitSwitchStatement(SwitchStatement* node) {
3562 ASSERT(!in_spilled_code());
3563 Comment cmnt(masm_, "[ SwitchStatement");
3564 CodeForStatementPosition(node);
3565 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
3566
3567 // Compile the switch value.
3568 Load(node->tag());
3569
3570 ZoneList<CaseClause*>* cases = node->cases();
3571 int length = cases->length();
3572 CaseClause* default_clause = NULL;
3573
3574 JumpTarget next_test;
3575 // Compile the case label expressions and comparisons. Exit early
3576 // if a comparison is unconditionally true. The target next_test is
3577 // bound before the loop in order to indicate control flow to the
3578 // first comparison.
3579 next_test.Bind();
3580 for (int i = 0; i < length && !next_test.is_unused(); i++) {
3581 CaseClause* clause = cases->at(i);
3582 // The default is not a test, but remember it for later.
3583 if (clause->is_default()) {
3584 default_clause = clause;
3585 continue;
3586 }
3587
3588 Comment cmnt(masm_, "[ Case comparison");
3589 // We recycle the same target next_test for each test. Bind it if
3590 // the previous test has not done so and then unuse it for the
3591 // loop.
3592 if (next_test.is_linked()) {
3593 next_test.Bind();
3594 }
3595 next_test.Unuse();
3596
3597 // Duplicate the switch value.
3598 frame_->Dup();
3599
3600 // Compile the label expression.
3601 Load(clause->label());
3602
3603 // Compare and branch to the body if true or the next test if
3604 // false. Prefer the next test as a fall through.
3605 ControlDestination dest(clause->body_target(), &next_test, false);
Leon Clarkee46be812010-01-19 14:06:41 +00003606 Comparison(node, equal, true, &dest);
Steve Blocka7e24c12009-10-30 11:49:00 +00003607
3608 // If the comparison fell through to the true target, jump to the
3609 // actual body.
3610 if (dest.true_was_fall_through()) {
3611 clause->body_target()->Unuse();
3612 clause->body_target()->Jump();
3613 }
3614 }
3615
3616 // If there was control flow to a next test from the last one
3617 // compiled, compile a jump to the default or break target.
3618 if (!next_test.is_unused()) {
3619 if (next_test.is_linked()) {
3620 next_test.Bind();
3621 }
3622 // Drop the switch value.
3623 frame_->Drop();
3624 if (default_clause != NULL) {
3625 default_clause->body_target()->Jump();
3626 } else {
3627 node->break_target()->Jump();
3628 }
3629 }
3630
3631
3632 // The last instruction emitted was a jump, either to the default
3633 // clause or the break target, or else to a case body from the loop
3634 // that compiles the tests.
3635 ASSERT(!has_valid_frame());
3636 // Compile case bodies as needed.
3637 for (int i = 0; i < length; i++) {
3638 CaseClause* clause = cases->at(i);
3639
3640 // There are two ways to reach the body: from the corresponding
3641 // test or as the fall through of the previous body.
3642 if (clause->body_target()->is_linked() || has_valid_frame()) {
3643 if (clause->body_target()->is_linked()) {
3644 if (has_valid_frame()) {
3645 // If we have both a jump to the test and a fall through, put
3646 // a jump on the fall through path to avoid the dropping of
3647 // the switch value on the test path. The exception is the
3648 // default which has already had the switch value dropped.
3649 if (clause->is_default()) {
3650 clause->body_target()->Bind();
3651 } else {
3652 JumpTarget body;
3653 body.Jump();
3654 clause->body_target()->Bind();
3655 frame_->Drop();
3656 body.Bind();
3657 }
3658 } else {
3659 // No fall through to worry about.
3660 clause->body_target()->Bind();
3661 if (!clause->is_default()) {
3662 frame_->Drop();
3663 }
3664 }
3665 } else {
3666 // Otherwise, we have only fall through.
3667 ASSERT(has_valid_frame());
3668 }
3669
3670 // We are now prepared to compile the body.
3671 Comment cmnt(masm_, "[ Case body");
3672 VisitStatements(clause->statements());
3673 }
3674 clause->body_target()->Unuse();
3675 }
3676
3677 // We may not have a valid frame here so bind the break target only
3678 // if needed.
3679 if (node->break_target()->is_linked()) {
3680 node->break_target()->Bind();
3681 }
3682 node->break_target()->Unuse();
3683}
3684
3685
Steve Block3ce2e202009-11-05 08:53:23 +00003686void CodeGenerator::VisitDoWhileStatement(DoWhileStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003687 ASSERT(!in_spilled_code());
Steve Block3ce2e202009-11-05 08:53:23 +00003688 Comment cmnt(masm_, "[ DoWhileStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00003689 CodeForStatementPosition(node);
3690 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
Steve Block3ce2e202009-11-05 08:53:23 +00003691 JumpTarget body(JumpTarget::BIDIRECTIONAL);
3692 IncrementLoopNesting();
Steve Blocka7e24c12009-10-30 11:49:00 +00003693
Steve Block3ce2e202009-11-05 08:53:23 +00003694 ConditionAnalysis info = AnalyzeCondition(node->cond());
3695 // Label the top of the loop for the backward jump if necessary.
3696 switch (info) {
3697 case ALWAYS_TRUE:
3698 // Use the continue target.
3699 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
3700 node->continue_target()->Bind();
3701 break;
3702 case ALWAYS_FALSE:
3703 // No need to label it.
3704 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
3705 break;
3706 case DONT_KNOW:
3707 // Continue is the test, so use the backward body target.
3708 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
3709 body.Bind();
3710 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00003711 }
3712
Steve Block3ce2e202009-11-05 08:53:23 +00003713 CheckStack(); // TODO(1222600): ignore if body contains calls.
3714 Visit(node->body());
Steve Blocka7e24c12009-10-30 11:49:00 +00003715
Steve Block3ce2e202009-11-05 08:53:23 +00003716 // Compile the test.
3717 switch (info) {
3718 case ALWAYS_TRUE:
3719 // If control flow can fall off the end of the body, jump back to
3720 // the top and bind the break target at the exit.
3721 if (has_valid_frame()) {
3722 node->continue_target()->Jump();
Steve Blocka7e24c12009-10-30 11:49:00 +00003723 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003724 if (node->break_target()->is_linked()) {
3725 node->break_target()->Bind();
3726 }
3727 break;
Steve Block3ce2e202009-11-05 08:53:23 +00003728 case ALWAYS_FALSE:
3729 // We may have had continues or breaks in the body.
3730 if (node->continue_target()->is_linked()) {
3731 node->continue_target()->Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00003732 }
Steve Block3ce2e202009-11-05 08:53:23 +00003733 if (node->break_target()->is_linked()) {
3734 node->break_target()->Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00003735 }
Steve Block3ce2e202009-11-05 08:53:23 +00003736 break;
3737 case DONT_KNOW:
3738 // We have to compile the test expression if it can be reached by
3739 // control flow falling out of the body or via continue.
3740 if (node->continue_target()->is_linked()) {
3741 node->continue_target()->Bind();
3742 }
3743 if (has_valid_frame()) {
Steve Blockd0582a62009-12-15 09:54:21 +00003744 Comment cmnt(masm_, "[ DoWhileCondition");
3745 CodeForDoWhileConditionPosition(node);
Steve Block3ce2e202009-11-05 08:53:23 +00003746 ControlDestination dest(&body, node->break_target(), false);
Steve Blockd0582a62009-12-15 09:54:21 +00003747 LoadCondition(node->cond(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00003748 }
Steve Block3ce2e202009-11-05 08:53:23 +00003749 if (node->break_target()->is_linked()) {
3750 node->break_target()->Bind();
3751 }
3752 break;
3753 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003754
Steve Block3ce2e202009-11-05 08:53:23 +00003755 DecrementLoopNesting();
3756}
Steve Blocka7e24c12009-10-30 11:49:00 +00003757
Steve Block3ce2e202009-11-05 08:53:23 +00003758
3759void CodeGenerator::VisitWhileStatement(WhileStatement* node) {
3760 ASSERT(!in_spilled_code());
3761 Comment cmnt(masm_, "[ WhileStatement");
3762 CodeForStatementPosition(node);
3763
3764 // If the condition is always false and has no side effects, we do not
3765 // need to compile anything.
3766 ConditionAnalysis info = AnalyzeCondition(node->cond());
3767 if (info == ALWAYS_FALSE) return;
3768
3769 // Do not duplicate conditions that may have function literal
3770 // subexpressions. This can cause us to compile the function literal
3771 // twice.
3772 bool test_at_bottom = !node->may_have_function_literal();
3773 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
3774 IncrementLoopNesting();
3775 JumpTarget body;
3776 if (test_at_bottom) {
3777 body.set_direction(JumpTarget::BIDIRECTIONAL);
3778 }
3779
3780 // Based on the condition analysis, compile the test as necessary.
3781 switch (info) {
3782 case ALWAYS_TRUE:
3783 // We will not compile the test expression. Label the top of the
3784 // loop with the continue target.
3785 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
3786 node->continue_target()->Bind();
3787 break;
3788 case DONT_KNOW: {
3789 if (test_at_bottom) {
3790 // Continue is the test at the bottom, no need to label the test
3791 // at the top. The body is a backward target.
3792 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
3793 } else {
3794 // Label the test at the top as the continue target. The body
3795 // is a forward-only target.
3796 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
3797 node->continue_target()->Bind();
3798 }
3799 // Compile the test with the body as the true target and preferred
3800 // fall-through and with the break target as the false target.
3801 ControlDestination dest(&body, node->break_target(), true);
Steve Blockd0582a62009-12-15 09:54:21 +00003802 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00003803
3804 if (dest.false_was_fall_through()) {
3805 // If we got the break target as fall-through, the test may have
3806 // been unconditionally false (if there are no jumps to the
3807 // body).
3808 if (!body.is_linked()) {
3809 DecrementLoopNesting();
3810 return;
3811 }
3812
3813 // Otherwise, jump around the body on the fall through and then
3814 // bind the body target.
3815 node->break_target()->Unuse();
3816 node->break_target()->Jump();
3817 body.Bind();
3818 }
3819 break;
3820 }
3821 case ALWAYS_FALSE:
3822 UNREACHABLE();
3823 break;
3824 }
3825
3826 CheckStack(); // TODO(1222600): ignore if body contains calls.
3827 Visit(node->body());
3828
3829 // Based on the condition analysis, compile the backward jump as
3830 // necessary.
3831 switch (info) {
3832 case ALWAYS_TRUE:
3833 // The loop body has been labeled with the continue target.
3834 if (has_valid_frame()) {
3835 node->continue_target()->Jump();
3836 }
3837 break;
3838 case DONT_KNOW:
3839 if (test_at_bottom) {
3840 // If we have chosen to recompile the test at the bottom, then
3841 // it is the continue target.
Steve Blocka7e24c12009-10-30 11:49:00 +00003842 if (node->continue_target()->is_linked()) {
3843 node->continue_target()->Bind();
3844 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003845 if (has_valid_frame()) {
Steve Block3ce2e202009-11-05 08:53:23 +00003846 // The break target is the fall-through (body is a backward
3847 // jump from here and thus an invalid fall-through).
3848 ControlDestination dest(&body, node->break_target(), false);
Steve Blockd0582a62009-12-15 09:54:21 +00003849 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00003850 }
3851 } else {
3852 // If we have chosen not to recompile the test at the bottom,
3853 // jump back to the one at the top.
3854 if (has_valid_frame()) {
3855 node->continue_target()->Jump();
Steve Blocka7e24c12009-10-30 11:49:00 +00003856 }
3857 }
Steve Block3ce2e202009-11-05 08:53:23 +00003858 break;
3859 case ALWAYS_FALSE:
3860 UNREACHABLE();
3861 break;
3862 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003863
Steve Block3ce2e202009-11-05 08:53:23 +00003864 // The break target may be already bound (by the condition), or there
3865 // may not be a valid frame. Bind it only if needed.
3866 if (node->break_target()->is_linked()) {
3867 node->break_target()->Bind();
3868 }
3869 DecrementLoopNesting();
3870}
3871
3872
Steve Block6ded16b2010-05-10 14:33:55 +01003873void CodeGenerator::SetTypeForStackSlot(Slot* slot, TypeInfo info) {
3874 ASSERT(slot->type() == Slot::LOCAL || slot->type() == Slot::PARAMETER);
3875 if (slot->type() == Slot::LOCAL) {
3876 frame_->SetTypeForLocalAt(slot->index(), info);
3877 } else {
3878 frame_->SetTypeForParamAt(slot->index(), info);
3879 }
3880 if (FLAG_debug_code && info.IsSmi()) {
3881 if (slot->type() == Slot::LOCAL) {
3882 frame_->PushLocalAt(slot->index());
3883 } else {
3884 frame_->PushParameterAt(slot->index());
3885 }
3886 Result var = frame_->Pop();
3887 var.ToRegister();
3888 __ AbortIfNotSmi(var.reg());
3889 }
3890}
3891
3892
Steve Block3ce2e202009-11-05 08:53:23 +00003893void CodeGenerator::VisitForStatement(ForStatement* node) {
3894 ASSERT(!in_spilled_code());
3895 Comment cmnt(masm_, "[ ForStatement");
3896 CodeForStatementPosition(node);
3897
3898 // Compile the init expression if present.
3899 if (node->init() != NULL) {
3900 Visit(node->init());
3901 }
3902
3903 // If the condition is always false and has no side effects, we do not
3904 // need to compile anything else.
3905 ConditionAnalysis info = AnalyzeCondition(node->cond());
3906 if (info == ALWAYS_FALSE) return;
3907
3908 // Do not duplicate conditions that may have function literal
3909 // subexpressions. This can cause us to compile the function literal
3910 // twice.
3911 bool test_at_bottom = !node->may_have_function_literal();
3912 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
3913 IncrementLoopNesting();
3914
3915 // Target for backward edge if no test at the bottom, otherwise
3916 // unused.
3917 JumpTarget loop(JumpTarget::BIDIRECTIONAL);
3918
3919 // Target for backward edge if there is a test at the bottom,
3920 // otherwise used as target for test at the top.
3921 JumpTarget body;
3922 if (test_at_bottom) {
3923 body.set_direction(JumpTarget::BIDIRECTIONAL);
3924 }
3925
3926 // Based on the condition analysis, compile the test as necessary.
3927 switch (info) {
3928 case ALWAYS_TRUE:
3929 // We will not compile the test expression. Label the top of the
3930 // loop.
3931 if (node->next() == NULL) {
3932 // Use the continue target if there is no update expression.
3933 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
3934 node->continue_target()->Bind();
3935 } else {
3936 // Otherwise use the backward loop target.
3937 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
3938 loop.Bind();
3939 }
3940 break;
3941 case DONT_KNOW: {
3942 if (test_at_bottom) {
3943 // Continue is either the update expression or the test at the
3944 // bottom, no need to label the test at the top.
3945 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
3946 } else if (node->next() == NULL) {
3947 // We are not recompiling the test at the bottom and there is no
3948 // update expression.
3949 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
3950 node->continue_target()->Bind();
3951 } else {
3952 // We are not recompiling the test at the bottom and there is an
3953 // update expression.
3954 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
3955 loop.Bind();
3956 }
3957 // Compile the test with the body as the true target and preferred
3958 // fall-through and with the break target as the false target.
3959 ControlDestination dest(&body, node->break_target(), true);
Steve Blockd0582a62009-12-15 09:54:21 +00003960 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00003961
3962 if (dest.false_was_fall_through()) {
3963 // If we got the break target as fall-through, the test may have
3964 // been unconditionally false (if there are no jumps to the
3965 // body).
3966 if (!body.is_linked()) {
3967 DecrementLoopNesting();
3968 return;
3969 }
3970
3971 // Otherwise, jump around the body on the fall through and then
3972 // bind the body target.
3973 node->break_target()->Unuse();
3974 node->break_target()->Jump();
3975 body.Bind();
3976 }
3977 break;
3978 }
3979 case ALWAYS_FALSE:
3980 UNREACHABLE();
3981 break;
3982 }
3983
3984 CheckStack(); // TODO(1222600): ignore if body contains calls.
Steve Block6ded16b2010-05-10 14:33:55 +01003985
3986 // We know that the loop index is a smi if it is not modified in the
3987 // loop body and it is checked against a constant limit in the loop
3988 // condition. In this case, we reset the static type information of the
3989 // loop index to smi before compiling the body, the update expression, and
3990 // the bottom check of the loop condition.
3991 if (node->is_fast_smi_loop()) {
3992 // Set number type of the loop variable to smi.
3993 SetTypeForStackSlot(node->loop_variable()->slot(), TypeInfo::Smi());
3994 }
3995
Steve Block3ce2e202009-11-05 08:53:23 +00003996 Visit(node->body());
3997
3998 // If there is an update expression, compile it if necessary.
3999 if (node->next() != NULL) {
4000 if (node->continue_target()->is_linked()) {
4001 node->continue_target()->Bind();
4002 }
4003
4004 // Control can reach the update by falling out of the body or by a
4005 // continue.
4006 if (has_valid_frame()) {
4007 // Record the source position of the statement as this code which
4008 // is after the code for the body actually belongs to the loop
4009 // statement and not the body.
4010 CodeForStatementPosition(node);
4011 Visit(node->next());
4012 }
4013 }
4014
Steve Block6ded16b2010-05-10 14:33:55 +01004015 // Set the type of the loop variable to smi before compiling the test
4016 // expression if we are in a fast smi loop condition.
4017 if (node->is_fast_smi_loop() && has_valid_frame()) {
4018 // Set number type of the loop variable to smi.
4019 SetTypeForStackSlot(node->loop_variable()->slot(), TypeInfo::Smi());
4020 }
4021
Steve Block3ce2e202009-11-05 08:53:23 +00004022 // Based on the condition analysis, compile the backward jump as
4023 // necessary.
4024 switch (info) {
4025 case ALWAYS_TRUE:
4026 if (has_valid_frame()) {
4027 if (node->next() == NULL) {
4028 node->continue_target()->Jump();
4029 } else {
4030 loop.Jump();
4031 }
4032 }
4033 break;
4034 case DONT_KNOW:
4035 if (test_at_bottom) {
4036 if (node->continue_target()->is_linked()) {
4037 // We can have dangling jumps to the continue target if there
4038 // was no update expression.
4039 node->continue_target()->Bind();
4040 }
4041 // Control can reach the test at the bottom by falling out of
4042 // the body, by a continue in the body, or from the update
4043 // expression.
4044 if (has_valid_frame()) {
4045 // The break target is the fall-through (body is a backward
4046 // jump from here).
4047 ControlDestination dest(&body, node->break_target(), false);
Steve Blockd0582a62009-12-15 09:54:21 +00004048 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00004049 }
4050 } else {
4051 // Otherwise, jump back to the test at the top.
Steve Blocka7e24c12009-10-30 11:49:00 +00004052 if (has_valid_frame()) {
4053 if (node->next() == NULL) {
4054 node->continue_target()->Jump();
4055 } else {
4056 loop.Jump();
4057 }
4058 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004059 }
4060 break;
Steve Block3ce2e202009-11-05 08:53:23 +00004061 case ALWAYS_FALSE:
4062 UNREACHABLE();
4063 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00004064 }
4065
Steve Block3ce2e202009-11-05 08:53:23 +00004066 // The break target may be already bound (by the condition), or
4067 // there may not be a valid frame. Bind it only if needed.
4068 if (node->break_target()->is_linked()) {
4069 node->break_target()->Bind();
4070 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004071 DecrementLoopNesting();
Steve Blocka7e24c12009-10-30 11:49:00 +00004072}
4073
4074
4075void CodeGenerator::VisitForInStatement(ForInStatement* node) {
4076 ASSERT(!in_spilled_code());
4077 VirtualFrame::SpilledScope spilled_scope;
4078 Comment cmnt(masm_, "[ ForInStatement");
4079 CodeForStatementPosition(node);
4080
4081 JumpTarget primitive;
4082 JumpTarget jsobject;
4083 JumpTarget fixed_array;
4084 JumpTarget entry(JumpTarget::BIDIRECTIONAL);
4085 JumpTarget end_del_check;
4086 JumpTarget exit;
4087
4088 // Get the object to enumerate over (converted to JSObject).
4089 LoadAndSpill(node->enumerable());
4090
4091 // Both SpiderMonkey and kjs ignore null and undefined in contrast
4092 // to the specification. 12.6.4 mandates a call to ToObject.
4093 frame_->EmitPop(eax);
4094
4095 // eax: value to be iterated over
4096 __ cmp(eax, Factory::undefined_value());
4097 exit.Branch(equal);
4098 __ cmp(eax, Factory::null_value());
4099 exit.Branch(equal);
4100
4101 // Stack layout in body:
4102 // [iteration counter (smi)] <- slot 0
4103 // [length of array] <- slot 1
4104 // [FixedArray] <- slot 2
4105 // [Map or 0] <- slot 3
4106 // [Object] <- slot 4
4107
4108 // Check if enumerable is already a JSObject
4109 // eax: value to be iterated over
4110 __ test(eax, Immediate(kSmiTagMask));
4111 primitive.Branch(zero);
4112 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
4113 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
4114 __ cmp(ecx, FIRST_JS_OBJECT_TYPE);
4115 jsobject.Branch(above_equal);
4116
4117 primitive.Bind();
4118 frame_->EmitPush(eax);
4119 frame_->InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION, 1);
4120 // function call returns the value in eax, which is where we want it below
4121
4122 jsobject.Bind();
4123 // Get the set of properties (as a FixedArray or Map).
4124 // eax: value to be iterated over
Steve Blockd0582a62009-12-15 09:54:21 +00004125 frame_->EmitPush(eax); // Push the object being iterated over.
Steve Blocka7e24c12009-10-30 11:49:00 +00004126
Steve Blockd0582a62009-12-15 09:54:21 +00004127 // Check cache validity in generated code. This is a fast case for
4128 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
4129 // guarantee cache validity, call the runtime system to check cache
4130 // validity or get the property names in a fixed array.
4131 JumpTarget call_runtime;
4132 JumpTarget loop(JumpTarget::BIDIRECTIONAL);
4133 JumpTarget check_prototype;
4134 JumpTarget use_cache;
4135 __ mov(ecx, eax);
4136 loop.Bind();
4137 // Check that there are no elements.
4138 __ mov(edx, FieldOperand(ecx, JSObject::kElementsOffset));
4139 __ cmp(Operand(edx), Immediate(Factory::empty_fixed_array()));
4140 call_runtime.Branch(not_equal);
4141 // Check that instance descriptors are not empty so that we can
4142 // check for an enum cache. Leave the map in ebx for the subsequent
4143 // prototype load.
4144 __ mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
4145 __ mov(edx, FieldOperand(ebx, Map::kInstanceDescriptorsOffset));
4146 __ cmp(Operand(edx), Immediate(Factory::empty_descriptor_array()));
4147 call_runtime.Branch(equal);
4148 // Check that there in an enum cache in the non-empty instance
4149 // descriptors. This is the case if the next enumeration index
4150 // field does not contain a smi.
4151 __ mov(edx, FieldOperand(edx, DescriptorArray::kEnumerationIndexOffset));
4152 __ test(edx, Immediate(kSmiTagMask));
4153 call_runtime.Branch(zero);
4154 // For all objects but the receiver, check that the cache is empty.
4155 __ cmp(ecx, Operand(eax));
4156 check_prototype.Branch(equal);
4157 __ mov(edx, FieldOperand(edx, DescriptorArray::kEnumCacheBridgeCacheOffset));
4158 __ cmp(Operand(edx), Immediate(Factory::empty_fixed_array()));
4159 call_runtime.Branch(not_equal);
4160 check_prototype.Bind();
4161 // Load the prototype from the map and loop if non-null.
4162 __ mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
4163 __ cmp(Operand(ecx), Immediate(Factory::null_value()));
4164 loop.Branch(not_equal);
4165 // The enum cache is valid. Load the map of the object being
4166 // iterated over and use the cache for the iteration.
4167 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
4168 use_cache.Jump();
4169
4170 call_runtime.Bind();
4171 // Call the runtime to get the property names for the object.
Steve Blocka7e24c12009-10-30 11:49:00 +00004172 frame_->EmitPush(eax); // push the Object (slot 4) for the runtime call
4173 frame_->CallRuntime(Runtime::kGetPropertyNamesFast, 1);
4174
Steve Blockd0582a62009-12-15 09:54:21 +00004175 // If we got a map from the runtime call, we can do a fast
4176 // modification check. Otherwise, we got a fixed array, and we have
4177 // to do a slow check.
Steve Blocka7e24c12009-10-30 11:49:00 +00004178 // eax: map or fixed array (result from call to
4179 // Runtime::kGetPropertyNamesFast)
4180 __ mov(edx, Operand(eax));
4181 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
4182 __ cmp(ecx, Factory::meta_map());
4183 fixed_array.Branch(not_equal);
4184
Steve Blockd0582a62009-12-15 09:54:21 +00004185 use_cache.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00004186 // Get enum cache
Steve Blockd0582a62009-12-15 09:54:21 +00004187 // eax: map (either the result from a call to
4188 // Runtime::kGetPropertyNamesFast or has been fetched directly from
4189 // the object)
Steve Blocka7e24c12009-10-30 11:49:00 +00004190 __ mov(ecx, Operand(eax));
Steve Blockd0582a62009-12-15 09:54:21 +00004191
Steve Blocka7e24c12009-10-30 11:49:00 +00004192 __ mov(ecx, FieldOperand(ecx, Map::kInstanceDescriptorsOffset));
4193 // Get the bridge array held in the enumeration index field.
4194 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumerationIndexOffset));
4195 // Get the cache from the bridge array.
4196 __ mov(edx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset));
4197
4198 frame_->EmitPush(eax); // <- slot 3
4199 frame_->EmitPush(edx); // <- slot 2
4200 __ mov(eax, FieldOperand(edx, FixedArray::kLengthOffset));
Leon Clarkee46be812010-01-19 14:06:41 +00004201 __ SmiTag(eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00004202 frame_->EmitPush(eax); // <- slot 1
4203 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 0
4204 entry.Jump();
4205
4206 fixed_array.Bind();
4207 // eax: fixed array (result from call to Runtime::kGetPropertyNamesFast)
4208 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 3
4209 frame_->EmitPush(eax); // <- slot 2
4210
4211 // Push the length of the array and the initial index onto the stack.
4212 __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset));
Leon Clarkee46be812010-01-19 14:06:41 +00004213 __ SmiTag(eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00004214 frame_->EmitPush(eax); // <- slot 1
4215 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 0
4216
4217 // Condition.
4218 entry.Bind();
4219 // Grab the current frame's height for the break and continue
4220 // targets only after all the state is pushed on the frame.
4221 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
4222 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
4223
4224 __ mov(eax, frame_->ElementAt(0)); // load the current count
4225 __ cmp(eax, frame_->ElementAt(1)); // compare to the array length
4226 node->break_target()->Branch(above_equal);
4227
4228 // Get the i'th entry of the array.
4229 __ mov(edx, frame_->ElementAt(2));
Kristian Monsen25f61362010-05-21 11:50:48 +01004230 __ mov(ebx, FixedArrayElementOperand(edx, eax));
Steve Blocka7e24c12009-10-30 11:49:00 +00004231
4232 // Get the expected map from the stack or a zero map in the
4233 // permanent slow case eax: current iteration count ebx: i'th entry
4234 // of the enum cache
4235 __ mov(edx, frame_->ElementAt(3));
4236 // Check if the expected map still matches that of the enumerable.
4237 // If not, we have to filter the key.
4238 // eax: current iteration count
4239 // ebx: i'th entry of the enum cache
4240 // edx: expected map value
4241 __ mov(ecx, frame_->ElementAt(4));
4242 __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
4243 __ cmp(ecx, Operand(edx));
4244 end_del_check.Branch(equal);
4245
4246 // Convert the entry to a string (or null if it isn't a property anymore).
4247 frame_->EmitPush(frame_->ElementAt(4)); // push enumerable
4248 frame_->EmitPush(ebx); // push entry
4249 frame_->InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION, 2);
4250 __ mov(ebx, Operand(eax));
4251
4252 // If the property has been removed while iterating, we just skip it.
4253 __ cmp(ebx, Factory::null_value());
4254 node->continue_target()->Branch(equal);
4255
4256 end_del_check.Bind();
4257 // Store the entry in the 'each' expression and take another spin in the
4258 // loop. edx: i'th entry of the enum cache (or string there of)
4259 frame_->EmitPush(ebx);
4260 { Reference each(this, node->each());
4261 // Loading a reference may leave the frame in an unspilled state.
4262 frame_->SpillAll();
4263 if (!each.is_illegal()) {
4264 if (each.size() > 0) {
4265 frame_->EmitPush(frame_->ElementAt(each.size()));
Leon Clarked91b9f72010-01-27 17:25:45 +00004266 each.SetValue(NOT_CONST_INIT);
4267 frame_->Drop(2);
4268 } else {
4269 // If the reference was to a slot we rely on the convenient property
4270 // that it doesn't matter whether a value (eg, ebx pushed above) is
4271 // right on top of or right underneath a zero-sized reference.
4272 each.SetValue(NOT_CONST_INIT);
Steve Blocka7e24c12009-10-30 11:49:00 +00004273 frame_->Drop();
4274 }
4275 }
4276 }
4277 // Unloading a reference may leave the frame in an unspilled state.
4278 frame_->SpillAll();
4279
Steve Blocka7e24c12009-10-30 11:49:00 +00004280 // Body.
4281 CheckStack(); // TODO(1222600): ignore if body contains calls.
4282 VisitAndSpill(node->body());
4283
4284 // Next. Reestablish a spilled frame in case we are coming here via
4285 // a continue in the body.
4286 node->continue_target()->Bind();
4287 frame_->SpillAll();
4288 frame_->EmitPop(eax);
4289 __ add(Operand(eax), Immediate(Smi::FromInt(1)));
4290 frame_->EmitPush(eax);
4291 entry.Jump();
4292
4293 // Cleanup. No need to spill because VirtualFrame::Drop is safe for
4294 // any frame.
4295 node->break_target()->Bind();
4296 frame_->Drop(5);
4297
4298 // Exit.
4299 exit.Bind();
4300
4301 node->continue_target()->Unuse();
4302 node->break_target()->Unuse();
4303}
4304
4305
Steve Block3ce2e202009-11-05 08:53:23 +00004306void CodeGenerator::VisitTryCatchStatement(TryCatchStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004307 ASSERT(!in_spilled_code());
4308 VirtualFrame::SpilledScope spilled_scope;
Steve Block3ce2e202009-11-05 08:53:23 +00004309 Comment cmnt(masm_, "[ TryCatchStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00004310 CodeForStatementPosition(node);
4311
4312 JumpTarget try_block;
4313 JumpTarget exit;
4314
4315 try_block.Call();
4316 // --- Catch block ---
4317 frame_->EmitPush(eax);
4318
4319 // Store the caught exception in the catch variable.
Leon Clarkee46be812010-01-19 14:06:41 +00004320 Variable* catch_var = node->catch_var()->var();
4321 ASSERT(catch_var != NULL && catch_var->slot() != NULL);
4322 StoreToSlot(catch_var->slot(), NOT_CONST_INIT);
Steve Blocka7e24c12009-10-30 11:49:00 +00004323
4324 // Remove the exception from the stack.
4325 frame_->Drop();
4326
4327 VisitStatementsAndSpill(node->catch_block()->statements());
4328 if (has_valid_frame()) {
4329 exit.Jump();
4330 }
4331
4332
4333 // --- Try block ---
4334 try_block.Bind();
4335
4336 frame_->PushTryHandler(TRY_CATCH_HANDLER);
4337 int handler_height = frame_->height();
4338
4339 // Shadow the jump targets for all escapes from the try block, including
4340 // returns. During shadowing, the original target is hidden as the
4341 // ShadowTarget and operations on the original actually affect the
4342 // shadowing target.
4343 //
4344 // We should probably try to unify the escaping targets and the return
4345 // target.
4346 int nof_escapes = node->escaping_targets()->length();
4347 List<ShadowTarget*> shadows(1 + nof_escapes);
4348
4349 // Add the shadow target for the function return.
4350 static const int kReturnShadowIndex = 0;
4351 shadows.Add(new ShadowTarget(&function_return_));
4352 bool function_return_was_shadowed = function_return_is_shadowed_;
4353 function_return_is_shadowed_ = true;
4354 ASSERT(shadows[kReturnShadowIndex]->other_target() == &function_return_);
4355
4356 // Add the remaining shadow targets.
4357 for (int i = 0; i < nof_escapes; i++) {
4358 shadows.Add(new ShadowTarget(node->escaping_targets()->at(i)));
4359 }
4360
4361 // Generate code for the statements in the try block.
4362 VisitStatementsAndSpill(node->try_block()->statements());
4363
4364 // Stop the introduced shadowing and count the number of required unlinks.
4365 // After shadowing stops, the original targets are unshadowed and the
4366 // ShadowTargets represent the formerly shadowing targets.
4367 bool has_unlinks = false;
4368 for (int i = 0; i < shadows.length(); i++) {
4369 shadows[i]->StopShadowing();
4370 has_unlinks = has_unlinks || shadows[i]->is_linked();
4371 }
4372 function_return_is_shadowed_ = function_return_was_shadowed;
4373
4374 // Get an external reference to the handler address.
4375 ExternalReference handler_address(Top::k_handler_address);
4376
4377 // Make sure that there's nothing left on the stack above the
4378 // handler structure.
4379 if (FLAG_debug_code) {
4380 __ mov(eax, Operand::StaticVariable(handler_address));
4381 __ cmp(esp, Operand(eax));
4382 __ Assert(equal, "stack pointer should point to top handler");
4383 }
4384
4385 // If we can fall off the end of the try block, unlink from try chain.
4386 if (has_valid_frame()) {
4387 // The next handler address is on top of the frame. Unlink from
4388 // the handler list and drop the rest of this handler from the
4389 // frame.
4390 ASSERT(StackHandlerConstants::kNextOffset == 0);
4391 frame_->EmitPop(Operand::StaticVariable(handler_address));
4392 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
4393 if (has_unlinks) {
4394 exit.Jump();
4395 }
4396 }
4397
4398 // Generate unlink code for the (formerly) shadowing targets that
4399 // have been jumped to. Deallocate each shadow target.
4400 Result return_value;
4401 for (int i = 0; i < shadows.length(); i++) {
4402 if (shadows[i]->is_linked()) {
4403 // Unlink from try chain; be careful not to destroy the TOS if
4404 // there is one.
4405 if (i == kReturnShadowIndex) {
4406 shadows[i]->Bind(&return_value);
4407 return_value.ToRegister(eax);
4408 } else {
4409 shadows[i]->Bind();
4410 }
4411 // Because we can be jumping here (to spilled code) from
4412 // unspilled code, we need to reestablish a spilled frame at
4413 // this block.
4414 frame_->SpillAll();
4415
4416 // Reload sp from the top handler, because some statements that we
4417 // break from (eg, for...in) may have left stuff on the stack.
4418 __ mov(esp, Operand::StaticVariable(handler_address));
4419 frame_->Forget(frame_->height() - handler_height);
4420
4421 ASSERT(StackHandlerConstants::kNextOffset == 0);
4422 frame_->EmitPop(Operand::StaticVariable(handler_address));
4423 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
4424
4425 if (i == kReturnShadowIndex) {
4426 if (!function_return_is_shadowed_) frame_->PrepareForReturn();
4427 shadows[i]->other_target()->Jump(&return_value);
4428 } else {
4429 shadows[i]->other_target()->Jump();
4430 }
4431 }
4432 }
4433
4434 exit.Bind();
4435}
4436
4437
Steve Block3ce2e202009-11-05 08:53:23 +00004438void CodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004439 ASSERT(!in_spilled_code());
4440 VirtualFrame::SpilledScope spilled_scope;
Steve Block3ce2e202009-11-05 08:53:23 +00004441 Comment cmnt(masm_, "[ TryFinallyStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00004442 CodeForStatementPosition(node);
4443
4444 // State: Used to keep track of reason for entering the finally
4445 // block. Should probably be extended to hold information for
4446 // break/continue from within the try block.
4447 enum { FALLING, THROWING, JUMPING };
4448
4449 JumpTarget try_block;
4450 JumpTarget finally_block;
4451
4452 try_block.Call();
4453
4454 frame_->EmitPush(eax);
4455 // In case of thrown exceptions, this is where we continue.
4456 __ Set(ecx, Immediate(Smi::FromInt(THROWING)));
4457 finally_block.Jump();
4458
4459 // --- Try block ---
4460 try_block.Bind();
4461
4462 frame_->PushTryHandler(TRY_FINALLY_HANDLER);
4463 int handler_height = frame_->height();
4464
4465 // Shadow the jump targets for all escapes from the try block, including
4466 // returns. During shadowing, the original target is hidden as the
4467 // ShadowTarget and operations on the original actually affect the
4468 // shadowing target.
4469 //
4470 // We should probably try to unify the escaping targets and the return
4471 // target.
4472 int nof_escapes = node->escaping_targets()->length();
4473 List<ShadowTarget*> shadows(1 + nof_escapes);
4474
4475 // Add the shadow target for the function return.
4476 static const int kReturnShadowIndex = 0;
4477 shadows.Add(new ShadowTarget(&function_return_));
4478 bool function_return_was_shadowed = function_return_is_shadowed_;
4479 function_return_is_shadowed_ = true;
4480 ASSERT(shadows[kReturnShadowIndex]->other_target() == &function_return_);
4481
4482 // Add the remaining shadow targets.
4483 for (int i = 0; i < nof_escapes; i++) {
4484 shadows.Add(new ShadowTarget(node->escaping_targets()->at(i)));
4485 }
4486
4487 // Generate code for the statements in the try block.
4488 VisitStatementsAndSpill(node->try_block()->statements());
4489
4490 // Stop the introduced shadowing and count the number of required unlinks.
4491 // After shadowing stops, the original targets are unshadowed and the
4492 // ShadowTargets represent the formerly shadowing targets.
4493 int nof_unlinks = 0;
4494 for (int i = 0; i < shadows.length(); i++) {
4495 shadows[i]->StopShadowing();
4496 if (shadows[i]->is_linked()) nof_unlinks++;
4497 }
4498 function_return_is_shadowed_ = function_return_was_shadowed;
4499
4500 // Get an external reference to the handler address.
4501 ExternalReference handler_address(Top::k_handler_address);
4502
4503 // If we can fall off the end of the try block, unlink from the try
4504 // chain and set the state on the frame to FALLING.
4505 if (has_valid_frame()) {
4506 // The next handler address is on top of the frame.
4507 ASSERT(StackHandlerConstants::kNextOffset == 0);
4508 frame_->EmitPop(Operand::StaticVariable(handler_address));
4509 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
4510
4511 // Fake a top of stack value (unneeded when FALLING) and set the
4512 // state in ecx, then jump around the unlink blocks if any.
4513 frame_->EmitPush(Immediate(Factory::undefined_value()));
4514 __ Set(ecx, Immediate(Smi::FromInt(FALLING)));
4515 if (nof_unlinks > 0) {
4516 finally_block.Jump();
4517 }
4518 }
4519
4520 // Generate code to unlink and set the state for the (formerly)
4521 // shadowing targets that have been jumped to.
4522 for (int i = 0; i < shadows.length(); i++) {
4523 if (shadows[i]->is_linked()) {
4524 // If we have come from the shadowed return, the return value is
4525 // on the virtual frame. We must preserve it until it is
4526 // pushed.
4527 if (i == kReturnShadowIndex) {
4528 Result return_value;
4529 shadows[i]->Bind(&return_value);
4530 return_value.ToRegister(eax);
4531 } else {
4532 shadows[i]->Bind();
4533 }
4534 // Because we can be jumping here (to spilled code) from
4535 // unspilled code, we need to reestablish a spilled frame at
4536 // this block.
4537 frame_->SpillAll();
4538
4539 // Reload sp from the top handler, because some statements that
4540 // we break from (eg, for...in) may have left stuff on the
4541 // stack.
4542 __ mov(esp, Operand::StaticVariable(handler_address));
4543 frame_->Forget(frame_->height() - handler_height);
4544
4545 // Unlink this handler and drop it from the frame.
4546 ASSERT(StackHandlerConstants::kNextOffset == 0);
4547 frame_->EmitPop(Operand::StaticVariable(handler_address));
4548 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
4549
4550 if (i == kReturnShadowIndex) {
4551 // If this target shadowed the function return, materialize
4552 // the return value on the stack.
4553 frame_->EmitPush(eax);
4554 } else {
4555 // Fake TOS for targets that shadowed breaks and continues.
4556 frame_->EmitPush(Immediate(Factory::undefined_value()));
4557 }
4558 __ Set(ecx, Immediate(Smi::FromInt(JUMPING + i)));
4559 if (--nof_unlinks > 0) {
4560 // If this is not the last unlink block, jump around the next.
4561 finally_block.Jump();
4562 }
4563 }
4564 }
4565
4566 // --- Finally block ---
4567 finally_block.Bind();
4568
4569 // Push the state on the stack.
4570 frame_->EmitPush(ecx);
4571
4572 // We keep two elements on the stack - the (possibly faked) result
4573 // and the state - while evaluating the finally block.
4574 //
4575 // Generate code for the statements in the finally block.
4576 VisitStatementsAndSpill(node->finally_block()->statements());
4577
4578 if (has_valid_frame()) {
4579 // Restore state and return value or faked TOS.
4580 frame_->EmitPop(ecx);
4581 frame_->EmitPop(eax);
4582 }
4583
4584 // Generate code to jump to the right destination for all used
4585 // formerly shadowing targets. Deallocate each shadow target.
4586 for (int i = 0; i < shadows.length(); i++) {
4587 if (has_valid_frame() && shadows[i]->is_bound()) {
4588 BreakTarget* original = shadows[i]->other_target();
4589 __ cmp(Operand(ecx), Immediate(Smi::FromInt(JUMPING + i)));
4590 if (i == kReturnShadowIndex) {
4591 // The return value is (already) in eax.
4592 Result return_value = allocator_->Allocate(eax);
4593 ASSERT(return_value.is_valid());
4594 if (function_return_is_shadowed_) {
4595 original->Branch(equal, &return_value);
4596 } else {
4597 // Branch around the preparation for return which may emit
4598 // code.
4599 JumpTarget skip;
4600 skip.Branch(not_equal);
4601 frame_->PrepareForReturn();
4602 original->Jump(&return_value);
4603 skip.Bind();
4604 }
4605 } else {
4606 original->Branch(equal);
4607 }
4608 }
4609 }
4610
4611 if (has_valid_frame()) {
4612 // Check if we need to rethrow the exception.
4613 JumpTarget exit;
4614 __ cmp(Operand(ecx), Immediate(Smi::FromInt(THROWING)));
4615 exit.Branch(not_equal);
4616
4617 // Rethrow exception.
4618 frame_->EmitPush(eax); // undo pop from above
4619 frame_->CallRuntime(Runtime::kReThrow, 1);
4620
4621 // Done.
4622 exit.Bind();
4623 }
4624}
4625
4626
4627void CodeGenerator::VisitDebuggerStatement(DebuggerStatement* node) {
4628 ASSERT(!in_spilled_code());
4629 Comment cmnt(masm_, "[ DebuggerStatement");
4630 CodeForStatementPosition(node);
4631#ifdef ENABLE_DEBUGGER_SUPPORT
4632 // Spill everything, even constants, to the frame.
4633 frame_->SpillAll();
Leon Clarke4515c472010-02-03 11:58:03 +00004634
Andrei Popescu402d9372010-02-26 13:31:12 +00004635 frame_->DebugBreak();
Steve Blocka7e24c12009-10-30 11:49:00 +00004636 // Ignore the return value.
4637#endif
4638}
4639
4640
Steve Block6ded16b2010-05-10 14:33:55 +01004641Result CodeGenerator::InstantiateFunction(
4642 Handle<SharedFunctionInfo> function_info) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004643 // The inevitable call will sync frame elements to memory anyway, so
4644 // we do it eagerly to allow us to push the arguments directly into
4645 // place.
Andrei Popescu402d9372010-02-26 13:31:12 +00004646 frame()->SyncRange(0, frame()->element_count() - 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00004647
Leon Clarkee46be812010-01-19 14:06:41 +00004648 // Use the fast case closure allocation code that allocates in new
4649 // space for nested functions that don't need literals cloning.
Steve Block6ded16b2010-05-10 14:33:55 +01004650 if (scope()->is_function_scope() && function_info->num_literals() == 0) {
Leon Clarkee46be812010-01-19 14:06:41 +00004651 FastNewClosureStub stub;
Steve Block6ded16b2010-05-10 14:33:55 +01004652 frame()->EmitPush(Immediate(function_info));
Andrei Popescu402d9372010-02-26 13:31:12 +00004653 return frame()->CallStub(&stub, 1);
Leon Clarkee46be812010-01-19 14:06:41 +00004654 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01004655 // Call the runtime to instantiate the function based on the
4656 // shared function info.
Andrei Popescu402d9372010-02-26 13:31:12 +00004657 frame()->EmitPush(esi);
Steve Block6ded16b2010-05-10 14:33:55 +01004658 frame()->EmitPush(Immediate(function_info));
Andrei Popescu402d9372010-02-26 13:31:12 +00004659 return frame()->CallRuntime(Runtime::kNewClosure, 2);
Leon Clarkee46be812010-01-19 14:06:41 +00004660 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004661}
4662
4663
4664void CodeGenerator::VisitFunctionLiteral(FunctionLiteral* node) {
4665 Comment cmnt(masm_, "[ FunctionLiteral");
Steve Block6ded16b2010-05-10 14:33:55 +01004666 ASSERT(!in_safe_int32_mode());
4667 // Build the function info and instantiate it.
4668 Handle<SharedFunctionInfo> function_info =
4669 Compiler::BuildFunctionInfo(node, script(), this);
Steve Blocka7e24c12009-10-30 11:49:00 +00004670 // Check for stack-overflow exception.
4671 if (HasStackOverflow()) return;
Steve Block6ded16b2010-05-10 14:33:55 +01004672 Result result = InstantiateFunction(function_info);
Andrei Popescu402d9372010-02-26 13:31:12 +00004673 frame()->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00004674}
4675
4676
Steve Block6ded16b2010-05-10 14:33:55 +01004677void CodeGenerator::VisitSharedFunctionInfoLiteral(
4678 SharedFunctionInfoLiteral* node) {
4679 ASSERT(!in_safe_int32_mode());
4680 Comment cmnt(masm_, "[ SharedFunctionInfoLiteral");
4681 Result result = InstantiateFunction(node->shared_function_info());
Andrei Popescu402d9372010-02-26 13:31:12 +00004682 frame()->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00004683}
4684
4685
4686void CodeGenerator::VisitConditional(Conditional* node) {
4687 Comment cmnt(masm_, "[ Conditional");
Steve Block6ded16b2010-05-10 14:33:55 +01004688 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00004689 JumpTarget then;
4690 JumpTarget else_;
4691 JumpTarget exit;
4692 ControlDestination dest(&then, &else_, true);
Steve Blockd0582a62009-12-15 09:54:21 +00004693 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00004694
4695 if (dest.false_was_fall_through()) {
4696 // The else target was bound, so we compile the else part first.
Steve Blockd0582a62009-12-15 09:54:21 +00004697 Load(node->else_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00004698
4699 if (then.is_linked()) {
4700 exit.Jump();
4701 then.Bind();
Steve Blockd0582a62009-12-15 09:54:21 +00004702 Load(node->then_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00004703 }
4704 } else {
4705 // The then target was bound, so we compile the then part first.
Steve Blockd0582a62009-12-15 09:54:21 +00004706 Load(node->then_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00004707
4708 if (else_.is_linked()) {
4709 exit.Jump();
4710 else_.Bind();
Steve Blockd0582a62009-12-15 09:54:21 +00004711 Load(node->else_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00004712 }
4713 }
4714
4715 exit.Bind();
4716}
4717
4718
Andrei Popescu402d9372010-02-26 13:31:12 +00004719Result CodeGenerator::LoadFromSlot(Slot* slot, TypeofState typeof_state) {
4720 Result result;
Steve Blocka7e24c12009-10-30 11:49:00 +00004721 if (slot->type() == Slot::LOOKUP) {
4722 ASSERT(slot->var()->is_dynamic());
Steve Blocka7e24c12009-10-30 11:49:00 +00004723 JumpTarget slow;
4724 JumpTarget done;
Steve Blocka7e24c12009-10-30 11:49:00 +00004725
Kristian Monsen25f61362010-05-21 11:50:48 +01004726 // Generate fast case for loading from slots that correspond to
4727 // local/global variables or arguments unless they are shadowed by
4728 // eval-introduced bindings.
4729 EmitDynamicLoadFromSlotFastCase(slot,
4730 typeof_state,
4731 &result,
4732 &slow,
4733 &done);
Steve Blocka7e24c12009-10-30 11:49:00 +00004734
4735 slow.Bind();
4736 // A runtime call is inevitable. We eagerly sync frame elements
4737 // to memory so that we can push the arguments directly into place
4738 // on top of the frame.
Andrei Popescu402d9372010-02-26 13:31:12 +00004739 frame()->SyncRange(0, frame()->element_count() - 1);
4740 frame()->EmitPush(esi);
4741 frame()->EmitPush(Immediate(slot->var()->name()));
Steve Blocka7e24c12009-10-30 11:49:00 +00004742 if (typeof_state == INSIDE_TYPEOF) {
Andrei Popescu402d9372010-02-26 13:31:12 +00004743 result =
4744 frame()->CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00004745 } else {
Andrei Popescu402d9372010-02-26 13:31:12 +00004746 result = frame()->CallRuntime(Runtime::kLoadContextSlot, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00004747 }
4748
Andrei Popescu402d9372010-02-26 13:31:12 +00004749 done.Bind(&result);
4750 return result;
Steve Blocka7e24c12009-10-30 11:49:00 +00004751
4752 } else if (slot->var()->mode() == Variable::CONST) {
4753 // Const slots may contain 'the hole' value (the constant hasn't been
4754 // initialized yet) which needs to be converted into the 'undefined'
4755 // value.
4756 //
4757 // We currently spill the virtual frame because constants use the
4758 // potentially unsafe direct-frame access of SlotOperand.
4759 VirtualFrame::SpilledScope spilled_scope;
4760 Comment cmnt(masm_, "[ Load const");
Andrei Popescu402d9372010-02-26 13:31:12 +00004761 Label exit;
Steve Blocka7e24c12009-10-30 11:49:00 +00004762 __ mov(ecx, SlotOperand(slot, ecx));
4763 __ cmp(ecx, Factory::the_hole_value());
Andrei Popescu402d9372010-02-26 13:31:12 +00004764 __ j(not_equal, &exit);
Steve Blocka7e24c12009-10-30 11:49:00 +00004765 __ mov(ecx, Factory::undefined_value());
Andrei Popescu402d9372010-02-26 13:31:12 +00004766 __ bind(&exit);
4767 return Result(ecx);
Steve Blocka7e24c12009-10-30 11:49:00 +00004768
4769 } else if (slot->type() == Slot::PARAMETER) {
Andrei Popescu402d9372010-02-26 13:31:12 +00004770 frame()->PushParameterAt(slot->index());
4771 return frame()->Pop();
Steve Blocka7e24c12009-10-30 11:49:00 +00004772
4773 } else if (slot->type() == Slot::LOCAL) {
Andrei Popescu402d9372010-02-26 13:31:12 +00004774 frame()->PushLocalAt(slot->index());
4775 return frame()->Pop();
Steve Blocka7e24c12009-10-30 11:49:00 +00004776
4777 } else {
4778 // The other remaining slot types (LOOKUP and GLOBAL) cannot reach
4779 // here.
4780 //
4781 // The use of SlotOperand below is safe for an unspilled frame
4782 // because it will always be a context slot.
4783 ASSERT(slot->type() == Slot::CONTEXT);
Andrei Popescu402d9372010-02-26 13:31:12 +00004784 result = allocator()->Allocate();
4785 ASSERT(result.is_valid());
4786 __ mov(result.reg(), SlotOperand(slot, result.reg()));
4787 return result;
Steve Blocka7e24c12009-10-30 11:49:00 +00004788 }
4789}
4790
4791
Andrei Popescu402d9372010-02-26 13:31:12 +00004792Result CodeGenerator::LoadFromSlotCheckForArguments(Slot* slot,
4793 TypeofState state) {
4794 Result result = LoadFromSlot(slot, state);
Steve Blocka7e24c12009-10-30 11:49:00 +00004795
4796 // Bail out quickly if we're not using lazy arguments allocation.
Andrei Popescu402d9372010-02-26 13:31:12 +00004797 if (ArgumentsMode() != LAZY_ARGUMENTS_ALLOCATION) return result;
Steve Blocka7e24c12009-10-30 11:49:00 +00004798
4799 // ... or if the slot isn't a non-parameter arguments slot.
Andrei Popescu402d9372010-02-26 13:31:12 +00004800 if (slot->type() == Slot::PARAMETER || !slot->is_arguments()) return result;
Steve Blocka7e24c12009-10-30 11:49:00 +00004801
4802 // If the loaded value is a constant, we know if the arguments
4803 // object has been lazily loaded yet.
Andrei Popescu402d9372010-02-26 13:31:12 +00004804 if (result.is_constant()) {
4805 if (result.handle()->IsTheHole()) {
4806 result.Unuse();
4807 return StoreArgumentsObject(false);
Steve Blocka7e24c12009-10-30 11:49:00 +00004808 } else {
Andrei Popescu402d9372010-02-26 13:31:12 +00004809 return result;
Steve Blocka7e24c12009-10-30 11:49:00 +00004810 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004811 }
4812
4813 // The loaded value is in a register. If it is the sentinel that
4814 // indicates that we haven't loaded the arguments object yet, we
4815 // need to do it now.
4816 JumpTarget exit;
Andrei Popescu402d9372010-02-26 13:31:12 +00004817 __ cmp(Operand(result.reg()), Immediate(Factory::the_hole_value()));
4818 exit.Branch(not_equal, &result);
4819
4820 result.Unuse();
4821 result = StoreArgumentsObject(false);
4822 exit.Bind(&result);
4823 return result;
Steve Blocka7e24c12009-10-30 11:49:00 +00004824}
4825
4826
4827Result CodeGenerator::LoadFromGlobalSlotCheckExtensions(
4828 Slot* slot,
4829 TypeofState typeof_state,
4830 JumpTarget* slow) {
Steve Block6ded16b2010-05-10 14:33:55 +01004831 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00004832 // Check that no extension objects have been created by calls to
4833 // eval from the current scope to the global scope.
4834 Register context = esi;
4835 Result tmp = allocator_->Allocate();
4836 ASSERT(tmp.is_valid()); // All non-reserved registers were available.
4837
4838 Scope* s = scope();
4839 while (s != NULL) {
4840 if (s->num_heap_slots() > 0) {
4841 if (s->calls_eval()) {
4842 // Check that extension is NULL.
4843 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
4844 Immediate(0));
4845 slow->Branch(not_equal, not_taken);
4846 }
4847 // Load next context in chain.
4848 __ mov(tmp.reg(), ContextOperand(context, Context::CLOSURE_INDEX));
4849 __ mov(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset));
4850 context = tmp.reg();
4851 }
4852 // If no outer scope calls eval, we do not need to check more
4853 // context extensions. If we have reached an eval scope, we check
4854 // all extensions from this point.
4855 if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break;
4856 s = s->outer_scope();
4857 }
4858
4859 if (s != NULL && s->is_eval_scope()) {
4860 // Loop up the context chain. There is no frame effect so it is
4861 // safe to use raw labels here.
4862 Label next, fast;
4863 if (!context.is(tmp.reg())) {
4864 __ mov(tmp.reg(), context);
4865 }
4866 __ bind(&next);
4867 // Terminate at global context.
4868 __ cmp(FieldOperand(tmp.reg(), HeapObject::kMapOffset),
4869 Immediate(Factory::global_context_map()));
4870 __ j(equal, &fast);
4871 // Check that extension is NULL.
4872 __ cmp(ContextOperand(tmp.reg(), Context::EXTENSION_INDEX), Immediate(0));
4873 slow->Branch(not_equal, not_taken);
4874 // Load next context in chain.
4875 __ mov(tmp.reg(), ContextOperand(tmp.reg(), Context::CLOSURE_INDEX));
4876 __ mov(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset));
4877 __ jmp(&next);
4878 __ bind(&fast);
4879 }
4880 tmp.Unuse();
4881
4882 // All extension objects were empty and it is safe to use a global
4883 // load IC call.
Andrei Popescu402d9372010-02-26 13:31:12 +00004884 // The register allocator prefers eax if it is free, so the code generator
4885 // will load the global object directly into eax, which is where the LoadIC
4886 // expects it.
4887 frame_->Spill(eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00004888 LoadGlobal();
4889 frame_->Push(slot->var()->name());
4890 RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
4891 ? RelocInfo::CODE_TARGET
4892 : RelocInfo::CODE_TARGET_CONTEXT;
4893 Result answer = frame_->CallLoadIC(mode);
4894 // A test eax instruction following the call signals that the inobject
4895 // property case was inlined. Ensure that there is not a test eax
4896 // instruction here.
4897 __ nop();
Steve Blocka7e24c12009-10-30 11:49:00 +00004898 return answer;
4899}
4900
4901
Kristian Monsen25f61362010-05-21 11:50:48 +01004902void CodeGenerator::EmitDynamicLoadFromSlotFastCase(Slot* slot,
4903 TypeofState typeof_state,
4904 Result* result,
4905 JumpTarget* slow,
4906 JumpTarget* done) {
4907 // Generate fast-case code for variables that might be shadowed by
4908 // eval-introduced variables. Eval is used a lot without
4909 // introducing variables. In those cases, we do not want to
4910 // perform a runtime call for all variables in the scope
4911 // containing the eval.
4912 if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) {
4913 *result = LoadFromGlobalSlotCheckExtensions(slot, typeof_state, slow);
4914 done->Jump(result);
4915
4916 } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) {
4917 Slot* potential_slot = slot->var()->local_if_not_shadowed()->slot();
4918 Expression* rewrite = slot->var()->local_if_not_shadowed()->rewrite();
4919 if (potential_slot != NULL) {
4920 // Generate fast case for locals that rewrite to slots.
4921 // Allocate a fresh register to use as a temp in
4922 // ContextSlotOperandCheckExtensions and to hold the result
4923 // value.
4924 *result = allocator()->Allocate();
4925 ASSERT(result->is_valid());
4926 __ mov(result->reg(),
4927 ContextSlotOperandCheckExtensions(potential_slot, *result, slow));
4928 if (potential_slot->var()->mode() == Variable::CONST) {
4929 __ cmp(result->reg(), Factory::the_hole_value());
4930 done->Branch(not_equal, result);
4931 __ mov(result->reg(), Factory::undefined_value());
4932 }
4933 done->Jump(result);
4934 } else if (rewrite != NULL) {
4935 // Generate fast case for calls of an argument function.
4936 Property* property = rewrite->AsProperty();
4937 if (property != NULL) {
4938 VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
4939 Literal* key_literal = property->key()->AsLiteral();
4940 if (obj_proxy != NULL &&
4941 key_literal != NULL &&
4942 obj_proxy->IsArguments() &&
4943 key_literal->handle()->IsSmi()) {
4944 // Load arguments object if there are no eval-introduced
4945 // variables. Then load the argument from the arguments
4946 // object using keyed load.
4947 Result arguments = allocator()->Allocate();
4948 ASSERT(arguments.is_valid());
4949 __ mov(arguments.reg(),
4950 ContextSlotOperandCheckExtensions(obj_proxy->var()->slot(),
4951 arguments,
4952 slow));
4953 frame_->Push(&arguments);
4954 frame_->Push(key_literal->handle());
4955 *result = EmitKeyedLoad();
4956 done->Jump(result);
4957 }
4958 }
4959 }
4960 }
4961}
4962
4963
Steve Blocka7e24c12009-10-30 11:49:00 +00004964void CodeGenerator::StoreToSlot(Slot* slot, InitState init_state) {
4965 if (slot->type() == Slot::LOOKUP) {
4966 ASSERT(slot->var()->is_dynamic());
4967
4968 // For now, just do a runtime call. Since the call is inevitable,
4969 // we eagerly sync the virtual frame so we can directly push the
4970 // arguments into place.
4971 frame_->SyncRange(0, frame_->element_count() - 1);
4972
4973 frame_->EmitPush(esi);
4974 frame_->EmitPush(Immediate(slot->var()->name()));
4975
4976 Result value;
4977 if (init_state == CONST_INIT) {
4978 // Same as the case for a normal store, but ignores attribute
4979 // (e.g. READ_ONLY) of context slot so that we can initialize const
4980 // properties (introduced via eval("const foo = (some expr);")). Also,
4981 // uses the current function context instead of the top context.
4982 //
4983 // Note that we must declare the foo upon entry of eval(), via a
4984 // context slot declaration, but we cannot initialize it at the same
4985 // time, because the const declaration may be at the end of the eval
4986 // code (sigh...) and the const variable may have been used before
4987 // (where its value is 'undefined'). Thus, we can only do the
4988 // initialization when we actually encounter the expression and when
4989 // the expression operands are defined and valid, and thus we need the
4990 // split into 2 operations: declaration of the context slot followed
4991 // by initialization.
4992 value = frame_->CallRuntime(Runtime::kInitializeConstContextSlot, 3);
4993 } else {
4994 value = frame_->CallRuntime(Runtime::kStoreContextSlot, 3);
4995 }
4996 // Storing a variable must keep the (new) value on the expression
4997 // stack. This is necessary for compiling chained assignment
4998 // expressions.
4999 frame_->Push(&value);
5000
5001 } else {
5002 ASSERT(!slot->var()->is_dynamic());
5003
5004 JumpTarget exit;
5005 if (init_state == CONST_INIT) {
5006 ASSERT(slot->var()->mode() == Variable::CONST);
5007 // Only the first const initialization must be executed (the slot
5008 // still contains 'the hole' value). When the assignment is executed,
5009 // the code is identical to a normal store (see below).
5010 //
5011 // We spill the frame in the code below because the direct-frame
5012 // access of SlotOperand is potentially unsafe with an unspilled
5013 // frame.
5014 VirtualFrame::SpilledScope spilled_scope;
5015 Comment cmnt(masm_, "[ Init const");
5016 __ mov(ecx, SlotOperand(slot, ecx));
5017 __ cmp(ecx, Factory::the_hole_value());
5018 exit.Branch(not_equal);
5019 }
5020
5021 // We must execute the store. Storing a variable must keep the (new)
5022 // value on the stack. This is necessary for compiling assignment
5023 // expressions.
5024 //
5025 // Note: We will reach here even with slot->var()->mode() ==
5026 // Variable::CONST because of const declarations which will initialize
5027 // consts to 'the hole' value and by doing so, end up calling this code.
5028 if (slot->type() == Slot::PARAMETER) {
5029 frame_->StoreToParameterAt(slot->index());
5030 } else if (slot->type() == Slot::LOCAL) {
5031 frame_->StoreToLocalAt(slot->index());
5032 } else {
5033 // The other slot types (LOOKUP and GLOBAL) cannot reach here.
5034 //
5035 // The use of SlotOperand below is safe for an unspilled frame
5036 // because the slot is a context slot.
5037 ASSERT(slot->type() == Slot::CONTEXT);
5038 frame_->Dup();
5039 Result value = frame_->Pop();
5040 value.ToRegister();
5041 Result start = allocator_->Allocate();
5042 ASSERT(start.is_valid());
5043 __ mov(SlotOperand(slot, start.reg()), value.reg());
5044 // RecordWrite may destroy the value registers.
5045 //
5046 // TODO(204): Avoid actually spilling when the value is not
5047 // needed (probably the common case).
5048 frame_->Spill(value.reg());
5049 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
5050 Result temp = allocator_->Allocate();
5051 ASSERT(temp.is_valid());
5052 __ RecordWrite(start.reg(), offset, value.reg(), temp.reg());
5053 // The results start, value, and temp are unused by going out of
5054 // scope.
5055 }
5056
5057 exit.Bind();
5058 }
5059}
5060
5061
Steve Block6ded16b2010-05-10 14:33:55 +01005062void CodeGenerator::VisitSlot(Slot* slot) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005063 Comment cmnt(masm_, "[ Slot");
Steve Block6ded16b2010-05-10 14:33:55 +01005064 if (in_safe_int32_mode()) {
5065 if ((slot->type() == Slot::LOCAL && !slot->is_arguments())) {
5066 frame()->UntaggedPushLocalAt(slot->index());
5067 } else if (slot->type() == Slot::PARAMETER) {
5068 frame()->UntaggedPushParameterAt(slot->index());
5069 } else {
5070 UNREACHABLE();
5071 }
5072 } else {
5073 Result result = LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF);
5074 frame()->Push(&result);
5075 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005076}
5077
5078
5079void CodeGenerator::VisitVariableProxy(VariableProxy* node) {
5080 Comment cmnt(masm_, "[ VariableProxy");
5081 Variable* var = node->var();
5082 Expression* expr = var->rewrite();
5083 if (expr != NULL) {
5084 Visit(expr);
5085 } else {
5086 ASSERT(var->is_global());
Steve Block6ded16b2010-05-10 14:33:55 +01005087 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005088 Reference ref(this, node);
Steve Blockd0582a62009-12-15 09:54:21 +00005089 ref.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00005090 }
5091}
5092
5093
5094void CodeGenerator::VisitLiteral(Literal* node) {
5095 Comment cmnt(masm_, "[ Literal");
Steve Block6ded16b2010-05-10 14:33:55 +01005096 if (in_safe_int32_mode()) {
5097 frame_->PushUntaggedElement(node->handle());
5098 } else {
5099 frame_->Push(node->handle());
5100 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005101}
5102
5103
Steve Blockd0582a62009-12-15 09:54:21 +00005104void CodeGenerator::PushUnsafeSmi(Handle<Object> value) {
5105 ASSERT(value->IsSmi());
5106 int bits = reinterpret_cast<int>(*value);
5107 __ push(Immediate(bits & 0x0000FFFF));
5108 __ or_(Operand(esp, 0), Immediate(bits & 0xFFFF0000));
5109}
5110
5111
5112void CodeGenerator::StoreUnsafeSmiToLocal(int offset, Handle<Object> value) {
5113 ASSERT(value->IsSmi());
5114 int bits = reinterpret_cast<int>(*value);
5115 __ mov(Operand(ebp, offset), Immediate(bits & 0x0000FFFF));
5116 __ or_(Operand(ebp, offset), Immediate(bits & 0xFFFF0000));
5117}
5118
5119
5120void CodeGenerator::MoveUnsafeSmi(Register target, Handle<Object> value) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005121 ASSERT(target.is_valid());
5122 ASSERT(value->IsSmi());
5123 int bits = reinterpret_cast<int>(*value);
5124 __ Set(target, Immediate(bits & 0x0000FFFF));
Steve Blockd0582a62009-12-15 09:54:21 +00005125 __ or_(target, bits & 0xFFFF0000);
Steve Blocka7e24c12009-10-30 11:49:00 +00005126}
5127
5128
5129bool CodeGenerator::IsUnsafeSmi(Handle<Object> value) {
5130 if (!value->IsSmi()) return false;
5131 int int_value = Smi::cast(*value)->value();
5132 return !is_intn(int_value, kMaxSmiInlinedBits);
5133}
5134
5135
5136// Materialize the regexp literal 'node' in the literals array
5137// 'literals' of the function. Leave the regexp boilerplate in
5138// 'boilerplate'.
5139class DeferredRegExpLiteral: public DeferredCode {
5140 public:
5141 DeferredRegExpLiteral(Register boilerplate,
5142 Register literals,
5143 RegExpLiteral* node)
5144 : boilerplate_(boilerplate), literals_(literals), node_(node) {
5145 set_comment("[ DeferredRegExpLiteral");
5146 }
5147
5148 void Generate();
5149
5150 private:
5151 Register boilerplate_;
5152 Register literals_;
5153 RegExpLiteral* node_;
5154};
5155
5156
5157void DeferredRegExpLiteral::Generate() {
5158 // Since the entry is undefined we call the runtime system to
5159 // compute the literal.
5160 // Literal array (0).
5161 __ push(literals_);
5162 // Literal index (1).
5163 __ push(Immediate(Smi::FromInt(node_->literal_index())));
5164 // RegExp pattern (2).
5165 __ push(Immediate(node_->pattern()));
5166 // RegExp flags (3).
5167 __ push(Immediate(node_->flags()));
5168 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
5169 if (!boilerplate_.is(eax)) __ mov(boilerplate_, eax);
5170}
5171
5172
5173void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01005174 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005175 Comment cmnt(masm_, "[ RegExp Literal");
5176
5177 // Retrieve the literals array and check the allocated entry. Begin
5178 // with a writable copy of the function of this activation in a
5179 // register.
5180 frame_->PushFunction();
5181 Result literals = frame_->Pop();
5182 literals.ToRegister();
5183 frame_->Spill(literals.reg());
5184
5185 // Load the literals array of the function.
5186 __ mov(literals.reg(),
5187 FieldOperand(literals.reg(), JSFunction::kLiteralsOffset));
5188
5189 // Load the literal at the ast saved index.
5190 Result boilerplate = allocator_->Allocate();
5191 ASSERT(boilerplate.is_valid());
5192 int literal_offset =
5193 FixedArray::kHeaderSize + node->literal_index() * kPointerSize;
5194 __ mov(boilerplate.reg(), FieldOperand(literals.reg(), literal_offset));
5195
5196 // Check whether we need to materialize the RegExp object. If so,
5197 // jump to the deferred code passing the literals array.
5198 DeferredRegExpLiteral* deferred =
5199 new DeferredRegExpLiteral(boilerplate.reg(), literals.reg(), node);
5200 __ cmp(boilerplate.reg(), Factory::undefined_value());
5201 deferred->Branch(equal);
5202 deferred->BindExit();
5203 literals.Unuse();
5204
5205 // Push the boilerplate object.
5206 frame_->Push(&boilerplate);
5207}
5208
5209
Steve Blocka7e24c12009-10-30 11:49:00 +00005210void CodeGenerator::VisitObjectLiteral(ObjectLiteral* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01005211 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005212 Comment cmnt(masm_, "[ ObjectLiteral");
5213
Leon Clarkee46be812010-01-19 14:06:41 +00005214 // Load a writable copy of the function of this activation in a
Steve Blocka7e24c12009-10-30 11:49:00 +00005215 // register.
5216 frame_->PushFunction();
5217 Result literals = frame_->Pop();
5218 literals.ToRegister();
5219 frame_->Spill(literals.reg());
5220
5221 // Load the literals array of the function.
5222 __ mov(literals.reg(),
5223 FieldOperand(literals.reg(), JSFunction::kLiteralsOffset));
Leon Clarkee46be812010-01-19 14:06:41 +00005224 // Literal array.
5225 frame_->Push(&literals);
5226 // Literal index.
5227 frame_->Push(Smi::FromInt(node->literal_index()));
5228 // Constant properties.
5229 frame_->Push(node->constant_properties());
Steve Block6ded16b2010-05-10 14:33:55 +01005230 // Should the object literal have fast elements?
5231 frame_->Push(Smi::FromInt(node->fast_elements() ? 1 : 0));
Leon Clarkee46be812010-01-19 14:06:41 +00005232 Result clone;
5233 if (node->depth() > 1) {
Steve Block6ded16b2010-05-10 14:33:55 +01005234 clone = frame_->CallRuntime(Runtime::kCreateObjectLiteral, 4);
Leon Clarkee46be812010-01-19 14:06:41 +00005235 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01005236 clone = frame_->CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
Steve Blocka7e24c12009-10-30 11:49:00 +00005237 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005238 frame_->Push(&clone);
5239
5240 for (int i = 0; i < node->properties()->length(); i++) {
5241 ObjectLiteral::Property* property = node->properties()->at(i);
5242 switch (property->kind()) {
5243 case ObjectLiteral::Property::CONSTANT:
5244 break;
5245 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
5246 if (CompileTimeValue::IsCompileTimeValue(property->value())) break;
5247 // else fall through.
5248 case ObjectLiteral::Property::COMPUTED: {
5249 Handle<Object> key(property->key()->handle());
5250 if (key->IsSymbol()) {
5251 // Duplicate the object as the IC receiver.
5252 frame_->Dup();
5253 Load(property->value());
Andrei Popescu402d9372010-02-26 13:31:12 +00005254 Result dummy = frame_->CallStoreIC(Handle<String>::cast(key), false);
5255 dummy.Unuse();
Steve Blocka7e24c12009-10-30 11:49:00 +00005256 break;
5257 }
5258 // Fall through
5259 }
5260 case ObjectLiteral::Property::PROTOTYPE: {
5261 // Duplicate the object as an argument to the runtime call.
5262 frame_->Dup();
5263 Load(property->key());
5264 Load(property->value());
5265 Result ignored = frame_->CallRuntime(Runtime::kSetProperty, 3);
5266 // Ignore the result.
5267 break;
5268 }
5269 case ObjectLiteral::Property::SETTER: {
5270 // Duplicate the object as an argument to the runtime call.
5271 frame_->Dup();
5272 Load(property->key());
5273 frame_->Push(Smi::FromInt(1));
5274 Load(property->value());
5275 Result ignored = frame_->CallRuntime(Runtime::kDefineAccessor, 4);
5276 // Ignore the result.
5277 break;
5278 }
5279 case ObjectLiteral::Property::GETTER: {
5280 // Duplicate the object as an argument to the runtime call.
5281 frame_->Dup();
5282 Load(property->key());
5283 frame_->Push(Smi::FromInt(0));
5284 Load(property->value());
5285 Result ignored = frame_->CallRuntime(Runtime::kDefineAccessor, 4);
5286 // Ignore the result.
5287 break;
5288 }
5289 default: UNREACHABLE();
5290 }
5291 }
5292}
5293
5294
Steve Blocka7e24c12009-10-30 11:49:00 +00005295void CodeGenerator::VisitArrayLiteral(ArrayLiteral* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01005296 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005297 Comment cmnt(masm_, "[ ArrayLiteral");
5298
Leon Clarkee46be812010-01-19 14:06:41 +00005299 // Load a writable copy of the function of this activation in a
Steve Blocka7e24c12009-10-30 11:49:00 +00005300 // register.
5301 frame_->PushFunction();
5302 Result literals = frame_->Pop();
5303 literals.ToRegister();
5304 frame_->Spill(literals.reg());
5305
5306 // Load the literals array of the function.
5307 __ mov(literals.reg(),
5308 FieldOperand(literals.reg(), JSFunction::kLiteralsOffset));
5309
Leon Clarkee46be812010-01-19 14:06:41 +00005310 frame_->Push(&literals);
5311 frame_->Push(Smi::FromInt(node->literal_index()));
5312 frame_->Push(node->constant_elements());
5313 int length = node->values()->length();
5314 Result clone;
5315 if (node->depth() > 1) {
5316 clone = frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3);
5317 } else if (length > FastCloneShallowArrayStub::kMaximumLength) {
5318 clone = frame_->CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
5319 } else {
5320 FastCloneShallowArrayStub stub(length);
5321 clone = frame_->CallStub(&stub, 3);
Steve Blocka7e24c12009-10-30 11:49:00 +00005322 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005323 frame_->Push(&clone);
5324
5325 // Generate code to set the elements in the array that are not
5326 // literals.
Leon Clarkee46be812010-01-19 14:06:41 +00005327 for (int i = 0; i < length; i++) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005328 Expression* value = node->values()->at(i);
5329
5330 // If value is a literal the property value is already set in the
5331 // boilerplate object.
5332 if (value->AsLiteral() != NULL) continue;
5333 // If value is a materialized literal the property value is already set
5334 // in the boilerplate object if it is simple.
5335 if (CompileTimeValue::IsCompileTimeValue(value)) continue;
5336
5337 // The property must be set by generated code.
5338 Load(value);
5339
5340 // Get the property value off the stack.
5341 Result prop_value = frame_->Pop();
5342 prop_value.ToRegister();
5343
5344 // Fetch the array literal while leaving a copy on the stack and
5345 // use it to get the elements array.
5346 frame_->Dup();
5347 Result elements = frame_->Pop();
5348 elements.ToRegister();
5349 frame_->Spill(elements.reg());
5350 // Get the elements array.
5351 __ mov(elements.reg(),
5352 FieldOperand(elements.reg(), JSObject::kElementsOffset));
5353
5354 // Write to the indexed properties array.
5355 int offset = i * kPointerSize + FixedArray::kHeaderSize;
5356 __ mov(FieldOperand(elements.reg(), offset), prop_value.reg());
5357
5358 // Update the write barrier for the array address.
5359 frame_->Spill(prop_value.reg()); // Overwritten by the write barrier.
5360 Result scratch = allocator_->Allocate();
5361 ASSERT(scratch.is_valid());
5362 __ RecordWrite(elements.reg(), offset, prop_value.reg(), scratch.reg());
5363 }
5364}
5365
5366
5367void CodeGenerator::VisitCatchExtensionObject(CatchExtensionObject* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01005368 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005369 ASSERT(!in_spilled_code());
5370 // Call runtime routine to allocate the catch extension object and
5371 // assign the exception value to the catch variable.
5372 Comment cmnt(masm_, "[ CatchExtensionObject");
5373 Load(node->key());
5374 Load(node->value());
5375 Result result =
5376 frame_->CallRuntime(Runtime::kCreateCatchExtensionObject, 2);
5377 frame_->Push(&result);
5378}
5379
5380
Andrei Popescu402d9372010-02-26 13:31:12 +00005381void CodeGenerator::EmitSlotAssignment(Assignment* node) {
5382#ifdef DEBUG
5383 int original_height = frame()->height();
5384#endif
5385 Comment cmnt(masm(), "[ Variable Assignment");
5386 Variable* var = node->target()->AsVariableProxy()->AsVariable();
5387 ASSERT(var != NULL);
5388 Slot* slot = var->slot();
5389 ASSERT(slot != NULL);
5390
5391 // Evaluate the right-hand side.
5392 if (node->is_compound()) {
Steve Block6ded16b2010-05-10 14:33:55 +01005393 // For a compound assignment the right-hand side is a binary operation
5394 // between the current property value and the actual right-hand side.
Andrei Popescu402d9372010-02-26 13:31:12 +00005395 Result result = LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF);
5396 frame()->Push(&result);
5397 Load(node->value());
5398
Steve Block6ded16b2010-05-10 14:33:55 +01005399 // Perform the binary operation.
Andrei Popescu402d9372010-02-26 13:31:12 +00005400 bool overwrite_value =
5401 (node->value()->AsBinaryOperation() != NULL &&
5402 node->value()->AsBinaryOperation()->ResultOverwriteAllowed());
Steve Block6ded16b2010-05-10 14:33:55 +01005403 // Construct the implicit binary operation.
5404 BinaryOperation expr(node, node->binary_op(), node->target(),
5405 node->value());
5406 GenericBinaryOperation(&expr,
Andrei Popescu402d9372010-02-26 13:31:12 +00005407 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
5408 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01005409 // For non-compound assignment just load the right-hand side.
Andrei Popescu402d9372010-02-26 13:31:12 +00005410 Load(node->value());
5411 }
5412
5413 // Perform the assignment.
5414 if (var->mode() != Variable::CONST || node->op() == Token::INIT_CONST) {
5415 CodeForSourcePosition(node->position());
5416 StoreToSlot(slot,
5417 node->op() == Token::INIT_CONST ? CONST_INIT : NOT_CONST_INIT);
5418 }
5419 ASSERT(frame()->height() == original_height + 1);
5420}
5421
5422
5423void CodeGenerator::EmitNamedPropertyAssignment(Assignment* node) {
5424#ifdef DEBUG
5425 int original_height = frame()->height();
5426#endif
5427 Comment cmnt(masm(), "[ Named Property Assignment");
5428 Variable* var = node->target()->AsVariableProxy()->AsVariable();
5429 Property* prop = node->target()->AsProperty();
5430 ASSERT(var == NULL || (prop == NULL && var->is_global()));
5431
Steve Block6ded16b2010-05-10 14:33:55 +01005432 // Initialize name and evaluate the receiver sub-expression if necessary. If
5433 // the receiver is trivial it is not placed on the stack at this point, but
5434 // loaded whenever actually needed.
Andrei Popescu402d9372010-02-26 13:31:12 +00005435 Handle<String> name;
5436 bool is_trivial_receiver = false;
5437 if (var != NULL) {
5438 name = var->name();
5439 } else {
5440 Literal* lit = prop->key()->AsLiteral();
5441 ASSERT_NOT_NULL(lit);
5442 name = Handle<String>::cast(lit->handle());
5443 // Do not materialize the receiver on the frame if it is trivial.
5444 is_trivial_receiver = prop->obj()->IsTrivial();
5445 if (!is_trivial_receiver) Load(prop->obj());
5446 }
5447
Steve Block6ded16b2010-05-10 14:33:55 +01005448 // Change to slow case in the beginning of an initialization block to
5449 // avoid the quadratic behavior of repeatedly adding fast properties.
Andrei Popescu402d9372010-02-26 13:31:12 +00005450 if (node->starts_initialization_block()) {
Steve Block6ded16b2010-05-10 14:33:55 +01005451 // Initialization block consists of assignments of the form expr.x = ..., so
5452 // this will never be an assignment to a variable, so there must be a
5453 // receiver object.
Andrei Popescu402d9372010-02-26 13:31:12 +00005454 ASSERT_EQ(NULL, var);
Andrei Popescu402d9372010-02-26 13:31:12 +00005455 if (is_trivial_receiver) {
5456 frame()->Push(prop->obj());
5457 } else {
5458 frame()->Dup();
5459 }
5460 Result ignored = frame()->CallRuntime(Runtime::kToSlowProperties, 1);
5461 }
5462
Steve Block6ded16b2010-05-10 14:33:55 +01005463 // Change to fast case at the end of an initialization block. To prepare for
5464 // that add an extra copy of the receiver to the frame, so that it can be
5465 // converted back to fast case after the assignment.
Andrei Popescu402d9372010-02-26 13:31:12 +00005466 if (node->ends_initialization_block() && !is_trivial_receiver) {
Andrei Popescu402d9372010-02-26 13:31:12 +00005467 frame()->Dup();
5468 }
5469
Steve Block6ded16b2010-05-10 14:33:55 +01005470 // Stack layout:
5471 // [tos] : receiver (only materialized if non-trivial)
5472 // [tos+1] : receiver if at the end of an initialization block
5473
Andrei Popescu402d9372010-02-26 13:31:12 +00005474 // Evaluate the right-hand side.
5475 if (node->is_compound()) {
Steve Block6ded16b2010-05-10 14:33:55 +01005476 // For a compound assignment the right-hand side is a binary operation
5477 // between the current property value and the actual right-hand side.
Andrei Popescu402d9372010-02-26 13:31:12 +00005478 if (is_trivial_receiver) {
5479 frame()->Push(prop->obj());
5480 } else if (var != NULL) {
5481 // The LoadIC stub expects the object in eax.
5482 // Freeing eax causes the code generator to load the global into it.
5483 frame_->Spill(eax);
5484 LoadGlobal();
5485 } else {
5486 frame()->Dup();
5487 }
5488 Result value = EmitNamedLoad(name, var != NULL);
5489 frame()->Push(&value);
5490 Load(node->value());
5491
5492 bool overwrite_value =
5493 (node->value()->AsBinaryOperation() != NULL &&
5494 node->value()->AsBinaryOperation()->ResultOverwriteAllowed());
Steve Block6ded16b2010-05-10 14:33:55 +01005495 // Construct the implicit binary operation.
5496 BinaryOperation expr(node, node->binary_op(), node->target(),
5497 node->value());
5498 GenericBinaryOperation(&expr,
Andrei Popescu402d9372010-02-26 13:31:12 +00005499 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
5500 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01005501 // For non-compound assignment just load the right-hand side.
Andrei Popescu402d9372010-02-26 13:31:12 +00005502 Load(node->value());
5503 }
5504
Steve Block6ded16b2010-05-10 14:33:55 +01005505 // Stack layout:
5506 // [tos] : value
5507 // [tos+1] : receiver (only materialized if non-trivial)
5508 // [tos+2] : receiver if at the end of an initialization block
5509
Andrei Popescu402d9372010-02-26 13:31:12 +00005510 // Perform the assignment. It is safe to ignore constants here.
5511 ASSERT(var == NULL || var->mode() != Variable::CONST);
5512 ASSERT_NE(Token::INIT_CONST, node->op());
5513 if (is_trivial_receiver) {
5514 Result value = frame()->Pop();
5515 frame()->Push(prop->obj());
5516 frame()->Push(&value);
5517 }
5518 CodeForSourcePosition(node->position());
5519 bool is_contextual = (var != NULL);
5520 Result answer = EmitNamedStore(name, is_contextual);
5521 frame()->Push(&answer);
5522
Steve Block6ded16b2010-05-10 14:33:55 +01005523 // Stack layout:
5524 // [tos] : result
5525 // [tos+1] : receiver if at the end of an initialization block
5526
Andrei Popescu402d9372010-02-26 13:31:12 +00005527 if (node->ends_initialization_block()) {
5528 ASSERT_EQ(NULL, var);
5529 // The argument to the runtime call is the receiver.
5530 if (is_trivial_receiver) {
5531 frame()->Push(prop->obj());
5532 } else {
5533 // A copy of the receiver is below the value of the assignment. Swap
5534 // the receiver and the value of the assignment expression.
5535 Result result = frame()->Pop();
5536 Result receiver = frame()->Pop();
5537 frame()->Push(&result);
5538 frame()->Push(&receiver);
5539 }
5540 Result ignored = frame_->CallRuntime(Runtime::kToFastProperties, 1);
5541 }
5542
Steve Block6ded16b2010-05-10 14:33:55 +01005543 // Stack layout:
5544 // [tos] : result
5545
Andrei Popescu402d9372010-02-26 13:31:12 +00005546 ASSERT_EQ(frame()->height(), original_height + 1);
5547}
5548
5549
5550void CodeGenerator::EmitKeyedPropertyAssignment(Assignment* node) {
5551#ifdef DEBUG
5552 int original_height = frame()->height();
5553#endif
Steve Block6ded16b2010-05-10 14:33:55 +01005554 Comment cmnt(masm_, "[ Keyed Property Assignment");
Andrei Popescu402d9372010-02-26 13:31:12 +00005555 Property* prop = node->target()->AsProperty();
5556 ASSERT_NOT_NULL(prop);
5557
5558 // Evaluate the receiver subexpression.
5559 Load(prop->obj());
5560
Steve Block6ded16b2010-05-10 14:33:55 +01005561 // Change to slow case in the beginning of an initialization block to
5562 // avoid the quadratic behavior of repeatedly adding fast properties.
Andrei Popescu402d9372010-02-26 13:31:12 +00005563 if (node->starts_initialization_block()) {
Andrei Popescu402d9372010-02-26 13:31:12 +00005564 frame_->Dup();
5565 Result ignored = frame_->CallRuntime(Runtime::kToSlowProperties, 1);
5566 }
5567
Steve Block6ded16b2010-05-10 14:33:55 +01005568 // Change to fast case at the end of an initialization block. To prepare for
5569 // that add an extra copy of the receiver to the frame, so that it can be
5570 // converted back to fast case after the assignment.
Andrei Popescu402d9372010-02-26 13:31:12 +00005571 if (node->ends_initialization_block()) {
Andrei Popescu402d9372010-02-26 13:31:12 +00005572 frame_->Dup();
5573 }
5574
5575 // Evaluate the key subexpression.
5576 Load(prop->key());
5577
Steve Block6ded16b2010-05-10 14:33:55 +01005578 // Stack layout:
5579 // [tos] : key
5580 // [tos+1] : receiver
5581 // [tos+2] : receiver if at the end of an initialization block
5582
Andrei Popescu402d9372010-02-26 13:31:12 +00005583 // Evaluate the right-hand side.
5584 if (node->is_compound()) {
Steve Block6ded16b2010-05-10 14:33:55 +01005585 // For a compound assignment the right-hand side is a binary operation
5586 // between the current property value and the actual right-hand side.
5587 // Duplicate receiver and key for loading the current property value.
Andrei Popescu402d9372010-02-26 13:31:12 +00005588 frame()->PushElementAt(1);
5589 frame()->PushElementAt(1);
5590 Result value = EmitKeyedLoad();
5591 frame()->Push(&value);
5592 Load(node->value());
5593
Steve Block6ded16b2010-05-10 14:33:55 +01005594 // Perform the binary operation.
Andrei Popescu402d9372010-02-26 13:31:12 +00005595 bool overwrite_value =
5596 (node->value()->AsBinaryOperation() != NULL &&
5597 node->value()->AsBinaryOperation()->ResultOverwriteAllowed());
Steve Block6ded16b2010-05-10 14:33:55 +01005598 BinaryOperation expr(node, node->binary_op(), node->target(),
5599 node->value());
5600 GenericBinaryOperation(&expr,
Andrei Popescu402d9372010-02-26 13:31:12 +00005601 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
5602 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01005603 // For non-compound assignment just load the right-hand side.
Andrei Popescu402d9372010-02-26 13:31:12 +00005604 Load(node->value());
5605 }
5606
Steve Block6ded16b2010-05-10 14:33:55 +01005607 // Stack layout:
5608 // [tos] : value
5609 // [tos+1] : key
5610 // [tos+2] : receiver
5611 // [tos+3] : receiver if at the end of an initialization block
5612
Andrei Popescu402d9372010-02-26 13:31:12 +00005613 // Perform the assignment. It is safe to ignore constants here.
5614 ASSERT(node->op() != Token::INIT_CONST);
5615 CodeForSourcePosition(node->position());
5616 Result answer = EmitKeyedStore(prop->key()->type());
5617 frame()->Push(&answer);
5618
Steve Block6ded16b2010-05-10 14:33:55 +01005619 // Stack layout:
5620 // [tos] : result
5621 // [tos+1] : receiver if at the end of an initialization block
5622
5623 // Change to fast case at the end of an initialization block.
Andrei Popescu402d9372010-02-26 13:31:12 +00005624 if (node->ends_initialization_block()) {
5625 // The argument to the runtime call is the extra copy of the receiver,
5626 // which is below the value of the assignment. Swap the receiver and
5627 // the value of the assignment expression.
5628 Result result = frame()->Pop();
5629 Result receiver = frame()->Pop();
5630 frame()->Push(&result);
5631 frame()->Push(&receiver);
5632 Result ignored = frame_->CallRuntime(Runtime::kToFastProperties, 1);
5633 }
5634
Steve Block6ded16b2010-05-10 14:33:55 +01005635 // Stack layout:
5636 // [tos] : result
5637
Andrei Popescu402d9372010-02-26 13:31:12 +00005638 ASSERT(frame()->height() == original_height + 1);
5639}
5640
5641
Steve Blocka7e24c12009-10-30 11:49:00 +00005642void CodeGenerator::VisitAssignment(Assignment* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01005643 ASSERT(!in_safe_int32_mode());
Leon Clarked91b9f72010-01-27 17:25:45 +00005644#ifdef DEBUG
Andrei Popescu402d9372010-02-26 13:31:12 +00005645 int original_height = frame()->height();
Leon Clarked91b9f72010-01-27 17:25:45 +00005646#endif
Andrei Popescu402d9372010-02-26 13:31:12 +00005647 Variable* var = node->target()->AsVariableProxy()->AsVariable();
5648 Property* prop = node->target()->AsProperty();
Steve Blocka7e24c12009-10-30 11:49:00 +00005649
Andrei Popescu402d9372010-02-26 13:31:12 +00005650 if (var != NULL && !var->is_global()) {
5651 EmitSlotAssignment(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00005652
Andrei Popescu402d9372010-02-26 13:31:12 +00005653 } else if ((prop != NULL && prop->key()->IsPropertyName()) ||
5654 (var != NULL && var->is_global())) {
5655 // Properties whose keys are property names and global variables are
5656 // treated as named property references. We do not need to consider
5657 // global 'this' because it is not a valid left-hand side.
5658 EmitNamedPropertyAssignment(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00005659
Andrei Popescu402d9372010-02-26 13:31:12 +00005660 } else if (prop != NULL) {
5661 // Other properties (including rewritten parameters for a function that
5662 // uses arguments) are keyed property assignments.
5663 EmitKeyedPropertyAssignment(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00005664
Andrei Popescu402d9372010-02-26 13:31:12 +00005665 } else {
5666 // Invalid left-hand side.
5667 Load(node->target());
5668 Result result = frame()->CallRuntime(Runtime::kThrowReferenceError, 1);
5669 // The runtime call doesn't actually return but the code generator will
5670 // still generate code and expects a certain frame height.
5671 frame()->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00005672 }
Andrei Popescu402d9372010-02-26 13:31:12 +00005673
5674 ASSERT(frame()->height() == original_height + 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00005675}
5676
5677
5678void CodeGenerator::VisitThrow(Throw* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01005679 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005680 Comment cmnt(masm_, "[ Throw");
5681 Load(node->exception());
5682 Result result = frame_->CallRuntime(Runtime::kThrow, 1);
5683 frame_->Push(&result);
5684}
5685
5686
5687void CodeGenerator::VisitProperty(Property* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01005688 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005689 Comment cmnt(masm_, "[ Property");
5690 Reference property(this, node);
Steve Blockd0582a62009-12-15 09:54:21 +00005691 property.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00005692}
5693
5694
5695void CodeGenerator::VisitCall(Call* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01005696 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005697 Comment cmnt(masm_, "[ Call");
5698
5699 Expression* function = node->expression();
5700 ZoneList<Expression*>* args = node->arguments();
5701
5702 // Check if the function is a variable or a property.
5703 Variable* var = function->AsVariableProxy()->AsVariable();
5704 Property* property = function->AsProperty();
5705
5706 // ------------------------------------------------------------------------
5707 // Fast-case: Use inline caching.
5708 // ---
5709 // According to ECMA-262, section 11.2.3, page 44, the function to call
5710 // must be resolved after the arguments have been evaluated. The IC code
5711 // automatically handles this by loading the arguments before the function
5712 // is resolved in cache misses (this also holds for megamorphic calls).
5713 // ------------------------------------------------------------------------
5714
5715 if (var != NULL && var->is_possibly_eval()) {
5716 // ----------------------------------
5717 // JavaScript example: 'eval(arg)' // eval is not known to be shadowed
5718 // ----------------------------------
5719
5720 // In a call to eval, we first call %ResolvePossiblyDirectEval to
5721 // resolve the function we need to call and the receiver of the
5722 // call. Then we call the resolved function using the given
5723 // arguments.
5724
5725 // Prepare the stack for the call to the resolved function.
5726 Load(function);
5727
5728 // Allocate a frame slot for the receiver.
5729 frame_->Push(Factory::undefined_value());
5730 int arg_count = args->length();
5731 for (int i = 0; i < arg_count; i++) {
5732 Load(args->at(i));
5733 }
5734
5735 // Prepare the stack for the call to ResolvePossiblyDirectEval.
5736 frame_->PushElementAt(arg_count + 1);
5737 if (arg_count > 0) {
5738 frame_->PushElementAt(arg_count);
5739 } else {
5740 frame_->Push(Factory::undefined_value());
5741 }
5742
Leon Clarkee46be812010-01-19 14:06:41 +00005743 // Push the receiver.
5744 frame_->PushParameterAt(-1);
5745
Steve Blocka7e24c12009-10-30 11:49:00 +00005746 // Resolve the call.
5747 Result result =
Leon Clarkee46be812010-01-19 14:06:41 +00005748 frame_->CallRuntime(Runtime::kResolvePossiblyDirectEval, 3);
Steve Blocka7e24c12009-10-30 11:49:00 +00005749
Leon Clarkee46be812010-01-19 14:06:41 +00005750 // The runtime call returns a pair of values in eax (function) and
5751 // edx (receiver). Touch up the stack with the right values.
5752 Result receiver = allocator_->Allocate(edx);
5753 frame_->SetElementAt(arg_count + 1, &result);
5754 frame_->SetElementAt(arg_count, &receiver);
5755 receiver.Unuse();
Steve Blocka7e24c12009-10-30 11:49:00 +00005756
5757 // Call the function.
5758 CodeForSourcePosition(node->position());
5759 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Leon Clarkee46be812010-01-19 14:06:41 +00005760 CallFunctionStub call_function(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
Steve Blocka7e24c12009-10-30 11:49:00 +00005761 result = frame_->CallStub(&call_function, arg_count + 1);
5762
5763 // Restore the context and overwrite the function on the stack with
5764 // the result.
5765 frame_->RestoreContextRegister();
5766 frame_->SetElementAt(0, &result);
5767
5768 } else if (var != NULL && !var->is_this() && var->is_global()) {
5769 // ----------------------------------
5770 // JavaScript example: 'foo(1, 2, 3)' // foo is global
5771 // ----------------------------------
5772
Steve Blocka7e24c12009-10-30 11:49:00 +00005773 // Pass the global object as the receiver and let the IC stub
5774 // patch the stack to use the global proxy as 'this' in the
5775 // invoked function.
5776 LoadGlobal();
5777
5778 // Load the arguments.
5779 int arg_count = args->length();
5780 for (int i = 0; i < arg_count; i++) {
5781 Load(args->at(i));
5782 }
5783
Leon Clarkee46be812010-01-19 14:06:41 +00005784 // Push the name of the function onto the frame.
5785 frame_->Push(var->name());
5786
Steve Blocka7e24c12009-10-30 11:49:00 +00005787 // Call the IC initialization code.
5788 CodeForSourcePosition(node->position());
5789 Result result = frame_->CallCallIC(RelocInfo::CODE_TARGET_CONTEXT,
5790 arg_count,
5791 loop_nesting());
5792 frame_->RestoreContextRegister();
Leon Clarkee46be812010-01-19 14:06:41 +00005793 frame_->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00005794
5795 } else if (var != NULL && var->slot() != NULL &&
5796 var->slot()->type() == Slot::LOOKUP) {
5797 // ----------------------------------
Kristian Monsen25f61362010-05-21 11:50:48 +01005798 // JavaScript examples:
5799 //
5800 // with (obj) foo(1, 2, 3) // foo may be in obj.
5801 //
5802 // function f() {};
5803 // function g() {
5804 // eval(...);
5805 // f(); // f could be in extension object.
5806 // }
Steve Blocka7e24c12009-10-30 11:49:00 +00005807 // ----------------------------------
5808
Kristian Monsen25f61362010-05-21 11:50:48 +01005809 JumpTarget slow, done;
5810 Result function;
5811
5812 // Generate fast case for loading functions from slots that
5813 // correspond to local/global variables or arguments unless they
5814 // are shadowed by eval-introduced bindings.
5815 EmitDynamicLoadFromSlotFastCase(var->slot(),
5816 NOT_INSIDE_TYPEOF,
5817 &function,
5818 &slow,
5819 &done);
5820
5821 slow.Bind();
5822 // Enter the runtime system to load the function from the context.
5823 // Sync the frame so we can push the arguments directly into
5824 // place.
Steve Blocka7e24c12009-10-30 11:49:00 +00005825 frame_->SyncRange(0, frame_->element_count() - 1);
5826 frame_->EmitPush(esi);
5827 frame_->EmitPush(Immediate(var->name()));
5828 frame_->CallRuntime(Runtime::kLoadContextSlot, 2);
5829 // The runtime call returns a pair of values in eax and edx. The
5830 // looked-up function is in eax and the receiver is in edx. These
5831 // register references are not ref counted here. We spill them
5832 // eagerly since they are arguments to an inevitable call (and are
5833 // not sharable by the arguments).
5834 ASSERT(!allocator()->is_used(eax));
5835 frame_->EmitPush(eax);
5836
5837 // Load the receiver.
5838 ASSERT(!allocator()->is_used(edx));
5839 frame_->EmitPush(edx);
5840
Kristian Monsen25f61362010-05-21 11:50:48 +01005841 // If fast case code has been generated, emit code to push the
5842 // function and receiver and have the slow path jump around this
5843 // code.
5844 if (done.is_linked()) {
5845 JumpTarget call;
5846 call.Jump();
5847 done.Bind(&function);
5848 frame_->Push(&function);
5849 LoadGlobalReceiver();
5850 call.Bind();
5851 }
5852
Steve Blocka7e24c12009-10-30 11:49:00 +00005853 // Call the function.
Leon Clarkee46be812010-01-19 14:06:41 +00005854 CallWithArguments(args, NO_CALL_FUNCTION_FLAGS, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00005855
5856 } else if (property != NULL) {
5857 // Check if the key is a literal string.
5858 Literal* literal = property->key()->AsLiteral();
5859
5860 if (literal != NULL && literal->handle()->IsSymbol()) {
5861 // ------------------------------------------------------------------
5862 // JavaScript example: 'object.foo(1, 2, 3)' or 'map["key"](1, 2, 3)'
5863 // ------------------------------------------------------------------
5864
5865 Handle<String> name = Handle<String>::cast(literal->handle());
5866
5867 if (ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION &&
5868 name->IsEqualTo(CStrVector("apply")) &&
5869 args->length() == 2 &&
5870 args->at(1)->AsVariableProxy() != NULL &&
5871 args->at(1)->AsVariableProxy()->IsArguments()) {
5872 // Use the optimized Function.prototype.apply that avoids
5873 // allocating lazily allocated arguments objects.
Leon Clarked91b9f72010-01-27 17:25:45 +00005874 CallApplyLazy(property->obj(),
Steve Blocka7e24c12009-10-30 11:49:00 +00005875 args->at(0),
5876 args->at(1)->AsVariableProxy(),
5877 node->position());
5878
5879 } else {
Leon Clarkee46be812010-01-19 14:06:41 +00005880 // Push the receiver onto the frame.
Steve Blocka7e24c12009-10-30 11:49:00 +00005881 Load(property->obj());
5882
5883 // Load the arguments.
5884 int arg_count = args->length();
5885 for (int i = 0; i < arg_count; i++) {
5886 Load(args->at(i));
5887 }
5888
Leon Clarkee46be812010-01-19 14:06:41 +00005889 // Push the name of the function onto the frame.
5890 frame_->Push(name);
5891
Steve Blocka7e24c12009-10-30 11:49:00 +00005892 // Call the IC initialization code.
5893 CodeForSourcePosition(node->position());
5894 Result result =
5895 frame_->CallCallIC(RelocInfo::CODE_TARGET, arg_count,
5896 loop_nesting());
5897 frame_->RestoreContextRegister();
Leon Clarkee46be812010-01-19 14:06:41 +00005898 frame_->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00005899 }
5900
5901 } else {
5902 // -------------------------------------------
5903 // JavaScript example: 'array[index](1, 2, 3)'
5904 // -------------------------------------------
5905
5906 // Load the function to call from the property through a reference.
Steve Blocka7e24c12009-10-30 11:49:00 +00005907
5908 // Pass receiver to called function.
5909 if (property->is_synthetic()) {
Leon Clarked91b9f72010-01-27 17:25:45 +00005910 Reference ref(this, property);
5911 ref.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00005912 // Use global object as receiver.
5913 LoadGlobalReceiver();
5914 } else {
Leon Clarked91b9f72010-01-27 17:25:45 +00005915 Load(property->obj());
Andrei Popescu402d9372010-02-26 13:31:12 +00005916 frame()->Dup();
Leon Clarked91b9f72010-01-27 17:25:45 +00005917 Load(property->key());
Andrei Popescu402d9372010-02-26 13:31:12 +00005918 Result function = EmitKeyedLoad();
Leon Clarked91b9f72010-01-27 17:25:45 +00005919 Result receiver = frame_->Pop();
5920 frame_->Push(&function);
5921 frame_->Push(&receiver);
Steve Blocka7e24c12009-10-30 11:49:00 +00005922 }
5923
5924 // Call the function.
Leon Clarkee46be812010-01-19 14:06:41 +00005925 CallWithArguments(args, RECEIVER_MIGHT_BE_VALUE, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00005926 }
5927
5928 } else {
5929 // ----------------------------------
5930 // JavaScript example: 'foo(1, 2, 3)' // foo is not global
5931 // ----------------------------------
5932
5933 // Load the function.
5934 Load(function);
5935
5936 // Pass the global proxy as the receiver.
5937 LoadGlobalReceiver();
5938
5939 // Call the function.
Leon Clarkee46be812010-01-19 14:06:41 +00005940 CallWithArguments(args, NO_CALL_FUNCTION_FLAGS, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00005941 }
5942}
5943
5944
5945void CodeGenerator::VisitCallNew(CallNew* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01005946 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005947 Comment cmnt(masm_, "[ CallNew");
5948
5949 // According to ECMA-262, section 11.2.2, page 44, the function
5950 // expression in new calls must be evaluated before the
5951 // arguments. This is different from ordinary calls, where the
5952 // actual function to call is resolved after the arguments have been
5953 // evaluated.
5954
5955 // Compute function to call and use the global object as the
5956 // receiver. There is no need to use the global proxy here because
5957 // it will always be replaced with a newly allocated object.
5958 Load(node->expression());
5959 LoadGlobal();
5960
5961 // Push the arguments ("left-to-right") on the stack.
5962 ZoneList<Expression*>* args = node->arguments();
5963 int arg_count = args->length();
5964 for (int i = 0; i < arg_count; i++) {
5965 Load(args->at(i));
5966 }
5967
5968 // Call the construct call builtin that handles allocation and
5969 // constructor invocation.
5970 CodeForSourcePosition(node->position());
5971 Result result = frame_->CallConstructor(arg_count);
5972 // Replace the function on the stack with the result.
5973 frame_->SetElementAt(0, &result);
5974}
5975
5976
5977void CodeGenerator::GenerateIsSmi(ZoneList<Expression*>* args) {
5978 ASSERT(args->length() == 1);
5979 Load(args->at(0));
5980 Result value = frame_->Pop();
5981 value.ToRegister();
5982 ASSERT(value.is_valid());
5983 __ test(value.reg(), Immediate(kSmiTagMask));
5984 value.Unuse();
5985 destination()->Split(zero);
5986}
5987
5988
5989void CodeGenerator::GenerateLog(ZoneList<Expression*>* args) {
5990 // Conditionally generate a log call.
5991 // Args:
5992 // 0 (literal string): The type of logging (corresponds to the flags).
5993 // This is used to determine whether or not to generate the log call.
5994 // 1 (string): Format string. Access the string at argument index 2
5995 // with '%2s' (see Logger::LogRuntime for all the formats).
5996 // 2 (array): Arguments to the format string.
5997 ASSERT_EQ(args->length(), 3);
5998#ifdef ENABLE_LOGGING_AND_PROFILING
5999 if (ShouldGenerateLog(args->at(0))) {
6000 Load(args->at(1));
6001 Load(args->at(2));
6002 frame_->CallRuntime(Runtime::kLog, 2);
6003 }
6004#endif
6005 // Finally, we're expected to leave a value on the top of the stack.
6006 frame_->Push(Factory::undefined_value());
6007}
6008
6009
6010void CodeGenerator::GenerateIsNonNegativeSmi(ZoneList<Expression*>* args) {
6011 ASSERT(args->length() == 1);
6012 Load(args->at(0));
6013 Result value = frame_->Pop();
6014 value.ToRegister();
6015 ASSERT(value.is_valid());
Steve Block6ded16b2010-05-10 14:33:55 +01006016 __ test(value.reg(), Immediate(kSmiTagMask | kSmiSignMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00006017 value.Unuse();
6018 destination()->Split(zero);
6019}
6020
6021
6022// This generates code that performs a charCodeAt() call or returns
6023// undefined in order to trigger the slow case, Runtime_StringCharCodeAt.
Steve Blockd0582a62009-12-15 09:54:21 +00006024// It can handle flat, 8 and 16 bit characters and cons strings where the
6025// answer is found in the left hand branch of the cons. The slow case will
6026// flatten the string, which will ensure that the answer is in the left hand
6027// side the next time around.
Steve Blocka7e24c12009-10-30 11:49:00 +00006028void CodeGenerator::GenerateFastCharCodeAt(ZoneList<Expression*>* args) {
6029 Comment(masm_, "[ GenerateFastCharCodeAt");
6030 ASSERT(args->length() == 2);
6031
Steve Blocka7e24c12009-10-30 11:49:00 +00006032 Load(args->at(0));
6033 Load(args->at(1));
6034 Result index = frame_->Pop();
6035 Result object = frame_->Pop();
6036
Steve Blocka7e24c12009-10-30 11:49:00 +00006037 // We will mutate the index register and possibly the object register.
6038 // The case where they are somehow the same register is handled
6039 // because we only mutate them in the case where the receiver is a
6040 // heap object and the index is not.
6041 object.ToRegister();
6042 index.ToRegister();
6043 frame_->Spill(object.reg());
6044 frame_->Spill(index.reg());
6045
Steve Block6ded16b2010-05-10 14:33:55 +01006046 // We need two extra registers.
6047 Result result = allocator()->Allocate();
6048 ASSERT(result.is_valid());
6049 Result scratch = allocator()->Allocate();
6050 ASSERT(scratch.is_valid());
Steve Blocka7e24c12009-10-30 11:49:00 +00006051
6052 // There is no virtual frame effect from here up to the final result
6053 // push.
Steve Block6ded16b2010-05-10 14:33:55 +01006054 Label slow_case;
6055 Label exit;
6056 StringHelper::GenerateFastCharCodeAt(masm_,
6057 object.reg(),
6058 index.reg(),
6059 scratch.reg(),
6060 result.reg(),
6061 &slow_case,
6062 &slow_case,
6063 &slow_case,
6064 &slow_case);
6065 __ jmp(&exit);
Steve Blocka7e24c12009-10-30 11:49:00 +00006066
6067 __ bind(&slow_case);
6068 // Move the undefined value into the result register, which will
6069 // trigger the slow case.
Steve Block6ded16b2010-05-10 14:33:55 +01006070 __ Set(result.reg(), Immediate(Factory::undefined_value()));
Steve Blocka7e24c12009-10-30 11:49:00 +00006071
Steve Block6ded16b2010-05-10 14:33:55 +01006072 __ bind(&exit);
6073 frame_->Push(&result);
6074}
6075
6076
6077void CodeGenerator::GenerateCharFromCode(ZoneList<Expression*>* args) {
6078 Comment(masm_, "[ GenerateCharFromCode");
6079 ASSERT(args->length() == 1);
6080
6081 Load(args->at(0));
6082
6083 Result code = frame_->Pop();
6084 code.ToRegister();
6085 ASSERT(code.is_valid());
6086
6087 // StringHelper::GenerateCharFromCode may do a runtime call.
6088 frame_->SpillAll();
6089
6090 Result result = allocator()->Allocate();
6091 ASSERT(result.is_valid());
6092
6093 StringHelper::GenerateCharFromCode(masm_,
6094 code.reg(),
6095 result.reg(),
6096 CALL_FUNCTION);
6097 frame_->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00006098}
6099
6100
6101void CodeGenerator::GenerateIsArray(ZoneList<Expression*>* args) {
6102 ASSERT(args->length() == 1);
6103 Load(args->at(0));
6104 Result value = frame_->Pop();
6105 value.ToRegister();
6106 ASSERT(value.is_valid());
6107 __ test(value.reg(), Immediate(kSmiTagMask));
6108 destination()->false_target()->Branch(equal);
6109 // It is a heap object - get map.
6110 Result temp = allocator()->Allocate();
6111 ASSERT(temp.is_valid());
6112 // Check if the object is a JS array or not.
6113 __ CmpObjectType(value.reg(), JS_ARRAY_TYPE, temp.reg());
6114 value.Unuse();
6115 temp.Unuse();
6116 destination()->Split(equal);
6117}
6118
6119
Andrei Popescu402d9372010-02-26 13:31:12 +00006120void CodeGenerator::GenerateIsRegExp(ZoneList<Expression*>* args) {
6121 ASSERT(args->length() == 1);
6122 Load(args->at(0));
6123 Result value = frame_->Pop();
6124 value.ToRegister();
6125 ASSERT(value.is_valid());
6126 __ test(value.reg(), Immediate(kSmiTagMask));
6127 destination()->false_target()->Branch(equal);
6128 // It is a heap object - get map.
6129 Result temp = allocator()->Allocate();
6130 ASSERT(temp.is_valid());
6131 // Check if the object is a regexp.
6132 __ CmpObjectType(value.reg(), JS_REGEXP_TYPE, temp.reg());
6133 value.Unuse();
6134 temp.Unuse();
6135 destination()->Split(equal);
6136}
6137
6138
Steve Blockd0582a62009-12-15 09:54:21 +00006139void CodeGenerator::GenerateIsObject(ZoneList<Expression*>* args) {
6140 // This generates a fast version of:
6141 // (typeof(arg) === 'object' || %_ClassOf(arg) == 'RegExp')
6142 ASSERT(args->length() == 1);
6143 Load(args->at(0));
6144 Result obj = frame_->Pop();
6145 obj.ToRegister();
6146
6147 __ test(obj.reg(), Immediate(kSmiTagMask));
6148 destination()->false_target()->Branch(zero);
6149 __ cmp(obj.reg(), Factory::null_value());
6150 destination()->true_target()->Branch(equal);
6151
6152 Result map = allocator()->Allocate();
6153 ASSERT(map.is_valid());
6154 __ mov(map.reg(), FieldOperand(obj.reg(), HeapObject::kMapOffset));
6155 // Undetectable objects behave like undefined when tested with typeof.
6156 __ movzx_b(map.reg(), FieldOperand(map.reg(), Map::kBitFieldOffset));
6157 __ test(map.reg(), Immediate(1 << Map::kIsUndetectable));
6158 destination()->false_target()->Branch(not_zero);
6159 __ mov(map.reg(), FieldOperand(obj.reg(), HeapObject::kMapOffset));
6160 __ movzx_b(map.reg(), FieldOperand(map.reg(), Map::kInstanceTypeOffset));
6161 __ cmp(map.reg(), FIRST_JS_OBJECT_TYPE);
6162 destination()->false_target()->Branch(less);
6163 __ cmp(map.reg(), LAST_JS_OBJECT_TYPE);
6164 obj.Unuse();
6165 map.Unuse();
6166 destination()->Split(less_equal);
6167}
6168
6169
6170void CodeGenerator::GenerateIsFunction(ZoneList<Expression*>* args) {
6171 // This generates a fast version of:
6172 // (%_ClassOf(arg) === 'Function')
6173 ASSERT(args->length() == 1);
6174 Load(args->at(0));
6175 Result obj = frame_->Pop();
6176 obj.ToRegister();
6177 __ test(obj.reg(), Immediate(kSmiTagMask));
6178 destination()->false_target()->Branch(zero);
6179 Result temp = allocator()->Allocate();
6180 ASSERT(temp.is_valid());
6181 __ CmpObjectType(obj.reg(), JS_FUNCTION_TYPE, temp.reg());
6182 obj.Unuse();
6183 temp.Unuse();
6184 destination()->Split(equal);
6185}
6186
6187
Leon Clarked91b9f72010-01-27 17:25:45 +00006188void CodeGenerator::GenerateIsUndetectableObject(ZoneList<Expression*>* args) {
6189 ASSERT(args->length() == 1);
6190 Load(args->at(0));
6191 Result obj = frame_->Pop();
6192 obj.ToRegister();
6193 __ test(obj.reg(), Immediate(kSmiTagMask));
6194 destination()->false_target()->Branch(zero);
6195 Result temp = allocator()->Allocate();
6196 ASSERT(temp.is_valid());
6197 __ mov(temp.reg(),
6198 FieldOperand(obj.reg(), HeapObject::kMapOffset));
6199 __ movzx_b(temp.reg(),
6200 FieldOperand(temp.reg(), Map::kBitFieldOffset));
6201 __ test(temp.reg(), Immediate(1 << Map::kIsUndetectable));
6202 obj.Unuse();
6203 temp.Unuse();
6204 destination()->Split(not_zero);
6205}
6206
6207
Steve Blocka7e24c12009-10-30 11:49:00 +00006208void CodeGenerator::GenerateIsConstructCall(ZoneList<Expression*>* args) {
6209 ASSERT(args->length() == 0);
6210
6211 // Get the frame pointer for the calling frame.
6212 Result fp = allocator()->Allocate();
6213 __ mov(fp.reg(), Operand(ebp, StandardFrameConstants::kCallerFPOffset));
6214
6215 // Skip the arguments adaptor frame if it exists.
6216 Label check_frame_marker;
6217 __ cmp(Operand(fp.reg(), StandardFrameConstants::kContextOffset),
6218 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
6219 __ j(not_equal, &check_frame_marker);
6220 __ mov(fp.reg(), Operand(fp.reg(), StandardFrameConstants::kCallerFPOffset));
6221
6222 // Check the marker in the calling frame.
6223 __ bind(&check_frame_marker);
6224 __ cmp(Operand(fp.reg(), StandardFrameConstants::kMarkerOffset),
6225 Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
6226 fp.Unuse();
6227 destination()->Split(equal);
6228}
6229
6230
6231void CodeGenerator::GenerateArgumentsLength(ZoneList<Expression*>* args) {
6232 ASSERT(args->length() == 0);
Steve Block6ded16b2010-05-10 14:33:55 +01006233
6234 Result fp = allocator_->Allocate();
6235 Result result = allocator_->Allocate();
6236 ASSERT(fp.is_valid() && result.is_valid());
6237
6238 Label exit;
6239
6240 // Get the number of formal parameters.
6241 __ Set(result.reg(), Immediate(Smi::FromInt(scope()->num_parameters())));
6242
6243 // Check if the calling frame is an arguments adaptor frame.
6244 __ mov(fp.reg(), Operand(ebp, StandardFrameConstants::kCallerFPOffset));
6245 __ cmp(Operand(fp.reg(), StandardFrameConstants::kContextOffset),
6246 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
6247 __ j(not_equal, &exit);
6248
6249 // Arguments adaptor case: Read the arguments length from the
6250 // adaptor frame.
6251 __ mov(result.reg(),
6252 Operand(fp.reg(), ArgumentsAdaptorFrameConstants::kLengthOffset));
6253
6254 __ bind(&exit);
6255 result.set_type_info(TypeInfo::Smi());
6256 if (FLAG_debug_code) __ AbortIfNotSmi(result.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00006257 frame_->Push(&result);
6258}
6259
6260
6261void CodeGenerator::GenerateClassOf(ZoneList<Expression*>* args) {
6262 ASSERT(args->length() == 1);
6263 JumpTarget leave, null, function, non_function_constructor;
6264 Load(args->at(0)); // Load the object.
6265 Result obj = frame_->Pop();
6266 obj.ToRegister();
6267 frame_->Spill(obj.reg());
6268
6269 // If the object is a smi, we return null.
6270 __ test(obj.reg(), Immediate(kSmiTagMask));
6271 null.Branch(zero);
6272
6273 // Check that the object is a JS object but take special care of JS
6274 // functions to make sure they have 'Function' as their class.
6275 { Result tmp = allocator()->Allocate();
6276 __ mov(obj.reg(), FieldOperand(obj.reg(), HeapObject::kMapOffset));
6277 __ movzx_b(tmp.reg(), FieldOperand(obj.reg(), Map::kInstanceTypeOffset));
6278 __ cmp(tmp.reg(), FIRST_JS_OBJECT_TYPE);
6279 null.Branch(less);
6280
6281 // As long as JS_FUNCTION_TYPE is the last instance type and it is
6282 // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
6283 // LAST_JS_OBJECT_TYPE.
6284 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
6285 ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
6286 __ cmp(tmp.reg(), JS_FUNCTION_TYPE);
6287 function.Branch(equal);
6288 }
6289
6290 // Check if the constructor in the map is a function.
6291 { Result tmp = allocator()->Allocate();
6292 __ mov(obj.reg(), FieldOperand(obj.reg(), Map::kConstructorOffset));
6293 __ CmpObjectType(obj.reg(), JS_FUNCTION_TYPE, tmp.reg());
6294 non_function_constructor.Branch(not_equal);
6295 }
6296
6297 // The map register now contains the constructor function. Grab the
6298 // instance class name from there.
6299 __ mov(obj.reg(),
6300 FieldOperand(obj.reg(), JSFunction::kSharedFunctionInfoOffset));
6301 __ mov(obj.reg(),
6302 FieldOperand(obj.reg(), SharedFunctionInfo::kInstanceClassNameOffset));
6303 frame_->Push(&obj);
6304 leave.Jump();
6305
6306 // Functions have class 'Function'.
6307 function.Bind();
6308 frame_->Push(Factory::function_class_symbol());
6309 leave.Jump();
6310
6311 // Objects with a non-function constructor have class 'Object'.
6312 non_function_constructor.Bind();
6313 frame_->Push(Factory::Object_symbol());
6314 leave.Jump();
6315
6316 // Non-JS objects have class null.
6317 null.Bind();
6318 frame_->Push(Factory::null_value());
6319
6320 // All done.
6321 leave.Bind();
6322}
6323
6324
6325void CodeGenerator::GenerateValueOf(ZoneList<Expression*>* args) {
6326 ASSERT(args->length() == 1);
6327 JumpTarget leave;
6328 Load(args->at(0)); // Load the object.
6329 frame_->Dup();
6330 Result object = frame_->Pop();
6331 object.ToRegister();
6332 ASSERT(object.is_valid());
6333 // if (object->IsSmi()) return object.
6334 __ test(object.reg(), Immediate(kSmiTagMask));
6335 leave.Branch(zero, taken);
6336 // It is a heap object - get map.
6337 Result temp = allocator()->Allocate();
6338 ASSERT(temp.is_valid());
6339 // if (!object->IsJSValue()) return object.
6340 __ CmpObjectType(object.reg(), JS_VALUE_TYPE, temp.reg());
6341 leave.Branch(not_equal, not_taken);
6342 __ mov(temp.reg(), FieldOperand(object.reg(), JSValue::kValueOffset));
6343 object.Unuse();
6344 frame_->SetElementAt(0, &temp);
6345 leave.Bind();
6346}
6347
6348
6349void CodeGenerator::GenerateSetValueOf(ZoneList<Expression*>* args) {
6350 ASSERT(args->length() == 2);
6351 JumpTarget leave;
6352 Load(args->at(0)); // Load the object.
6353 Load(args->at(1)); // Load the value.
6354 Result value = frame_->Pop();
6355 Result object = frame_->Pop();
6356 value.ToRegister();
6357 object.ToRegister();
6358
6359 // if (object->IsSmi()) return value.
6360 __ test(object.reg(), Immediate(kSmiTagMask));
6361 leave.Branch(zero, &value, taken);
6362
6363 // It is a heap object - get its map.
6364 Result scratch = allocator_->Allocate();
6365 ASSERT(scratch.is_valid());
6366 // if (!object->IsJSValue()) return value.
6367 __ CmpObjectType(object.reg(), JS_VALUE_TYPE, scratch.reg());
6368 leave.Branch(not_equal, &value, not_taken);
6369
6370 // Store the value.
6371 __ mov(FieldOperand(object.reg(), JSValue::kValueOffset), value.reg());
6372 // Update the write barrier. Save the value as it will be
6373 // overwritten by the write barrier code and is needed afterward.
6374 Result duplicate_value = allocator_->Allocate();
6375 ASSERT(duplicate_value.is_valid());
6376 __ mov(duplicate_value.reg(), value.reg());
6377 // The object register is also overwritten by the write barrier and
6378 // possibly aliased in the frame.
6379 frame_->Spill(object.reg());
6380 __ RecordWrite(object.reg(), JSValue::kValueOffset, duplicate_value.reg(),
6381 scratch.reg());
6382 object.Unuse();
6383 scratch.Unuse();
6384 duplicate_value.Unuse();
6385
6386 // Leave.
6387 leave.Bind(&value);
6388 frame_->Push(&value);
6389}
6390
6391
Steve Block6ded16b2010-05-10 14:33:55 +01006392void CodeGenerator::GenerateArguments(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00006393 ASSERT(args->length() == 1);
6394
6395 // ArgumentsAccessStub expects the key in edx and the formal
6396 // parameter count in eax.
6397 Load(args->at(0));
6398 Result key = frame_->Pop();
6399 // Explicitly create a constant result.
Andrei Popescu31002712010-02-23 13:46:05 +00006400 Result count(Handle<Smi>(Smi::FromInt(scope()->num_parameters())));
Steve Blocka7e24c12009-10-30 11:49:00 +00006401 // Call the shared stub to get to arguments[key].
6402 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
6403 Result result = frame_->CallStub(&stub, &key, &count);
6404 frame_->Push(&result);
6405}
6406
6407
6408void CodeGenerator::GenerateObjectEquals(ZoneList<Expression*>* args) {
6409 ASSERT(args->length() == 2);
6410
6411 // Load the two objects into registers and perform the comparison.
6412 Load(args->at(0));
6413 Load(args->at(1));
6414 Result right = frame_->Pop();
6415 Result left = frame_->Pop();
6416 right.ToRegister();
6417 left.ToRegister();
6418 __ cmp(right.reg(), Operand(left.reg()));
6419 right.Unuse();
6420 left.Unuse();
6421 destination()->Split(equal);
6422}
6423
6424
6425void CodeGenerator::GenerateGetFramePointer(ZoneList<Expression*>* args) {
6426 ASSERT(args->length() == 0);
6427 ASSERT(kSmiTag == 0); // EBP value is aligned, so it should look like Smi.
6428 Result ebp_as_smi = allocator_->Allocate();
6429 ASSERT(ebp_as_smi.is_valid());
6430 __ mov(ebp_as_smi.reg(), Operand(ebp));
6431 frame_->Push(&ebp_as_smi);
6432}
6433
6434
Steve Block6ded16b2010-05-10 14:33:55 +01006435void CodeGenerator::GenerateRandomHeapNumber(
6436 ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00006437 ASSERT(args->length() == 0);
6438 frame_->SpillAll();
6439
Steve Block6ded16b2010-05-10 14:33:55 +01006440 Label slow_allocate_heapnumber;
6441 Label heapnumber_allocated;
Steve Blocka7e24c12009-10-30 11:49:00 +00006442
Steve Block6ded16b2010-05-10 14:33:55 +01006443 __ AllocateHeapNumber(edi, ebx, ecx, &slow_allocate_heapnumber);
6444 __ jmp(&heapnumber_allocated);
Steve Blocka7e24c12009-10-30 11:49:00 +00006445
Steve Block6ded16b2010-05-10 14:33:55 +01006446 __ bind(&slow_allocate_heapnumber);
6447 // To allocate a heap number, and ensure that it is not a smi, we
6448 // call the runtime function FUnaryMinus on 0, returning the double
6449 // -0.0. A new, distinct heap number is returned each time.
6450 __ push(Immediate(Smi::FromInt(0)));
6451 __ CallRuntime(Runtime::kNumberUnaryMinus, 1);
6452 __ mov(edi, eax);
6453
6454 __ bind(&heapnumber_allocated);
6455
6456 __ PrepareCallCFunction(0, ebx);
6457 __ CallCFunction(ExternalReference::random_uint32_function(), 0);
6458
6459 // Convert 32 random bits in eax to 0.(32 random bits) in a double
6460 // by computing:
6461 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
6462 // This is implemented on both SSE2 and FPU.
6463 if (CpuFeatures::IsSupported(SSE2)) {
6464 CpuFeatures::Scope fscope(SSE2);
6465 __ mov(ebx, Immediate(0x49800000)); // 1.0 x 2^20 as single.
6466 __ movd(xmm1, Operand(ebx));
6467 __ movd(xmm0, Operand(eax));
6468 __ cvtss2sd(xmm1, xmm1);
6469 __ pxor(xmm0, xmm1);
6470 __ subsd(xmm0, xmm1);
6471 __ movdbl(FieldOperand(edi, HeapNumber::kValueOffset), xmm0);
6472 } else {
6473 // 0x4130000000000000 is 1.0 x 2^20 as a double.
6474 __ mov(FieldOperand(edi, HeapNumber::kExponentOffset),
6475 Immediate(0x41300000));
6476 __ mov(FieldOperand(edi, HeapNumber::kMantissaOffset), eax);
6477 __ fld_d(FieldOperand(edi, HeapNumber::kValueOffset));
6478 __ mov(FieldOperand(edi, HeapNumber::kMantissaOffset), Immediate(0));
6479 __ fld_d(FieldOperand(edi, HeapNumber::kValueOffset));
6480 __ fsubp(1);
6481 __ fstp_d(FieldOperand(edi, HeapNumber::kValueOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00006482 }
Steve Block6ded16b2010-05-10 14:33:55 +01006483 __ mov(eax, edi);
Steve Blocka7e24c12009-10-30 11:49:00 +00006484
6485 Result result = allocator_->Allocate(eax);
6486 frame_->Push(&result);
6487}
6488
6489
Steve Blockd0582a62009-12-15 09:54:21 +00006490void CodeGenerator::GenerateStringAdd(ZoneList<Expression*>* args) {
6491 ASSERT_EQ(2, args->length());
6492
6493 Load(args->at(0));
6494 Load(args->at(1));
6495
6496 StringAddStub stub(NO_STRING_ADD_FLAGS);
6497 Result answer = frame_->CallStub(&stub, 2);
6498 frame_->Push(&answer);
6499}
6500
6501
Leon Clarkee46be812010-01-19 14:06:41 +00006502void CodeGenerator::GenerateSubString(ZoneList<Expression*>* args) {
6503 ASSERT_EQ(3, args->length());
6504
6505 Load(args->at(0));
6506 Load(args->at(1));
6507 Load(args->at(2));
6508
6509 SubStringStub stub;
6510 Result answer = frame_->CallStub(&stub, 3);
6511 frame_->Push(&answer);
6512}
6513
6514
6515void CodeGenerator::GenerateStringCompare(ZoneList<Expression*>* args) {
6516 ASSERT_EQ(2, args->length());
6517
6518 Load(args->at(0));
6519 Load(args->at(1));
6520
6521 StringCompareStub stub;
6522 Result answer = frame_->CallStub(&stub, 2);
6523 frame_->Push(&answer);
6524}
6525
6526
6527void CodeGenerator::GenerateRegExpExec(ZoneList<Expression*>* args) {
Steve Block6ded16b2010-05-10 14:33:55 +01006528 ASSERT_EQ(4, args->length());
Leon Clarkee46be812010-01-19 14:06:41 +00006529
6530 // Load the arguments on the stack and call the stub.
6531 Load(args->at(0));
6532 Load(args->at(1));
6533 Load(args->at(2));
6534 Load(args->at(3));
6535 RegExpExecStub stub;
6536 Result result = frame_->CallStub(&stub, 4);
6537 frame_->Push(&result);
6538}
6539
6540
Steve Block6ded16b2010-05-10 14:33:55 +01006541void CodeGenerator::GenerateRegExpConstructResult(ZoneList<Expression*>* args) {
6542 // No stub. This code only occurs a few times in regexp.js.
6543 const int kMaxInlineLength = 100;
6544 ASSERT_EQ(3, args->length());
6545 Load(args->at(0)); // Size of array, smi.
6546 Load(args->at(1)); // "index" property value.
6547 Load(args->at(2)); // "input" property value.
6548 {
6549 VirtualFrame::SpilledScope spilled_scope;
6550
6551 Label slowcase;
6552 Label done;
6553 __ mov(ebx, Operand(esp, kPointerSize * 2));
6554 __ test(ebx, Immediate(kSmiTagMask));
6555 __ j(not_zero, &slowcase);
6556 __ cmp(Operand(ebx), Immediate(Smi::FromInt(kMaxInlineLength)));
6557 __ j(above, &slowcase);
6558 // Smi-tagging is equivalent to multiplying by 2.
6559 STATIC_ASSERT(kSmiTag == 0);
6560 STATIC_ASSERT(kSmiTagSize == 1);
6561 // Allocate RegExpResult followed by FixedArray with size in ebx.
6562 // JSArray: [Map][empty properties][Elements][Length-smi][index][input]
6563 // Elements: [Map][Length][..elements..]
6564 __ AllocateInNewSpace(JSRegExpResult::kSize + FixedArray::kHeaderSize,
6565 times_half_pointer_size,
6566 ebx, // In: Number of elements (times 2, being a smi)
6567 eax, // Out: Start of allocation (tagged).
6568 ecx, // Out: End of allocation.
6569 edx, // Scratch register
6570 &slowcase,
6571 TAG_OBJECT);
6572 // eax: Start of allocated area, object-tagged.
6573
6574 // Set JSArray map to global.regexp_result_map().
6575 // Set empty properties FixedArray.
6576 // Set elements to point to FixedArray allocated right after the JSArray.
6577 // Interleave operations for better latency.
6578 __ mov(edx, ContextOperand(esi, Context::GLOBAL_INDEX));
6579 __ mov(ecx, Immediate(Factory::empty_fixed_array()));
6580 __ lea(ebx, Operand(eax, JSRegExpResult::kSize));
6581 __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalContextOffset));
6582 __ mov(FieldOperand(eax, JSObject::kElementsOffset), ebx);
6583 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset), ecx);
6584 __ mov(edx, ContextOperand(edx, Context::REGEXP_RESULT_MAP_INDEX));
6585 __ mov(FieldOperand(eax, HeapObject::kMapOffset), edx);
6586
6587 // Set input, index and length fields from arguments.
6588 __ pop(FieldOperand(eax, JSRegExpResult::kInputOffset));
6589 __ pop(FieldOperand(eax, JSRegExpResult::kIndexOffset));
6590 __ pop(ecx);
6591 __ mov(FieldOperand(eax, JSArray::kLengthOffset), ecx);
6592
6593 // Fill out the elements FixedArray.
6594 // eax: JSArray.
6595 // ebx: FixedArray.
6596 // ecx: Number of elements in array, as smi.
6597
6598 // Set map.
6599 __ mov(FieldOperand(ebx, HeapObject::kMapOffset),
6600 Immediate(Factory::fixed_array_map()));
6601 // Set length.
6602 __ SmiUntag(ecx);
6603 __ mov(FieldOperand(ebx, FixedArray::kLengthOffset), ecx);
6604 // Fill contents of fixed-array with the-hole.
6605 __ mov(edx, Immediate(Factory::the_hole_value()));
6606 __ lea(ebx, FieldOperand(ebx, FixedArray::kHeaderSize));
6607 // Fill fixed array elements with hole.
6608 // eax: JSArray.
6609 // ecx: Number of elements to fill.
6610 // ebx: Start of elements in FixedArray.
6611 // edx: the hole.
6612 Label loop;
6613 __ test(ecx, Operand(ecx));
6614 __ bind(&loop);
6615 __ j(less_equal, &done); // Jump if ecx is negative or zero.
6616 __ sub(Operand(ecx), Immediate(1));
6617 __ mov(Operand(ebx, ecx, times_pointer_size, 0), edx);
6618 __ jmp(&loop);
6619
6620 __ bind(&slowcase);
6621 __ CallRuntime(Runtime::kRegExpConstructResult, 3);
6622
6623 __ bind(&done);
6624 }
6625 frame_->Forget(3);
6626 frame_->Push(eax);
6627}
6628
6629
6630class DeferredSearchCache: public DeferredCode {
6631 public:
6632 DeferredSearchCache(Register dst, Register cache, Register key)
6633 : dst_(dst), cache_(cache), key_(key) {
6634 set_comment("[ DeferredSearchCache");
6635 }
6636
6637 virtual void Generate();
6638
6639 private:
Kristian Monsen25f61362010-05-21 11:50:48 +01006640 Register dst_; // on invocation Smi index of finger, on exit
6641 // holds value being looked up.
6642 Register cache_; // instance of JSFunctionResultCache.
6643 Register key_; // key being looked up.
Steve Block6ded16b2010-05-10 14:33:55 +01006644};
6645
6646
6647void DeferredSearchCache::Generate() {
Kristian Monsen25f61362010-05-21 11:50:48 +01006648 Label first_loop, search_further, second_loop, cache_miss;
6649
6650 // Smi-tagging is equivalent to multiplying by 2.
6651 STATIC_ASSERT(kSmiTag == 0);
6652 STATIC_ASSERT(kSmiTagSize == 1);
6653
6654 Smi* kEntrySizeSmi = Smi::FromInt(JSFunctionResultCache::kEntrySize);
6655 Smi* kEntriesIndexSmi = Smi::FromInt(JSFunctionResultCache::kEntriesIndex);
6656
6657 // Check the cache from finger to start of the cache.
6658 __ bind(&first_loop);
6659 __ sub(Operand(dst_), Immediate(kEntrySizeSmi));
6660 __ cmp(Operand(dst_), Immediate(kEntriesIndexSmi));
6661 __ j(less, &search_further);
6662
6663 __ cmp(key_, CodeGenerator::FixedArrayElementOperand(cache_, dst_));
6664 __ j(not_equal, &first_loop);
6665
6666 __ mov(FieldOperand(cache_, JSFunctionResultCache::kFingerOffset), dst_);
6667 __ mov(dst_, CodeGenerator::FixedArrayElementOperand(cache_, dst_, 1));
6668 __ jmp(exit_label());
6669
6670 __ bind(&search_further);
6671
6672 // Check the cache from end of cache up to finger.
6673 __ mov(dst_, FieldOperand(cache_, JSFunctionResultCache::kCacheSizeOffset));
6674
6675 __ bind(&second_loop);
6676 __ sub(Operand(dst_), Immediate(kEntrySizeSmi));
6677 // Consider prefetching into some reg.
6678 __ cmp(dst_, FieldOperand(cache_, JSFunctionResultCache::kFingerOffset));
6679 __ j(less_equal, &cache_miss);
6680
6681 __ cmp(key_, CodeGenerator::FixedArrayElementOperand(cache_, dst_));
6682 __ j(not_equal, &second_loop);
6683
6684 __ mov(FieldOperand(cache_, JSFunctionResultCache::kFingerOffset), dst_);
6685 __ mov(dst_, CodeGenerator::FixedArrayElementOperand(cache_, dst_, 1));
6686 __ jmp(exit_label());
6687
6688 __ bind(&cache_miss);
6689 __ push(cache_); // store a reference to cache
6690 __ push(key_); // store a key
6691 Handle<Object> receiver(Top::global_context()->global());
6692 __ push(Immediate(receiver));
Steve Block6ded16b2010-05-10 14:33:55 +01006693 __ push(key_);
Kristian Monsen25f61362010-05-21 11:50:48 +01006694 // On ia32 function must be in edi.
6695 __ mov(edi, FieldOperand(cache_, JSFunctionResultCache::kFactoryOffset));
6696 ParameterCount expected(1);
6697 __ InvokeFunction(edi, expected, CALL_FUNCTION);
6698
6699 // Find a place to put new cached value into.
6700 Label add_new_entry, update_cache;
6701 __ mov(ecx, Operand(esp, kPointerSize)); // restore the cache
6702 // Possible optimization: cache size is constant for the given cache
6703 // so technically we could use a constant here. However, if we have
6704 // cache miss this optimization would hardly matter much.
6705
6706 // Check if we could add new entry to cache.
6707 __ mov(ebx, FieldOperand(ecx, FixedArray::kLengthOffset));
6708 __ SmiTag(ebx);
6709 __ cmp(ebx, FieldOperand(ecx, JSFunctionResultCache::kCacheSizeOffset));
6710 __ j(greater, &add_new_entry);
6711
6712 // Check if we could evict entry after finger.
6713 __ mov(edx, FieldOperand(ecx, JSFunctionResultCache::kFingerOffset));
6714 __ add(Operand(edx), Immediate(kEntrySizeSmi));
6715 __ cmp(ebx, Operand(edx));
6716 __ j(greater, &update_cache);
6717
6718 // Need to wrap over the cache.
6719 __ mov(edx, Immediate(kEntriesIndexSmi));
6720 __ jmp(&update_cache);
6721
6722 __ bind(&add_new_entry);
6723 __ mov(edx, FieldOperand(ecx, JSFunctionResultCache::kCacheSizeOffset));
6724 __ lea(ebx, Operand(edx, JSFunctionResultCache::kEntrySize << 1));
6725 __ mov(FieldOperand(ecx, JSFunctionResultCache::kCacheSizeOffset), ebx);
6726
6727 // Update the cache itself.
6728 // edx holds the index.
6729 __ bind(&update_cache);
6730 __ pop(ebx); // restore the key
6731 __ mov(FieldOperand(ecx, JSFunctionResultCache::kFingerOffset), edx);
6732 // Store key.
6733 __ mov(CodeGenerator::FixedArrayElementOperand(ecx, edx), ebx);
6734 __ RecordWrite(ecx, 0, ebx, edx);
6735
6736 // Store value.
6737 __ pop(ecx); // restore the cache.
6738 __ mov(edx, FieldOperand(ecx, JSFunctionResultCache::kFingerOffset));
6739 __ add(Operand(edx), Immediate(Smi::FromInt(1)));
6740 __ mov(ebx, eax);
6741 __ mov(CodeGenerator::FixedArrayElementOperand(ecx, edx), ebx);
6742 __ RecordWrite(ecx, 0, ebx, edx);
6743
Steve Block6ded16b2010-05-10 14:33:55 +01006744 if (!dst_.is(eax)) {
6745 __ mov(dst_, eax);
6746 }
6747}
6748
6749
6750void CodeGenerator::GenerateGetFromCache(ZoneList<Expression*>* args) {
6751 ASSERT_EQ(2, args->length());
6752
6753 ASSERT_NE(NULL, args->at(0)->AsLiteral());
6754 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
6755
6756 Handle<FixedArray> jsfunction_result_caches(
6757 Top::global_context()->jsfunction_result_caches());
6758 if (jsfunction_result_caches->length() <= cache_id) {
6759 __ Abort("Attempt to use undefined cache.");
6760 frame_->Push(Factory::undefined_value());
6761 return;
6762 }
6763
6764 Load(args->at(1));
6765 Result key = frame_->Pop();
6766 key.ToRegister();
6767
6768 Result cache = allocator()->Allocate();
6769 ASSERT(cache.is_valid());
6770 __ mov(cache.reg(), ContextOperand(esi, Context::GLOBAL_INDEX));
6771 __ mov(cache.reg(),
6772 FieldOperand(cache.reg(), GlobalObject::kGlobalContextOffset));
6773 __ mov(cache.reg(),
6774 ContextOperand(cache.reg(), Context::JSFUNCTION_RESULT_CACHES_INDEX));
6775 __ mov(cache.reg(),
6776 FieldOperand(cache.reg(), FixedArray::OffsetOfElementAt(cache_id)));
6777
6778 Result tmp = allocator()->Allocate();
6779 ASSERT(tmp.is_valid());
6780
6781 DeferredSearchCache* deferred = new DeferredSearchCache(tmp.reg(),
6782 cache.reg(),
6783 key.reg());
6784
Steve Block6ded16b2010-05-10 14:33:55 +01006785 // tmp.reg() now holds finger offset as a smi.
6786 ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
Kristian Monsen25f61362010-05-21 11:50:48 +01006787 __ mov(tmp.reg(), FieldOperand(cache.reg(),
6788 JSFunctionResultCache::kFingerOffset));
6789 __ cmp(key.reg(), FixedArrayElementOperand(cache.reg(), tmp.reg()));
Steve Block6ded16b2010-05-10 14:33:55 +01006790 deferred->Branch(not_equal);
6791
Kristian Monsen25f61362010-05-21 11:50:48 +01006792 __ mov(tmp.reg(), FixedArrayElementOperand(cache.reg(), tmp.reg(), 1));
Steve Block6ded16b2010-05-10 14:33:55 +01006793
6794 deferred->BindExit();
6795 frame_->Push(&tmp);
6796}
6797
6798
Andrei Popescu402d9372010-02-26 13:31:12 +00006799void CodeGenerator::GenerateNumberToString(ZoneList<Expression*>* args) {
6800 ASSERT_EQ(args->length(), 1);
6801
6802 // Load the argument on the stack and call the stub.
6803 Load(args->at(0));
6804 NumberToStringStub stub;
6805 Result result = frame_->CallStub(&stub, 1);
6806 frame_->Push(&result);
6807}
6808
6809
Steve Block6ded16b2010-05-10 14:33:55 +01006810class DeferredSwapElements: public DeferredCode {
6811 public:
6812 DeferredSwapElements(Register object, Register index1, Register index2)
6813 : object_(object), index1_(index1), index2_(index2) {
6814 set_comment("[ DeferredSwapElements");
6815 }
6816
6817 virtual void Generate();
6818
6819 private:
6820 Register object_, index1_, index2_;
6821};
6822
6823
6824void DeferredSwapElements::Generate() {
6825 __ push(object_);
6826 __ push(index1_);
6827 __ push(index2_);
6828 __ CallRuntime(Runtime::kSwapElements, 3);
6829}
6830
6831
6832void CodeGenerator::GenerateSwapElements(ZoneList<Expression*>* args) {
6833 // Note: this code assumes that indices are passed are within
6834 // elements' bounds and refer to valid (not holes) values.
6835 Comment cmnt(masm_, "[ GenerateSwapElements");
6836
6837 ASSERT_EQ(3, args->length());
6838
6839 Load(args->at(0));
6840 Load(args->at(1));
6841 Load(args->at(2));
6842
6843 Result index2 = frame_->Pop();
6844 index2.ToRegister();
6845
6846 Result index1 = frame_->Pop();
6847 index1.ToRegister();
6848
6849 Result object = frame_->Pop();
6850 object.ToRegister();
6851
6852 Result tmp1 = allocator()->Allocate();
6853 tmp1.ToRegister();
6854 Result tmp2 = allocator()->Allocate();
6855 tmp2.ToRegister();
6856
6857 frame_->Spill(object.reg());
6858 frame_->Spill(index1.reg());
6859 frame_->Spill(index2.reg());
6860
6861 DeferredSwapElements* deferred = new DeferredSwapElements(object.reg(),
6862 index1.reg(),
6863 index2.reg());
6864
6865 // Fetch the map and check if array is in fast case.
6866 // Check that object doesn't require security checks and
6867 // has no indexed interceptor.
6868 __ CmpObjectType(object.reg(), FIRST_JS_OBJECT_TYPE, tmp1.reg());
6869 deferred->Branch(less);
6870 __ movzx_b(tmp1.reg(), FieldOperand(tmp1.reg(), Map::kBitFieldOffset));
6871 __ test(tmp1.reg(), Immediate(KeyedLoadIC::kSlowCaseBitFieldMask));
6872 deferred->Branch(not_zero);
6873
6874 // Check the object's elements are in fast case.
6875 __ mov(tmp1.reg(), FieldOperand(object.reg(), JSObject::kElementsOffset));
6876 __ cmp(FieldOperand(tmp1.reg(), HeapObject::kMapOffset),
6877 Immediate(Factory::fixed_array_map()));
6878 deferred->Branch(not_equal);
6879
6880 // Smi-tagging is equivalent to multiplying by 2.
6881 STATIC_ASSERT(kSmiTag == 0);
6882 STATIC_ASSERT(kSmiTagSize == 1);
6883
6884 // Check that both indices are smis.
6885 __ mov(tmp2.reg(), index1.reg());
6886 __ or_(tmp2.reg(), Operand(index2.reg()));
6887 __ test(tmp2.reg(), Immediate(kSmiTagMask));
6888 deferred->Branch(not_zero);
6889
6890 // Bring addresses into index1 and index2.
Kristian Monsen25f61362010-05-21 11:50:48 +01006891 __ lea(index1.reg(), FixedArrayElementOperand(tmp1.reg(), index1.reg()));
6892 __ lea(index2.reg(), FixedArrayElementOperand(tmp1.reg(), index2.reg()));
Steve Block6ded16b2010-05-10 14:33:55 +01006893
6894 // Swap elements.
6895 __ mov(object.reg(), Operand(index1.reg(), 0));
6896 __ mov(tmp2.reg(), Operand(index2.reg(), 0));
6897 __ mov(Operand(index2.reg(), 0), object.reg());
6898 __ mov(Operand(index1.reg(), 0), tmp2.reg());
6899
6900 Label done;
6901 __ InNewSpace(tmp1.reg(), tmp2.reg(), equal, &done);
6902 // Possible optimization: do a check that both values are Smis
6903 // (or them and test against Smi mask.)
6904
6905 __ mov(tmp2.reg(), tmp1.reg());
6906 RecordWriteStub recordWrite1(tmp2.reg(), index1.reg(), object.reg());
6907 __ CallStub(&recordWrite1);
6908
6909 RecordWriteStub recordWrite2(tmp1.reg(), index2.reg(), object.reg());
6910 __ CallStub(&recordWrite2);
6911
6912 __ bind(&done);
6913
6914 deferred->BindExit();
6915 frame_->Push(Factory::undefined_value());
6916}
6917
6918
6919void CodeGenerator::GenerateCallFunction(ZoneList<Expression*>* args) {
6920 Comment cmnt(masm_, "[ GenerateCallFunction");
6921
6922 ASSERT(args->length() >= 2);
6923
6924 int n_args = args->length() - 2; // for receiver and function.
6925 Load(args->at(0)); // receiver
6926 for (int i = 0; i < n_args; i++) {
6927 Load(args->at(i + 1));
6928 }
6929 Load(args->at(n_args + 1)); // function
6930 Result result = frame_->CallJSFunction(n_args);
6931 frame_->Push(&result);
6932}
6933
6934
6935// Generates the Math.pow method. Only handles special cases and
6936// branches to the runtime system for everything else. Please note
6937// that this function assumes that the callsite has executed ToNumber
6938// on both arguments.
6939void CodeGenerator::GenerateMathPow(ZoneList<Expression*>* args) {
6940 ASSERT(args->length() == 2);
6941 Load(args->at(0));
6942 Load(args->at(1));
6943 if (!CpuFeatures::IsSupported(SSE2)) {
6944 Result res = frame_->CallRuntime(Runtime::kMath_pow, 2);
6945 frame_->Push(&res);
6946 } else {
6947 CpuFeatures::Scope use_sse2(SSE2);
6948 Label allocate_return;
6949 // Load the two operands while leaving the values on the frame.
6950 frame()->Dup();
6951 Result exponent = frame()->Pop();
6952 exponent.ToRegister();
6953 frame()->Spill(exponent.reg());
6954 frame()->PushElementAt(1);
6955 Result base = frame()->Pop();
6956 base.ToRegister();
6957 frame()->Spill(base.reg());
6958
6959 Result answer = allocator()->Allocate();
6960 ASSERT(answer.is_valid());
6961 ASSERT(!exponent.reg().is(base.reg()));
6962 JumpTarget call_runtime;
6963
6964 // Save 1 in xmm3 - we need this several times later on.
6965 __ mov(answer.reg(), Immediate(1));
6966 __ cvtsi2sd(xmm3, Operand(answer.reg()));
6967
6968 Label exponent_nonsmi;
6969 Label base_nonsmi;
6970 // If the exponent is a heap number go to that specific case.
6971 __ test(exponent.reg(), Immediate(kSmiTagMask));
6972 __ j(not_zero, &exponent_nonsmi);
6973 __ test(base.reg(), Immediate(kSmiTagMask));
6974 __ j(not_zero, &base_nonsmi);
6975
6976 // Optimized version when y is an integer.
6977 Label powi;
6978 __ SmiUntag(base.reg());
6979 __ cvtsi2sd(xmm0, Operand(base.reg()));
6980 __ jmp(&powi);
6981 // exponent is smi and base is a heapnumber.
6982 __ bind(&base_nonsmi);
6983 __ cmp(FieldOperand(base.reg(), HeapObject::kMapOffset),
6984 Factory::heap_number_map());
6985 call_runtime.Branch(not_equal);
6986
6987 __ movdbl(xmm0, FieldOperand(base.reg(), HeapNumber::kValueOffset));
6988
6989 // Optimized version of pow if y is an integer.
6990 __ bind(&powi);
6991 __ SmiUntag(exponent.reg());
6992
6993 // Save exponent in base as we need to check if exponent is negative later.
6994 // We know that base and exponent are in different registers.
6995 __ mov(base.reg(), exponent.reg());
6996
6997 // Get absolute value of exponent.
6998 Label no_neg;
6999 __ cmp(exponent.reg(), 0);
7000 __ j(greater_equal, &no_neg);
7001 __ neg(exponent.reg());
7002 __ bind(&no_neg);
7003
7004 // Load xmm1 with 1.
7005 __ movsd(xmm1, xmm3);
7006 Label while_true;
7007 Label no_multiply;
7008
7009 __ bind(&while_true);
7010 __ shr(exponent.reg(), 1);
7011 __ j(not_carry, &no_multiply);
7012 __ mulsd(xmm1, xmm0);
7013 __ bind(&no_multiply);
7014 __ test(exponent.reg(), Operand(exponent.reg()));
7015 __ mulsd(xmm0, xmm0);
7016 __ j(not_zero, &while_true);
7017
7018 // x has the original value of y - if y is negative return 1/result.
7019 __ test(base.reg(), Operand(base.reg()));
7020 __ j(positive, &allocate_return);
7021 // Special case if xmm1 has reached infinity.
7022 __ mov(answer.reg(), Immediate(0x7FB00000));
7023 __ movd(xmm0, Operand(answer.reg()));
7024 __ cvtss2sd(xmm0, xmm0);
7025 __ ucomisd(xmm0, xmm1);
7026 call_runtime.Branch(equal);
7027 __ divsd(xmm3, xmm1);
7028 __ movsd(xmm1, xmm3);
7029 __ jmp(&allocate_return);
7030
7031 // exponent (or both) is a heapnumber - no matter what we should now work
7032 // on doubles.
7033 __ bind(&exponent_nonsmi);
7034 __ cmp(FieldOperand(exponent.reg(), HeapObject::kMapOffset),
7035 Factory::heap_number_map());
7036 call_runtime.Branch(not_equal);
7037 __ movdbl(xmm1, FieldOperand(exponent.reg(), HeapNumber::kValueOffset));
7038 // Test if exponent is nan.
7039 __ ucomisd(xmm1, xmm1);
7040 call_runtime.Branch(parity_even);
7041
7042 Label base_not_smi;
7043 Label handle_special_cases;
7044 __ test(base.reg(), Immediate(kSmiTagMask));
7045 __ j(not_zero, &base_not_smi);
7046 __ SmiUntag(base.reg());
7047 __ cvtsi2sd(xmm0, Operand(base.reg()));
7048 __ jmp(&handle_special_cases);
7049 __ bind(&base_not_smi);
7050 __ cmp(FieldOperand(base.reg(), HeapObject::kMapOffset),
7051 Factory::heap_number_map());
7052 call_runtime.Branch(not_equal);
7053 __ mov(answer.reg(), FieldOperand(base.reg(), HeapNumber::kExponentOffset));
7054 __ and_(answer.reg(), HeapNumber::kExponentMask);
7055 __ cmp(Operand(answer.reg()), Immediate(HeapNumber::kExponentMask));
7056 // base is NaN or +/-Infinity
7057 call_runtime.Branch(greater_equal);
7058 __ movdbl(xmm0, FieldOperand(base.reg(), HeapNumber::kValueOffset));
7059
7060 // base is in xmm0 and exponent is in xmm1.
7061 __ bind(&handle_special_cases);
7062 Label not_minus_half;
7063 // Test for -0.5.
7064 // Load xmm2 with -0.5.
7065 __ mov(answer.reg(), Immediate(0xBF000000));
7066 __ movd(xmm2, Operand(answer.reg()));
7067 __ cvtss2sd(xmm2, xmm2);
7068 // xmm2 now has -0.5.
7069 __ ucomisd(xmm2, xmm1);
7070 __ j(not_equal, &not_minus_half);
7071
7072 // Calculates reciprocal of square root.
7073 // Note that 1/sqrt(x) = sqrt(1/x))
7074 __ divsd(xmm3, xmm0);
7075 __ movsd(xmm1, xmm3);
7076 __ sqrtsd(xmm1, xmm1);
7077 __ jmp(&allocate_return);
7078
7079 // Test for 0.5.
7080 __ bind(&not_minus_half);
7081 // Load xmm2 with 0.5.
7082 // Since xmm3 is 1 and xmm2 is -0.5 this is simply xmm2 + xmm3.
7083 __ addsd(xmm2, xmm3);
7084 // xmm2 now has 0.5.
7085 __ comisd(xmm2, xmm1);
7086 call_runtime.Branch(not_equal);
7087 // Calculates square root.
7088 __ movsd(xmm1, xmm0);
7089 __ sqrtsd(xmm1, xmm1);
7090
7091 JumpTarget done;
7092 Label failure, success;
7093 __ bind(&allocate_return);
7094 // Make a copy of the frame to enable us to handle allocation
7095 // failure after the JumpTarget jump.
7096 VirtualFrame* clone = new VirtualFrame(frame());
7097 __ AllocateHeapNumber(answer.reg(), exponent.reg(),
7098 base.reg(), &failure);
7099 __ movdbl(FieldOperand(answer.reg(), HeapNumber::kValueOffset), xmm1);
7100 // Remove the two original values from the frame - we only need those
7101 // in the case where we branch to runtime.
7102 frame()->Drop(2);
7103 exponent.Unuse();
7104 base.Unuse();
7105 done.Jump(&answer);
7106 // Use the copy of the original frame as our current frame.
7107 RegisterFile empty_regs;
7108 SetFrame(clone, &empty_regs);
7109 // If we experience an allocation failure we branch to runtime.
7110 __ bind(&failure);
7111 call_runtime.Bind();
7112 answer = frame()->CallRuntime(Runtime::kMath_pow_cfunction, 2);
7113
7114 done.Bind(&answer);
7115 frame()->Push(&answer);
7116 }
7117}
7118
7119
Andrei Popescu402d9372010-02-26 13:31:12 +00007120void CodeGenerator::GenerateMathSin(ZoneList<Expression*>* args) {
7121 ASSERT_EQ(args->length(), 1);
7122 Load(args->at(0));
7123 TranscendentalCacheStub stub(TranscendentalCache::SIN);
7124 Result result = frame_->CallStub(&stub, 1);
7125 frame_->Push(&result);
7126}
7127
7128
7129void CodeGenerator::GenerateMathCos(ZoneList<Expression*>* args) {
7130 ASSERT_EQ(args->length(), 1);
7131 Load(args->at(0));
7132 TranscendentalCacheStub stub(TranscendentalCache::COS);
7133 Result result = frame_->CallStub(&stub, 1);
7134 frame_->Push(&result);
7135}
7136
7137
Steve Block6ded16b2010-05-10 14:33:55 +01007138// Generates the Math.sqrt method. Please note - this function assumes that
7139// the callsite has executed ToNumber on the argument.
7140void CodeGenerator::GenerateMathSqrt(ZoneList<Expression*>* args) {
7141 ASSERT_EQ(args->length(), 1);
7142 Load(args->at(0));
7143
7144 if (!CpuFeatures::IsSupported(SSE2)) {
7145 Result result = frame()->CallRuntime(Runtime::kMath_sqrt, 1);
7146 frame()->Push(&result);
7147 } else {
7148 CpuFeatures::Scope use_sse2(SSE2);
7149 // Leave original value on the frame if we need to call runtime.
7150 frame()->Dup();
7151 Result result = frame()->Pop();
7152 result.ToRegister();
7153 frame()->Spill(result.reg());
7154 Label runtime;
7155 Label non_smi;
7156 Label load_done;
7157 JumpTarget end;
7158
7159 __ test(result.reg(), Immediate(kSmiTagMask));
7160 __ j(not_zero, &non_smi);
7161 __ SmiUntag(result.reg());
7162 __ cvtsi2sd(xmm0, Operand(result.reg()));
7163 __ jmp(&load_done);
7164 __ bind(&non_smi);
7165 __ cmp(FieldOperand(result.reg(), HeapObject::kMapOffset),
7166 Factory::heap_number_map());
7167 __ j(not_equal, &runtime);
7168 __ movdbl(xmm0, FieldOperand(result.reg(), HeapNumber::kValueOffset));
7169
7170 __ bind(&load_done);
7171 __ sqrtsd(xmm0, xmm0);
7172 // A copy of the virtual frame to allow us to go to runtime after the
7173 // JumpTarget jump.
7174 Result scratch = allocator()->Allocate();
7175 VirtualFrame* clone = new VirtualFrame(frame());
7176 __ AllocateHeapNumber(result.reg(), scratch.reg(), no_reg, &runtime);
7177
7178 __ movdbl(FieldOperand(result.reg(), HeapNumber::kValueOffset), xmm0);
7179 frame()->Drop(1);
7180 scratch.Unuse();
7181 end.Jump(&result);
7182 // We only branch to runtime if we have an allocation error.
7183 // Use the copy of the original frame as our current frame.
7184 RegisterFile empty_regs;
7185 SetFrame(clone, &empty_regs);
7186 __ bind(&runtime);
7187 result = frame()->CallRuntime(Runtime::kMath_sqrt, 1);
7188
7189 end.Bind(&result);
7190 frame()->Push(&result);
7191 }
7192}
7193
7194
Steve Blocka7e24c12009-10-30 11:49:00 +00007195void CodeGenerator::VisitCallRuntime(CallRuntime* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01007196 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00007197 if (CheckForInlineRuntimeCall(node)) {
7198 return;
7199 }
7200
7201 ZoneList<Expression*>* args = node->arguments();
7202 Comment cmnt(masm_, "[ CallRuntime");
7203 Runtime::Function* function = node->function();
7204
7205 if (function == NULL) {
Steve Blocka7e24c12009-10-30 11:49:00 +00007206 // Push the builtins object found in the current global object.
7207 Result temp = allocator()->Allocate();
7208 ASSERT(temp.is_valid());
7209 __ mov(temp.reg(), GlobalObject());
7210 __ mov(temp.reg(), FieldOperand(temp.reg(), GlobalObject::kBuiltinsOffset));
7211 frame_->Push(&temp);
7212 }
7213
7214 // Push the arguments ("left-to-right").
7215 int arg_count = args->length();
7216 for (int i = 0; i < arg_count; i++) {
7217 Load(args->at(i));
7218 }
7219
7220 if (function == NULL) {
7221 // Call the JS runtime function.
Leon Clarkee46be812010-01-19 14:06:41 +00007222 frame_->Push(node->name());
Steve Blocka7e24c12009-10-30 11:49:00 +00007223 Result answer = frame_->CallCallIC(RelocInfo::CODE_TARGET,
7224 arg_count,
7225 loop_nesting_);
7226 frame_->RestoreContextRegister();
Leon Clarkee46be812010-01-19 14:06:41 +00007227 frame_->Push(&answer);
Steve Blocka7e24c12009-10-30 11:49:00 +00007228 } else {
7229 // Call the C runtime function.
7230 Result answer = frame_->CallRuntime(function, arg_count);
7231 frame_->Push(&answer);
7232 }
7233}
7234
7235
7236void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00007237 Comment cmnt(masm_, "[ UnaryOperation");
7238
7239 Token::Value op = node->op();
7240
7241 if (op == Token::NOT) {
7242 // Swap the true and false targets but keep the same actual label
7243 // as the fall through.
7244 destination()->Invert();
Steve Blockd0582a62009-12-15 09:54:21 +00007245 LoadCondition(node->expression(), destination(), true);
Steve Blocka7e24c12009-10-30 11:49:00 +00007246 // Swap the labels back.
7247 destination()->Invert();
7248
7249 } else if (op == Token::DELETE) {
7250 Property* property = node->expression()->AsProperty();
7251 if (property != NULL) {
7252 Load(property->obj());
7253 Load(property->key());
7254 Result answer = frame_->InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, 2);
7255 frame_->Push(&answer);
7256 return;
7257 }
7258
7259 Variable* variable = node->expression()->AsVariableProxy()->AsVariable();
7260 if (variable != NULL) {
7261 Slot* slot = variable->slot();
7262 if (variable->is_global()) {
7263 LoadGlobal();
7264 frame_->Push(variable->name());
7265 Result answer = frame_->InvokeBuiltin(Builtins::DELETE,
7266 CALL_FUNCTION, 2);
7267 frame_->Push(&answer);
7268 return;
7269
7270 } else if (slot != NULL && slot->type() == Slot::LOOKUP) {
7271 // Call the runtime to look up the context holding the named
7272 // variable. Sync the virtual frame eagerly so we can push the
7273 // arguments directly into place.
7274 frame_->SyncRange(0, frame_->element_count() - 1);
7275 frame_->EmitPush(esi);
7276 frame_->EmitPush(Immediate(variable->name()));
7277 Result context = frame_->CallRuntime(Runtime::kLookupContext, 2);
7278 ASSERT(context.is_register());
7279 frame_->EmitPush(context.reg());
7280 context.Unuse();
7281 frame_->EmitPush(Immediate(variable->name()));
7282 Result answer = frame_->InvokeBuiltin(Builtins::DELETE,
7283 CALL_FUNCTION, 2);
7284 frame_->Push(&answer);
7285 return;
7286 }
7287
7288 // Default: Result of deleting non-global, not dynamically
7289 // introduced variables is false.
7290 frame_->Push(Factory::false_value());
7291
7292 } else {
7293 // Default: Result of deleting expressions is true.
7294 Load(node->expression()); // may have side-effects
7295 frame_->SetElementAt(0, Factory::true_value());
7296 }
7297
7298 } else if (op == Token::TYPEOF) {
7299 // Special case for loading the typeof expression; see comment on
7300 // LoadTypeofExpression().
7301 LoadTypeofExpression(node->expression());
7302 Result answer = frame_->CallRuntime(Runtime::kTypeof, 1);
7303 frame_->Push(&answer);
7304
7305 } else if (op == Token::VOID) {
7306 Expression* expression = node->expression();
7307 if (expression && expression->AsLiteral() && (
7308 expression->AsLiteral()->IsTrue() ||
7309 expression->AsLiteral()->IsFalse() ||
7310 expression->AsLiteral()->handle()->IsNumber() ||
7311 expression->AsLiteral()->handle()->IsString() ||
7312 expression->AsLiteral()->handle()->IsJSRegExp() ||
7313 expression->AsLiteral()->IsNull())) {
7314 // Omit evaluating the value of the primitive literal.
7315 // It will be discarded anyway, and can have no side effect.
7316 frame_->Push(Factory::undefined_value());
7317 } else {
7318 Load(node->expression());
7319 frame_->SetElementAt(0, Factory::undefined_value());
7320 }
7321
7322 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01007323 if (in_safe_int32_mode()) {
7324 Visit(node->expression());
7325 Result value = frame_->Pop();
7326 ASSERT(value.is_untagged_int32());
7327 // Registers containing an int32 value are not multiply used.
7328 ASSERT(!value.is_register() || !frame_->is_used(value.reg()));
7329 value.ToRegister();
7330 switch (op) {
7331 case Token::SUB: {
7332 __ neg(value.reg());
7333 if (node->no_negative_zero()) {
7334 // -MIN_INT is MIN_INT with the overflow flag set.
7335 unsafe_bailout_->Branch(overflow);
7336 } else {
7337 // MIN_INT and 0 both have bad negations. They both have 31 zeros.
7338 __ test(value.reg(), Immediate(0x7FFFFFFF));
7339 unsafe_bailout_->Branch(zero);
7340 }
7341 break;
7342 }
7343 case Token::BIT_NOT: {
7344 __ not_(value.reg());
7345 break;
7346 }
7347 case Token::ADD: {
7348 // Unary plus has no effect on int32 values.
7349 break;
7350 }
7351 default:
7352 UNREACHABLE();
7353 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00007354 }
Steve Block6ded16b2010-05-10 14:33:55 +01007355 frame_->Push(&value);
7356 } else {
7357 Load(node->expression());
7358 bool overwrite =
7359 (node->expression()->AsBinaryOperation() != NULL &&
7360 node->expression()->AsBinaryOperation()->ResultOverwriteAllowed());
7361 switch (op) {
7362 case Token::NOT:
7363 case Token::DELETE:
7364 case Token::TYPEOF:
7365 UNREACHABLE(); // handled above
7366 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00007367
Steve Block6ded16b2010-05-10 14:33:55 +01007368 case Token::SUB: {
7369 GenericUnaryOpStub stub(Token::SUB, overwrite);
7370 Result operand = frame_->Pop();
7371 Result answer = frame_->CallStub(&stub, &operand);
7372 answer.set_type_info(TypeInfo::Number());
7373 frame_->Push(&answer);
7374 break;
7375 }
7376 case Token::BIT_NOT: {
7377 // Smi check.
7378 JumpTarget smi_label;
7379 JumpTarget continue_label;
7380 Result operand = frame_->Pop();
7381 TypeInfo operand_info = operand.type_info();
7382 operand.ToRegister();
7383 if (operand_info.IsSmi()) {
7384 if (FLAG_debug_code) __ AbortIfNotSmi(operand.reg());
7385 frame_->Spill(operand.reg());
7386 // Set smi tag bit. It will be reset by the not operation.
7387 __ lea(operand.reg(), Operand(operand.reg(), kSmiTagMask));
7388 __ not_(operand.reg());
7389 Result answer = operand;
7390 answer.set_type_info(TypeInfo::Smi());
7391 frame_->Push(&answer);
7392 } else {
7393 __ test(operand.reg(), Immediate(kSmiTagMask));
7394 smi_label.Branch(zero, &operand, taken);
Steve Blocka7e24c12009-10-30 11:49:00 +00007395
Steve Block6ded16b2010-05-10 14:33:55 +01007396 GenericUnaryOpStub stub(Token::BIT_NOT, overwrite);
7397 Result answer = frame_->CallStub(&stub, &operand);
7398 continue_label.Jump(&answer);
Leon Clarkee46be812010-01-19 14:06:41 +00007399
Steve Block6ded16b2010-05-10 14:33:55 +01007400 smi_label.Bind(&answer);
7401 answer.ToRegister();
7402 frame_->Spill(answer.reg());
7403 // Set smi tag bit. It will be reset by the not operation.
7404 __ lea(answer.reg(), Operand(answer.reg(), kSmiTagMask));
7405 __ not_(answer.reg());
Leon Clarkee46be812010-01-19 14:06:41 +00007406
Steve Block6ded16b2010-05-10 14:33:55 +01007407 continue_label.Bind(&answer);
7408 answer.set_type_info(TypeInfo::Integer32());
7409 frame_->Push(&answer);
7410 }
7411 break;
7412 }
7413 case Token::ADD: {
7414 // Smi check.
7415 JumpTarget continue_label;
7416 Result operand = frame_->Pop();
7417 TypeInfo operand_info = operand.type_info();
7418 operand.ToRegister();
7419 __ test(operand.reg(), Immediate(kSmiTagMask));
7420 continue_label.Branch(zero, &operand, taken);
Steve Blocka7e24c12009-10-30 11:49:00 +00007421
Steve Block6ded16b2010-05-10 14:33:55 +01007422 frame_->Push(&operand);
7423 Result answer = frame_->InvokeBuiltin(Builtins::TO_NUMBER,
Steve Blocka7e24c12009-10-30 11:49:00 +00007424 CALL_FUNCTION, 1);
7425
Steve Block6ded16b2010-05-10 14:33:55 +01007426 continue_label.Bind(&answer);
7427 if (operand_info.IsSmi()) {
7428 answer.set_type_info(TypeInfo::Smi());
7429 } else if (operand_info.IsInteger32()) {
7430 answer.set_type_info(TypeInfo::Integer32());
7431 } else {
7432 answer.set_type_info(TypeInfo::Number());
7433 }
7434 frame_->Push(&answer);
7435 break;
7436 }
7437 default:
7438 UNREACHABLE();
Steve Blocka7e24c12009-10-30 11:49:00 +00007439 }
Steve Blocka7e24c12009-10-30 11:49:00 +00007440 }
7441 }
7442}
7443
7444
7445// The value in dst was optimistically incremented or decremented. The
7446// result overflowed or was not smi tagged. Undo the operation, call
7447// into the runtime to convert the argument to a number, and call the
7448// specialized add or subtract stub. The result is left in dst.
7449class DeferredPrefixCountOperation: public DeferredCode {
7450 public:
Steve Block6ded16b2010-05-10 14:33:55 +01007451 DeferredPrefixCountOperation(Register dst,
7452 bool is_increment,
7453 TypeInfo input_type)
7454 : dst_(dst), is_increment_(is_increment), input_type_(input_type) {
Steve Blocka7e24c12009-10-30 11:49:00 +00007455 set_comment("[ DeferredCountOperation");
7456 }
7457
7458 virtual void Generate();
7459
7460 private:
7461 Register dst_;
7462 bool is_increment_;
Steve Block6ded16b2010-05-10 14:33:55 +01007463 TypeInfo input_type_;
Steve Blocka7e24c12009-10-30 11:49:00 +00007464};
7465
7466
7467void DeferredPrefixCountOperation::Generate() {
7468 // Undo the optimistic smi operation.
7469 if (is_increment_) {
7470 __ sub(Operand(dst_), Immediate(Smi::FromInt(1)));
7471 } else {
7472 __ add(Operand(dst_), Immediate(Smi::FromInt(1)));
7473 }
Steve Block6ded16b2010-05-10 14:33:55 +01007474 Register left;
7475 if (input_type_.IsNumber()) {
7476 left = dst_;
Steve Blocka7e24c12009-10-30 11:49:00 +00007477 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01007478 __ push(dst_);
7479 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION);
7480 left = eax;
Steve Blocka7e24c12009-10-30 11:49:00 +00007481 }
Steve Block6ded16b2010-05-10 14:33:55 +01007482
7483 GenericBinaryOpStub stub(is_increment_ ? Token::ADD : Token::SUB,
7484 NO_OVERWRITE,
7485 NO_GENERIC_BINARY_FLAGS,
7486 TypeInfo::Number());
7487 stub.GenerateCall(masm_, left, Smi::FromInt(1));
7488
Steve Blocka7e24c12009-10-30 11:49:00 +00007489 if (!dst_.is(eax)) __ mov(dst_, eax);
7490}
7491
7492
7493// The value in dst was optimistically incremented or decremented. The
7494// result overflowed or was not smi tagged. Undo the operation and call
7495// into the runtime to convert the argument to a number. Update the
7496// original value in old. Call the specialized add or subtract stub.
7497// The result is left in dst.
7498class DeferredPostfixCountOperation: public DeferredCode {
7499 public:
Steve Block6ded16b2010-05-10 14:33:55 +01007500 DeferredPostfixCountOperation(Register dst,
7501 Register old,
7502 bool is_increment,
7503 TypeInfo input_type)
7504 : dst_(dst),
7505 old_(old),
7506 is_increment_(is_increment),
7507 input_type_(input_type) {
Steve Blocka7e24c12009-10-30 11:49:00 +00007508 set_comment("[ DeferredCountOperation");
7509 }
7510
7511 virtual void Generate();
7512
7513 private:
7514 Register dst_;
7515 Register old_;
7516 bool is_increment_;
Steve Block6ded16b2010-05-10 14:33:55 +01007517 TypeInfo input_type_;
Steve Blocka7e24c12009-10-30 11:49:00 +00007518};
7519
7520
7521void DeferredPostfixCountOperation::Generate() {
7522 // Undo the optimistic smi operation.
7523 if (is_increment_) {
7524 __ sub(Operand(dst_), Immediate(Smi::FromInt(1)));
7525 } else {
7526 __ add(Operand(dst_), Immediate(Smi::FromInt(1)));
7527 }
Steve Block6ded16b2010-05-10 14:33:55 +01007528 Register left;
7529 if (input_type_.IsNumber()) {
7530 __ push(dst_); // Save the input to use as the old value.
7531 left = dst_;
Steve Blocka7e24c12009-10-30 11:49:00 +00007532 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01007533 __ push(dst_);
7534 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION);
7535 __ push(eax); // Save the result of ToNumber to use as the old value.
7536 left = eax;
Steve Blocka7e24c12009-10-30 11:49:00 +00007537 }
Steve Block6ded16b2010-05-10 14:33:55 +01007538
7539 GenericBinaryOpStub stub(is_increment_ ? Token::ADD : Token::SUB,
7540 NO_OVERWRITE,
7541 NO_GENERIC_BINARY_FLAGS,
7542 TypeInfo::Number());
7543 stub.GenerateCall(masm_, left, Smi::FromInt(1));
7544
Steve Blocka7e24c12009-10-30 11:49:00 +00007545 if (!dst_.is(eax)) __ mov(dst_, eax);
7546 __ pop(old_);
7547}
7548
7549
7550void CodeGenerator::VisitCountOperation(CountOperation* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01007551 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00007552 Comment cmnt(masm_, "[ CountOperation");
7553
7554 bool is_postfix = node->is_postfix();
7555 bool is_increment = node->op() == Token::INC;
7556
7557 Variable* var = node->expression()->AsVariableProxy()->AsVariable();
7558 bool is_const = (var != NULL && var->mode() == Variable::CONST);
7559
7560 // Postfix operations need a stack slot under the reference to hold
7561 // the old value while the new value is being stored. This is so that
7562 // in the case that storing the new value requires a call, the old
7563 // value will be in the frame to be spilled.
7564 if (is_postfix) frame_->Push(Smi::FromInt(0));
7565
Leon Clarked91b9f72010-01-27 17:25:45 +00007566 // A constant reference is not saved to, so a constant reference is not a
7567 // compound assignment reference.
7568 { Reference target(this, node->expression(), !is_const);
Steve Blocka7e24c12009-10-30 11:49:00 +00007569 if (target.is_illegal()) {
7570 // Spoof the virtual frame to have the expected height (one higher
7571 // than on entry).
7572 if (!is_postfix) frame_->Push(Smi::FromInt(0));
7573 return;
7574 }
Steve Blockd0582a62009-12-15 09:54:21 +00007575 target.TakeValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00007576
7577 Result new_value = frame_->Pop();
7578 new_value.ToRegister();
7579
7580 Result old_value; // Only allocated in the postfix case.
7581 if (is_postfix) {
7582 // Allocate a temporary to preserve the old value.
7583 old_value = allocator_->Allocate();
7584 ASSERT(old_value.is_valid());
7585 __ mov(old_value.reg(), new_value.reg());
Steve Block6ded16b2010-05-10 14:33:55 +01007586
7587 // The return value for postfix operations is ToNumber(input).
7588 // Keep more precise type info if the input is some kind of
7589 // number already. If the input is not a number we have to wait
7590 // for the deferred code to convert it.
7591 if (new_value.type_info().IsNumber()) {
7592 old_value.set_type_info(new_value.type_info());
7593 }
Steve Blocka7e24c12009-10-30 11:49:00 +00007594 }
Steve Block6ded16b2010-05-10 14:33:55 +01007595
Steve Blocka7e24c12009-10-30 11:49:00 +00007596 // Ensure the new value is writable.
7597 frame_->Spill(new_value.reg());
7598
Steve Block6ded16b2010-05-10 14:33:55 +01007599 Result tmp;
7600 if (new_value.is_smi()) {
7601 if (FLAG_debug_code) __ AbortIfNotSmi(new_value.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00007602 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01007603 // We don't know statically if the input is a smi.
7604 // In order to combine the overflow and the smi tag check, we need
7605 // to be able to allocate a byte register. We attempt to do so
7606 // without spilling. If we fail, we will generate separate overflow
7607 // and smi tag checks.
7608 // We allocate and clear a temporary byte register before performing
7609 // the count operation since clearing the register using xor will clear
7610 // the overflow flag.
7611 tmp = allocator_->AllocateByteRegisterWithoutSpilling();
7612 if (tmp.is_valid()) {
7613 __ Set(tmp.reg(), Immediate(0));
7614 }
Steve Blocka7e24c12009-10-30 11:49:00 +00007615 }
7616
7617 if (is_increment) {
7618 __ add(Operand(new_value.reg()), Immediate(Smi::FromInt(1)));
7619 } else {
7620 __ sub(Operand(new_value.reg()), Immediate(Smi::FromInt(1)));
7621 }
7622
Steve Block6ded16b2010-05-10 14:33:55 +01007623 DeferredCode* deferred = NULL;
7624 if (is_postfix) {
7625 deferred = new DeferredPostfixCountOperation(new_value.reg(),
7626 old_value.reg(),
7627 is_increment,
7628 new_value.type_info());
7629 } else {
7630 deferred = new DeferredPrefixCountOperation(new_value.reg(),
7631 is_increment,
7632 new_value.type_info());
7633 }
7634
7635 if (new_value.is_smi()) {
7636 // In case we have a smi as input just check for overflow.
7637 deferred->Branch(overflow);
7638 } else {
7639 // If the count operation didn't overflow and the result is a valid
7640 // smi, we're done. Otherwise, we jump to the deferred slow-case
7641 // code.
Steve Blocka7e24c12009-10-30 11:49:00 +00007642 // We combine the overflow and the smi tag check if we could
7643 // successfully allocate a temporary byte register.
Steve Block6ded16b2010-05-10 14:33:55 +01007644 if (tmp.is_valid()) {
7645 __ setcc(overflow, tmp.reg());
7646 __ or_(Operand(tmp.reg()), new_value.reg());
7647 __ test(tmp.reg(), Immediate(kSmiTagMask));
7648 tmp.Unuse();
7649 deferred->Branch(not_zero);
7650 } else {
7651 // Otherwise we test separately for overflow and smi tag.
7652 deferred->Branch(overflow);
7653 __ test(new_value.reg(), Immediate(kSmiTagMask));
7654 deferred->Branch(not_zero);
7655 }
Steve Blocka7e24c12009-10-30 11:49:00 +00007656 }
7657 deferred->BindExit();
7658
Steve Block6ded16b2010-05-10 14:33:55 +01007659 // Postfix count operations return their input converted to
7660 // number. The case when the input is already a number is covered
7661 // above in the allocation code for old_value.
7662 if (is_postfix && !new_value.type_info().IsNumber()) {
7663 old_value.set_type_info(TypeInfo::Number());
7664 }
7665
7666 // The result of ++ or -- is an Integer32 if the
7667 // input is a smi. Otherwise it is a number.
7668 if (new_value.is_smi()) {
7669 new_value.set_type_info(TypeInfo::Integer32());
7670 } else {
7671 new_value.set_type_info(TypeInfo::Number());
7672 }
7673
Steve Blocka7e24c12009-10-30 11:49:00 +00007674 // Postfix: store the old value in the allocated slot under the
7675 // reference.
7676 if (is_postfix) frame_->SetElementAt(target.size(), &old_value);
7677
7678 frame_->Push(&new_value);
7679 // Non-constant: update the reference.
7680 if (!is_const) target.SetValue(NOT_CONST_INIT);
7681 }
7682
7683 // Postfix: drop the new value and use the old.
7684 if (is_postfix) frame_->Drop();
7685}
7686
7687
Steve Block6ded16b2010-05-10 14:33:55 +01007688void CodeGenerator::Int32BinaryOperation(BinaryOperation* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00007689 Token::Value op = node->op();
Steve Block6ded16b2010-05-10 14:33:55 +01007690 Comment cmnt(masm_, "[ Int32BinaryOperation");
7691 ASSERT(in_safe_int32_mode());
7692 ASSERT(safe_int32_mode_enabled());
7693 ASSERT(FLAG_safe_int32_compiler);
Steve Blocka7e24c12009-10-30 11:49:00 +00007694
Steve Block6ded16b2010-05-10 14:33:55 +01007695 if (op == Token::COMMA) {
7696 // Discard left value.
7697 frame_->Nip(1);
7698 return;
7699 }
7700
7701 Result right = frame_->Pop();
7702 Result left = frame_->Pop();
7703
7704 ASSERT(right.is_untagged_int32());
7705 ASSERT(left.is_untagged_int32());
7706 // Registers containing an int32 value are not multiply used.
7707 ASSERT(!left.is_register() || !frame_->is_used(left.reg()));
7708 ASSERT(!right.is_register() || !frame_->is_used(right.reg()));
7709
7710 switch (op) {
7711 case Token::COMMA:
7712 case Token::OR:
7713 case Token::AND:
7714 UNREACHABLE();
7715 break;
7716 case Token::BIT_OR:
7717 case Token::BIT_XOR:
7718 case Token::BIT_AND:
7719 if (left.is_constant() || right.is_constant()) {
7720 int32_t value; // Put constant in value, non-constant in left.
7721 // Constants are known to be int32 values, from static analysis,
7722 // or else will be converted to int32 by implicit ECMA [[ToInt32]].
7723 if (left.is_constant()) {
7724 ASSERT(left.handle()->IsSmi() || left.handle()->IsHeapNumber());
7725 value = NumberToInt32(*left.handle());
7726 left = right;
7727 } else {
7728 ASSERT(right.handle()->IsSmi() || right.handle()->IsHeapNumber());
7729 value = NumberToInt32(*right.handle());
7730 }
7731
7732 left.ToRegister();
7733 if (op == Token::BIT_OR) {
7734 __ or_(Operand(left.reg()), Immediate(value));
7735 } else if (op == Token::BIT_XOR) {
7736 __ xor_(Operand(left.reg()), Immediate(value));
7737 } else {
7738 ASSERT(op == Token::BIT_AND);
7739 __ and_(Operand(left.reg()), Immediate(value));
7740 }
7741 } else {
7742 ASSERT(left.is_register());
7743 ASSERT(right.is_register());
7744 if (op == Token::BIT_OR) {
7745 __ or_(left.reg(), Operand(right.reg()));
7746 } else if (op == Token::BIT_XOR) {
7747 __ xor_(left.reg(), Operand(right.reg()));
7748 } else {
7749 ASSERT(op == Token::BIT_AND);
7750 __ and_(left.reg(), Operand(right.reg()));
7751 }
7752 }
7753 frame_->Push(&left);
7754 right.Unuse();
7755 break;
7756 case Token::SAR:
7757 case Token::SHL:
7758 case Token::SHR: {
7759 bool test_shr_overflow = false;
7760 left.ToRegister();
7761 if (right.is_constant()) {
7762 ASSERT(right.handle()->IsSmi() || right.handle()->IsHeapNumber());
7763 int shift_amount = NumberToInt32(*right.handle()) & 0x1F;
7764 if (op == Token::SAR) {
7765 __ sar(left.reg(), shift_amount);
7766 } else if (op == Token::SHL) {
7767 __ shl(left.reg(), shift_amount);
7768 } else {
7769 ASSERT(op == Token::SHR);
7770 __ shr(left.reg(), shift_amount);
7771 if (shift_amount == 0) test_shr_overflow = true;
7772 }
7773 } else {
7774 // Move right to ecx
7775 if (left.is_register() && left.reg().is(ecx)) {
7776 right.ToRegister();
7777 __ xchg(left.reg(), right.reg());
7778 left = right; // Left is unused here, copy of right unused by Push.
7779 } else {
7780 right.ToRegister(ecx);
7781 left.ToRegister();
7782 }
7783 if (op == Token::SAR) {
7784 __ sar_cl(left.reg());
7785 } else if (op == Token::SHL) {
7786 __ shl_cl(left.reg());
7787 } else {
7788 ASSERT(op == Token::SHR);
7789 __ shr_cl(left.reg());
7790 test_shr_overflow = true;
7791 }
7792 }
7793 {
7794 Register left_reg = left.reg();
7795 frame_->Push(&left);
7796 right.Unuse();
7797 if (test_shr_overflow && !node->to_int32()) {
7798 // Uint32 results with top bit set are not Int32 values.
7799 // If they will be forced to Int32, skip the test.
7800 // Test is needed because shr with shift amount 0 does not set flags.
7801 __ test(left_reg, Operand(left_reg));
7802 unsafe_bailout_->Branch(sign);
7803 }
7804 }
7805 break;
7806 }
7807 case Token::ADD:
7808 case Token::SUB:
7809 case Token::MUL:
7810 if ((left.is_constant() && op != Token::SUB) || right.is_constant()) {
7811 int32_t value; // Put constant in value, non-constant in left.
7812 if (right.is_constant()) {
7813 ASSERT(right.handle()->IsSmi() || right.handle()->IsHeapNumber());
7814 value = NumberToInt32(*right.handle());
7815 } else {
7816 ASSERT(left.handle()->IsSmi() || left.handle()->IsHeapNumber());
7817 value = NumberToInt32(*left.handle());
7818 left = right;
7819 }
7820
7821 left.ToRegister();
7822 if (op == Token::ADD) {
7823 __ add(Operand(left.reg()), Immediate(value));
7824 } else if (op == Token::SUB) {
7825 __ sub(Operand(left.reg()), Immediate(value));
7826 } else {
7827 ASSERT(op == Token::MUL);
7828 __ imul(left.reg(), left.reg(), value);
7829 }
7830 } else {
7831 left.ToRegister();
7832 ASSERT(left.is_register());
7833 ASSERT(right.is_register());
7834 if (op == Token::ADD) {
7835 __ add(left.reg(), Operand(right.reg()));
7836 } else if (op == Token::SUB) {
7837 __ sub(left.reg(), Operand(right.reg()));
7838 } else {
7839 ASSERT(op == Token::MUL);
7840 // We have statically verified that a negative zero can be ignored.
7841 __ imul(left.reg(), Operand(right.reg()));
7842 }
7843 }
7844 right.Unuse();
7845 frame_->Push(&left);
7846 if (!node->to_int32()) {
7847 // If ToInt32 is called on the result of ADD, SUB, or MUL, we don't
7848 // care about overflows.
7849 unsafe_bailout_->Branch(overflow);
7850 }
7851 break;
7852 case Token::DIV:
7853 case Token::MOD: {
7854 if (right.is_register() && (right.reg().is(eax) || right.reg().is(edx))) {
7855 if (left.is_register() && left.reg().is(edi)) {
7856 right.ToRegister(ebx);
7857 } else {
7858 right.ToRegister(edi);
7859 }
7860 }
7861 left.ToRegister(eax);
7862 Result edx_reg = allocator_->Allocate(edx);
7863 right.ToRegister();
7864 // The results are unused here because BreakTarget::Branch cannot handle
7865 // live results.
7866 Register right_reg = right.reg();
7867 left.Unuse();
7868 right.Unuse();
7869 edx_reg.Unuse();
7870 __ cmp(right_reg, 0);
7871 // Ensure divisor is positive: no chance of non-int32 or -0 result.
7872 unsafe_bailout_->Branch(less_equal);
7873 __ cdq(); // Sign-extend eax into edx:eax
7874 __ idiv(right_reg);
7875 if (op == Token::MOD) {
7876 // Negative zero can arise as a negative divident with a zero result.
7877 if (!node->no_negative_zero()) {
7878 Label not_negative_zero;
7879 __ test(edx, Operand(edx));
7880 __ j(not_zero, &not_negative_zero);
7881 __ test(eax, Operand(eax));
7882 unsafe_bailout_->Branch(negative);
7883 __ bind(&not_negative_zero);
7884 }
7885 Result edx_result(edx, TypeInfo::Integer32());
7886 edx_result.set_untagged_int32(true);
7887 frame_->Push(&edx_result);
7888 } else {
7889 ASSERT(op == Token::DIV);
7890 __ test(edx, Operand(edx));
7891 unsafe_bailout_->Branch(not_equal);
7892 Result eax_result(eax, TypeInfo::Integer32());
7893 eax_result.set_untagged_int32(true);
7894 frame_->Push(&eax_result);
7895 }
7896 break;
7897 }
7898 default:
7899 UNREACHABLE();
7900 break;
7901 }
7902}
7903
7904
7905void CodeGenerator::GenerateLogicalBooleanOperation(BinaryOperation* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00007906 // According to ECMA-262 section 11.11, page 58, the binary logical
7907 // operators must yield the result of one of the two expressions
7908 // before any ToBoolean() conversions. This means that the value
7909 // produced by a && or || operator is not necessarily a boolean.
7910
7911 // NOTE: If the left hand side produces a materialized value (not
7912 // control flow), we force the right hand side to do the same. This
7913 // is necessary because we assume that if we get control flow on the
7914 // last path out of an expression we got it on all paths.
Steve Block6ded16b2010-05-10 14:33:55 +01007915 if (node->op() == Token::AND) {
7916 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00007917 JumpTarget is_true;
7918 ControlDestination dest(&is_true, destination()->false_target(), true);
Steve Blockd0582a62009-12-15 09:54:21 +00007919 LoadCondition(node->left(), &dest, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00007920
7921 if (dest.false_was_fall_through()) {
7922 // The current false target was used as the fall-through. If
7923 // there are no dangling jumps to is_true then the left
7924 // subexpression was unconditionally false. Otherwise we have
7925 // paths where we do have to evaluate the right subexpression.
7926 if (is_true.is_linked()) {
7927 // We need to compile the right subexpression. If the jump to
7928 // the current false target was a forward jump then we have a
7929 // valid frame, we have just bound the false target, and we
7930 // have to jump around the code for the right subexpression.
7931 if (has_valid_frame()) {
7932 destination()->false_target()->Unuse();
7933 destination()->false_target()->Jump();
7934 }
7935 is_true.Bind();
7936 // The left subexpression compiled to control flow, so the
7937 // right one is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00007938 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00007939 } else {
7940 // We have actually just jumped to or bound the current false
7941 // target but the current control destination is not marked as
7942 // used.
7943 destination()->Use(false);
7944 }
7945
7946 } else if (dest.is_used()) {
7947 // The left subexpression compiled to control flow (and is_true
7948 // was just bound), so the right is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00007949 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00007950
7951 } else {
7952 // We have a materialized value on the frame, so we exit with
7953 // one on all paths. There are possibly also jumps to is_true
7954 // from nested subexpressions.
7955 JumpTarget pop_and_continue;
7956 JumpTarget exit;
7957
7958 // Avoid popping the result if it converts to 'false' using the
7959 // standard ToBoolean() conversion as described in ECMA-262,
7960 // section 9.2, page 30.
7961 //
7962 // Duplicate the TOS value. The duplicate will be popped by
7963 // ToBoolean.
7964 frame_->Dup();
7965 ControlDestination dest(&pop_and_continue, &exit, true);
7966 ToBoolean(&dest);
7967
7968 // Pop the result of evaluating the first part.
7969 frame_->Drop();
7970
7971 // Compile right side expression.
7972 is_true.Bind();
7973 Load(node->right());
7974
7975 // Exit (always with a materialized value).
7976 exit.Bind();
7977 }
7978
Steve Block6ded16b2010-05-10 14:33:55 +01007979 } else {
7980 ASSERT(node->op() == Token::OR);
7981 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00007982 JumpTarget is_false;
7983 ControlDestination dest(destination()->true_target(), &is_false, false);
Steve Blockd0582a62009-12-15 09:54:21 +00007984 LoadCondition(node->left(), &dest, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00007985
7986 if (dest.true_was_fall_through()) {
7987 // The current true target was used as the fall-through. If
7988 // there are no dangling jumps to is_false then the left
7989 // subexpression was unconditionally true. Otherwise we have
7990 // paths where we do have to evaluate the right subexpression.
7991 if (is_false.is_linked()) {
7992 // We need to compile the right subexpression. If the jump to
7993 // the current true target was a forward jump then we have a
7994 // valid frame, we have just bound the true target, and we
7995 // have to jump around the code for the right subexpression.
7996 if (has_valid_frame()) {
7997 destination()->true_target()->Unuse();
7998 destination()->true_target()->Jump();
7999 }
8000 is_false.Bind();
8001 // The left subexpression compiled to control flow, so the
8002 // right one is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00008003 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00008004 } else {
8005 // We have just jumped to or bound the current true target but
8006 // the current control destination is not marked as used.
8007 destination()->Use(true);
8008 }
8009
8010 } else if (dest.is_used()) {
8011 // The left subexpression compiled to control flow (and is_false
8012 // was just bound), so the right is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00008013 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00008014
8015 } else {
8016 // We have a materialized value on the frame, so we exit with
8017 // one on all paths. There are possibly also jumps to is_false
8018 // from nested subexpressions.
8019 JumpTarget pop_and_continue;
8020 JumpTarget exit;
8021
8022 // Avoid popping the result if it converts to 'true' using the
8023 // standard ToBoolean() conversion as described in ECMA-262,
8024 // section 9.2, page 30.
8025 //
8026 // Duplicate the TOS value. The duplicate will be popped by
8027 // ToBoolean.
8028 frame_->Dup();
8029 ControlDestination dest(&exit, &pop_and_continue, false);
8030 ToBoolean(&dest);
8031
8032 // Pop the result of evaluating the first part.
8033 frame_->Drop();
8034
8035 // Compile right side expression.
8036 is_false.Bind();
8037 Load(node->right());
8038
8039 // Exit (always with a materialized value).
8040 exit.Bind();
8041 }
Steve Block6ded16b2010-05-10 14:33:55 +01008042 }
8043}
Steve Blocka7e24c12009-10-30 11:49:00 +00008044
Steve Block6ded16b2010-05-10 14:33:55 +01008045
8046void CodeGenerator::VisitBinaryOperation(BinaryOperation* node) {
8047 Comment cmnt(masm_, "[ BinaryOperation");
8048
8049 if (node->op() == Token::AND || node->op() == Token::OR) {
8050 GenerateLogicalBooleanOperation(node);
8051 } else if (in_safe_int32_mode()) {
8052 Visit(node->left());
8053 Visit(node->right());
8054 Int32BinaryOperation(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00008055 } else {
8056 // NOTE: The code below assumes that the slow cases (calls to runtime)
8057 // never return a constant/immutable object.
8058 OverwriteMode overwrite_mode = NO_OVERWRITE;
8059 if (node->left()->AsBinaryOperation() != NULL &&
8060 node->left()->AsBinaryOperation()->ResultOverwriteAllowed()) {
8061 overwrite_mode = OVERWRITE_LEFT;
8062 } else if (node->right()->AsBinaryOperation() != NULL &&
8063 node->right()->AsBinaryOperation()->ResultOverwriteAllowed()) {
8064 overwrite_mode = OVERWRITE_RIGHT;
8065 }
8066
Steve Block6ded16b2010-05-10 14:33:55 +01008067 if (node->left()->IsTrivial()) {
8068 Load(node->right());
8069 Result right = frame_->Pop();
8070 frame_->Push(node->left());
8071 frame_->Push(&right);
8072 } else {
8073 Load(node->left());
8074 Load(node->right());
8075 }
8076 GenericBinaryOperation(node, overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00008077 }
8078}
8079
8080
8081void CodeGenerator::VisitThisFunction(ThisFunction* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01008082 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00008083 frame_->PushFunction();
8084}
8085
8086
8087void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01008088 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00008089 Comment cmnt(masm_, "[ CompareOperation");
8090
Leon Clarkee46be812010-01-19 14:06:41 +00008091 bool left_already_loaded = false;
8092
Steve Blocka7e24c12009-10-30 11:49:00 +00008093 // Get the expressions from the node.
8094 Expression* left = node->left();
8095 Expression* right = node->right();
8096 Token::Value op = node->op();
8097 // To make typeof testing for natives implemented in JavaScript really
8098 // efficient, we generate special code for expressions of the form:
8099 // 'typeof <expression> == <string>'.
8100 UnaryOperation* operation = left->AsUnaryOperation();
8101 if ((op == Token::EQ || op == Token::EQ_STRICT) &&
8102 (operation != NULL && operation->op() == Token::TYPEOF) &&
8103 (right->AsLiteral() != NULL &&
8104 right->AsLiteral()->handle()->IsString())) {
8105 Handle<String> check(String::cast(*right->AsLiteral()->handle()));
8106
8107 // Load the operand and move it to a register.
8108 LoadTypeofExpression(operation->expression());
8109 Result answer = frame_->Pop();
8110 answer.ToRegister();
8111
8112 if (check->Equals(Heap::number_symbol())) {
8113 __ test(answer.reg(), Immediate(kSmiTagMask));
8114 destination()->true_target()->Branch(zero);
8115 frame_->Spill(answer.reg());
8116 __ mov(answer.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset));
8117 __ cmp(answer.reg(), Factory::heap_number_map());
8118 answer.Unuse();
8119 destination()->Split(equal);
8120
8121 } else if (check->Equals(Heap::string_symbol())) {
8122 __ test(answer.reg(), Immediate(kSmiTagMask));
8123 destination()->false_target()->Branch(zero);
8124
8125 // It can be an undetectable string object.
8126 Result temp = allocator()->Allocate();
8127 ASSERT(temp.is_valid());
8128 __ mov(temp.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset));
8129 __ movzx_b(temp.reg(), FieldOperand(temp.reg(), Map::kBitFieldOffset));
8130 __ test(temp.reg(), Immediate(1 << Map::kIsUndetectable));
8131 destination()->false_target()->Branch(not_zero);
Andrei Popescu402d9372010-02-26 13:31:12 +00008132 __ CmpObjectType(answer.reg(), FIRST_NONSTRING_TYPE, temp.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00008133 temp.Unuse();
8134 answer.Unuse();
Andrei Popescu402d9372010-02-26 13:31:12 +00008135 destination()->Split(below);
Steve Blocka7e24c12009-10-30 11:49:00 +00008136
8137 } else if (check->Equals(Heap::boolean_symbol())) {
8138 __ cmp(answer.reg(), Factory::true_value());
8139 destination()->true_target()->Branch(equal);
8140 __ cmp(answer.reg(), Factory::false_value());
8141 answer.Unuse();
8142 destination()->Split(equal);
8143
8144 } else if (check->Equals(Heap::undefined_symbol())) {
8145 __ cmp(answer.reg(), Factory::undefined_value());
8146 destination()->true_target()->Branch(equal);
8147
8148 __ test(answer.reg(), Immediate(kSmiTagMask));
8149 destination()->false_target()->Branch(zero);
8150
8151 // It can be an undetectable object.
8152 frame_->Spill(answer.reg());
8153 __ mov(answer.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset));
8154 __ movzx_b(answer.reg(),
8155 FieldOperand(answer.reg(), Map::kBitFieldOffset));
8156 __ test(answer.reg(), Immediate(1 << Map::kIsUndetectable));
8157 answer.Unuse();
8158 destination()->Split(not_zero);
8159
8160 } else if (check->Equals(Heap::function_symbol())) {
8161 __ test(answer.reg(), Immediate(kSmiTagMask));
8162 destination()->false_target()->Branch(zero);
8163 frame_->Spill(answer.reg());
8164 __ CmpObjectType(answer.reg(), JS_FUNCTION_TYPE, answer.reg());
Steve Blockd0582a62009-12-15 09:54:21 +00008165 destination()->true_target()->Branch(equal);
8166 // Regular expressions are callable so typeof == 'function'.
8167 __ CmpInstanceType(answer.reg(), JS_REGEXP_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00008168 answer.Unuse();
8169 destination()->Split(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +00008170 } else if (check->Equals(Heap::object_symbol())) {
8171 __ test(answer.reg(), Immediate(kSmiTagMask));
8172 destination()->false_target()->Branch(zero);
8173 __ cmp(answer.reg(), Factory::null_value());
8174 destination()->true_target()->Branch(equal);
8175
Steve Blocka7e24c12009-10-30 11:49:00 +00008176 Result map = allocator()->Allocate();
8177 ASSERT(map.is_valid());
Steve Blockd0582a62009-12-15 09:54:21 +00008178 // Regular expressions are typeof == 'function', not 'object'.
8179 __ CmpObjectType(answer.reg(), JS_REGEXP_TYPE, map.reg());
8180 destination()->false_target()->Branch(equal);
8181
8182 // It can be an undetectable object.
Steve Blocka7e24c12009-10-30 11:49:00 +00008183 __ movzx_b(map.reg(), FieldOperand(map.reg(), Map::kBitFieldOffset));
8184 __ test(map.reg(), Immediate(1 << Map::kIsUndetectable));
8185 destination()->false_target()->Branch(not_zero);
8186 __ mov(map.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset));
8187 __ movzx_b(map.reg(), FieldOperand(map.reg(), Map::kInstanceTypeOffset));
8188 __ cmp(map.reg(), FIRST_JS_OBJECT_TYPE);
8189 destination()->false_target()->Branch(less);
8190 __ cmp(map.reg(), LAST_JS_OBJECT_TYPE);
8191 answer.Unuse();
8192 map.Unuse();
8193 destination()->Split(less_equal);
8194 } else {
8195 // Uncommon case: typeof testing against a string literal that is
8196 // never returned from the typeof operator.
8197 answer.Unuse();
8198 destination()->Goto(false);
8199 }
8200 return;
Leon Clarkee46be812010-01-19 14:06:41 +00008201 } else if (op == Token::LT &&
8202 right->AsLiteral() != NULL &&
8203 right->AsLiteral()->handle()->IsHeapNumber()) {
8204 Handle<HeapNumber> check(HeapNumber::cast(*right->AsLiteral()->handle()));
8205 if (check->value() == 2147483648.0) { // 0x80000000.
8206 Load(left);
8207 left_already_loaded = true;
8208 Result lhs = frame_->Pop();
8209 lhs.ToRegister();
8210 __ test(lhs.reg(), Immediate(kSmiTagMask));
8211 destination()->true_target()->Branch(zero); // All Smis are less.
8212 Result scratch = allocator()->Allocate();
8213 ASSERT(scratch.is_valid());
8214 __ mov(scratch.reg(), FieldOperand(lhs.reg(), HeapObject::kMapOffset));
8215 __ cmp(scratch.reg(), Factory::heap_number_map());
8216 JumpTarget not_a_number;
8217 not_a_number.Branch(not_equal, &lhs);
8218 __ mov(scratch.reg(),
8219 FieldOperand(lhs.reg(), HeapNumber::kExponentOffset));
8220 __ cmp(Operand(scratch.reg()), Immediate(0xfff00000));
8221 not_a_number.Branch(above_equal, &lhs); // It's a negative NaN or -Inf.
8222 const uint32_t borderline_exponent =
8223 (HeapNumber::kExponentBias + 31) << HeapNumber::kExponentShift;
8224 __ cmp(Operand(scratch.reg()), Immediate(borderline_exponent));
8225 scratch.Unuse();
8226 lhs.Unuse();
8227 destination()->true_target()->Branch(less);
8228 destination()->false_target()->Jump();
8229
8230 not_a_number.Bind(&lhs);
8231 frame_->Push(&lhs);
8232 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008233 }
8234
8235 Condition cc = no_condition;
8236 bool strict = false;
8237 switch (op) {
8238 case Token::EQ_STRICT:
8239 strict = true;
8240 // Fall through
8241 case Token::EQ:
8242 cc = equal;
8243 break;
8244 case Token::LT:
8245 cc = less;
8246 break;
8247 case Token::GT:
8248 cc = greater;
8249 break;
8250 case Token::LTE:
8251 cc = less_equal;
8252 break;
8253 case Token::GTE:
8254 cc = greater_equal;
8255 break;
8256 case Token::IN: {
Leon Clarkee46be812010-01-19 14:06:41 +00008257 if (!left_already_loaded) Load(left);
Steve Blocka7e24c12009-10-30 11:49:00 +00008258 Load(right);
8259 Result answer = frame_->InvokeBuiltin(Builtins::IN, CALL_FUNCTION, 2);
8260 frame_->Push(&answer); // push the result
8261 return;
8262 }
8263 case Token::INSTANCEOF: {
Leon Clarkee46be812010-01-19 14:06:41 +00008264 if (!left_already_loaded) Load(left);
Steve Blocka7e24c12009-10-30 11:49:00 +00008265 Load(right);
8266 InstanceofStub stub;
8267 Result answer = frame_->CallStub(&stub, 2);
8268 answer.ToRegister();
8269 __ test(answer.reg(), Operand(answer.reg()));
8270 answer.Unuse();
8271 destination()->Split(zero);
8272 return;
8273 }
8274 default:
8275 UNREACHABLE();
8276 }
Steve Block6ded16b2010-05-10 14:33:55 +01008277
8278 if (left->IsTrivial()) {
8279 if (!left_already_loaded) {
8280 Load(right);
8281 Result right_result = frame_->Pop();
8282 frame_->Push(left);
8283 frame_->Push(&right_result);
8284 } else {
8285 Load(right);
8286 }
8287 } else {
8288 if (!left_already_loaded) Load(left);
8289 Load(right);
8290 }
Leon Clarkee46be812010-01-19 14:06:41 +00008291 Comparison(node, cc, strict, destination());
Steve Blocka7e24c12009-10-30 11:49:00 +00008292}
8293
8294
8295#ifdef DEBUG
8296bool CodeGenerator::HasValidEntryRegisters() {
8297 return (allocator()->count(eax) == (frame()->is_used(eax) ? 1 : 0))
8298 && (allocator()->count(ebx) == (frame()->is_used(ebx) ? 1 : 0))
8299 && (allocator()->count(ecx) == (frame()->is_used(ecx) ? 1 : 0))
8300 && (allocator()->count(edx) == (frame()->is_used(edx) ? 1 : 0))
8301 && (allocator()->count(edi) == (frame()->is_used(edi) ? 1 : 0));
8302}
8303#endif
8304
8305
8306// Emit a LoadIC call to get the value from receiver and leave it in
Andrei Popescu402d9372010-02-26 13:31:12 +00008307// dst.
Steve Blocka7e24c12009-10-30 11:49:00 +00008308class DeferredReferenceGetNamedValue: public DeferredCode {
8309 public:
8310 DeferredReferenceGetNamedValue(Register dst,
8311 Register receiver,
8312 Handle<String> name)
8313 : dst_(dst), receiver_(receiver), name_(name) {
8314 set_comment("[ DeferredReferenceGetNamedValue");
8315 }
8316
8317 virtual void Generate();
8318
8319 Label* patch_site() { return &patch_site_; }
8320
8321 private:
8322 Label patch_site_;
8323 Register dst_;
8324 Register receiver_;
8325 Handle<String> name_;
8326};
8327
8328
8329void DeferredReferenceGetNamedValue::Generate() {
Andrei Popescu402d9372010-02-26 13:31:12 +00008330 if (!receiver_.is(eax)) {
8331 __ mov(eax, receiver_);
8332 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008333 __ Set(ecx, Immediate(name_));
8334 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
8335 __ call(ic, RelocInfo::CODE_TARGET);
8336 // The call must be followed by a test eax instruction to indicate
8337 // that the inobject property case was inlined.
8338 //
8339 // Store the delta to the map check instruction here in the test
8340 // instruction. Use masm_-> instead of the __ macro since the
8341 // latter can't return a value.
8342 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
8343 // Here we use masm_-> instead of the __ macro because this is the
8344 // instruction that gets patched and coverage code gets in the way.
8345 masm_->test(eax, Immediate(-delta_to_patch_site));
8346 __ IncrementCounter(&Counters::named_load_inline_miss, 1);
8347
8348 if (!dst_.is(eax)) __ mov(dst_, eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00008349}
8350
8351
8352class DeferredReferenceGetKeyedValue: public DeferredCode {
8353 public:
8354 explicit DeferredReferenceGetKeyedValue(Register dst,
8355 Register receiver,
Andrei Popescu402d9372010-02-26 13:31:12 +00008356 Register key)
8357 : dst_(dst), receiver_(receiver), key_(key) {
Steve Blocka7e24c12009-10-30 11:49:00 +00008358 set_comment("[ DeferredReferenceGetKeyedValue");
8359 }
8360
8361 virtual void Generate();
8362
8363 Label* patch_site() { return &patch_site_; }
8364
8365 private:
8366 Label patch_site_;
8367 Register dst_;
8368 Register receiver_;
8369 Register key_;
Steve Blocka7e24c12009-10-30 11:49:00 +00008370};
8371
8372
8373void DeferredReferenceGetKeyedValue::Generate() {
Andrei Popescu402d9372010-02-26 13:31:12 +00008374 if (!receiver_.is(eax)) {
8375 // Register eax is available for key.
8376 if (!key_.is(eax)) {
8377 __ mov(eax, key_);
8378 }
8379 if (!receiver_.is(edx)) {
8380 __ mov(edx, receiver_);
8381 }
8382 } else if (!key_.is(edx)) {
8383 // Register edx is available for receiver.
8384 if (!receiver_.is(edx)) {
8385 __ mov(edx, receiver_);
8386 }
8387 if (!key_.is(eax)) {
8388 __ mov(eax, key_);
8389 }
8390 } else {
8391 __ xchg(edx, eax);
8392 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008393 // Calculate the delta from the IC call instruction to the map check
8394 // cmp instruction in the inlined version. This delta is stored in
8395 // a test(eax, delta) instruction after the call so that we can find
8396 // it in the IC initialization code and patch the cmp instruction.
8397 // This means that we cannot allow test instructions after calls to
8398 // KeyedLoadIC stubs in other places.
8399 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
Andrei Popescu402d9372010-02-26 13:31:12 +00008400 __ call(ic, RelocInfo::CODE_TARGET);
Steve Blocka7e24c12009-10-30 11:49:00 +00008401 // The delta from the start of the map-compare instruction to the
8402 // test instruction. We use masm_-> directly here instead of the __
8403 // macro because the macro sometimes uses macro expansion to turn
8404 // into something that can't return a value. This is encountered
8405 // when doing generated code coverage tests.
8406 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
8407 // Here we use masm_-> instead of the __ macro because this is the
8408 // instruction that gets patched and coverage code gets in the way.
8409 masm_->test(eax, Immediate(-delta_to_patch_site));
8410 __ IncrementCounter(&Counters::keyed_load_inline_miss, 1);
8411
8412 if (!dst_.is(eax)) __ mov(dst_, eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00008413}
8414
8415
8416class DeferredReferenceSetKeyedValue: public DeferredCode {
8417 public:
8418 DeferredReferenceSetKeyedValue(Register value,
8419 Register key,
Steve Block6ded16b2010-05-10 14:33:55 +01008420 Register receiver,
8421 Register scratch)
8422 : value_(value),
8423 key_(key),
8424 receiver_(receiver),
8425 scratch_(scratch) {
Steve Blocka7e24c12009-10-30 11:49:00 +00008426 set_comment("[ DeferredReferenceSetKeyedValue");
8427 }
8428
8429 virtual void Generate();
8430
8431 Label* patch_site() { return &patch_site_; }
8432
8433 private:
8434 Register value_;
8435 Register key_;
8436 Register receiver_;
Steve Block6ded16b2010-05-10 14:33:55 +01008437 Register scratch_;
Steve Blocka7e24c12009-10-30 11:49:00 +00008438 Label patch_site_;
8439};
8440
8441
8442void DeferredReferenceSetKeyedValue::Generate() {
8443 __ IncrementCounter(&Counters::keyed_store_inline_miss, 1);
Steve Block6ded16b2010-05-10 14:33:55 +01008444 // Move value_ to eax, key_ to ecx, and receiver_ to edx.
8445 Register old_value = value_;
8446
8447 // First, move value to eax.
8448 if (!value_.is(eax)) {
8449 if (key_.is(eax)) {
8450 // Move key_ out of eax, preferably to ecx.
8451 if (!value_.is(ecx) && !receiver_.is(ecx)) {
8452 __ mov(ecx, key_);
8453 key_ = ecx;
8454 } else {
8455 __ mov(scratch_, key_);
8456 key_ = scratch_;
8457 }
8458 }
8459 if (receiver_.is(eax)) {
8460 // Move receiver_ out of eax, preferably to edx.
8461 if (!value_.is(edx) && !key_.is(edx)) {
8462 __ mov(edx, receiver_);
8463 receiver_ = edx;
8464 } else {
8465 // Both moves to scratch are from eax, also, no valid path hits both.
8466 __ mov(scratch_, receiver_);
8467 receiver_ = scratch_;
8468 }
8469 }
8470 __ mov(eax, value_);
8471 value_ = eax;
8472 }
8473
8474 // Now value_ is in eax. Move the other two to the right positions.
8475 // We do not update the variables key_ and receiver_ to ecx and edx.
8476 if (key_.is(ecx)) {
8477 if (!receiver_.is(edx)) {
8478 __ mov(edx, receiver_);
8479 }
8480 } else if (key_.is(edx)) {
8481 if (receiver_.is(ecx)) {
8482 __ xchg(edx, ecx);
8483 } else {
8484 __ mov(ecx, key_);
8485 if (!receiver_.is(edx)) {
8486 __ mov(edx, receiver_);
8487 }
8488 }
8489 } else { // Key is not in edx or ecx.
8490 if (!receiver_.is(edx)) {
8491 __ mov(edx, receiver_);
8492 }
8493 __ mov(ecx, key_);
8494 }
8495
Steve Blocka7e24c12009-10-30 11:49:00 +00008496 // Call the IC stub.
8497 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
8498 __ call(ic, RelocInfo::CODE_TARGET);
8499 // The delta from the start of the map-compare instruction to the
8500 // test instruction. We use masm_-> directly here instead of the
8501 // __ macro because the macro sometimes uses macro expansion to turn
8502 // into something that can't return a value. This is encountered
8503 // when doing generated code coverage tests.
8504 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
8505 // Here we use masm_-> instead of the __ macro because this is the
8506 // instruction that gets patched and coverage code gets in the way.
8507 masm_->test(eax, Immediate(-delta_to_patch_site));
Steve Block6ded16b2010-05-10 14:33:55 +01008508 // Restore value (returned from store IC) register.
8509 if (!old_value.is(eax)) __ mov(old_value, eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00008510}
8511
8512
Andrei Popescu402d9372010-02-26 13:31:12 +00008513Result CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) {
8514#ifdef DEBUG
8515 int original_height = frame()->height();
8516#endif
8517 Result result;
8518 // Do not inline the inobject property case for loads from the global
8519 // object. Also do not inline for unoptimized code. This saves time in
8520 // the code generator. Unoptimized code is toplevel code or code that is
8521 // not in a loop.
8522 if (is_contextual || scope()->is_global_scope() || loop_nesting() == 0) {
8523 Comment cmnt(masm(), "[ Load from named Property");
8524 frame()->Push(name);
8525
8526 RelocInfo::Mode mode = is_contextual
8527 ? RelocInfo::CODE_TARGET_CONTEXT
8528 : RelocInfo::CODE_TARGET;
8529 result = frame()->CallLoadIC(mode);
8530 // A test eax instruction following the call signals that the inobject
8531 // property case was inlined. Ensure that there is not a test eax
8532 // instruction here.
8533 __ nop();
8534 } else {
8535 // Inline the inobject property case.
8536 Comment cmnt(masm(), "[ Inlined named property load");
8537 Result receiver = frame()->Pop();
8538 receiver.ToRegister();
8539
8540 result = allocator()->Allocate();
8541 ASSERT(result.is_valid());
8542 DeferredReferenceGetNamedValue* deferred =
8543 new DeferredReferenceGetNamedValue(result.reg(), receiver.reg(), name);
8544
8545 // Check that the receiver is a heap object.
8546 __ test(receiver.reg(), Immediate(kSmiTagMask));
8547 deferred->Branch(zero);
8548
8549 __ bind(deferred->patch_site());
8550 // This is the map check instruction that will be patched (so we can't
8551 // use the double underscore macro that may insert instructions).
8552 // Initially use an invalid map to force a failure.
8553 masm()->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
8554 Immediate(Factory::null_value()));
8555 // This branch is always a forwards branch so it's always a fixed size
8556 // which allows the assert below to succeed and patching to work.
8557 deferred->Branch(not_equal);
8558
8559 // The delta from the patch label to the load offset must be statically
8560 // known.
8561 ASSERT(masm()->SizeOfCodeGeneratedSince(deferred->patch_site()) ==
8562 LoadIC::kOffsetToLoadInstruction);
8563 // The initial (invalid) offset has to be large enough to force a 32-bit
8564 // instruction encoding to allow patching with an arbitrary offset. Use
8565 // kMaxInt (minus kHeapObjectTag).
8566 int offset = kMaxInt;
8567 masm()->mov(result.reg(), FieldOperand(receiver.reg(), offset));
8568
8569 __ IncrementCounter(&Counters::named_load_inline, 1);
8570 deferred->BindExit();
8571 }
8572 ASSERT(frame()->height() == original_height - 1);
8573 return result;
8574}
8575
8576
8577Result CodeGenerator::EmitNamedStore(Handle<String> name, bool is_contextual) {
8578#ifdef DEBUG
8579 int expected_height = frame()->height() - (is_contextual ? 1 : 2);
8580#endif
8581 Result result = frame()->CallStoreIC(name, is_contextual);
8582
8583 ASSERT_EQ(expected_height, frame()->height());
8584 return result;
8585}
8586
8587
8588Result CodeGenerator::EmitKeyedLoad() {
8589#ifdef DEBUG
8590 int original_height = frame()->height();
8591#endif
8592 Result result;
8593 // Inline array load code if inside of a loop. We do not know the
8594 // receiver map yet, so we initially generate the code with a check
8595 // against an invalid map. In the inline cache code, we patch the map
8596 // check if appropriate.
Leon Clarked91b9f72010-01-27 17:25:45 +00008597 if (loop_nesting() > 0) {
8598 Comment cmnt(masm_, "[ Inlined load from keyed Property");
8599
8600 Result key = frame_->Pop();
8601 Result receiver = frame_->Pop();
8602 key.ToRegister();
8603 receiver.ToRegister();
8604
8605 // Use a fresh temporary to load the elements without destroying
8606 // the receiver which is needed for the deferred slow case.
8607 Result elements = allocator()->Allocate();
8608 ASSERT(elements.is_valid());
8609
8610 // Use a fresh temporary for the index and later the loaded
8611 // value.
Andrei Popescu402d9372010-02-26 13:31:12 +00008612 result = allocator()->Allocate();
8613 ASSERT(result.is_valid());
Leon Clarked91b9f72010-01-27 17:25:45 +00008614
8615 DeferredReferenceGetKeyedValue* deferred =
Andrei Popescu402d9372010-02-26 13:31:12 +00008616 new DeferredReferenceGetKeyedValue(result.reg(),
Leon Clarked91b9f72010-01-27 17:25:45 +00008617 receiver.reg(),
Andrei Popescu402d9372010-02-26 13:31:12 +00008618 key.reg());
Leon Clarked91b9f72010-01-27 17:25:45 +00008619
Andrei Popescu402d9372010-02-26 13:31:12 +00008620 __ test(receiver.reg(), Immediate(kSmiTagMask));
8621 deferred->Branch(zero);
Leon Clarked91b9f72010-01-27 17:25:45 +00008622
8623 // Initially, use an invalid map. The map is patched in the IC
8624 // initialization code.
8625 __ bind(deferred->patch_site());
8626 // Use masm-> here instead of the double underscore macro since extra
8627 // coverage code can interfere with the patching.
8628 masm_->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
8629 Immediate(Factory::null_value()));
8630 deferred->Branch(not_equal);
8631
8632 // Check that the key is a smi.
Steve Block6ded16b2010-05-10 14:33:55 +01008633 if (!key.is_smi()) {
8634 __ test(key.reg(), Immediate(kSmiTagMask));
8635 deferred->Branch(not_zero);
8636 } else {
8637 if (FLAG_debug_code) __ AbortIfNotSmi(key.reg());
8638 }
Leon Clarked91b9f72010-01-27 17:25:45 +00008639
8640 // Get the elements array from the receiver and check that it
8641 // is not a dictionary.
8642 __ mov(elements.reg(),
8643 FieldOperand(receiver.reg(), JSObject::kElementsOffset));
8644 __ cmp(FieldOperand(elements.reg(), HeapObject::kMapOffset),
8645 Immediate(Factory::fixed_array_map()));
8646 deferred->Branch(not_equal);
8647
8648 // Shift the key to get the actual index value and check that
Steve Block6ded16b2010-05-10 14:33:55 +01008649 // it is within bounds. Use unsigned comparison to handle negative keys.
Andrei Popescu402d9372010-02-26 13:31:12 +00008650 __ mov(result.reg(), key.reg());
8651 __ SmiUntag(result.reg());
8652 __ cmp(result.reg(),
Leon Clarked91b9f72010-01-27 17:25:45 +00008653 FieldOperand(elements.reg(), FixedArray::kLengthOffset));
8654 deferred->Branch(above_equal);
8655
Andrei Popescu402d9372010-02-26 13:31:12 +00008656 // Load and check that the result is not the hole.
8657 __ mov(result.reg(), Operand(elements.reg(),
8658 result.reg(),
8659 times_4,
8660 FixedArray::kHeaderSize - kHeapObjectTag));
Leon Clarked91b9f72010-01-27 17:25:45 +00008661 elements.Unuse();
Andrei Popescu402d9372010-02-26 13:31:12 +00008662 __ cmp(Operand(result.reg()), Immediate(Factory::the_hole_value()));
Leon Clarked91b9f72010-01-27 17:25:45 +00008663 deferred->Branch(equal);
8664 __ IncrementCounter(&Counters::keyed_load_inline, 1);
8665
8666 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00008667 } else {
8668 Comment cmnt(masm_, "[ Load from keyed Property");
Andrei Popescu402d9372010-02-26 13:31:12 +00008669 result = frame_->CallKeyedLoadIC(RelocInfo::CODE_TARGET);
Leon Clarked91b9f72010-01-27 17:25:45 +00008670 // Make sure that we do not have a test instruction after the
8671 // call. A test instruction after the call is used to
8672 // indicate that we have generated an inline version of the
8673 // keyed load. The explicit nop instruction is here because
8674 // the push that follows might be peep-hole optimized away.
8675 __ nop();
Leon Clarked91b9f72010-01-27 17:25:45 +00008676 }
Andrei Popescu402d9372010-02-26 13:31:12 +00008677 ASSERT(frame()->height() == original_height - 2);
8678 return result;
8679}
8680
8681
8682Result CodeGenerator::EmitKeyedStore(StaticType* key_type) {
8683#ifdef DEBUG
8684 int original_height = frame()->height();
8685#endif
8686 Result result;
8687 // Generate inlined version of the keyed store if the code is in a loop
8688 // and the key is likely to be a smi.
8689 if (loop_nesting() > 0 && key_type->IsLikelySmi()) {
8690 Comment cmnt(masm(), "[ Inlined store to keyed Property");
8691
8692 // Get the receiver, key and value into registers.
8693 result = frame()->Pop();
8694 Result key = frame()->Pop();
8695 Result receiver = frame()->Pop();
8696
8697 Result tmp = allocator_->Allocate();
8698 ASSERT(tmp.is_valid());
Steve Block6ded16b2010-05-10 14:33:55 +01008699 Result tmp2 = allocator_->Allocate();
8700 ASSERT(tmp2.is_valid());
Andrei Popescu402d9372010-02-26 13:31:12 +00008701
8702 // Determine whether the value is a constant before putting it in a
8703 // register.
8704 bool value_is_constant = result.is_constant();
8705
8706 // Make sure that value, key and receiver are in registers.
8707 result.ToRegister();
8708 key.ToRegister();
8709 receiver.ToRegister();
8710
8711 DeferredReferenceSetKeyedValue* deferred =
8712 new DeferredReferenceSetKeyedValue(result.reg(),
8713 key.reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01008714 receiver.reg(),
8715 tmp.reg());
Andrei Popescu402d9372010-02-26 13:31:12 +00008716
8717 // Check that the receiver is not a smi.
8718 __ test(receiver.reg(), Immediate(kSmiTagMask));
8719 deferred->Branch(zero);
8720
Steve Block6ded16b2010-05-10 14:33:55 +01008721 // Check that the key is a smi.
8722 if (!key.is_smi()) {
8723 __ test(key.reg(), Immediate(kSmiTagMask));
8724 deferred->Branch(not_zero);
8725 } else {
8726 if (FLAG_debug_code) __ AbortIfNotSmi(key.reg());
8727 }
8728
Andrei Popescu402d9372010-02-26 13:31:12 +00008729 // Check that the receiver is a JSArray.
Steve Block6ded16b2010-05-10 14:33:55 +01008730 __ CmpObjectType(receiver.reg(), JS_ARRAY_TYPE, tmp.reg());
Andrei Popescu402d9372010-02-26 13:31:12 +00008731 deferred->Branch(not_equal);
8732
8733 // Check that the key is within bounds. Both the key and the length of
Steve Block6ded16b2010-05-10 14:33:55 +01008734 // the JSArray are smis. Use unsigned comparison to handle negative keys.
Andrei Popescu402d9372010-02-26 13:31:12 +00008735 __ cmp(key.reg(),
8736 FieldOperand(receiver.reg(), JSArray::kLengthOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01008737 deferred->Branch(above_equal);
Andrei Popescu402d9372010-02-26 13:31:12 +00008738
8739 // Get the elements array from the receiver and check that it is not a
8740 // dictionary.
8741 __ mov(tmp.reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01008742 FieldOperand(receiver.reg(), JSArray::kElementsOffset));
8743
8744 // Check whether it is possible to omit the write barrier. If the elements
8745 // array is in new space or the value written is a smi we can safely update
8746 // the elements array without updating the remembered set.
8747 Label in_new_space;
8748 __ InNewSpace(tmp.reg(), tmp2.reg(), equal, &in_new_space);
8749 if (!value_is_constant) {
8750 __ test(result.reg(), Immediate(kSmiTagMask));
8751 deferred->Branch(not_zero);
8752 }
8753
8754 __ bind(&in_new_space);
Andrei Popescu402d9372010-02-26 13:31:12 +00008755 // Bind the deferred code patch site to be able to locate the fixed
8756 // array map comparison. When debugging, we patch this comparison to
8757 // always fail so that we will hit the IC call in the deferred code
8758 // which will allow the debugger to break for fast case stores.
8759 __ bind(deferred->patch_site());
8760 __ cmp(FieldOperand(tmp.reg(), HeapObject::kMapOffset),
8761 Immediate(Factory::fixed_array_map()));
8762 deferred->Branch(not_equal);
8763
8764 // Store the value.
Kristian Monsen25f61362010-05-21 11:50:48 +01008765 __ mov(FixedArrayElementOperand(tmp.reg(), key.reg()), result.reg());
Andrei Popescu402d9372010-02-26 13:31:12 +00008766 __ IncrementCounter(&Counters::keyed_store_inline, 1);
8767
8768 deferred->BindExit();
8769 } else {
8770 result = frame()->CallKeyedStoreIC();
8771 // Make sure that we do not have a test instruction after the
8772 // call. A test instruction after the call is used to
8773 // indicate that we have generated an inline version of the
8774 // keyed store.
8775 __ nop();
Andrei Popescu402d9372010-02-26 13:31:12 +00008776 }
8777 ASSERT(frame()->height() == original_height - 3);
8778 return result;
Leon Clarked91b9f72010-01-27 17:25:45 +00008779}
8780
8781
Steve Blocka7e24c12009-10-30 11:49:00 +00008782#undef __
8783#define __ ACCESS_MASM(masm)
8784
8785
Steve Block6ded16b2010-05-10 14:33:55 +01008786static void CheckTwoForSminess(MacroAssembler* masm,
8787 Register left, Register right, Register scratch,
8788 TypeInfo left_info, TypeInfo right_info,
8789 DeferredInlineBinaryOperation* deferred) {
8790 if (left.is(right)) {
8791 if (!left_info.IsSmi()) {
8792 __ test(left, Immediate(kSmiTagMask));
8793 deferred->Branch(not_zero);
8794 } else {
8795 if (FLAG_debug_code) __ AbortIfNotSmi(left);
8796 }
8797 } else if (!left_info.IsSmi()) {
8798 if (!right_info.IsSmi()) {
8799 __ mov(scratch, left);
8800 __ or_(scratch, Operand(right));
8801 __ test(scratch, Immediate(kSmiTagMask));
8802 deferred->Branch(not_zero);
8803 } else {
8804 __ test(left, Immediate(kSmiTagMask));
8805 deferred->Branch(not_zero);
8806 if (FLAG_debug_code) __ AbortIfNotSmi(right);
8807 }
8808 } else {
8809 if (FLAG_debug_code) __ AbortIfNotSmi(left);
8810 if (!right_info.IsSmi()) {
8811 __ test(right, Immediate(kSmiTagMask));
8812 deferred->Branch(not_zero);
8813 } else {
8814 if (FLAG_debug_code) __ AbortIfNotSmi(right);
8815 }
8816 }
8817}
8818
8819
Steve Blocka7e24c12009-10-30 11:49:00 +00008820Handle<String> Reference::GetName() {
8821 ASSERT(type_ == NAMED);
8822 Property* property = expression_->AsProperty();
8823 if (property == NULL) {
8824 // Global variable reference treated as a named property reference.
8825 VariableProxy* proxy = expression_->AsVariableProxy();
8826 ASSERT(proxy->AsVariable() != NULL);
8827 ASSERT(proxy->AsVariable()->is_global());
8828 return proxy->name();
8829 } else {
8830 Literal* raw_name = property->key()->AsLiteral();
8831 ASSERT(raw_name != NULL);
Andrei Popescu402d9372010-02-26 13:31:12 +00008832 return Handle<String>::cast(raw_name->handle());
Steve Blocka7e24c12009-10-30 11:49:00 +00008833 }
8834}
8835
8836
Steve Blockd0582a62009-12-15 09:54:21 +00008837void Reference::GetValue() {
Steve Blocka7e24c12009-10-30 11:49:00 +00008838 ASSERT(!cgen_->in_spilled_code());
8839 ASSERT(cgen_->HasValidEntryRegisters());
8840 ASSERT(!is_illegal());
8841 MacroAssembler* masm = cgen_->masm();
8842
8843 // Record the source position for the property load.
8844 Property* property = expression_->AsProperty();
8845 if (property != NULL) {
8846 cgen_->CodeForSourcePosition(property->position());
8847 }
8848
8849 switch (type_) {
8850 case SLOT: {
8851 Comment cmnt(masm, "[ Load from Slot");
8852 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
8853 ASSERT(slot != NULL);
Andrei Popescu402d9372010-02-26 13:31:12 +00008854 Result result =
8855 cgen_->LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF);
8856 if (!persist_after_get_) set_unloaded();
8857 cgen_->frame()->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00008858 break;
8859 }
8860
8861 case NAMED: {
Steve Blocka7e24c12009-10-30 11:49:00 +00008862 Variable* var = expression_->AsVariableProxy()->AsVariable();
8863 bool is_global = var != NULL;
8864 ASSERT(!is_global || var->is_global());
Andrei Popescu402d9372010-02-26 13:31:12 +00008865 if (persist_after_get_) cgen_->frame()->Dup();
8866 Result result = cgen_->EmitNamedLoad(GetName(), is_global);
8867 if (!persist_after_get_) set_unloaded();
8868 cgen_->frame()->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00008869 break;
8870 }
8871
8872 case KEYED: {
Andrei Popescu402d9372010-02-26 13:31:12 +00008873 if (persist_after_get_) {
8874 cgen_->frame()->PushElementAt(1);
8875 cgen_->frame()->PushElementAt(1);
8876 }
8877 Result value = cgen_->EmitKeyedLoad();
Leon Clarked91b9f72010-01-27 17:25:45 +00008878 cgen_->frame()->Push(&value);
Andrei Popescu402d9372010-02-26 13:31:12 +00008879 if (!persist_after_get_) set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00008880 break;
8881 }
8882
8883 default:
8884 UNREACHABLE();
8885 }
8886}
8887
8888
Steve Blockd0582a62009-12-15 09:54:21 +00008889void Reference::TakeValue() {
Steve Blocka7e24c12009-10-30 11:49:00 +00008890 // For non-constant frame-allocated slots, we invalidate the value in the
8891 // slot. For all others, we fall back on GetValue.
8892 ASSERT(!cgen_->in_spilled_code());
8893 ASSERT(!is_illegal());
8894 if (type_ != SLOT) {
Steve Blockd0582a62009-12-15 09:54:21 +00008895 GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00008896 return;
8897 }
8898
8899 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
8900 ASSERT(slot != NULL);
8901 if (slot->type() == Slot::LOOKUP ||
8902 slot->type() == Slot::CONTEXT ||
8903 slot->var()->mode() == Variable::CONST ||
8904 slot->is_arguments()) {
Steve Blockd0582a62009-12-15 09:54:21 +00008905 GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00008906 return;
8907 }
8908
8909 // Only non-constant, frame-allocated parameters and locals can
8910 // reach here. Be careful not to use the optimizations for arguments
8911 // object access since it may not have been initialized yet.
8912 ASSERT(!slot->is_arguments());
8913 if (slot->type() == Slot::PARAMETER) {
8914 cgen_->frame()->TakeParameterAt(slot->index());
8915 } else {
8916 ASSERT(slot->type() == Slot::LOCAL);
8917 cgen_->frame()->TakeLocalAt(slot->index());
8918 }
Leon Clarked91b9f72010-01-27 17:25:45 +00008919
8920 ASSERT(persist_after_get_);
8921 // Do not unload the reference, because it is used in SetValue.
Steve Blocka7e24c12009-10-30 11:49:00 +00008922}
8923
8924
8925void Reference::SetValue(InitState init_state) {
8926 ASSERT(cgen_->HasValidEntryRegisters());
8927 ASSERT(!is_illegal());
8928 MacroAssembler* masm = cgen_->masm();
8929 switch (type_) {
8930 case SLOT: {
8931 Comment cmnt(masm, "[ Store to Slot");
8932 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
8933 ASSERT(slot != NULL);
8934 cgen_->StoreToSlot(slot, init_state);
Andrei Popescu402d9372010-02-26 13:31:12 +00008935 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00008936 break;
8937 }
8938
8939 case NAMED: {
8940 Comment cmnt(masm, "[ Store to named Property");
Andrei Popescu402d9372010-02-26 13:31:12 +00008941 Result answer = cgen_->EmitNamedStore(GetName(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00008942 cgen_->frame()->Push(&answer);
Leon Clarke4515c472010-02-03 11:58:03 +00008943 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00008944 break;
8945 }
8946
8947 case KEYED: {
8948 Comment cmnt(masm, "[ Store to keyed Property");
Steve Blocka7e24c12009-10-30 11:49:00 +00008949 Property* property = expression()->AsProperty();
8950 ASSERT(property != NULL);
Steve Block6ded16b2010-05-10 14:33:55 +01008951
Andrei Popescu402d9372010-02-26 13:31:12 +00008952 Result answer = cgen_->EmitKeyedStore(property->key()->type());
8953 cgen_->frame()->Push(&answer);
8954 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00008955 break;
8956 }
8957
Andrei Popescu402d9372010-02-26 13:31:12 +00008958 case UNLOADED:
8959 case ILLEGAL:
Steve Blocka7e24c12009-10-30 11:49:00 +00008960 UNREACHABLE();
8961 }
8962}
8963
8964
Leon Clarkee46be812010-01-19 14:06:41 +00008965void FastNewClosureStub::Generate(MacroAssembler* masm) {
Steve Block6ded16b2010-05-10 14:33:55 +01008966 // Create a new closure from the given function info in new
8967 // space. Set the context to the current context in esi.
Leon Clarkee46be812010-01-19 14:06:41 +00008968 Label gc;
8969 __ AllocateInNewSpace(JSFunction::kSize, eax, ebx, ecx, &gc, TAG_OBJECT);
8970
Steve Block6ded16b2010-05-10 14:33:55 +01008971 // Get the function info from the stack.
Leon Clarkee46be812010-01-19 14:06:41 +00008972 __ mov(edx, Operand(esp, 1 * kPointerSize));
8973
8974 // Compute the function map in the current global context and set that
8975 // as the map of the allocated object.
8976 __ mov(ecx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
8977 __ mov(ecx, FieldOperand(ecx, GlobalObject::kGlobalContextOffset));
8978 __ mov(ecx, Operand(ecx, Context::SlotOffset(Context::FUNCTION_MAP_INDEX)));
8979 __ mov(FieldOperand(eax, JSObject::kMapOffset), ecx);
8980
Steve Block6ded16b2010-05-10 14:33:55 +01008981 // Initialize the rest of the function. We don't have to update the
8982 // write barrier because the allocated object is in new space.
8983 __ mov(ebx, Immediate(Factory::empty_fixed_array()));
8984 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset), ebx);
8985 __ mov(FieldOperand(eax, JSObject::kElementsOffset), ebx);
8986 __ mov(FieldOperand(eax, JSFunction::kPrototypeOrInitialMapOffset),
8987 Immediate(Factory::the_hole_value()));
8988 __ mov(FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset), edx);
8989 __ mov(FieldOperand(eax, JSFunction::kContextOffset), esi);
8990 __ mov(FieldOperand(eax, JSFunction::kLiteralsOffset), ebx);
Leon Clarkee46be812010-01-19 14:06:41 +00008991
8992 // Return and remove the on-stack parameter.
8993 __ ret(1 * kPointerSize);
8994
8995 // Create a new closure through the slower runtime call.
8996 __ bind(&gc);
8997 __ pop(ecx); // Temporarily remove return address.
8998 __ pop(edx);
8999 __ push(esi);
9000 __ push(edx);
9001 __ push(ecx); // Restore return address.
Steve Block6ded16b2010-05-10 14:33:55 +01009002 __ TailCallRuntime(Runtime::kNewClosure, 2, 1);
Leon Clarkee46be812010-01-19 14:06:41 +00009003}
9004
9005
9006void FastNewContextStub::Generate(MacroAssembler* masm) {
9007 // Try to allocate the context in new space.
9008 Label gc;
9009 int length = slots_ + Context::MIN_CONTEXT_SLOTS;
9010 __ AllocateInNewSpace((length * kPointerSize) + FixedArray::kHeaderSize,
9011 eax, ebx, ecx, &gc, TAG_OBJECT);
9012
9013 // Get the function from the stack.
9014 __ mov(ecx, Operand(esp, 1 * kPointerSize));
9015
9016 // Setup the object header.
9017 __ mov(FieldOperand(eax, HeapObject::kMapOffset), Factory::context_map());
9018 __ mov(FieldOperand(eax, Array::kLengthOffset), Immediate(length));
9019
9020 // Setup the fixed slots.
9021 __ xor_(ebx, Operand(ebx)); // Set to NULL.
9022 __ mov(Operand(eax, Context::SlotOffset(Context::CLOSURE_INDEX)), ecx);
9023 __ mov(Operand(eax, Context::SlotOffset(Context::FCONTEXT_INDEX)), eax);
9024 __ mov(Operand(eax, Context::SlotOffset(Context::PREVIOUS_INDEX)), ebx);
9025 __ mov(Operand(eax, Context::SlotOffset(Context::EXTENSION_INDEX)), ebx);
9026
9027 // Copy the global object from the surrounding context. We go through the
9028 // context in the function (ecx) to match the allocation behavior we have
9029 // in the runtime system (see Heap::AllocateFunctionContext).
9030 __ mov(ebx, FieldOperand(ecx, JSFunction::kContextOffset));
9031 __ mov(ebx, Operand(ebx, Context::SlotOffset(Context::GLOBAL_INDEX)));
9032 __ mov(Operand(eax, Context::SlotOffset(Context::GLOBAL_INDEX)), ebx);
9033
9034 // Initialize the rest of the slots to undefined.
9035 __ mov(ebx, Factory::undefined_value());
9036 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) {
9037 __ mov(Operand(eax, Context::SlotOffset(i)), ebx);
9038 }
9039
9040 // Return and remove the on-stack parameter.
9041 __ mov(esi, Operand(eax));
9042 __ ret(1 * kPointerSize);
9043
9044 // Need to collect. Call into runtime system.
9045 __ bind(&gc);
Steve Block6ded16b2010-05-10 14:33:55 +01009046 __ TailCallRuntime(Runtime::kNewContext, 1, 1);
Leon Clarkee46be812010-01-19 14:06:41 +00009047}
9048
9049
9050void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
Andrei Popescu402d9372010-02-26 13:31:12 +00009051 // Stack layout on entry:
9052 //
9053 // [esp + kPointerSize]: constant elements.
9054 // [esp + (2 * kPointerSize)]: literal index.
9055 // [esp + (3 * kPointerSize)]: literals array.
9056
9057 // All sizes here are multiples of kPointerSize.
Leon Clarkee46be812010-01-19 14:06:41 +00009058 int elements_size = (length_ > 0) ? FixedArray::SizeFor(length_) : 0;
9059 int size = JSArray::kSize + elements_size;
9060
9061 // Load boilerplate object into ecx and check if we need to create a
9062 // boilerplate.
9063 Label slow_case;
9064 __ mov(ecx, Operand(esp, 3 * kPointerSize));
9065 __ mov(eax, Operand(esp, 2 * kPointerSize));
9066 ASSERT((kPointerSize == 4) && (kSmiTagSize == 1) && (kSmiTag == 0));
Kristian Monsen25f61362010-05-21 11:50:48 +01009067 __ mov(ecx, CodeGenerator::FixedArrayElementOperand(ecx, eax));
Leon Clarkee46be812010-01-19 14:06:41 +00009068 __ cmp(ecx, Factory::undefined_value());
9069 __ j(equal, &slow_case);
9070
9071 // Allocate both the JS array and the elements array in one big
9072 // allocation. This avoids multiple limit checks.
9073 __ AllocateInNewSpace(size, eax, ebx, edx, &slow_case, TAG_OBJECT);
9074
9075 // Copy the JS array part.
9076 for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
9077 if ((i != JSArray::kElementsOffset) || (length_ == 0)) {
9078 __ mov(ebx, FieldOperand(ecx, i));
9079 __ mov(FieldOperand(eax, i), ebx);
9080 }
9081 }
9082
9083 if (length_ > 0) {
9084 // Get hold of the elements array of the boilerplate and setup the
9085 // elements pointer in the resulting object.
9086 __ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset));
9087 __ lea(edx, Operand(eax, JSArray::kSize));
9088 __ mov(FieldOperand(eax, JSArray::kElementsOffset), edx);
9089
9090 // Copy the elements array.
9091 for (int i = 0; i < elements_size; i += kPointerSize) {
9092 __ mov(ebx, FieldOperand(ecx, i));
9093 __ mov(FieldOperand(edx, i), ebx);
9094 }
9095 }
9096
9097 // Return and remove the on-stack parameters.
9098 __ ret(3 * kPointerSize);
9099
9100 __ bind(&slow_case);
Steve Block6ded16b2010-05-10 14:33:55 +01009101 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1);
Leon Clarkee46be812010-01-19 14:06:41 +00009102}
9103
9104
Steve Blocka7e24c12009-10-30 11:49:00 +00009105// NOTE: The stub does not handle the inlined cases (Smis, Booleans, undefined).
9106void ToBooleanStub::Generate(MacroAssembler* masm) {
9107 Label false_result, true_result, not_string;
9108 __ mov(eax, Operand(esp, 1 * kPointerSize));
9109
9110 // 'null' => false.
9111 __ cmp(eax, Factory::null_value());
9112 __ j(equal, &false_result);
9113
9114 // Get the map and type of the heap object.
9115 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
9116 __ movzx_b(ecx, FieldOperand(edx, Map::kInstanceTypeOffset));
9117
9118 // Undetectable => false.
9119 __ movzx_b(ebx, FieldOperand(edx, Map::kBitFieldOffset));
9120 __ and_(ebx, 1 << Map::kIsUndetectable);
9121 __ j(not_zero, &false_result);
9122
9123 // JavaScript object => true.
9124 __ cmp(ecx, FIRST_JS_OBJECT_TYPE);
9125 __ j(above_equal, &true_result);
9126
9127 // String value => false iff empty.
9128 __ cmp(ecx, FIRST_NONSTRING_TYPE);
9129 __ j(above_equal, &not_string);
Steve Blocka7e24c12009-10-30 11:49:00 +00009130 __ mov(edx, FieldOperand(eax, String::kLengthOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01009131 ASSERT(kSmiTag == 0);
Steve Blockd0582a62009-12-15 09:54:21 +00009132 __ test(edx, Operand(edx));
Steve Blocka7e24c12009-10-30 11:49:00 +00009133 __ j(zero, &false_result);
9134 __ jmp(&true_result);
9135
9136 __ bind(&not_string);
9137 // HeapNumber => false iff +0, -0, or NaN.
9138 __ cmp(edx, Factory::heap_number_map());
9139 __ j(not_equal, &true_result);
9140 __ fldz();
9141 __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset));
Steve Block3ce2e202009-11-05 08:53:23 +00009142 __ FCmp();
Steve Blocka7e24c12009-10-30 11:49:00 +00009143 __ j(zero, &false_result);
9144 // Fall through to |true_result|.
9145
9146 // Return 1/0 for true/false in eax.
9147 __ bind(&true_result);
9148 __ mov(eax, 1);
9149 __ ret(1 * kPointerSize);
9150 __ bind(&false_result);
9151 __ mov(eax, 0);
9152 __ ret(1 * kPointerSize);
9153}
9154
9155
Steve Block3ce2e202009-11-05 08:53:23 +00009156void GenericBinaryOpStub::GenerateCall(
9157 MacroAssembler* masm,
9158 Register left,
9159 Register right) {
9160 if (!ArgsInRegistersSupported()) {
9161 // Pass arguments on the stack.
9162 __ push(left);
9163 __ push(right);
9164 } else {
9165 // The calling convention with registers is left in edx and right in eax.
Steve Blockd0582a62009-12-15 09:54:21 +00009166 Register left_arg = edx;
9167 Register right_arg = eax;
9168 if (!(left.is(left_arg) && right.is(right_arg))) {
9169 if (left.is(right_arg) && right.is(left_arg)) {
Steve Block3ce2e202009-11-05 08:53:23 +00009170 if (IsOperationCommutative()) {
9171 SetArgsReversed();
9172 } else {
9173 __ xchg(left, right);
9174 }
Steve Blockd0582a62009-12-15 09:54:21 +00009175 } else if (left.is(left_arg)) {
9176 __ mov(right_arg, right);
Andrei Popescu402d9372010-02-26 13:31:12 +00009177 } else if (right.is(right_arg)) {
9178 __ mov(left_arg, left);
Steve Blockd0582a62009-12-15 09:54:21 +00009179 } else if (left.is(right_arg)) {
Steve Block3ce2e202009-11-05 08:53:23 +00009180 if (IsOperationCommutative()) {
Steve Blockd0582a62009-12-15 09:54:21 +00009181 __ mov(left_arg, right);
Steve Block3ce2e202009-11-05 08:53:23 +00009182 SetArgsReversed();
9183 } else {
Steve Blockd0582a62009-12-15 09:54:21 +00009184 // Order of moves important to avoid destroying left argument.
9185 __ mov(left_arg, left);
9186 __ mov(right_arg, right);
Steve Block3ce2e202009-11-05 08:53:23 +00009187 }
Steve Blockd0582a62009-12-15 09:54:21 +00009188 } else if (right.is(left_arg)) {
Steve Block3ce2e202009-11-05 08:53:23 +00009189 if (IsOperationCommutative()) {
Steve Blockd0582a62009-12-15 09:54:21 +00009190 __ mov(right_arg, left);
Steve Block3ce2e202009-11-05 08:53:23 +00009191 SetArgsReversed();
9192 } else {
Steve Blockd0582a62009-12-15 09:54:21 +00009193 // Order of moves important to avoid destroying right argument.
9194 __ mov(right_arg, right);
9195 __ mov(left_arg, left);
Steve Block3ce2e202009-11-05 08:53:23 +00009196 }
Steve Block3ce2e202009-11-05 08:53:23 +00009197 } else {
Steve Blockd0582a62009-12-15 09:54:21 +00009198 // Order of moves is not important.
9199 __ mov(left_arg, left);
9200 __ mov(right_arg, right);
Steve Block3ce2e202009-11-05 08:53:23 +00009201 }
9202 }
9203
9204 // Update flags to indicate that arguments are in registers.
9205 SetArgsInRegisters();
Steve Blockd0582a62009-12-15 09:54:21 +00009206 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
Steve Block3ce2e202009-11-05 08:53:23 +00009207 }
9208
9209 // Call the stub.
9210 __ CallStub(this);
9211}
9212
9213
9214void GenericBinaryOpStub::GenerateCall(
9215 MacroAssembler* masm,
9216 Register left,
9217 Smi* right) {
9218 if (!ArgsInRegistersSupported()) {
9219 // Pass arguments on the stack.
9220 __ push(left);
9221 __ push(Immediate(right));
9222 } else {
Steve Blockd0582a62009-12-15 09:54:21 +00009223 // The calling convention with registers is left in edx and right in eax.
9224 Register left_arg = edx;
9225 Register right_arg = eax;
9226 if (left.is(left_arg)) {
9227 __ mov(right_arg, Immediate(right));
9228 } else if (left.is(right_arg) && IsOperationCommutative()) {
9229 __ mov(left_arg, Immediate(right));
Steve Block3ce2e202009-11-05 08:53:23 +00009230 SetArgsReversed();
9231 } else {
Andrei Popescu402d9372010-02-26 13:31:12 +00009232 // For non-commutative operations, left and right_arg might be
9233 // the same register. Therefore, the order of the moves is
9234 // important here in order to not overwrite left before moving
9235 // it to left_arg.
Steve Blockd0582a62009-12-15 09:54:21 +00009236 __ mov(left_arg, left);
9237 __ mov(right_arg, Immediate(right));
Steve Block3ce2e202009-11-05 08:53:23 +00009238 }
9239
9240 // Update flags to indicate that arguments are in registers.
9241 SetArgsInRegisters();
Steve Blockd0582a62009-12-15 09:54:21 +00009242 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
Steve Block3ce2e202009-11-05 08:53:23 +00009243 }
9244
9245 // Call the stub.
9246 __ CallStub(this);
9247}
9248
9249
9250void GenericBinaryOpStub::GenerateCall(
9251 MacroAssembler* masm,
9252 Smi* left,
9253 Register right) {
9254 if (!ArgsInRegistersSupported()) {
9255 // Pass arguments on the stack.
9256 __ push(Immediate(left));
9257 __ push(right);
9258 } else {
Steve Blockd0582a62009-12-15 09:54:21 +00009259 // The calling convention with registers is left in edx and right in eax.
9260 Register left_arg = edx;
9261 Register right_arg = eax;
9262 if (right.is(right_arg)) {
9263 __ mov(left_arg, Immediate(left));
9264 } else if (right.is(left_arg) && IsOperationCommutative()) {
9265 __ mov(right_arg, Immediate(left));
9266 SetArgsReversed();
Steve Block3ce2e202009-11-05 08:53:23 +00009267 } else {
Andrei Popescu402d9372010-02-26 13:31:12 +00009268 // For non-commutative operations, right and left_arg might be
9269 // the same register. Therefore, the order of the moves is
9270 // important here in order to not overwrite right before moving
9271 // it to right_arg.
Steve Blockd0582a62009-12-15 09:54:21 +00009272 __ mov(right_arg, right);
Andrei Popescu402d9372010-02-26 13:31:12 +00009273 __ mov(left_arg, Immediate(left));
Steve Block3ce2e202009-11-05 08:53:23 +00009274 }
9275 // Update flags to indicate that arguments are in registers.
9276 SetArgsInRegisters();
Steve Blockd0582a62009-12-15 09:54:21 +00009277 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
Steve Block3ce2e202009-11-05 08:53:23 +00009278 }
9279
9280 // Call the stub.
9281 __ CallStub(this);
9282}
9283
9284
Leon Clarked91b9f72010-01-27 17:25:45 +00009285Result GenericBinaryOpStub::GenerateCall(MacroAssembler* masm,
9286 VirtualFrame* frame,
9287 Result* left,
9288 Result* right) {
9289 if (ArgsInRegistersSupported()) {
9290 SetArgsInRegisters();
9291 return frame->CallStub(this, left, right);
9292 } else {
9293 frame->Push(left);
9294 frame->Push(right);
9295 return frame->CallStub(this, 2);
9296 }
9297}
9298
9299
Steve Blocka7e24c12009-10-30 11:49:00 +00009300void GenericBinaryOpStub::GenerateSmiCode(MacroAssembler* masm, Label* slow) {
Leon Clarked91b9f72010-01-27 17:25:45 +00009301 // 1. Move arguments into edx, eax except for DIV and MOD, which need the
9302 // dividend in eax and edx free for the division. Use eax, ebx for those.
9303 Comment load_comment(masm, "-- Load arguments");
9304 Register left = edx;
9305 Register right = eax;
9306 if (op_ == Token::DIV || op_ == Token::MOD) {
9307 left = eax;
9308 right = ebx;
9309 if (HasArgsInRegisters()) {
9310 __ mov(ebx, eax);
9311 __ mov(eax, edx);
9312 }
9313 }
9314 if (!HasArgsInRegisters()) {
9315 __ mov(right, Operand(esp, 1 * kPointerSize));
9316 __ mov(left, Operand(esp, 2 * kPointerSize));
9317 }
Steve Blocka7e24c12009-10-30 11:49:00 +00009318
Steve Block6ded16b2010-05-10 14:33:55 +01009319 if (static_operands_type_.IsSmi()) {
9320 if (FLAG_debug_code) {
9321 __ AbortIfNotSmi(left);
9322 __ AbortIfNotSmi(right);
9323 }
9324 if (op_ == Token::BIT_OR) {
9325 __ or_(right, Operand(left));
9326 GenerateReturn(masm);
9327 return;
9328 } else if (op_ == Token::BIT_AND) {
9329 __ and_(right, Operand(left));
9330 GenerateReturn(masm);
9331 return;
9332 } else if (op_ == Token::BIT_XOR) {
9333 __ xor_(right, Operand(left));
9334 GenerateReturn(masm);
9335 return;
9336 }
9337 }
9338
Leon Clarked91b9f72010-01-27 17:25:45 +00009339 // 2. Prepare the smi check of both operands by oring them together.
9340 Comment smi_check_comment(masm, "-- Smi check arguments");
9341 Label not_smis;
9342 Register combined = ecx;
9343 ASSERT(!left.is(combined) && !right.is(combined));
Steve Blocka7e24c12009-10-30 11:49:00 +00009344 switch (op_) {
Leon Clarked91b9f72010-01-27 17:25:45 +00009345 case Token::BIT_OR:
9346 // Perform the operation into eax and smi check the result. Preserve
9347 // eax in case the result is not a smi.
9348 ASSERT(!left.is(ecx) && !right.is(ecx));
9349 __ mov(ecx, right);
9350 __ or_(right, Operand(left)); // Bitwise or is commutative.
9351 combined = right;
9352 break;
9353
9354 case Token::BIT_XOR:
9355 case Token::BIT_AND:
Leon Clarkeeab96aa2010-01-27 16:31:12 +00009356 case Token::ADD:
Steve Blocka7e24c12009-10-30 11:49:00 +00009357 case Token::SUB:
Leon Clarked91b9f72010-01-27 17:25:45 +00009358 case Token::MUL:
Steve Blocka7e24c12009-10-30 11:49:00 +00009359 case Token::DIV:
9360 case Token::MOD:
Leon Clarked91b9f72010-01-27 17:25:45 +00009361 __ mov(combined, right);
9362 __ or_(combined, Operand(left));
9363 break;
9364
9365 case Token::SHL:
9366 case Token::SAR:
9367 case Token::SHR:
9368 // Move the right operand into ecx for the shift operation, use eax
9369 // for the smi check register.
9370 ASSERT(!left.is(ecx) && !right.is(ecx));
9371 __ mov(ecx, right);
9372 __ or_(right, Operand(left));
9373 combined = right;
Steve Blocka7e24c12009-10-30 11:49:00 +00009374 break;
9375
9376 default:
Steve Blocka7e24c12009-10-30 11:49:00 +00009377 break;
9378 }
9379
Leon Clarked91b9f72010-01-27 17:25:45 +00009380 // 3. Perform the smi check of the operands.
9381 ASSERT(kSmiTag == 0); // Adjust zero check if not the case.
9382 __ test(combined, Immediate(kSmiTagMask));
9383 __ j(not_zero, &not_smis, not_taken);
Steve Blocka7e24c12009-10-30 11:49:00 +00009384
Leon Clarked91b9f72010-01-27 17:25:45 +00009385 // 4. Operands are both smis, perform the operation leaving the result in
9386 // eax and check the result if necessary.
9387 Comment perform_smi(masm, "-- Perform smi operation");
9388 Label use_fp_on_smis;
Steve Blocka7e24c12009-10-30 11:49:00 +00009389 switch (op_) {
Leon Clarked91b9f72010-01-27 17:25:45 +00009390 case Token::BIT_OR:
9391 // Nothing to do.
9392 break;
9393
9394 case Token::BIT_XOR:
9395 ASSERT(right.is(eax));
9396 __ xor_(right, Operand(left)); // Bitwise xor is commutative.
9397 break;
9398
9399 case Token::BIT_AND:
9400 ASSERT(right.is(eax));
9401 __ and_(right, Operand(left)); // Bitwise and is commutative.
9402 break;
9403
9404 case Token::SHL:
9405 // Remove tags from operands (but keep sign).
9406 __ SmiUntag(left);
9407 __ SmiUntag(ecx);
9408 // Perform the operation.
9409 __ shl_cl(left);
9410 // Check that the *signed* result fits in a smi.
9411 __ cmp(left, 0xc0000000);
9412 __ j(sign, &use_fp_on_smis, not_taken);
9413 // Tag the result and store it in register eax.
9414 __ SmiTag(left);
9415 __ mov(eax, left);
9416 break;
9417
9418 case Token::SAR:
9419 // Remove tags from operands (but keep sign).
9420 __ SmiUntag(left);
9421 __ SmiUntag(ecx);
9422 // Perform the operation.
9423 __ sar_cl(left);
9424 // Tag the result and store it in register eax.
9425 __ SmiTag(left);
9426 __ mov(eax, left);
9427 break;
9428
9429 case Token::SHR:
9430 // Remove tags from operands (but keep sign).
9431 __ SmiUntag(left);
9432 __ SmiUntag(ecx);
9433 // Perform the operation.
9434 __ shr_cl(left);
9435 // Check that the *unsigned* result fits in a smi.
9436 // Neither of the two high-order bits can be set:
9437 // - 0x80000000: high bit would be lost when smi tagging.
9438 // - 0x40000000: this number would convert to negative when
9439 // Smi tagging these two cases can only happen with shifts
9440 // by 0 or 1 when handed a valid smi.
9441 __ test(left, Immediate(0xc0000000));
9442 __ j(not_zero, slow, not_taken);
9443 // Tag the result and store it in register eax.
9444 __ SmiTag(left);
9445 __ mov(eax, left);
9446 break;
9447
Steve Blocka7e24c12009-10-30 11:49:00 +00009448 case Token::ADD:
Leon Clarked91b9f72010-01-27 17:25:45 +00009449 ASSERT(right.is(eax));
9450 __ add(right, Operand(left)); // Addition is commutative.
9451 __ j(overflow, &use_fp_on_smis, not_taken);
9452 break;
9453
Steve Blocka7e24c12009-10-30 11:49:00 +00009454 case Token::SUB:
Leon Clarked91b9f72010-01-27 17:25:45 +00009455 __ sub(left, Operand(right));
9456 __ j(overflow, &use_fp_on_smis, not_taken);
9457 __ mov(eax, left);
Steve Blocka7e24c12009-10-30 11:49:00 +00009458 break;
9459
9460 case Token::MUL:
9461 // If the smi tag is 0 we can just leave the tag on one operand.
Leon Clarked91b9f72010-01-27 17:25:45 +00009462 ASSERT(kSmiTag == 0); // Adjust code below if not the case.
9463 // We can't revert the multiplication if the result is not a smi
9464 // so save the right operand.
9465 __ mov(ebx, right);
Steve Blocka7e24c12009-10-30 11:49:00 +00009466 // Remove tag from one of the operands (but keep sign).
Leon Clarked91b9f72010-01-27 17:25:45 +00009467 __ SmiUntag(right);
Steve Blocka7e24c12009-10-30 11:49:00 +00009468 // Do multiplication.
Leon Clarked91b9f72010-01-27 17:25:45 +00009469 __ imul(right, Operand(left)); // Multiplication is commutative.
9470 __ j(overflow, &use_fp_on_smis, not_taken);
9471 // Check for negative zero result. Use combined = left | right.
9472 __ NegativeZeroTest(right, combined, &use_fp_on_smis);
Steve Blocka7e24c12009-10-30 11:49:00 +00009473 break;
9474
9475 case Token::DIV:
Leon Clarked91b9f72010-01-27 17:25:45 +00009476 // We can't revert the division if the result is not a smi so
9477 // save the left operand.
9478 __ mov(edi, left);
9479 // Check for 0 divisor.
9480 __ test(right, Operand(right));
9481 __ j(zero, &use_fp_on_smis, not_taken);
9482 // Sign extend left into edx:eax.
9483 ASSERT(left.is(eax));
9484 __ cdq();
9485 // Divide edx:eax by right.
9486 __ idiv(right);
9487 // Check for the corner case of dividing the most negative smi by
9488 // -1. We cannot use the overflow flag, since it is not set by idiv
9489 // instruction.
Steve Blocka7e24c12009-10-30 11:49:00 +00009490 ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
9491 __ cmp(eax, 0x40000000);
Leon Clarked91b9f72010-01-27 17:25:45 +00009492 __ j(equal, &use_fp_on_smis);
9493 // Check for negative zero result. Use combined = left | right.
9494 __ NegativeZeroTest(eax, combined, &use_fp_on_smis);
Steve Blocka7e24c12009-10-30 11:49:00 +00009495 // Check that the remainder is zero.
9496 __ test(edx, Operand(edx));
Leon Clarked91b9f72010-01-27 17:25:45 +00009497 __ j(not_zero, &use_fp_on_smis);
Steve Blocka7e24c12009-10-30 11:49:00 +00009498 // Tag the result and store it in register eax.
Leon Clarkee46be812010-01-19 14:06:41 +00009499 __ SmiTag(eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00009500 break;
9501
9502 case Token::MOD:
Leon Clarked91b9f72010-01-27 17:25:45 +00009503 // Check for 0 divisor.
9504 __ test(right, Operand(right));
9505 __ j(zero, &not_smis, not_taken);
9506
9507 // Sign extend left into edx:eax.
9508 ASSERT(left.is(eax));
9509 __ cdq();
9510 // Divide edx:eax by right.
9511 __ idiv(right);
9512 // Check for negative zero result. Use combined = left | right.
9513 __ NegativeZeroTest(edx, combined, slow);
Steve Blocka7e24c12009-10-30 11:49:00 +00009514 // Move remainder to register eax.
Leon Clarked91b9f72010-01-27 17:25:45 +00009515 __ mov(eax, edx);
Steve Blocka7e24c12009-10-30 11:49:00 +00009516 break;
9517
9518 default:
9519 UNREACHABLE();
Leon Clarked91b9f72010-01-27 17:25:45 +00009520 }
9521
9522 // 5. Emit return of result in eax.
9523 GenerateReturn(masm);
9524
9525 // 6. For some operations emit inline code to perform floating point
9526 // operations on known smis (e.g., if the result of the operation
9527 // overflowed the smi range).
9528 switch (op_) {
9529 case Token::SHL: {
9530 Comment perform_float(masm, "-- Perform float operation on smis");
9531 __ bind(&use_fp_on_smis);
9532 // Result we want is in left == edx, so we can put the allocated heap
9533 // number in eax.
9534 __ AllocateHeapNumber(eax, ecx, ebx, slow);
9535 // Store the result in the HeapNumber and return.
9536 if (CpuFeatures::IsSupported(SSE2)) {
9537 CpuFeatures::Scope use_sse2(SSE2);
9538 __ cvtsi2sd(xmm0, Operand(left));
9539 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
9540 } else {
9541 // It's OK to overwrite the right argument on the stack because we
9542 // are about to return.
9543 __ mov(Operand(esp, 1 * kPointerSize), left);
9544 __ fild_s(Operand(esp, 1 * kPointerSize));
9545 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
9546 }
9547 GenerateReturn(masm);
9548 break;
9549 }
9550
9551 case Token::ADD:
9552 case Token::SUB:
9553 case Token::MUL:
9554 case Token::DIV: {
9555 Comment perform_float(masm, "-- Perform float operation on smis");
9556 __ bind(&use_fp_on_smis);
9557 // Restore arguments to edx, eax.
9558 switch (op_) {
9559 case Token::ADD:
9560 // Revert right = right + left.
9561 __ sub(right, Operand(left));
9562 break;
9563 case Token::SUB:
9564 // Revert left = left - right.
9565 __ add(left, Operand(right));
9566 break;
9567 case Token::MUL:
9568 // Right was clobbered but a copy is in ebx.
9569 __ mov(right, ebx);
9570 break;
9571 case Token::DIV:
9572 // Left was clobbered but a copy is in edi. Right is in ebx for
9573 // division.
9574 __ mov(edx, edi);
9575 __ mov(eax, right);
9576 break;
9577 default: UNREACHABLE();
9578 break;
9579 }
9580 __ AllocateHeapNumber(ecx, ebx, no_reg, slow);
9581 if (CpuFeatures::IsSupported(SSE2)) {
9582 CpuFeatures::Scope use_sse2(SSE2);
9583 FloatingPointHelper::LoadSSE2Smis(masm, ebx);
9584 switch (op_) {
9585 case Token::ADD: __ addsd(xmm0, xmm1); break;
9586 case Token::SUB: __ subsd(xmm0, xmm1); break;
9587 case Token::MUL: __ mulsd(xmm0, xmm1); break;
9588 case Token::DIV: __ divsd(xmm0, xmm1); break;
9589 default: UNREACHABLE();
9590 }
9591 __ movdbl(FieldOperand(ecx, HeapNumber::kValueOffset), xmm0);
9592 } else { // SSE2 not available, use FPU.
9593 FloatingPointHelper::LoadFloatSmis(masm, ebx);
9594 switch (op_) {
9595 case Token::ADD: __ faddp(1); break;
9596 case Token::SUB: __ fsubp(1); break;
9597 case Token::MUL: __ fmulp(1); break;
9598 case Token::DIV: __ fdivp(1); break;
9599 default: UNREACHABLE();
9600 }
9601 __ fstp_d(FieldOperand(ecx, HeapNumber::kValueOffset));
9602 }
9603 __ mov(eax, ecx);
9604 GenerateReturn(masm);
9605 break;
9606 }
9607
9608 default:
9609 break;
9610 }
9611
9612 // 7. Non-smi operands, fall out to the non-smi code with the operands in
9613 // edx and eax.
9614 Comment done_comment(masm, "-- Enter non-smi code");
9615 __ bind(&not_smis);
9616 switch (op_) {
9617 case Token::BIT_OR:
9618 case Token::SHL:
9619 case Token::SAR:
9620 case Token::SHR:
9621 // Right operand is saved in ecx and eax was destroyed by the smi
9622 // check.
9623 __ mov(eax, ecx);
9624 break;
9625
9626 case Token::DIV:
9627 case Token::MOD:
9628 // Operands are in eax, ebx at this point.
9629 __ mov(edx, eax);
9630 __ mov(eax, ebx);
9631 break;
9632
9633 default:
Steve Blocka7e24c12009-10-30 11:49:00 +00009634 break;
9635 }
9636}
9637
9638
9639void GenericBinaryOpStub::Generate(MacroAssembler* masm) {
9640 Label call_runtime;
9641
Steve Block3ce2e202009-11-05 08:53:23 +00009642 __ IncrementCounter(&Counters::generic_binary_stub_calls, 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00009643
Steve Block3ce2e202009-11-05 08:53:23 +00009644 // Generate fast case smi code if requested. This flag is set when the fast
9645 // case smi code is not generated by the caller. Generating it here will speed
9646 // up common operations.
Steve Block6ded16b2010-05-10 14:33:55 +01009647 if (ShouldGenerateSmiCode()) {
Leon Clarked91b9f72010-01-27 17:25:45 +00009648 GenerateSmiCode(masm, &call_runtime);
9649 } else if (op_ != Token::MOD) { // MOD goes straight to runtime.
Steve Block6ded16b2010-05-10 14:33:55 +01009650 if (!HasArgsInRegisters()) {
9651 GenerateLoadArguments(masm);
9652 }
Steve Blocka7e24c12009-10-30 11:49:00 +00009653 }
9654
Steve Blocka7e24c12009-10-30 11:49:00 +00009655 // Floating point case.
Steve Block6ded16b2010-05-10 14:33:55 +01009656 if (ShouldGenerateFPCode()) {
9657 switch (op_) {
9658 case Token::ADD:
9659 case Token::SUB:
9660 case Token::MUL:
9661 case Token::DIV: {
9662 if (runtime_operands_type_ == BinaryOpIC::DEFAULT &&
9663 HasSmiCodeInStub()) {
9664 // Execution reaches this point when the first non-smi argument occurs
9665 // (and only if smi code is generated). This is the right moment to
9666 // patch to HEAP_NUMBERS state. The transition is attempted only for
9667 // the four basic operations. The stub stays in the DEFAULT state
9668 // forever for all other operations (also if smi code is skipped).
9669 GenerateTypeTransition(masm);
Andrei Popescu402d9372010-02-26 13:31:12 +00009670 }
Steve Blocka7e24c12009-10-30 11:49:00 +00009671
Steve Block6ded16b2010-05-10 14:33:55 +01009672 Label not_floats;
Leon Clarkee46be812010-01-19 14:06:41 +00009673 if (CpuFeatures::IsSupported(SSE2)) {
9674 CpuFeatures::Scope use_sse2(SSE2);
Steve Block6ded16b2010-05-10 14:33:55 +01009675 if (static_operands_type_.IsNumber()) {
9676 if (FLAG_debug_code) {
9677 // Assert at runtime that inputs are only numbers.
9678 __ AbortIfNotNumber(edx);
9679 __ AbortIfNotNumber(eax);
9680 }
9681 if (static_operands_type_.IsSmi()) {
9682 if (FLAG_debug_code) {
9683 __ AbortIfNotSmi(edx);
9684 __ AbortIfNotSmi(eax);
9685 }
9686 FloatingPointHelper::LoadSSE2Smis(masm, ecx);
9687 } else {
9688 FloatingPointHelper::LoadSSE2Operands(masm);
9689 }
9690 } else {
9691 FloatingPointHelper::LoadSSE2Operands(masm, &call_runtime);
9692 }
9693
9694 switch (op_) {
9695 case Token::ADD: __ addsd(xmm0, xmm1); break;
9696 case Token::SUB: __ subsd(xmm0, xmm1); break;
9697 case Token::MUL: __ mulsd(xmm0, xmm1); break;
9698 case Token::DIV: __ divsd(xmm0, xmm1); break;
9699 default: UNREACHABLE();
9700 }
9701 GenerateHeapResultAllocation(masm, &call_runtime);
Leon Clarkee46be812010-01-19 14:06:41 +00009702 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
Steve Block6ded16b2010-05-10 14:33:55 +01009703 GenerateReturn(masm);
9704 } else { // SSE2 not available, use FPU.
9705 if (static_operands_type_.IsNumber()) {
9706 if (FLAG_debug_code) {
9707 // Assert at runtime that inputs are only numbers.
9708 __ AbortIfNotNumber(edx);
9709 __ AbortIfNotNumber(eax);
9710 }
9711 } else {
9712 FloatingPointHelper::CheckFloatOperands(masm, &call_runtime, ebx);
9713 }
9714 FloatingPointHelper::LoadFloatOperands(
9715 masm,
9716 ecx,
9717 FloatingPointHelper::ARGS_IN_REGISTERS);
9718 switch (op_) {
9719 case Token::ADD: __ faddp(1); break;
9720 case Token::SUB: __ fsubp(1); break;
9721 case Token::MUL: __ fmulp(1); break;
9722 case Token::DIV: __ fdivp(1); break;
9723 default: UNREACHABLE();
9724 }
9725 Label after_alloc_failure;
9726 GenerateHeapResultAllocation(masm, &after_alloc_failure);
Leon Clarkee46be812010-01-19 14:06:41 +00009727 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01009728 GenerateReturn(masm);
9729 __ bind(&after_alloc_failure);
9730 __ ffree();
9731 __ jmp(&call_runtime);
Leon Clarkee46be812010-01-19 14:06:41 +00009732 }
Steve Block6ded16b2010-05-10 14:33:55 +01009733 __ bind(&not_floats);
9734 if (runtime_operands_type_ == BinaryOpIC::DEFAULT &&
9735 !HasSmiCodeInStub()) {
9736 // Execution reaches this point when the first non-number argument
9737 // occurs (and only if smi code is skipped from the stub, otherwise
9738 // the patching has already been done earlier in this case branch).
9739 // Try patching to STRINGS for ADD operation.
9740 if (op_ == Token::ADD) {
9741 GenerateTypeTransition(masm);
9742 }
9743 }
9744 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00009745 }
Steve Block6ded16b2010-05-10 14:33:55 +01009746 case Token::MOD: {
9747 // For MOD we go directly to runtime in the non-smi case.
9748 break;
9749 }
9750 case Token::BIT_OR:
9751 case Token::BIT_AND:
9752 case Token::BIT_XOR:
9753 case Token::SAR:
9754 case Token::SHL:
9755 case Token::SHR: {
9756 Label non_smi_result;
9757 FloatingPointHelper::LoadAsIntegers(masm,
9758 static_operands_type_,
9759 use_sse3_,
9760 &call_runtime);
9761 switch (op_) {
9762 case Token::BIT_OR: __ or_(eax, Operand(ecx)); break;
9763 case Token::BIT_AND: __ and_(eax, Operand(ecx)); break;
9764 case Token::BIT_XOR: __ xor_(eax, Operand(ecx)); break;
9765 case Token::SAR: __ sar_cl(eax); break;
9766 case Token::SHL: __ shl_cl(eax); break;
9767 case Token::SHR: __ shr_cl(eax); break;
9768 default: UNREACHABLE();
9769 }
9770 if (op_ == Token::SHR) {
9771 // Check if result is non-negative and fits in a smi.
9772 __ test(eax, Immediate(0xc0000000));
9773 __ j(not_zero, &call_runtime);
9774 } else {
9775 // Check if result fits in a smi.
9776 __ cmp(eax, 0xc0000000);
9777 __ j(negative, &non_smi_result);
9778 }
9779 // Tag smi result and return.
9780 __ SmiTag(eax);
9781 GenerateReturn(masm);
9782
9783 // All ops except SHR return a signed int32 that we load in
9784 // a HeapNumber.
9785 if (op_ != Token::SHR) {
9786 __ bind(&non_smi_result);
9787 // Allocate a heap number if needed.
9788 __ mov(ebx, Operand(eax)); // ebx: result
9789 Label skip_allocation;
9790 switch (mode_) {
9791 case OVERWRITE_LEFT:
9792 case OVERWRITE_RIGHT:
9793 // If the operand was an object, we skip the
9794 // allocation of a heap number.
9795 __ mov(eax, Operand(esp, mode_ == OVERWRITE_RIGHT ?
9796 1 * kPointerSize : 2 * kPointerSize));
9797 __ test(eax, Immediate(kSmiTagMask));
9798 __ j(not_zero, &skip_allocation, not_taken);
9799 // Fall through!
9800 case NO_OVERWRITE:
9801 __ AllocateHeapNumber(eax, ecx, edx, &call_runtime);
9802 __ bind(&skip_allocation);
9803 break;
9804 default: UNREACHABLE();
9805 }
9806 // Store the result in the HeapNumber and return.
9807 if (CpuFeatures::IsSupported(SSE2)) {
9808 CpuFeatures::Scope use_sse2(SSE2);
9809 __ cvtsi2sd(xmm0, Operand(ebx));
9810 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
9811 } else {
9812 __ mov(Operand(esp, 1 * kPointerSize), ebx);
9813 __ fild_s(Operand(esp, 1 * kPointerSize));
9814 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
9815 }
9816 GenerateReturn(masm);
9817 }
9818 break;
9819 }
9820 default: UNREACHABLE(); break;
Steve Blocka7e24c12009-10-30 11:49:00 +00009821 }
Steve Blocka7e24c12009-10-30 11:49:00 +00009822 }
9823
9824 // If all else fails, use the runtime system to get the correct
Steve Block3ce2e202009-11-05 08:53:23 +00009825 // result. If arguments was passed in registers now place them on the
Steve Blockd0582a62009-12-15 09:54:21 +00009826 // stack in the correct order below the return address.
Steve Blocka7e24c12009-10-30 11:49:00 +00009827 __ bind(&call_runtime);
Leon Clarked91b9f72010-01-27 17:25:45 +00009828 if (HasArgsInRegisters()) {
Steve Block6ded16b2010-05-10 14:33:55 +01009829 GenerateRegisterArgsPush(masm);
Steve Block3ce2e202009-11-05 08:53:23 +00009830 }
Steve Block6ded16b2010-05-10 14:33:55 +01009831
Steve Blocka7e24c12009-10-30 11:49:00 +00009832 switch (op_) {
9833 case Token::ADD: {
9834 // Test for string arguments before calling runtime.
Andrei Popescu402d9372010-02-26 13:31:12 +00009835 Label not_strings, not_string1, string1, string1_smi2;
Steve Block6ded16b2010-05-10 14:33:55 +01009836
9837 // If this stub has already generated FP-specific code then the arguments
9838 // are already in edx, eax
9839 if (!ShouldGenerateFPCode() && !HasArgsInRegisters()) {
9840 GenerateLoadArguments(masm);
9841 }
9842
9843 // Registers containing left and right operands respectively.
9844 Register lhs, rhs;
9845 if (HasArgsReversed()) {
9846 lhs = eax;
9847 rhs = edx;
9848 } else {
9849 lhs = edx;
9850 rhs = eax;
9851 }
9852
9853 // Test if first argument is a string.
9854 __ test(lhs, Immediate(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00009855 __ j(zero, &not_string1);
Steve Block6ded16b2010-05-10 14:33:55 +01009856 __ CmpObjectType(lhs, FIRST_NONSTRING_TYPE, ecx);
Steve Blocka7e24c12009-10-30 11:49:00 +00009857 __ j(above_equal, &not_string1);
9858
Leon Clarked91b9f72010-01-27 17:25:45 +00009859 // First argument is a string, test second.
Steve Block6ded16b2010-05-10 14:33:55 +01009860 __ test(rhs, Immediate(kSmiTagMask));
Andrei Popescu402d9372010-02-26 13:31:12 +00009861 __ j(zero, &string1_smi2);
Steve Block6ded16b2010-05-10 14:33:55 +01009862 __ CmpObjectType(rhs, FIRST_NONSTRING_TYPE, ecx);
Steve Blocka7e24c12009-10-30 11:49:00 +00009863 __ j(above_equal, &string1);
9864
Steve Blockd0582a62009-12-15 09:54:21 +00009865 // First and second argument are strings. Jump to the string add stub.
Andrei Popescu402d9372010-02-26 13:31:12 +00009866 StringAddStub string_add_stub(NO_STRING_CHECK_IN_STUB);
9867 __ TailCallStub(&string_add_stub);
Steve Blocka7e24c12009-10-30 11:49:00 +00009868
Andrei Popescu402d9372010-02-26 13:31:12 +00009869 __ bind(&string1_smi2);
9870 // First argument is a string, second is a smi. Try to lookup the number
9871 // string for the smi in the number string cache.
9872 NumberToStringStub::GenerateLookupNumberStringCache(
Steve Block6ded16b2010-05-10 14:33:55 +01009873 masm, rhs, edi, ebx, ecx, true, &string1);
Andrei Popescu402d9372010-02-26 13:31:12 +00009874
Steve Block6ded16b2010-05-10 14:33:55 +01009875 // Replace second argument on stack and tailcall string add stub to make
9876 // the result.
9877 __ mov(Operand(esp, 1 * kPointerSize), edi);
9878 __ TailCallStub(&string_add_stub);
Andrei Popescu402d9372010-02-26 13:31:12 +00009879
Steve Block6ded16b2010-05-10 14:33:55 +01009880 // Only first argument is a string.
Steve Blocka7e24c12009-10-30 11:49:00 +00009881 __ bind(&string1);
Steve Block6ded16b2010-05-10 14:33:55 +01009882 __ InvokeBuiltin(Builtins::STRING_ADD_LEFT, JUMP_FUNCTION);
Steve Blocka7e24c12009-10-30 11:49:00 +00009883
9884 // First argument was not a string, test second.
9885 __ bind(&not_string1);
Steve Block6ded16b2010-05-10 14:33:55 +01009886 __ test(rhs, Immediate(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00009887 __ j(zero, &not_strings);
Steve Block6ded16b2010-05-10 14:33:55 +01009888 __ CmpObjectType(rhs, FIRST_NONSTRING_TYPE, ecx);
Steve Blocka7e24c12009-10-30 11:49:00 +00009889 __ j(above_equal, &not_strings);
9890
9891 // Only second argument is a string.
Steve Block6ded16b2010-05-10 14:33:55 +01009892 __ InvokeBuiltin(Builtins::STRING_ADD_RIGHT, JUMP_FUNCTION);
Steve Blocka7e24c12009-10-30 11:49:00 +00009893
9894 __ bind(&not_strings);
9895 // Neither argument is a string.
9896 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION);
9897 break;
9898 }
9899 case Token::SUB:
9900 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION);
9901 break;
9902 case Token::MUL:
9903 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION);
Leon Clarked91b9f72010-01-27 17:25:45 +00009904 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00009905 case Token::DIV:
9906 __ InvokeBuiltin(Builtins::DIV, JUMP_FUNCTION);
9907 break;
9908 case Token::MOD:
9909 __ InvokeBuiltin(Builtins::MOD, JUMP_FUNCTION);
9910 break;
9911 case Token::BIT_OR:
9912 __ InvokeBuiltin(Builtins::BIT_OR, JUMP_FUNCTION);
9913 break;
9914 case Token::BIT_AND:
9915 __ InvokeBuiltin(Builtins::BIT_AND, JUMP_FUNCTION);
9916 break;
9917 case Token::BIT_XOR:
9918 __ InvokeBuiltin(Builtins::BIT_XOR, JUMP_FUNCTION);
9919 break;
9920 case Token::SAR:
9921 __ InvokeBuiltin(Builtins::SAR, JUMP_FUNCTION);
9922 break;
9923 case Token::SHL:
9924 __ InvokeBuiltin(Builtins::SHL, JUMP_FUNCTION);
9925 break;
9926 case Token::SHR:
9927 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION);
9928 break;
9929 default:
9930 UNREACHABLE();
9931 }
9932}
9933
9934
Leon Clarked91b9f72010-01-27 17:25:45 +00009935void GenericBinaryOpStub::GenerateHeapResultAllocation(MacroAssembler* masm,
9936 Label* alloc_failure) {
9937 Label skip_allocation;
9938 OverwriteMode mode = mode_;
9939 if (HasArgsReversed()) {
9940 if (mode == OVERWRITE_RIGHT) {
9941 mode = OVERWRITE_LEFT;
9942 } else if (mode == OVERWRITE_LEFT) {
9943 mode = OVERWRITE_RIGHT;
9944 }
9945 }
9946 switch (mode) {
9947 case OVERWRITE_LEFT: {
9948 // If the argument in edx is already an object, we skip the
9949 // allocation of a heap number.
9950 __ test(edx, Immediate(kSmiTagMask));
9951 __ j(not_zero, &skip_allocation, not_taken);
9952 // Allocate a heap number for the result. Keep eax and edx intact
9953 // for the possible runtime call.
9954 __ AllocateHeapNumber(ebx, ecx, no_reg, alloc_failure);
9955 // Now edx can be overwritten losing one of the arguments as we are
9956 // now done and will not need it any more.
9957 __ mov(edx, Operand(ebx));
9958 __ bind(&skip_allocation);
9959 // Use object in edx as a result holder
9960 __ mov(eax, Operand(edx));
9961 break;
9962 }
9963 case OVERWRITE_RIGHT:
9964 // If the argument in eax is already an object, we skip the
9965 // allocation of a heap number.
9966 __ test(eax, Immediate(kSmiTagMask));
9967 __ j(not_zero, &skip_allocation, not_taken);
9968 // Fall through!
9969 case NO_OVERWRITE:
9970 // Allocate a heap number for the result. Keep eax and edx intact
9971 // for the possible runtime call.
9972 __ AllocateHeapNumber(ebx, ecx, no_reg, alloc_failure);
9973 // Now eax can be overwritten losing one of the arguments as we are
9974 // now done and will not need it any more.
9975 __ mov(eax, ebx);
9976 __ bind(&skip_allocation);
9977 break;
9978 default: UNREACHABLE();
9979 }
9980}
9981
9982
Steve Block3ce2e202009-11-05 08:53:23 +00009983void GenericBinaryOpStub::GenerateLoadArguments(MacroAssembler* masm) {
9984 // If arguments are not passed in registers read them from the stack.
Steve Block6ded16b2010-05-10 14:33:55 +01009985 ASSERT(!HasArgsInRegisters());
9986 __ mov(eax, Operand(esp, 1 * kPointerSize));
9987 __ mov(edx, Operand(esp, 2 * kPointerSize));
Steve Block3ce2e202009-11-05 08:53:23 +00009988}
Steve Blocka7e24c12009-10-30 11:49:00 +00009989
Steve Block3ce2e202009-11-05 08:53:23 +00009990
9991void GenericBinaryOpStub::GenerateReturn(MacroAssembler* masm) {
9992 // If arguments are not passed in registers remove them from the stack before
9993 // returning.
Leon Clarked91b9f72010-01-27 17:25:45 +00009994 if (!HasArgsInRegisters()) {
Steve Block3ce2e202009-11-05 08:53:23 +00009995 __ ret(2 * kPointerSize); // Remove both operands
9996 } else {
9997 __ ret(0);
9998 }
Steve Blocka7e24c12009-10-30 11:49:00 +00009999}
10000
10001
Steve Block6ded16b2010-05-10 14:33:55 +010010002void GenericBinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) {
10003 ASSERT(HasArgsInRegisters());
10004 __ pop(ecx);
10005 if (HasArgsReversed()) {
10006 __ push(eax);
10007 __ push(edx);
10008 } else {
10009 __ push(edx);
10010 __ push(eax);
10011 }
10012 __ push(ecx);
10013}
10014
10015
10016void GenericBinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
10017 Label get_result;
10018
10019 // Keep a copy of operands on the stack and make sure they are also in
10020 // edx, eax.
10021 if (HasArgsInRegisters()) {
10022 GenerateRegisterArgsPush(masm);
10023 } else {
10024 GenerateLoadArguments(masm);
10025 }
10026
10027 // Internal frame is necessary to handle exceptions properly.
10028 __ EnterInternalFrame();
10029
10030 // Push arguments on stack if the stub expects them there.
10031 if (!HasArgsInRegisters()) {
10032 __ push(edx);
10033 __ push(eax);
10034 }
10035 // Call the stub proper to get the result in eax.
10036 __ call(&get_result);
10037 __ LeaveInternalFrame();
10038
10039 __ pop(ecx); // Return address.
10040 // Left and right arguments are now on top.
10041 // Push the operation result. The tail call to BinaryOp_Patch will
10042 // return it to the original caller.
10043 __ push(eax);
10044 // Push this stub's key. Although the operation and the type info are
10045 // encoded into the key, the encoding is opaque, so push them too.
10046 __ push(Immediate(Smi::FromInt(MinorKey())));
10047 __ push(Immediate(Smi::FromInt(op_)));
10048 __ push(Immediate(Smi::FromInt(runtime_operands_type_)));
10049
10050 __ push(ecx); // Return address.
10051
10052 // Patch the caller to an appropriate specialized stub
10053 // and return the operation result.
10054 __ TailCallExternalReference(
10055 ExternalReference(IC_Utility(IC::kBinaryOp_Patch)),
10056 6,
10057 1);
10058
10059 // The entry point for the result calculation is assumed to be immediately
10060 // after this sequence.
10061 __ bind(&get_result);
10062}
10063
10064
10065Handle<Code> GetBinaryOpStub(int key, BinaryOpIC::TypeInfo type_info) {
10066 GenericBinaryOpStub stub(key, type_info);
10067 return stub.GetCode();
10068}
10069
10070
Andrei Popescu402d9372010-02-26 13:31:12 +000010071void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
10072 // Input on stack:
10073 // esp[4]: argument (should be number).
10074 // esp[0]: return address.
10075 // Test that eax is a number.
10076 Label runtime_call;
10077 Label runtime_call_clear_stack;
10078 Label input_not_smi;
10079 Label loaded;
10080 __ mov(eax, Operand(esp, kPointerSize));
10081 __ test(eax, Immediate(kSmiTagMask));
10082 __ j(not_zero, &input_not_smi);
10083 // Input is a smi. Untag and load it onto the FPU stack.
10084 // Then load the low and high words of the double into ebx, edx.
10085 ASSERT_EQ(1, kSmiTagSize);
10086 __ sar(eax, 1);
10087 __ sub(Operand(esp), Immediate(2 * kPointerSize));
10088 __ mov(Operand(esp, 0), eax);
10089 __ fild_s(Operand(esp, 0));
10090 __ fst_d(Operand(esp, 0));
10091 __ pop(edx);
10092 __ pop(ebx);
10093 __ jmp(&loaded);
10094 __ bind(&input_not_smi);
10095 // Check if input is a HeapNumber.
10096 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
10097 __ cmp(Operand(ebx), Immediate(Factory::heap_number_map()));
10098 __ j(not_equal, &runtime_call);
10099 // Input is a HeapNumber. Push it on the FPU stack and load its
10100 // low and high words into ebx, edx.
10101 __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset));
10102 __ mov(edx, FieldOperand(eax, HeapNumber::kExponentOffset));
10103 __ mov(ebx, FieldOperand(eax, HeapNumber::kMantissaOffset));
10104
10105 __ bind(&loaded);
10106 // ST[0] == double value
10107 // ebx = low 32 bits of double value
10108 // edx = high 32 bits of double value
10109 // Compute hash:
10110 // h = (low ^ high); h ^= h >> 16; h ^= h >> 8; h = h & (cacheSize - 1);
10111 __ mov(ecx, ebx);
10112 __ xor_(ecx, Operand(edx));
10113 __ mov(eax, ecx);
10114 __ sar(eax, 16);
10115 __ xor_(ecx, Operand(eax));
10116 __ mov(eax, ecx);
10117 __ sar(eax, 8);
10118 __ xor_(ecx, Operand(eax));
10119 ASSERT(IsPowerOf2(TranscendentalCache::kCacheSize));
10120 __ and_(Operand(ecx), Immediate(TranscendentalCache::kCacheSize - 1));
10121 // ST[0] == double value.
10122 // ebx = low 32 bits of double value.
10123 // edx = high 32 bits of double value.
10124 // ecx = TranscendentalCache::hash(double value).
10125 __ mov(eax,
10126 Immediate(ExternalReference::transcendental_cache_array_address()));
10127 // Eax points to cache array.
10128 __ mov(eax, Operand(eax, type_ * sizeof(TranscendentalCache::caches_[0])));
10129 // Eax points to the cache for the type type_.
10130 // If NULL, the cache hasn't been initialized yet, so go through runtime.
10131 __ test(eax, Operand(eax));
10132 __ j(zero, &runtime_call_clear_stack);
10133#ifdef DEBUG
10134 // Check that the layout of cache elements match expectations.
Steve Block6ded16b2010-05-10 14:33:55 +010010135 { TranscendentalCache::Element test_elem[2];
Andrei Popescu402d9372010-02-26 13:31:12 +000010136 char* elem_start = reinterpret_cast<char*>(&test_elem[0]);
10137 char* elem2_start = reinterpret_cast<char*>(&test_elem[1]);
10138 char* elem_in0 = reinterpret_cast<char*>(&(test_elem[0].in[0]));
10139 char* elem_in1 = reinterpret_cast<char*>(&(test_elem[0].in[1]));
10140 char* elem_out = reinterpret_cast<char*>(&(test_elem[0].output));
10141 CHECK_EQ(12, elem2_start - elem_start); // Two uint_32's and a pointer.
10142 CHECK_EQ(0, elem_in0 - elem_start);
10143 CHECK_EQ(kIntSize, elem_in1 - elem_start);
10144 CHECK_EQ(2 * kIntSize, elem_out - elem_start);
10145 }
10146#endif
10147 // Find the address of the ecx'th entry in the cache, i.e., &eax[ecx*12].
10148 __ lea(ecx, Operand(ecx, ecx, times_2, 0));
10149 __ lea(ecx, Operand(eax, ecx, times_4, 0));
10150 // Check if cache matches: Double value is stored in uint32_t[2] array.
10151 Label cache_miss;
10152 __ cmp(ebx, Operand(ecx, 0));
10153 __ j(not_equal, &cache_miss);
10154 __ cmp(edx, Operand(ecx, kIntSize));
10155 __ j(not_equal, &cache_miss);
10156 // Cache hit!
10157 __ mov(eax, Operand(ecx, 2 * kIntSize));
10158 __ fstp(0);
10159 __ ret(kPointerSize);
10160
10161 __ bind(&cache_miss);
10162 // Update cache with new value.
10163 // We are short on registers, so use no_reg as scratch.
10164 // This gives slightly larger code.
10165 __ AllocateHeapNumber(eax, edi, no_reg, &runtime_call_clear_stack);
10166 GenerateOperation(masm);
10167 __ mov(Operand(ecx, 0), ebx);
10168 __ mov(Operand(ecx, kIntSize), edx);
10169 __ mov(Operand(ecx, 2 * kIntSize), eax);
10170 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
10171 __ ret(kPointerSize);
10172
10173 __ bind(&runtime_call_clear_stack);
10174 __ fstp(0);
10175 __ bind(&runtime_call);
Steve Block6ded16b2010-05-10 14:33:55 +010010176 __ TailCallExternalReference(ExternalReference(RuntimeFunction()), 1, 1);
Andrei Popescu402d9372010-02-26 13:31:12 +000010177}
10178
10179
10180Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() {
10181 switch (type_) {
10182 // Add more cases when necessary.
10183 case TranscendentalCache::SIN: return Runtime::kMath_sin;
10184 case TranscendentalCache::COS: return Runtime::kMath_cos;
10185 default:
10186 UNIMPLEMENTED();
10187 return Runtime::kAbort;
10188 }
10189}
10190
10191
10192void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm) {
10193 // Only free register is edi.
10194 Label done;
10195 ASSERT(type_ == TranscendentalCache::SIN ||
10196 type_ == TranscendentalCache::COS);
10197 // More transcendental types can be added later.
10198
10199 // Both fsin and fcos require arguments in the range +/-2^63 and
10200 // return NaN for infinities and NaN. They can share all code except
10201 // the actual fsin/fcos operation.
10202 Label in_range;
10203 // If argument is outside the range -2^63..2^63, fsin/cos doesn't
10204 // work. We must reduce it to the appropriate range.
10205 __ mov(edi, edx);
10206 __ and_(Operand(edi), Immediate(0x7ff00000)); // Exponent only.
10207 int supported_exponent_limit =
10208 (63 + HeapNumber::kExponentBias) << HeapNumber::kExponentShift;
10209 __ cmp(Operand(edi), Immediate(supported_exponent_limit));
10210 __ j(below, &in_range, taken);
10211 // Check for infinity and NaN. Both return NaN for sin.
10212 __ cmp(Operand(edi), Immediate(0x7ff00000));
10213 Label non_nan_result;
10214 __ j(not_equal, &non_nan_result, taken);
10215 // Input is +/-Infinity or NaN. Result is NaN.
10216 __ fstp(0);
10217 // NaN is represented by 0x7ff8000000000000.
10218 __ push(Immediate(0x7ff80000));
10219 __ push(Immediate(0));
10220 __ fld_d(Operand(esp, 0));
10221 __ add(Operand(esp), Immediate(2 * kPointerSize));
10222 __ jmp(&done);
10223
10224 __ bind(&non_nan_result);
10225
10226 // Use fpmod to restrict argument to the range +/-2*PI.
10227 __ mov(edi, eax); // Save eax before using fnstsw_ax.
10228 __ fldpi();
10229 __ fadd(0);
10230 __ fld(1);
10231 // FPU Stack: input, 2*pi, input.
10232 {
10233 Label no_exceptions;
10234 __ fwait();
10235 __ fnstsw_ax();
10236 // Clear if Illegal Operand or Zero Division exceptions are set.
10237 __ test(Operand(eax), Immediate(5));
10238 __ j(zero, &no_exceptions);
10239 __ fnclex();
10240 __ bind(&no_exceptions);
10241 }
10242
10243 // Compute st(0) % st(1)
10244 {
10245 Label partial_remainder_loop;
10246 __ bind(&partial_remainder_loop);
10247 __ fprem1();
10248 __ fwait();
10249 __ fnstsw_ax();
10250 __ test(Operand(eax), Immediate(0x400 /* C2 */));
10251 // If C2 is set, computation only has partial result. Loop to
10252 // continue computation.
10253 __ j(not_zero, &partial_remainder_loop);
10254 }
10255 // FPU Stack: input, 2*pi, input % 2*pi
10256 __ fstp(2);
10257 __ fstp(0);
10258 __ mov(eax, edi); // Restore eax (allocated HeapNumber pointer).
10259
10260 // FPU Stack: input % 2*pi
10261 __ bind(&in_range);
10262 switch (type_) {
10263 case TranscendentalCache::SIN:
10264 __ fsin();
10265 break;
10266 case TranscendentalCache::COS:
10267 __ fcos();
10268 break;
10269 default:
10270 UNREACHABLE();
10271 }
10272 __ bind(&done);
10273}
10274
10275
Leon Clarkee46be812010-01-19 14:06:41 +000010276// Get the integer part of a heap number. Surprisingly, all this bit twiddling
10277// is faster than using the built-in instructions on floating point registers.
10278// Trashes edi and ebx. Dest is ecx. Source cannot be ecx or one of the
10279// trashed registers.
10280void IntegerConvert(MacroAssembler* masm,
10281 Register source,
Steve Block6ded16b2010-05-10 14:33:55 +010010282 TypeInfo type_info,
Leon Clarkee46be812010-01-19 14:06:41 +000010283 bool use_sse3,
10284 Label* conversion_failure) {
Leon Clarked91b9f72010-01-27 17:25:45 +000010285 ASSERT(!source.is(ecx) && !source.is(edi) && !source.is(ebx));
Leon Clarkee46be812010-01-19 14:06:41 +000010286 Label done, right_exponent, normal_exponent;
10287 Register scratch = ebx;
10288 Register scratch2 = edi;
Kristian Monsen25f61362010-05-21 11:50:48 +010010289 if (type_info.IsInteger32() && CpuFeatures::IsEnabled(SSE2)) {
10290 CpuFeatures::Scope scope(SSE2);
10291 __ cvttsd2si(ecx, FieldOperand(source, HeapNumber::kValueOffset));
10292 return;
10293 }
Steve Block6ded16b2010-05-10 14:33:55 +010010294 if (!type_info.IsInteger32() || !use_sse3) {
10295 // Get exponent word.
10296 __ mov(scratch, FieldOperand(source, HeapNumber::kExponentOffset));
10297 // Get exponent alone in scratch2.
10298 __ mov(scratch2, scratch);
10299 __ and_(scratch2, HeapNumber::kExponentMask);
10300 }
Leon Clarkee46be812010-01-19 14:06:41 +000010301 if (use_sse3) {
10302 CpuFeatures::Scope scope(SSE3);
Steve Block6ded16b2010-05-10 14:33:55 +010010303 if (!type_info.IsInteger32()) {
10304 // Check whether the exponent is too big for a 64 bit signed integer.
10305 static const uint32_t kTooBigExponent =
10306 (HeapNumber::kExponentBias + 63) << HeapNumber::kExponentShift;
10307 __ cmp(Operand(scratch2), Immediate(kTooBigExponent));
10308 __ j(greater_equal, conversion_failure);
10309 }
Leon Clarkee46be812010-01-19 14:06:41 +000010310 // Load x87 register with heap number.
10311 __ fld_d(FieldOperand(source, HeapNumber::kValueOffset));
10312 // Reserve space for 64 bit answer.
10313 __ sub(Operand(esp), Immediate(sizeof(uint64_t))); // Nolint.
10314 // Do conversion, which cannot fail because we checked the exponent.
10315 __ fisttp_d(Operand(esp, 0));
10316 __ mov(ecx, Operand(esp, 0)); // Load low word of answer into ecx.
10317 __ add(Operand(esp), Immediate(sizeof(uint64_t))); // Nolint.
10318 } else {
10319 // Load ecx with zero. We use this either for the final shift or
10320 // for the answer.
10321 __ xor_(ecx, Operand(ecx));
10322 // Check whether the exponent matches a 32 bit signed int that cannot be
10323 // represented by a Smi. A non-smi 32 bit integer is 1.xxx * 2^30 so the
10324 // exponent is 30 (biased). This is the exponent that we are fastest at and
10325 // also the highest exponent we can handle here.
10326 const uint32_t non_smi_exponent =
10327 (HeapNumber::kExponentBias + 30) << HeapNumber::kExponentShift;
10328 __ cmp(Operand(scratch2), Immediate(non_smi_exponent));
10329 // If we have a match of the int32-but-not-Smi exponent then skip some
10330 // logic.
10331 __ j(equal, &right_exponent);
10332 // If the exponent is higher than that then go to slow case. This catches
10333 // numbers that don't fit in a signed int32, infinities and NaNs.
10334 __ j(less, &normal_exponent);
10335
10336 {
10337 // Handle a big exponent. The only reason we have this code is that the
10338 // >>> operator has a tendency to generate numbers with an exponent of 31.
10339 const uint32_t big_non_smi_exponent =
10340 (HeapNumber::kExponentBias + 31) << HeapNumber::kExponentShift;
10341 __ cmp(Operand(scratch2), Immediate(big_non_smi_exponent));
10342 __ j(not_equal, conversion_failure);
10343 // We have the big exponent, typically from >>>. This means the number is
10344 // in the range 2^31 to 2^32 - 1. Get the top bits of the mantissa.
10345 __ mov(scratch2, scratch);
10346 __ and_(scratch2, HeapNumber::kMantissaMask);
10347 // Put back the implicit 1.
10348 __ or_(scratch2, 1 << HeapNumber::kExponentShift);
10349 // Shift up the mantissa bits to take up the space the exponent used to
10350 // take. We just orred in the implicit bit so that took care of one and
10351 // we want to use the full unsigned range so we subtract 1 bit from the
10352 // shift distance.
10353 const int big_shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 1;
10354 __ shl(scratch2, big_shift_distance);
10355 // Get the second half of the double.
10356 __ mov(ecx, FieldOperand(source, HeapNumber::kMantissaOffset));
10357 // Shift down 21 bits to get the most significant 11 bits or the low
10358 // mantissa word.
10359 __ shr(ecx, 32 - big_shift_distance);
10360 __ or_(ecx, Operand(scratch2));
10361 // We have the answer in ecx, but we may need to negate it.
10362 __ test(scratch, Operand(scratch));
10363 __ j(positive, &done);
10364 __ neg(ecx);
10365 __ jmp(&done);
10366 }
10367
10368 __ bind(&normal_exponent);
10369 // Exponent word in scratch, exponent part of exponent word in scratch2.
10370 // Zero in ecx.
10371 // We know the exponent is smaller than 30 (biased). If it is less than
10372 // 0 (biased) then the number is smaller in magnitude than 1.0 * 2^0, ie
10373 // it rounds to zero.
10374 const uint32_t zero_exponent =
10375 (HeapNumber::kExponentBias + 0) << HeapNumber::kExponentShift;
10376 __ sub(Operand(scratch2), Immediate(zero_exponent));
10377 // ecx already has a Smi zero.
10378 __ j(less, &done);
10379
10380 // We have a shifted exponent between 0 and 30 in scratch2.
10381 __ shr(scratch2, HeapNumber::kExponentShift);
10382 __ mov(ecx, Immediate(30));
10383 __ sub(ecx, Operand(scratch2));
10384
10385 __ bind(&right_exponent);
10386 // Here ecx is the shift, scratch is the exponent word.
10387 // Get the top bits of the mantissa.
10388 __ and_(scratch, HeapNumber::kMantissaMask);
10389 // Put back the implicit 1.
10390 __ or_(scratch, 1 << HeapNumber::kExponentShift);
10391 // Shift up the mantissa bits to take up the space the exponent used to
10392 // take. We have kExponentShift + 1 significant bits int he low end of the
10393 // word. Shift them to the top bits.
10394 const int shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 2;
10395 __ shl(scratch, shift_distance);
10396 // Get the second half of the double. For some exponents we don't
10397 // actually need this because the bits get shifted out again, but
10398 // it's probably slower to test than just to do it.
10399 __ mov(scratch2, FieldOperand(source, HeapNumber::kMantissaOffset));
10400 // Shift down 22 bits to get the most significant 10 bits or the low
10401 // mantissa word.
10402 __ shr(scratch2, 32 - shift_distance);
10403 __ or_(scratch2, Operand(scratch));
10404 // Move down according to the exponent.
10405 __ shr_cl(scratch2);
10406 // Now the unsigned answer is in scratch2. We need to move it to ecx and
10407 // we may need to fix the sign.
10408 Label negative;
10409 __ xor_(ecx, Operand(ecx));
10410 __ cmp(ecx, FieldOperand(source, HeapNumber::kExponentOffset));
10411 __ j(greater, &negative);
10412 __ mov(ecx, scratch2);
10413 __ jmp(&done);
10414 __ bind(&negative);
10415 __ sub(ecx, Operand(scratch2));
10416 __ bind(&done);
10417 }
10418}
10419
10420
10421// Input: edx, eax are the left and right objects of a bit op.
10422// Output: eax, ecx are left and right integers for a bit op.
Steve Block6ded16b2010-05-10 14:33:55 +010010423void FloatingPointHelper::LoadNumbersAsIntegers(MacroAssembler* masm,
10424 TypeInfo type_info,
10425 bool use_sse3,
10426 Label* conversion_failure) {
Leon Clarkee46be812010-01-19 14:06:41 +000010427 // Check float operands.
10428 Label arg1_is_object, check_undefined_arg1;
10429 Label arg2_is_object, check_undefined_arg2;
10430 Label load_arg2, done;
10431
Steve Block6ded16b2010-05-10 14:33:55 +010010432 if (!type_info.IsDouble()) {
10433 if (!type_info.IsSmi()) {
10434 __ test(edx, Immediate(kSmiTagMask));
10435 __ j(not_zero, &arg1_is_object);
10436 } else {
10437 if (FLAG_debug_code) __ AbortIfNotSmi(edx);
10438 }
10439 __ SmiUntag(edx);
10440 __ jmp(&load_arg2);
10441 }
10442
10443 __ bind(&arg1_is_object);
10444
10445 // Get the untagged integer version of the edx heap number in ecx.
10446 IntegerConvert(masm, edx, type_info, use_sse3, conversion_failure);
10447 __ mov(edx, ecx);
10448
10449 // Here edx has the untagged integer, eax has a Smi or a heap number.
10450 __ bind(&load_arg2);
10451 if (!type_info.IsDouble()) {
10452 // Test if arg2 is a Smi.
10453 if (!type_info.IsSmi()) {
10454 __ test(eax, Immediate(kSmiTagMask));
10455 __ j(not_zero, &arg2_is_object);
10456 } else {
10457 if (FLAG_debug_code) __ AbortIfNotSmi(eax);
10458 }
10459 __ SmiUntag(eax);
10460 __ mov(ecx, eax);
10461 __ jmp(&done);
10462 }
10463
10464 __ bind(&arg2_is_object);
10465
10466 // Get the untagged integer version of the eax heap number in ecx.
10467 IntegerConvert(masm, eax, type_info, use_sse3, conversion_failure);
10468 __ bind(&done);
10469 __ mov(eax, edx);
10470}
10471
10472
10473// Input: edx, eax are the left and right objects of a bit op.
10474// Output: eax, ecx are left and right integers for a bit op.
10475void FloatingPointHelper::LoadUnknownsAsIntegers(MacroAssembler* masm,
10476 bool use_sse3,
10477 Label* conversion_failure) {
10478 // Check float operands.
10479 Label arg1_is_object, check_undefined_arg1;
10480 Label arg2_is_object, check_undefined_arg2;
10481 Label load_arg2, done;
10482
10483 // Test if arg1 is a Smi.
Leon Clarkee46be812010-01-19 14:06:41 +000010484 __ test(edx, Immediate(kSmiTagMask));
10485 __ j(not_zero, &arg1_is_object);
Steve Block6ded16b2010-05-10 14:33:55 +010010486
Leon Clarkee46be812010-01-19 14:06:41 +000010487 __ SmiUntag(edx);
10488 __ jmp(&load_arg2);
10489
10490 // If the argument is undefined it converts to zero (ECMA-262, section 9.5).
10491 __ bind(&check_undefined_arg1);
10492 __ cmp(edx, Factory::undefined_value());
10493 __ j(not_equal, conversion_failure);
10494 __ mov(edx, Immediate(0));
10495 __ jmp(&load_arg2);
10496
10497 __ bind(&arg1_is_object);
10498 __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset));
10499 __ cmp(ebx, Factory::heap_number_map());
10500 __ j(not_equal, &check_undefined_arg1);
Steve Block6ded16b2010-05-10 14:33:55 +010010501
Leon Clarkee46be812010-01-19 14:06:41 +000010502 // Get the untagged integer version of the edx heap number in ecx.
Steve Block6ded16b2010-05-10 14:33:55 +010010503 IntegerConvert(masm,
10504 edx,
10505 TypeInfo::Unknown(),
10506 use_sse3,
10507 conversion_failure);
Leon Clarkee46be812010-01-19 14:06:41 +000010508 __ mov(edx, ecx);
10509
10510 // Here edx has the untagged integer, eax has a Smi or a heap number.
10511 __ bind(&load_arg2);
Steve Block6ded16b2010-05-10 14:33:55 +010010512
Leon Clarkee46be812010-01-19 14:06:41 +000010513 // Test if arg2 is a Smi.
10514 __ test(eax, Immediate(kSmiTagMask));
10515 __ j(not_zero, &arg2_is_object);
Steve Block6ded16b2010-05-10 14:33:55 +010010516
Leon Clarkee46be812010-01-19 14:06:41 +000010517 __ SmiUntag(eax);
10518 __ mov(ecx, eax);
10519 __ jmp(&done);
10520
10521 // If the argument is undefined it converts to zero (ECMA-262, section 9.5).
10522 __ bind(&check_undefined_arg2);
10523 __ cmp(eax, Factory::undefined_value());
10524 __ j(not_equal, conversion_failure);
10525 __ mov(ecx, Immediate(0));
10526 __ jmp(&done);
10527
10528 __ bind(&arg2_is_object);
10529 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
10530 __ cmp(ebx, Factory::heap_number_map());
10531 __ j(not_equal, &check_undefined_arg2);
Steve Block6ded16b2010-05-10 14:33:55 +010010532
Leon Clarkee46be812010-01-19 14:06:41 +000010533 // Get the untagged integer version of the eax heap number in ecx.
Steve Block6ded16b2010-05-10 14:33:55 +010010534 IntegerConvert(masm,
10535 eax,
10536 TypeInfo::Unknown(),
10537 use_sse3,
10538 conversion_failure);
Leon Clarkee46be812010-01-19 14:06:41 +000010539 __ bind(&done);
10540 __ mov(eax, edx);
10541}
10542
10543
Steve Block6ded16b2010-05-10 14:33:55 +010010544void FloatingPointHelper::LoadAsIntegers(MacroAssembler* masm,
10545 TypeInfo type_info,
10546 bool use_sse3,
10547 Label* conversion_failure) {
10548 if (type_info.IsNumber()) {
10549 LoadNumbersAsIntegers(masm, type_info, use_sse3, conversion_failure);
10550 } else {
10551 LoadUnknownsAsIntegers(masm, use_sse3, conversion_failure);
10552 }
10553}
10554
10555
Steve Blocka7e24c12009-10-30 11:49:00 +000010556void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm,
10557 Register number) {
10558 Label load_smi, done;
10559
10560 __ test(number, Immediate(kSmiTagMask));
10561 __ j(zero, &load_smi, not_taken);
10562 __ fld_d(FieldOperand(number, HeapNumber::kValueOffset));
10563 __ jmp(&done);
10564
10565 __ bind(&load_smi);
Leon Clarkee46be812010-01-19 14:06:41 +000010566 __ SmiUntag(number);
Steve Blocka7e24c12009-10-30 11:49:00 +000010567 __ push(number);
10568 __ fild_s(Operand(esp, 0));
10569 __ pop(number);
10570
10571 __ bind(&done);
10572}
10573
10574
Andrei Popescu402d9372010-02-26 13:31:12 +000010575void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm) {
10576 Label load_smi_edx, load_eax, load_smi_eax, done;
10577 // Load operand in edx into xmm0.
10578 __ test(edx, Immediate(kSmiTagMask));
10579 __ j(zero, &load_smi_edx, not_taken); // Argument in edx is a smi.
10580 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
10581
10582 __ bind(&load_eax);
10583 // Load operand in eax into xmm1.
10584 __ test(eax, Immediate(kSmiTagMask));
10585 __ j(zero, &load_smi_eax, not_taken); // Argument in eax is a smi.
10586 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
10587 __ jmp(&done);
10588
10589 __ bind(&load_smi_edx);
10590 __ SmiUntag(edx); // Untag smi before converting to float.
10591 __ cvtsi2sd(xmm0, Operand(edx));
10592 __ SmiTag(edx); // Retag smi for heap number overwriting test.
10593 __ jmp(&load_eax);
10594
10595 __ bind(&load_smi_eax);
10596 __ SmiUntag(eax); // Untag smi before converting to float.
10597 __ cvtsi2sd(xmm1, Operand(eax));
10598 __ SmiTag(eax); // Retag smi for heap number overwriting test.
10599
10600 __ bind(&done);
10601}
10602
10603
Leon Clarked91b9f72010-01-27 17:25:45 +000010604void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm,
Steve Blocka7e24c12009-10-30 11:49:00 +000010605 Label* not_numbers) {
10606 Label load_smi_edx, load_eax, load_smi_eax, load_float_eax, done;
10607 // Load operand in edx into xmm0, or branch to not_numbers.
10608 __ test(edx, Immediate(kSmiTagMask));
10609 __ j(zero, &load_smi_edx, not_taken); // Argument in edx is a smi.
10610 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), Factory::heap_number_map());
10611 __ j(not_equal, not_numbers); // Argument in edx is not a number.
10612 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
10613 __ bind(&load_eax);
10614 // Load operand in eax into xmm1, or branch to not_numbers.
10615 __ test(eax, Immediate(kSmiTagMask));
10616 __ j(zero, &load_smi_eax, not_taken); // Argument in eax is a smi.
10617 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), Factory::heap_number_map());
10618 __ j(equal, &load_float_eax);
10619 __ jmp(not_numbers); // Argument in eax is not a number.
10620 __ bind(&load_smi_edx);
Leon Clarkee46be812010-01-19 14:06:41 +000010621 __ SmiUntag(edx); // Untag smi before converting to float.
Steve Blocka7e24c12009-10-30 11:49:00 +000010622 __ cvtsi2sd(xmm0, Operand(edx));
Leon Clarkee46be812010-01-19 14:06:41 +000010623 __ SmiTag(edx); // Retag smi for heap number overwriting test.
Steve Blocka7e24c12009-10-30 11:49:00 +000010624 __ jmp(&load_eax);
10625 __ bind(&load_smi_eax);
Leon Clarkee46be812010-01-19 14:06:41 +000010626 __ SmiUntag(eax); // Untag smi before converting to float.
Steve Blocka7e24c12009-10-30 11:49:00 +000010627 __ cvtsi2sd(xmm1, Operand(eax));
Leon Clarkee46be812010-01-19 14:06:41 +000010628 __ SmiTag(eax); // Retag smi for heap number overwriting test.
Steve Blocka7e24c12009-10-30 11:49:00 +000010629 __ jmp(&done);
10630 __ bind(&load_float_eax);
10631 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
10632 __ bind(&done);
10633}
10634
10635
Leon Clarked91b9f72010-01-27 17:25:45 +000010636void FloatingPointHelper::LoadSSE2Smis(MacroAssembler* masm,
10637 Register scratch) {
10638 const Register left = edx;
10639 const Register right = eax;
10640 __ mov(scratch, left);
10641 ASSERT(!scratch.is(right)); // We're about to clobber scratch.
10642 __ SmiUntag(scratch);
10643 __ cvtsi2sd(xmm0, Operand(scratch));
10644
10645 __ mov(scratch, right);
10646 __ SmiUntag(scratch);
10647 __ cvtsi2sd(xmm1, Operand(scratch));
10648}
10649
10650
Steve Blocka7e24c12009-10-30 11:49:00 +000010651void FloatingPointHelper::LoadFloatOperands(MacroAssembler* masm,
Leon Clarked91b9f72010-01-27 17:25:45 +000010652 Register scratch,
10653 ArgLocation arg_location) {
Steve Blocka7e24c12009-10-30 11:49:00 +000010654 Label load_smi_1, load_smi_2, done_load_1, done;
Leon Clarked91b9f72010-01-27 17:25:45 +000010655 if (arg_location == ARGS_IN_REGISTERS) {
10656 __ mov(scratch, edx);
10657 } else {
10658 __ mov(scratch, Operand(esp, 2 * kPointerSize));
10659 }
Steve Blocka7e24c12009-10-30 11:49:00 +000010660 __ test(scratch, Immediate(kSmiTagMask));
10661 __ j(zero, &load_smi_1, not_taken);
10662 __ fld_d(FieldOperand(scratch, HeapNumber::kValueOffset));
10663 __ bind(&done_load_1);
10664
Leon Clarked91b9f72010-01-27 17:25:45 +000010665 if (arg_location == ARGS_IN_REGISTERS) {
10666 __ mov(scratch, eax);
10667 } else {
10668 __ mov(scratch, Operand(esp, 1 * kPointerSize));
10669 }
Steve Blocka7e24c12009-10-30 11:49:00 +000010670 __ test(scratch, Immediate(kSmiTagMask));
10671 __ j(zero, &load_smi_2, not_taken);
10672 __ fld_d(FieldOperand(scratch, HeapNumber::kValueOffset));
10673 __ jmp(&done);
10674
10675 __ bind(&load_smi_1);
Leon Clarkee46be812010-01-19 14:06:41 +000010676 __ SmiUntag(scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +000010677 __ push(scratch);
10678 __ fild_s(Operand(esp, 0));
10679 __ pop(scratch);
10680 __ jmp(&done_load_1);
10681
10682 __ bind(&load_smi_2);
Leon Clarkee46be812010-01-19 14:06:41 +000010683 __ SmiUntag(scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +000010684 __ push(scratch);
10685 __ fild_s(Operand(esp, 0));
10686 __ pop(scratch);
10687
10688 __ bind(&done);
10689}
10690
10691
Leon Clarked91b9f72010-01-27 17:25:45 +000010692void FloatingPointHelper::LoadFloatSmis(MacroAssembler* masm,
10693 Register scratch) {
10694 const Register left = edx;
10695 const Register right = eax;
10696 __ mov(scratch, left);
10697 ASSERT(!scratch.is(right)); // We're about to clobber scratch.
10698 __ SmiUntag(scratch);
10699 __ push(scratch);
10700 __ fild_s(Operand(esp, 0));
10701
10702 __ mov(scratch, right);
10703 __ SmiUntag(scratch);
10704 __ mov(Operand(esp, 0), scratch);
10705 __ fild_s(Operand(esp, 0));
10706 __ pop(scratch);
10707}
10708
10709
Steve Blocka7e24c12009-10-30 11:49:00 +000010710void FloatingPointHelper::CheckFloatOperands(MacroAssembler* masm,
10711 Label* non_float,
10712 Register scratch) {
10713 Label test_other, done;
10714 // Test if both operands are floats or smi -> scratch=k_is_float;
10715 // Otherwise scratch = k_not_float.
10716 __ test(edx, Immediate(kSmiTagMask));
10717 __ j(zero, &test_other, not_taken); // argument in edx is OK
10718 __ mov(scratch, FieldOperand(edx, HeapObject::kMapOffset));
10719 __ cmp(scratch, Factory::heap_number_map());
10720 __ j(not_equal, non_float); // argument in edx is not a number -> NaN
10721
10722 __ bind(&test_other);
10723 __ test(eax, Immediate(kSmiTagMask));
10724 __ j(zero, &done); // argument in eax is OK
10725 __ mov(scratch, FieldOperand(eax, HeapObject::kMapOffset));
10726 __ cmp(scratch, Factory::heap_number_map());
10727 __ j(not_equal, non_float); // argument in eax is not a number -> NaN
10728
10729 // Fall-through: Both operands are numbers.
10730 __ bind(&done);
10731}
10732
10733
Leon Clarkee46be812010-01-19 14:06:41 +000010734void GenericUnaryOpStub::Generate(MacroAssembler* masm) {
10735 Label slow, done;
Steve Blocka7e24c12009-10-30 11:49:00 +000010736
Leon Clarkee46be812010-01-19 14:06:41 +000010737 if (op_ == Token::SUB) {
10738 // Check whether the value is a smi.
10739 Label try_float;
10740 __ test(eax, Immediate(kSmiTagMask));
10741 __ j(not_zero, &try_float, not_taken);
Steve Blocka7e24c12009-10-30 11:49:00 +000010742
Leon Clarkee46be812010-01-19 14:06:41 +000010743 // Go slow case if the value of the expression is zero
10744 // to make sure that we switch between 0 and -0.
10745 __ test(eax, Operand(eax));
10746 __ j(zero, &slow, not_taken);
Steve Blocka7e24c12009-10-30 11:49:00 +000010747
Leon Clarkee46be812010-01-19 14:06:41 +000010748 // The value of the expression is a smi that is not zero. Try
10749 // optimistic subtraction '0 - value'.
10750 Label undo;
Steve Blocka7e24c12009-10-30 11:49:00 +000010751 __ mov(edx, Operand(eax));
Leon Clarkee46be812010-01-19 14:06:41 +000010752 __ Set(eax, Immediate(0));
10753 __ sub(eax, Operand(edx));
10754 __ j(overflow, &undo, not_taken);
10755
10756 // If result is a smi we are done.
10757 __ test(eax, Immediate(kSmiTagMask));
10758 __ j(zero, &done, taken);
10759
10760 // Restore eax and go slow case.
10761 __ bind(&undo);
10762 __ mov(eax, Operand(edx));
10763 __ jmp(&slow);
10764
10765 // Try floating point case.
10766 __ bind(&try_float);
10767 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
10768 __ cmp(edx, Factory::heap_number_map());
10769 __ j(not_equal, &slow);
10770 if (overwrite_) {
10771 __ mov(edx, FieldOperand(eax, HeapNumber::kExponentOffset));
10772 __ xor_(edx, HeapNumber::kSignMask); // Flip sign.
10773 __ mov(FieldOperand(eax, HeapNumber::kExponentOffset), edx);
10774 } else {
10775 __ mov(edx, Operand(eax));
10776 // edx: operand
10777 __ AllocateHeapNumber(eax, ebx, ecx, &undo);
10778 // eax: allocated 'empty' number
10779 __ mov(ecx, FieldOperand(edx, HeapNumber::kExponentOffset));
10780 __ xor_(ecx, HeapNumber::kSignMask); // Flip sign.
10781 __ mov(FieldOperand(eax, HeapNumber::kExponentOffset), ecx);
10782 __ mov(ecx, FieldOperand(edx, HeapNumber::kMantissaOffset));
10783 __ mov(FieldOperand(eax, HeapNumber::kMantissaOffset), ecx);
10784 }
10785 } else if (op_ == Token::BIT_NOT) {
10786 // Check if the operand is a heap number.
10787 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
10788 __ cmp(edx, Factory::heap_number_map());
10789 __ j(not_equal, &slow, not_taken);
10790
10791 // Convert the heap number in eax to an untagged integer in ecx.
Steve Block6ded16b2010-05-10 14:33:55 +010010792 IntegerConvert(masm,
10793 eax,
10794 TypeInfo::Unknown(),
10795 CpuFeatures::IsSupported(SSE3),
10796 &slow);
Leon Clarkee46be812010-01-19 14:06:41 +000010797
10798 // Do the bitwise operation and check if the result fits in a smi.
10799 Label try_float;
10800 __ not_(ecx);
10801 __ cmp(ecx, 0xc0000000);
10802 __ j(sign, &try_float, not_taken);
10803
10804 // Tag the result as a smi and we're done.
10805 ASSERT(kSmiTagSize == 1);
10806 __ lea(eax, Operand(ecx, times_2, kSmiTag));
10807 __ jmp(&done);
10808
10809 // Try to store the result in a heap number.
10810 __ bind(&try_float);
10811 if (!overwrite_) {
10812 // Allocate a fresh heap number, but don't overwrite eax until
10813 // we're sure we can do it without going through the slow case
10814 // that needs the value in eax.
10815 __ AllocateHeapNumber(ebx, edx, edi, &slow);
10816 __ mov(eax, Operand(ebx));
10817 }
10818 if (CpuFeatures::IsSupported(SSE2)) {
10819 CpuFeatures::Scope use_sse2(SSE2);
10820 __ cvtsi2sd(xmm0, Operand(ecx));
10821 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
10822 } else {
10823 __ push(ecx);
10824 __ fild_s(Operand(esp, 0));
10825 __ pop(ecx);
10826 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
10827 }
10828 } else {
10829 UNIMPLEMENTED();
Steve Blocka7e24c12009-10-30 11:49:00 +000010830 }
10831
Leon Clarkee46be812010-01-19 14:06:41 +000010832 // Return from the stub.
Steve Blocka7e24c12009-10-30 11:49:00 +000010833 __ bind(&done);
Steve Blocka7e24c12009-10-30 11:49:00 +000010834 __ StubReturn(1);
Leon Clarkee46be812010-01-19 14:06:41 +000010835
10836 // Handle the slow case by jumping to the JavaScript builtin.
10837 __ bind(&slow);
10838 __ pop(ecx); // pop return address.
10839 __ push(eax);
10840 __ push(ecx); // push return address
10841 switch (op_) {
10842 case Token::SUB:
10843 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION);
10844 break;
10845 case Token::BIT_NOT:
10846 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION);
10847 break;
10848 default:
10849 UNREACHABLE();
10850 }
Steve Blocka7e24c12009-10-30 11:49:00 +000010851}
10852
10853
Steve Blocka7e24c12009-10-30 11:49:00 +000010854void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
10855 // The key is in edx and the parameter count is in eax.
10856
10857 // The displacement is used for skipping the frame pointer on the
10858 // stack. It is the offset of the last parameter (if any) relative
10859 // to the frame pointer.
10860 static const int kDisplacement = 1 * kPointerSize;
10861
10862 // Check that the key is a smi.
10863 Label slow;
10864 __ test(edx, Immediate(kSmiTagMask));
10865 __ j(not_zero, &slow, not_taken);
10866
10867 // Check if the calling frame is an arguments adaptor frame.
10868 Label adaptor;
10869 __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
10870 __ mov(ecx, Operand(ebx, StandardFrameConstants::kContextOffset));
10871 __ cmp(Operand(ecx), Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
10872 __ j(equal, &adaptor);
10873
10874 // Check index against formal parameters count limit passed in
10875 // through register eax. Use unsigned comparison to get negative
10876 // check for free.
10877 __ cmp(edx, Operand(eax));
10878 __ j(above_equal, &slow, not_taken);
10879
10880 // Read the argument from the stack and return it.
10881 ASSERT(kSmiTagSize == 1 && kSmiTag == 0); // shifting code depends on this
10882 __ lea(ebx, Operand(ebp, eax, times_2, 0));
10883 __ neg(edx);
10884 __ mov(eax, Operand(ebx, edx, times_2, kDisplacement));
10885 __ ret(0);
10886
10887 // Arguments adaptor case: Check index against actual arguments
10888 // limit found in the arguments adaptor frame. Use unsigned
10889 // comparison to get negative check for free.
10890 __ bind(&adaptor);
10891 __ mov(ecx, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
10892 __ cmp(edx, Operand(ecx));
10893 __ j(above_equal, &slow, not_taken);
10894
10895 // Read the argument from the stack and return it.
10896 ASSERT(kSmiTagSize == 1 && kSmiTag == 0); // shifting code depends on this
10897 __ lea(ebx, Operand(ebx, ecx, times_2, 0));
10898 __ neg(edx);
10899 __ mov(eax, Operand(ebx, edx, times_2, kDisplacement));
10900 __ ret(0);
10901
10902 // Slow-case: Handle non-smi or out-of-bounds access to arguments
10903 // by calling the runtime system.
10904 __ bind(&slow);
10905 __ pop(ebx); // Return address.
10906 __ push(edx);
10907 __ push(ebx);
Steve Block6ded16b2010-05-10 14:33:55 +010010908 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
Steve Blocka7e24c12009-10-30 11:49:00 +000010909}
10910
10911
10912void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
Andrei Popescu402d9372010-02-26 13:31:12 +000010913 // esp[0] : return address
10914 // esp[4] : number of parameters
10915 // esp[8] : receiver displacement
10916 // esp[16] : function
10917
Steve Blocka7e24c12009-10-30 11:49:00 +000010918 // The displacement is used for skipping the return address and the
10919 // frame pointer on the stack. It is the offset of the last
10920 // parameter (if any) relative to the frame pointer.
10921 static const int kDisplacement = 2 * kPointerSize;
10922
10923 // Check if the calling frame is an arguments adaptor frame.
Leon Clarkee46be812010-01-19 14:06:41 +000010924 Label adaptor_frame, try_allocate, runtime;
Steve Blocka7e24c12009-10-30 11:49:00 +000010925 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
10926 __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
10927 __ cmp(Operand(ecx), Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
Leon Clarkee46be812010-01-19 14:06:41 +000010928 __ j(equal, &adaptor_frame);
10929
10930 // Get the length from the frame.
10931 __ mov(ecx, Operand(esp, 1 * kPointerSize));
10932 __ jmp(&try_allocate);
Steve Blocka7e24c12009-10-30 11:49:00 +000010933
10934 // Patch the arguments.length and the parameters pointer.
Leon Clarkee46be812010-01-19 14:06:41 +000010935 __ bind(&adaptor_frame);
Steve Blocka7e24c12009-10-30 11:49:00 +000010936 __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
10937 __ mov(Operand(esp, 1 * kPointerSize), ecx);
10938 __ lea(edx, Operand(edx, ecx, times_2, kDisplacement));
10939 __ mov(Operand(esp, 2 * kPointerSize), edx);
10940
Leon Clarkee46be812010-01-19 14:06:41 +000010941 // Try the new space allocation. Start out with computing the size of
10942 // the arguments object and the elements array.
10943 Label add_arguments_object;
10944 __ bind(&try_allocate);
10945 __ test(ecx, Operand(ecx));
10946 __ j(zero, &add_arguments_object);
10947 __ lea(ecx, Operand(ecx, times_2, FixedArray::kHeaderSize));
10948 __ bind(&add_arguments_object);
10949 __ add(Operand(ecx), Immediate(Heap::kArgumentsObjectSize));
10950
10951 // Do the allocation of both objects in one go.
10952 __ AllocateInNewSpace(ecx, eax, edx, ebx, &runtime, TAG_OBJECT);
10953
10954 // Get the arguments boilerplate from the current (global) context.
10955 int offset = Context::SlotOffset(Context::ARGUMENTS_BOILERPLATE_INDEX);
10956 __ mov(edi, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
10957 __ mov(edi, FieldOperand(edi, GlobalObject::kGlobalContextOffset));
10958 __ mov(edi, Operand(edi, offset));
10959
10960 // Copy the JS object part.
10961 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
10962 __ mov(ebx, FieldOperand(edi, i));
10963 __ mov(FieldOperand(eax, i), ebx);
10964 }
10965
10966 // Setup the callee in-object property.
10967 ASSERT(Heap::arguments_callee_index == 0);
10968 __ mov(ebx, Operand(esp, 3 * kPointerSize));
10969 __ mov(FieldOperand(eax, JSObject::kHeaderSize), ebx);
10970
10971 // Get the length (smi tagged) and set that as an in-object property too.
10972 ASSERT(Heap::arguments_length_index == 1);
10973 __ mov(ecx, Operand(esp, 1 * kPointerSize));
10974 __ mov(FieldOperand(eax, JSObject::kHeaderSize + kPointerSize), ecx);
10975
10976 // If there are no actual arguments, we're done.
10977 Label done;
10978 __ test(ecx, Operand(ecx));
10979 __ j(zero, &done);
10980
10981 // Get the parameters pointer from the stack and untag the length.
10982 __ mov(edx, Operand(esp, 2 * kPointerSize));
10983 __ SmiUntag(ecx);
10984
10985 // Setup the elements pointer in the allocated arguments object and
10986 // initialize the header in the elements fixed array.
10987 __ lea(edi, Operand(eax, Heap::kArgumentsObjectSize));
10988 __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi);
10989 __ mov(FieldOperand(edi, FixedArray::kMapOffset),
10990 Immediate(Factory::fixed_array_map()));
10991 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
10992
10993 // Copy the fixed array slots.
10994 Label loop;
10995 __ bind(&loop);
10996 __ mov(ebx, Operand(edx, -1 * kPointerSize)); // Skip receiver.
10997 __ mov(FieldOperand(edi, FixedArray::kHeaderSize), ebx);
10998 __ add(Operand(edi), Immediate(kPointerSize));
10999 __ sub(Operand(edx), Immediate(kPointerSize));
11000 __ dec(ecx);
Leon Clarkee46be812010-01-19 14:06:41 +000011001 __ j(not_zero, &loop);
11002
11003 // Return and remove the on-stack parameters.
11004 __ bind(&done);
11005 __ ret(3 * kPointerSize);
11006
Steve Blocka7e24c12009-10-30 11:49:00 +000011007 // Do the runtime call to allocate the arguments object.
11008 __ bind(&runtime);
Steve Block6ded16b2010-05-10 14:33:55 +010011009 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
Steve Blocka7e24c12009-10-30 11:49:00 +000011010}
11011
11012
Leon Clarkee46be812010-01-19 14:06:41 +000011013void RegExpExecStub::Generate(MacroAssembler* masm) {
Leon Clarke4515c472010-02-03 11:58:03 +000011014 // Just jump directly to runtime if native RegExp is not selected at compile
11015 // time or if regexp entry in generated code is turned off runtime switch or
11016 // at compilation.
Steve Block6ded16b2010-05-10 14:33:55 +010011017#ifdef V8_INTERPRETED_REGEXP
11018 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
11019#else // V8_INTERPRETED_REGEXP
Leon Clarkee46be812010-01-19 14:06:41 +000011020 if (!FLAG_regexp_entry_native) {
Steve Block6ded16b2010-05-10 14:33:55 +010011021 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
Leon Clarkee46be812010-01-19 14:06:41 +000011022 return;
11023 }
11024
11025 // Stack frame on entry.
11026 // esp[0]: return address
11027 // esp[4]: last_match_info (expected JSArray)
11028 // esp[8]: previous index
11029 // esp[12]: subject string
11030 // esp[16]: JSRegExp object
11031
Leon Clarked91b9f72010-01-27 17:25:45 +000011032 static const int kLastMatchInfoOffset = 1 * kPointerSize;
11033 static const int kPreviousIndexOffset = 2 * kPointerSize;
11034 static const int kSubjectOffset = 3 * kPointerSize;
11035 static const int kJSRegExpOffset = 4 * kPointerSize;
11036
11037 Label runtime, invoke_regexp;
11038
11039 // Ensure that a RegExp stack is allocated.
11040 ExternalReference address_of_regexp_stack_memory_address =
11041 ExternalReference::address_of_regexp_stack_memory_address();
11042 ExternalReference address_of_regexp_stack_memory_size =
11043 ExternalReference::address_of_regexp_stack_memory_size();
11044 __ mov(ebx, Operand::StaticVariable(address_of_regexp_stack_memory_size));
11045 __ test(ebx, Operand(ebx));
11046 __ j(zero, &runtime, not_taken);
Leon Clarkee46be812010-01-19 14:06:41 +000011047
11048 // Check that the first argument is a JSRegExp object.
Leon Clarked91b9f72010-01-27 17:25:45 +000011049 __ mov(eax, Operand(esp, kJSRegExpOffset));
Leon Clarkee46be812010-01-19 14:06:41 +000011050 ASSERT_EQ(0, kSmiTag);
11051 __ test(eax, Immediate(kSmiTagMask));
11052 __ j(zero, &runtime);
11053 __ CmpObjectType(eax, JS_REGEXP_TYPE, ecx);
11054 __ j(not_equal, &runtime);
11055 // Check that the RegExp has been compiled (data contains a fixed array).
11056 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
Leon Clarke4515c472010-02-03 11:58:03 +000011057 if (FLAG_debug_code) {
11058 __ test(ecx, Immediate(kSmiTagMask));
11059 __ Check(not_zero, "Unexpected type for RegExp data, FixedArray expected");
11060 __ CmpObjectType(ecx, FIXED_ARRAY_TYPE, ebx);
11061 __ Check(equal, "Unexpected type for RegExp data, FixedArray expected");
11062 }
Leon Clarkee46be812010-01-19 14:06:41 +000011063
11064 // ecx: RegExp data (FixedArray)
11065 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
11066 __ mov(ebx, FieldOperand(ecx, JSRegExp::kDataTagOffset));
11067 __ cmp(Operand(ebx), Immediate(Smi::FromInt(JSRegExp::IRREGEXP)));
11068 __ j(not_equal, &runtime);
11069
11070 // ecx: RegExp data (FixedArray)
11071 // Check that the number of captures fit in the static offsets vector buffer.
11072 __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset));
11073 // Calculate number of capture registers (number_of_captures + 1) * 2. This
11074 // uses the asumption that smis are 2 * their untagged value.
11075 ASSERT_EQ(0, kSmiTag);
11076 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
11077 __ add(Operand(edx), Immediate(2)); // edx was a smi.
11078 // Check that the static offsets vector buffer is large enough.
11079 __ cmp(edx, OffsetsVector::kStaticOffsetsVectorSize);
11080 __ j(above, &runtime);
11081
11082 // ecx: RegExp data (FixedArray)
11083 // edx: Number of capture registers
11084 // Check that the second argument is a string.
Leon Clarked91b9f72010-01-27 17:25:45 +000011085 __ mov(eax, Operand(esp, kSubjectOffset));
Leon Clarkee46be812010-01-19 14:06:41 +000011086 __ test(eax, Immediate(kSmiTagMask));
11087 __ j(zero, &runtime);
11088 Condition is_string = masm->IsObjectStringType(eax, ebx, ebx);
11089 __ j(NegateCondition(is_string), &runtime);
11090 // Get the length of the string to ebx.
11091 __ mov(ebx, FieldOperand(eax, String::kLengthOffset));
11092
Steve Block6ded16b2010-05-10 14:33:55 +010011093 // ebx: Length of subject string as a smi
Leon Clarkee46be812010-01-19 14:06:41 +000011094 // ecx: RegExp data (FixedArray)
11095 // edx: Number of capture registers
Leon Clarke4515c472010-02-03 11:58:03 +000011096 // Check that the third argument is a positive smi less than the subject
Steve Block6ded16b2010-05-10 14:33:55 +010011097 // string length. A negative value will be greater (unsigned comparison).
Leon Clarked91b9f72010-01-27 17:25:45 +000011098 __ mov(eax, Operand(esp, kPreviousIndexOffset));
Steve Block6ded16b2010-05-10 14:33:55 +010011099 __ test(eax, Immediate(kSmiTagMask));
Kristian Monsen25f61362010-05-21 11:50:48 +010011100 __ j(not_zero, &runtime);
Leon Clarkee46be812010-01-19 14:06:41 +000011101 __ cmp(eax, Operand(ebx));
Steve Block6ded16b2010-05-10 14:33:55 +010011102 __ j(above_equal, &runtime);
Leon Clarkee46be812010-01-19 14:06:41 +000011103
11104 // ecx: RegExp data (FixedArray)
11105 // edx: Number of capture registers
11106 // Check that the fourth object is a JSArray object.
Leon Clarked91b9f72010-01-27 17:25:45 +000011107 __ mov(eax, Operand(esp, kLastMatchInfoOffset));
Leon Clarkee46be812010-01-19 14:06:41 +000011108 __ test(eax, Immediate(kSmiTagMask));
11109 __ j(zero, &runtime);
11110 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
11111 __ j(not_equal, &runtime);
11112 // Check that the JSArray is in fast case.
11113 __ mov(ebx, FieldOperand(eax, JSArray::kElementsOffset));
11114 __ mov(eax, FieldOperand(ebx, HeapObject::kMapOffset));
11115 __ cmp(eax, Factory::fixed_array_map());
11116 __ j(not_equal, &runtime);
11117 // Check that the last match info has space for the capture registers and the
11118 // additional information.
11119 __ mov(eax, FieldOperand(ebx, FixedArray::kLengthOffset));
11120 __ add(Operand(edx), Immediate(RegExpImpl::kLastMatchOverhead));
11121 __ cmp(edx, Operand(eax));
11122 __ j(greater, &runtime);
11123
11124 // ecx: RegExp data (FixedArray)
Leon Clarked91b9f72010-01-27 17:25:45 +000011125 // Check the representation and encoding of the subject string.
11126 Label seq_string, seq_two_byte_string, check_code;
11127 const int kStringRepresentationEncodingMask =
11128 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
11129 __ mov(eax, Operand(esp, kSubjectOffset));
Leon Clarkee46be812010-01-19 14:06:41 +000011130 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
11131 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
Leon Clarked91b9f72010-01-27 17:25:45 +000011132 __ and_(ebx, kStringRepresentationEncodingMask);
11133 // First check for sequential string.
11134 ASSERT_EQ(0, kStringTag);
11135 ASSERT_EQ(0, kSeqStringTag);
11136 __ test(Operand(ebx),
11137 Immediate(kIsNotStringMask | kStringRepresentationMask));
11138 __ j(zero, &seq_string);
11139
11140 // Check for flat cons string.
11141 // A flat cons string is a cons string where the second part is the empty
11142 // string. In that case the subject string is just the first part of the cons
11143 // string. Also in this case the first part of the cons string is known to be
Leon Clarke4515c472010-02-03 11:58:03 +000011144 // a sequential string or an external string.
Steve Block6ded16b2010-05-10 14:33:55 +010011145 __ and_(ebx, kStringRepresentationMask);
11146 __ cmp(ebx, kConsStringTag);
Leon Clarkee46be812010-01-19 14:06:41 +000011147 __ j(not_equal, &runtime);
Leon Clarked91b9f72010-01-27 17:25:45 +000011148 __ mov(edx, FieldOperand(eax, ConsString::kSecondOffset));
Leon Clarke4515c472010-02-03 11:58:03 +000011149 __ cmp(Operand(edx), Factory::empty_string());
Leon Clarked91b9f72010-01-27 17:25:45 +000011150 __ j(not_equal, &runtime);
11151 __ mov(eax, FieldOperand(eax, ConsString::kFirstOffset));
11152 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
11153 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
Leon Clarke4515c472010-02-03 11:58:03 +000011154 ASSERT_EQ(0, kSeqStringTag);
11155 __ test(ebx, Immediate(kStringRepresentationMask));
11156 __ j(not_zero, &runtime);
Leon Clarked91b9f72010-01-27 17:25:45 +000011157 __ and_(ebx, kStringRepresentationEncodingMask);
Leon Clarkee46be812010-01-19 14:06:41 +000011158
Leon Clarked91b9f72010-01-27 17:25:45 +000011159 __ bind(&seq_string);
11160 // eax: subject string (sequential either ascii to two byte)
11161 // ebx: suject string type & kStringRepresentationEncodingMask
Leon Clarkee46be812010-01-19 14:06:41 +000011162 // ecx: RegExp data (FixedArray)
11163 // Check that the irregexp code has been generated for an ascii string. If
Leon Clarked91b9f72010-01-27 17:25:45 +000011164 // it has, the field contains a code object otherwise it contains the hole.
Steve Block6ded16b2010-05-10 14:33:55 +010011165 const int kSeqTwoByteString = kStringTag | kSeqStringTag | kTwoByteStringTag;
11166 __ cmp(ebx, kSeqTwoByteString);
Leon Clarked91b9f72010-01-27 17:25:45 +000011167 __ j(equal, &seq_two_byte_string);
Leon Clarke4515c472010-02-03 11:58:03 +000011168 if (FLAG_debug_code) {
11169 __ cmp(ebx, kStringTag | kSeqStringTag | kAsciiStringTag);
11170 __ Check(equal, "Expected sequential ascii string");
11171 }
Leon Clarkee46be812010-01-19 14:06:41 +000011172 __ mov(edx, FieldOperand(ecx, JSRegExp::kDataAsciiCodeOffset));
Leon Clarked91b9f72010-01-27 17:25:45 +000011173 __ Set(edi, Immediate(1)); // Type is ascii.
11174 __ jmp(&check_code);
11175
11176 __ bind(&seq_two_byte_string);
11177 // eax: subject string
11178 // ecx: RegExp data (FixedArray)
11179 __ mov(edx, FieldOperand(ecx, JSRegExp::kDataUC16CodeOffset));
11180 __ Set(edi, Immediate(0)); // Type is two byte.
11181
11182 __ bind(&check_code);
Leon Clarke4515c472010-02-03 11:58:03 +000011183 // Check that the irregexp code has been generated for the actual string
11184 // encoding. If it has, the field contains a code object otherwise it contains
11185 // the hole.
Leon Clarkee46be812010-01-19 14:06:41 +000011186 __ CmpObjectType(edx, CODE_TYPE, ebx);
11187 __ j(not_equal, &runtime);
11188
Leon Clarked91b9f72010-01-27 17:25:45 +000011189 // eax: subject string
11190 // edx: code
Leon Clarke4515c472010-02-03 11:58:03 +000011191 // edi: encoding of subject string (1 if ascii, 0 if two_byte);
Leon Clarkee46be812010-01-19 14:06:41 +000011192 // Load used arguments before starting to push arguments for call to native
11193 // RegExp code to avoid handling changing stack height.
Leon Clarked91b9f72010-01-27 17:25:45 +000011194 __ mov(ebx, Operand(esp, kPreviousIndexOffset));
Leon Clarked91b9f72010-01-27 17:25:45 +000011195 __ SmiUntag(ebx); // Previous index from smi.
Leon Clarkee46be812010-01-19 14:06:41 +000011196
11197 // eax: subject string
11198 // ebx: previous index
11199 // edx: code
Leon Clarke4515c472010-02-03 11:58:03 +000011200 // edi: encoding of subject string (1 if ascii 0 if two_byte);
Leon Clarkee46be812010-01-19 14:06:41 +000011201 // All checks done. Now push arguments for native regexp code.
11202 __ IncrementCounter(&Counters::regexp_entry_native, 1);
11203
Steve Block6ded16b2010-05-10 14:33:55 +010011204 static const int kRegExpExecuteArguments = 7;
11205 __ PrepareCallCFunction(kRegExpExecuteArguments, ecx);
11206
Leon Clarked91b9f72010-01-27 17:25:45 +000011207 // Argument 7: Indicate that this is a direct call from JavaScript.
Steve Block6ded16b2010-05-10 14:33:55 +010011208 __ mov(Operand(esp, 6 * kPointerSize), Immediate(1));
Leon Clarkee46be812010-01-19 14:06:41 +000011209
Leon Clarked91b9f72010-01-27 17:25:45 +000011210 // Argument 6: Start (high end) of backtracking stack memory area.
Leon Clarkee46be812010-01-19 14:06:41 +000011211 __ mov(ecx, Operand::StaticVariable(address_of_regexp_stack_memory_address));
11212 __ add(ecx, Operand::StaticVariable(address_of_regexp_stack_memory_size));
Steve Block6ded16b2010-05-10 14:33:55 +010011213 __ mov(Operand(esp, 5 * kPointerSize), ecx);
Leon Clarkee46be812010-01-19 14:06:41 +000011214
Leon Clarkee46be812010-01-19 14:06:41 +000011215 // Argument 5: static offsets vector buffer.
Steve Block6ded16b2010-05-10 14:33:55 +010011216 __ mov(Operand(esp, 4 * kPointerSize),
11217 Immediate(ExternalReference::address_of_static_offsets_vector()));
Leon Clarkee46be812010-01-19 14:06:41 +000011218
Leon Clarked91b9f72010-01-27 17:25:45 +000011219 // Argument 4: End of string data
11220 // Argument 3: Start of string data
Steve Block6ded16b2010-05-10 14:33:55 +010011221 Label setup_two_byte, setup_rest;
Leon Clarked91b9f72010-01-27 17:25:45 +000011222 __ test(edi, Operand(edi));
11223 __ mov(edi, FieldOperand(eax, String::kLengthOffset));
Steve Block6ded16b2010-05-10 14:33:55 +010011224 __ j(zero, &setup_two_byte);
11225 __ SmiUntag(edi);
Leon Clarked91b9f72010-01-27 17:25:45 +000011226 __ lea(ecx, FieldOperand(eax, edi, times_1, SeqAsciiString::kHeaderSize));
Steve Block6ded16b2010-05-10 14:33:55 +010011227 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4.
Leon Clarked91b9f72010-01-27 17:25:45 +000011228 __ lea(ecx, FieldOperand(eax, ebx, times_1, SeqAsciiString::kHeaderSize));
Steve Block6ded16b2010-05-10 14:33:55 +010011229 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3.
11230 __ jmp(&setup_rest);
Leon Clarkee46be812010-01-19 14:06:41 +000011231
Steve Block6ded16b2010-05-10 14:33:55 +010011232 __ bind(&setup_two_byte);
11233 ASSERT(kSmiTag == 0 && kSmiTagSize == 1); // edi is smi (powered by 2).
11234 __ lea(ecx, FieldOperand(eax, edi, times_1, SeqTwoByteString::kHeaderSize));
11235 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4.
Leon Clarked91b9f72010-01-27 17:25:45 +000011236 __ lea(ecx, FieldOperand(eax, ebx, times_2, SeqTwoByteString::kHeaderSize));
Steve Block6ded16b2010-05-10 14:33:55 +010011237 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3.
Leon Clarked91b9f72010-01-27 17:25:45 +000011238
Steve Block6ded16b2010-05-10 14:33:55 +010011239 __ bind(&setup_rest);
Leon Clarkee46be812010-01-19 14:06:41 +000011240
11241 // Argument 2: Previous index.
Steve Block6ded16b2010-05-10 14:33:55 +010011242 __ mov(Operand(esp, 1 * kPointerSize), ebx);
Leon Clarkee46be812010-01-19 14:06:41 +000011243
11244 // Argument 1: Subject string.
Steve Block6ded16b2010-05-10 14:33:55 +010011245 __ mov(Operand(esp, 0 * kPointerSize), eax);
Leon Clarkee46be812010-01-19 14:06:41 +000011246
11247 // Locate the code entry and call it.
11248 __ add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
Steve Block6ded16b2010-05-10 14:33:55 +010011249 __ CallCFunction(edx, kRegExpExecuteArguments);
Leon Clarkee46be812010-01-19 14:06:41 +000011250
11251 // Check the result.
11252 Label success;
11253 __ cmp(eax, NativeRegExpMacroAssembler::SUCCESS);
11254 __ j(equal, &success, taken);
11255 Label failure;
11256 __ cmp(eax, NativeRegExpMacroAssembler::FAILURE);
11257 __ j(equal, &failure, taken);
11258 __ cmp(eax, NativeRegExpMacroAssembler::EXCEPTION);
11259 // If not exception it can only be retry. Handle that in the runtime system.
11260 __ j(not_equal, &runtime);
11261 // Result must now be exception. If there is no pending exception already a
11262 // stack overflow (on the backtrack stack) was detected in RegExp code but
11263 // haven't created the exception yet. Handle that in the runtime system.
Steve Block6ded16b2010-05-10 14:33:55 +010011264 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
Leon Clarkee46be812010-01-19 14:06:41 +000011265 ExternalReference pending_exception(Top::k_pending_exception_address);
11266 __ mov(eax,
11267 Operand::StaticVariable(ExternalReference::the_hole_value_location()));
11268 __ cmp(eax, Operand::StaticVariable(pending_exception));
11269 __ j(equal, &runtime);
11270 __ bind(&failure);
11271 // For failure and exception return null.
11272 __ mov(Operand(eax), Factory::null_value());
11273 __ ret(4 * kPointerSize);
11274
11275 // Load RegExp data.
11276 __ bind(&success);
Leon Clarked91b9f72010-01-27 17:25:45 +000011277 __ mov(eax, Operand(esp, kJSRegExpOffset));
Leon Clarkee46be812010-01-19 14:06:41 +000011278 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
11279 __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset));
11280 // Calculate number of capture registers (number_of_captures + 1) * 2.
Leon Clarke4515c472010-02-03 11:58:03 +000011281 ASSERT_EQ(0, kSmiTag);
11282 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
Leon Clarkee46be812010-01-19 14:06:41 +000011283 __ add(Operand(edx), Immediate(2)); // edx was a smi.
11284
11285 // edx: Number of capture registers
11286 // Load last_match_info which is still known to be a fast case JSArray.
Leon Clarked91b9f72010-01-27 17:25:45 +000011287 __ mov(eax, Operand(esp, kLastMatchInfoOffset));
Leon Clarkee46be812010-01-19 14:06:41 +000011288 __ mov(ebx, FieldOperand(eax, JSArray::kElementsOffset));
11289
11290 // ebx: last_match_info backing store (FixedArray)
11291 // edx: number of capture registers
11292 // Store the capture count.
11293 __ SmiTag(edx); // Number of capture registers to smi.
11294 __ mov(FieldOperand(ebx, RegExpImpl::kLastCaptureCountOffset), edx);
11295 __ SmiUntag(edx); // Number of capture registers back from smi.
11296 // Store last subject and last input.
Leon Clarked91b9f72010-01-27 17:25:45 +000011297 __ mov(eax, Operand(esp, kSubjectOffset));
Leon Clarkee46be812010-01-19 14:06:41 +000011298 __ mov(FieldOperand(ebx, RegExpImpl::kLastSubjectOffset), eax);
11299 __ mov(ecx, ebx);
11300 __ RecordWrite(ecx, RegExpImpl::kLastSubjectOffset, eax, edi);
Leon Clarked91b9f72010-01-27 17:25:45 +000011301 __ mov(eax, Operand(esp, kSubjectOffset));
Leon Clarkee46be812010-01-19 14:06:41 +000011302 __ mov(FieldOperand(ebx, RegExpImpl::kLastInputOffset), eax);
11303 __ mov(ecx, ebx);
11304 __ RecordWrite(ecx, RegExpImpl::kLastInputOffset, eax, edi);
11305
11306 // Get the static offsets vector filled by the native regexp code.
11307 ExternalReference address_of_static_offsets_vector =
11308 ExternalReference::address_of_static_offsets_vector();
11309 __ mov(ecx, Immediate(address_of_static_offsets_vector));
11310
11311 // ebx: last_match_info backing store (FixedArray)
11312 // ecx: offsets vector
11313 // edx: number of capture registers
11314 Label next_capture, done;
Leon Clarkee46be812010-01-19 14:06:41 +000011315 // Capture register counter starts from number of capture registers and
11316 // counts down until wraping after zero.
11317 __ bind(&next_capture);
11318 __ sub(Operand(edx), Immediate(1));
11319 __ j(negative, &done);
11320 // Read the value from the static offsets vector buffer.
Leon Clarke4515c472010-02-03 11:58:03 +000011321 __ mov(edi, Operand(ecx, edx, times_int_size, 0));
Steve Block6ded16b2010-05-10 14:33:55 +010011322 __ SmiTag(edi);
Leon Clarkee46be812010-01-19 14:06:41 +000011323 // Store the smi value in the last match info.
11324 __ mov(FieldOperand(ebx,
11325 edx,
11326 times_pointer_size,
11327 RegExpImpl::kFirstCaptureOffset),
11328 edi);
11329 __ jmp(&next_capture);
11330 __ bind(&done);
11331
11332 // Return last match info.
Leon Clarked91b9f72010-01-27 17:25:45 +000011333 __ mov(eax, Operand(esp, kLastMatchInfoOffset));
Leon Clarkee46be812010-01-19 14:06:41 +000011334 __ ret(4 * kPointerSize);
11335
11336 // Do the runtime call to execute the regexp.
11337 __ bind(&runtime);
Steve Block6ded16b2010-05-10 14:33:55 +010011338 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
11339#endif // V8_INTERPRETED_REGEXP
Leon Clarkee46be812010-01-19 14:06:41 +000011340}
11341
11342
Andrei Popescu402d9372010-02-26 13:31:12 +000011343void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm,
11344 Register object,
11345 Register result,
11346 Register scratch1,
11347 Register scratch2,
11348 bool object_is_smi,
11349 Label* not_found) {
Andrei Popescu402d9372010-02-26 13:31:12 +000011350 // Use of registers. Register result is used as a temporary.
11351 Register number_string_cache = result;
11352 Register mask = scratch1;
11353 Register scratch = scratch2;
11354
11355 // Load the number string cache.
11356 ExternalReference roots_address = ExternalReference::roots_address();
11357 __ mov(scratch, Immediate(Heap::kNumberStringCacheRootIndex));
11358 __ mov(number_string_cache,
11359 Operand::StaticArray(scratch, times_pointer_size, roots_address));
11360 // Make the hash mask from the length of the number string cache. It
11361 // contains two elements (number and string) for each cache entry.
11362 __ mov(mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset));
11363 __ shr(mask, 1); // Divide length by two (length is not a smi).
11364 __ sub(Operand(mask), Immediate(1)); // Make mask.
Steve Block6ded16b2010-05-10 14:33:55 +010011365
Andrei Popescu402d9372010-02-26 13:31:12 +000011366 // Calculate the entry in the number string cache. The hash value in the
Steve Block6ded16b2010-05-10 14:33:55 +010011367 // number string cache for smis is just the smi value, and the hash for
11368 // doubles is the xor of the upper and lower words. See
11369 // Heap::GetNumberStringCache.
11370 Label smi_hash_calculated;
11371 Label load_result_from_cache;
11372 if (object_is_smi) {
11373 __ mov(scratch, object);
11374 __ SmiUntag(scratch);
11375 } else {
11376 Label not_smi, hash_calculated;
11377 ASSERT(kSmiTag == 0);
11378 __ test(object, Immediate(kSmiTagMask));
11379 __ j(not_zero, &not_smi);
11380 __ mov(scratch, object);
11381 __ SmiUntag(scratch);
11382 __ jmp(&smi_hash_calculated);
11383 __ bind(&not_smi);
11384 __ cmp(FieldOperand(object, HeapObject::kMapOffset),
11385 Factory::heap_number_map());
11386 __ j(not_equal, not_found);
11387 ASSERT_EQ(8, kDoubleSize);
11388 __ mov(scratch, FieldOperand(object, HeapNumber::kValueOffset));
11389 __ xor_(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4));
11390 // Object is heap number and hash is now in scratch. Calculate cache index.
11391 __ and_(scratch, Operand(mask));
11392 Register index = scratch;
11393 Register probe = mask;
11394 __ mov(probe,
11395 FieldOperand(number_string_cache,
11396 index,
11397 times_twice_pointer_size,
11398 FixedArray::kHeaderSize));
11399 __ test(probe, Immediate(kSmiTagMask));
11400 __ j(zero, not_found);
11401 if (CpuFeatures::IsSupported(SSE2)) {
11402 CpuFeatures::Scope fscope(SSE2);
11403 __ movdbl(xmm0, FieldOperand(object, HeapNumber::kValueOffset));
11404 __ movdbl(xmm1, FieldOperand(probe, HeapNumber::kValueOffset));
11405 __ comisd(xmm0, xmm1);
11406 } else {
11407 __ fld_d(FieldOperand(object, HeapNumber::kValueOffset));
11408 __ fld_d(FieldOperand(probe, HeapNumber::kValueOffset));
11409 __ FCmp();
11410 }
11411 __ j(parity_even, not_found); // Bail out if NaN is involved.
11412 __ j(not_equal, not_found); // The cache did not contain this value.
11413 __ jmp(&load_result_from_cache);
11414 }
11415
11416 __ bind(&smi_hash_calculated);
11417 // Object is smi and hash is now in scratch. Calculate cache index.
Andrei Popescu402d9372010-02-26 13:31:12 +000011418 __ and_(scratch, Operand(mask));
Steve Block6ded16b2010-05-10 14:33:55 +010011419 Register index = scratch;
Andrei Popescu402d9372010-02-26 13:31:12 +000011420 // Check if the entry is the smi we are looking for.
11421 __ cmp(object,
11422 FieldOperand(number_string_cache,
Steve Block6ded16b2010-05-10 14:33:55 +010011423 index,
Andrei Popescu402d9372010-02-26 13:31:12 +000011424 times_twice_pointer_size,
11425 FixedArray::kHeaderSize));
11426 __ j(not_equal, not_found);
11427
11428 // Get the result from the cache.
Steve Block6ded16b2010-05-10 14:33:55 +010011429 __ bind(&load_result_from_cache);
Andrei Popescu402d9372010-02-26 13:31:12 +000011430 __ mov(result,
11431 FieldOperand(number_string_cache,
Steve Block6ded16b2010-05-10 14:33:55 +010011432 index,
Andrei Popescu402d9372010-02-26 13:31:12 +000011433 times_twice_pointer_size,
11434 FixedArray::kHeaderSize + kPointerSize));
11435 __ IncrementCounter(&Counters::number_to_string_native, 1);
11436}
11437
11438
11439void NumberToStringStub::Generate(MacroAssembler* masm) {
11440 Label runtime;
11441
11442 __ mov(ebx, Operand(esp, kPointerSize));
11443
11444 // Generate code to lookup number in the number string cache.
11445 GenerateLookupNumberStringCache(masm, ebx, eax, ecx, edx, false, &runtime);
11446 __ ret(1 * kPointerSize);
11447
11448 __ bind(&runtime);
11449 // Handle number to string in the runtime system if not found in the cache.
Steve Block6ded16b2010-05-10 14:33:55 +010011450 __ TailCallRuntime(Runtime::kNumberToStringSkipCache, 1, 1);
11451}
11452
11453
11454void RecordWriteStub::Generate(MacroAssembler* masm) {
11455 masm->RecordWriteHelper(object_, addr_, scratch_);
11456 masm->ret(0);
11457}
11458
11459
11460static int NegativeComparisonResult(Condition cc) {
11461 ASSERT(cc != equal);
11462 ASSERT((cc == less) || (cc == less_equal)
11463 || (cc == greater) || (cc == greater_equal));
11464 return (cc == greater || cc == greater_equal) ? LESS : GREATER;
Andrei Popescu402d9372010-02-26 13:31:12 +000011465}
11466
11467
Steve Blocka7e24c12009-10-30 11:49:00 +000011468void CompareStub::Generate(MacroAssembler* masm) {
11469 Label call_builtin, done;
11470
11471 // NOTICE! This code is only reached after a smi-fast-case check, so
11472 // it is certain that at least one operand isn't a smi.
11473
Steve Block6ded16b2010-05-10 14:33:55 +010011474 // Identical objects can be compared fast, but there are some tricky cases
11475 // for NaN and undefined.
11476 {
11477 Label not_identical;
11478 __ cmp(eax, Operand(edx));
11479 __ j(not_equal, &not_identical);
Steve Blocka7e24c12009-10-30 11:49:00 +000011480
Steve Block6ded16b2010-05-10 14:33:55 +010011481 if (cc_ != equal) {
11482 // Check for undefined. undefined OP undefined is false even though
11483 // undefined == undefined.
11484 Label check_for_nan;
11485 __ cmp(edx, Factory::undefined_value());
11486 __ j(not_equal, &check_for_nan);
11487 __ Set(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc_))));
11488 __ ret(0);
11489 __ bind(&check_for_nan);
11490 }
Steve Blocka7e24c12009-10-30 11:49:00 +000011491
Steve Block6ded16b2010-05-10 14:33:55 +010011492 // Test for NaN. Sadly, we can't just compare to Factory::nan_value(),
11493 // so we do the second best thing - test it ourselves.
11494 // Note: if cc_ != equal, never_nan_nan_ is not used.
11495 if (never_nan_nan_ && (cc_ == equal)) {
11496 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
11497 __ ret(0);
11498 } else {
11499 Label return_equal;
11500 Label heap_number;
11501 // If it's not a heap number, then return equal.
11502 __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
11503 Immediate(Factory::heap_number_map()));
11504 __ j(equal, &heap_number);
11505 __ bind(&return_equal);
11506 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
11507 __ ret(0);
Steve Blocka7e24c12009-10-30 11:49:00 +000011508
Steve Block6ded16b2010-05-10 14:33:55 +010011509 __ bind(&heap_number);
11510 // It is a heap number, so return non-equal if it's NaN and equal if
11511 // it's not NaN.
11512 // The representation of NaN values has all exponent bits (52..62) set,
11513 // and not all mantissa bits (0..51) clear.
11514 // We only accept QNaNs, which have bit 51 set.
11515 // Read top bits of double representation (second word of value).
11516
11517 // Value is a QNaN if value & kQuietNaNMask == kQuietNaNMask, i.e.,
11518 // all bits in the mask are set. We only need to check the word
11519 // that contains the exponent and high bit of the mantissa.
11520 ASSERT_NE(0, (kQuietNaNHighBitsMask << 1) & 0x80000000u);
11521 __ mov(edx, FieldOperand(edx, HeapNumber::kExponentOffset));
11522 __ xor_(eax, Operand(eax));
11523 // Shift value and mask so kQuietNaNHighBitsMask applies to topmost
11524 // bits.
11525 __ add(edx, Operand(edx));
11526 __ cmp(edx, kQuietNaNHighBitsMask << 1);
11527 if (cc_ == equal) {
11528 ASSERT_NE(1, EQUAL);
Leon Clarkee46be812010-01-19 14:06:41 +000011529 __ setcc(above_equal, eax);
11530 __ ret(0);
Steve Block6ded16b2010-05-10 14:33:55 +010011531 } else {
11532 Label nan;
11533 __ j(above_equal, &nan);
11534 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
11535 __ ret(0);
11536 __ bind(&nan);
11537 __ Set(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc_))));
11538 __ ret(0);
Leon Clarkee46be812010-01-19 14:06:41 +000011539 }
Steve Blocka7e24c12009-10-30 11:49:00 +000011540 }
11541
Steve Block6ded16b2010-05-10 14:33:55 +010011542 __ bind(&not_identical);
11543 }
11544
11545 if (cc_ == equal) { // Both strict and non-strict.
11546 Label slow; // Fallthrough label.
11547
Steve Blocka7e24c12009-10-30 11:49:00 +000011548 // If we're doing a strict equality comparison, we don't have to do
11549 // type conversion, so we generate code to do fast comparison for objects
11550 // and oddballs. Non-smi numbers and strings still go through the usual
11551 // slow-case code.
11552 if (strict_) {
11553 // If either is a Smi (we know that not both are), then they can only
11554 // be equal if the other is a HeapNumber. If so, use the slow case.
11555 {
11556 Label not_smis;
11557 ASSERT_EQ(0, kSmiTag);
11558 ASSERT_EQ(0, Smi::FromInt(0));
11559 __ mov(ecx, Immediate(kSmiTagMask));
11560 __ and_(ecx, Operand(eax));
11561 __ test(ecx, Operand(edx));
11562 __ j(not_zero, &not_smis);
11563 // One operand is a smi.
11564
11565 // Check whether the non-smi is a heap number.
11566 ASSERT_EQ(1, kSmiTagMask);
11567 // ecx still holds eax & kSmiTag, which is either zero or one.
11568 __ sub(Operand(ecx), Immediate(0x01));
11569 __ mov(ebx, edx);
11570 __ xor_(ebx, Operand(eax));
11571 __ and_(ebx, Operand(ecx)); // ebx holds either 0 or eax ^ edx.
11572 __ xor_(ebx, Operand(eax));
11573 // if eax was smi, ebx is now edx, else eax.
11574
11575 // Check if the non-smi operand is a heap number.
11576 __ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
11577 Immediate(Factory::heap_number_map()));
11578 // If heap number, handle it in the slow case.
11579 __ j(equal, &slow);
11580 // Return non-equal (ebx is not zero)
11581 __ mov(eax, ebx);
11582 __ ret(0);
11583
11584 __ bind(&not_smis);
11585 }
11586
11587 // If either operand is a JSObject or an oddball value, then they are not
11588 // equal since their pointers are different
11589 // There is no test for undetectability in strict equality.
11590
11591 // Get the type of the first operand.
11592 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
11593 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
11594
11595 // If the first object is a JS object, we have done pointer comparison.
11596 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
11597 Label first_non_object;
11598 __ cmp(ecx, FIRST_JS_OBJECT_TYPE);
11599 __ j(less, &first_non_object);
11600
11601 // Return non-zero (eax is not zero)
11602 Label return_not_equal;
11603 ASSERT(kHeapObjectTag != 0);
11604 __ bind(&return_not_equal);
11605 __ ret(0);
11606
11607 __ bind(&first_non_object);
11608 // Check for oddballs: true, false, null, undefined.
11609 __ cmp(ecx, ODDBALL_TYPE);
11610 __ j(equal, &return_not_equal);
11611
11612 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
11613 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
11614
11615 __ cmp(ecx, FIRST_JS_OBJECT_TYPE);
11616 __ j(greater_equal, &return_not_equal);
11617
11618 // Check for oddballs: true, false, null, undefined.
11619 __ cmp(ecx, ODDBALL_TYPE);
11620 __ j(equal, &return_not_equal);
11621
11622 // Fall through to the general case.
11623 }
11624 __ bind(&slow);
11625 }
11626
11627 // Push arguments below the return address.
11628 __ pop(ecx);
11629 __ push(eax);
11630 __ push(edx);
11631 __ push(ecx);
11632
Steve Block6ded16b2010-05-10 14:33:55 +010011633 // Generate the number comparison code.
11634 if (include_number_compare_) {
11635 Label non_number_comparison;
11636 Label unordered;
11637 if (CpuFeatures::IsSupported(SSE2)) {
11638 CpuFeatures::Scope use_sse2(SSE2);
11639 CpuFeatures::Scope use_cmov(CMOV);
Steve Blocka7e24c12009-10-30 11:49:00 +000011640
Steve Block6ded16b2010-05-10 14:33:55 +010011641 FloatingPointHelper::LoadSSE2Operands(masm, &non_number_comparison);
11642 __ comisd(xmm0, xmm1);
Steve Blocka7e24c12009-10-30 11:49:00 +000011643
Steve Block6ded16b2010-05-10 14:33:55 +010011644 // Don't base result on EFLAGS when a NaN is involved.
11645 __ j(parity_even, &unordered, not_taken);
11646 // Return a result of -1, 0, or 1, based on EFLAGS.
11647 __ mov(eax, 0); // equal
11648 __ mov(ecx, Immediate(Smi::FromInt(1)));
11649 __ cmov(above, eax, Operand(ecx));
11650 __ mov(ecx, Immediate(Smi::FromInt(-1)));
11651 __ cmov(below, eax, Operand(ecx));
11652 __ ret(2 * kPointerSize);
11653 } else {
11654 FloatingPointHelper::CheckFloatOperands(
11655 masm, &non_number_comparison, ebx);
11656 FloatingPointHelper::LoadFloatOperands(masm, ecx);
11657 __ FCmp();
Steve Blocka7e24c12009-10-30 11:49:00 +000011658
Steve Block6ded16b2010-05-10 14:33:55 +010011659 // Don't base result on EFLAGS when a NaN is involved.
11660 __ j(parity_even, &unordered, not_taken);
Steve Blocka7e24c12009-10-30 11:49:00 +000011661
Steve Block6ded16b2010-05-10 14:33:55 +010011662 Label below_label, above_label;
11663 // Return a result of -1, 0, or 1, based on EFLAGS. In all cases remove
11664 // two arguments from the stack as they have been pushed in preparation
11665 // of a possible runtime call.
11666 __ j(below, &below_label, not_taken);
11667 __ j(above, &above_label, not_taken);
Steve Blocka7e24c12009-10-30 11:49:00 +000011668
Steve Block6ded16b2010-05-10 14:33:55 +010011669 __ xor_(eax, Operand(eax));
11670 __ ret(2 * kPointerSize);
Steve Blocka7e24c12009-10-30 11:49:00 +000011671
Steve Block6ded16b2010-05-10 14:33:55 +010011672 __ bind(&below_label);
11673 __ mov(eax, Immediate(Smi::FromInt(-1)));
11674 __ ret(2 * kPointerSize);
Steve Blocka7e24c12009-10-30 11:49:00 +000011675
Steve Block6ded16b2010-05-10 14:33:55 +010011676 __ bind(&above_label);
11677 __ mov(eax, Immediate(Smi::FromInt(1)));
11678 __ ret(2 * kPointerSize);
11679 }
11680
11681 // If one of the numbers was NaN, then the result is always false.
11682 // The cc is never not-equal.
11683 __ bind(&unordered);
11684 ASSERT(cc_ != not_equal);
11685 if (cc_ == less || cc_ == less_equal) {
11686 __ mov(eax, Immediate(Smi::FromInt(1)));
11687 } else {
11688 __ mov(eax, Immediate(Smi::FromInt(-1)));
11689 }
Steve Blocka7e24c12009-10-30 11:49:00 +000011690 __ ret(2 * kPointerSize); // eax, edx were pushed
Steve Block6ded16b2010-05-10 14:33:55 +010011691
11692 // The number comparison code did not provide a valid result.
11693 __ bind(&non_number_comparison);
Steve Blocka7e24c12009-10-30 11:49:00 +000011694 }
Steve Blocka7e24c12009-10-30 11:49:00 +000011695
11696 // Fast negative check for symbol-to-symbol equality.
Leon Clarkee46be812010-01-19 14:06:41 +000011697 Label check_for_strings;
Steve Blocka7e24c12009-10-30 11:49:00 +000011698 if (cc_ == equal) {
Leon Clarkee46be812010-01-19 14:06:41 +000011699 BranchIfNonSymbol(masm, &check_for_strings, eax, ecx);
11700 BranchIfNonSymbol(masm, &check_for_strings, edx, ecx);
Steve Blocka7e24c12009-10-30 11:49:00 +000011701
11702 // We've already checked for object identity, so if both operands
11703 // are symbols they aren't equal. Register eax already holds a
11704 // non-zero value, which indicates not equal, so just return.
11705 __ ret(2 * kPointerSize);
11706 }
11707
Leon Clarkee46be812010-01-19 14:06:41 +000011708 __ bind(&check_for_strings);
11709
Leon Clarked91b9f72010-01-27 17:25:45 +000011710 __ JumpIfNotBothSequentialAsciiStrings(edx, eax, ecx, ebx, &call_builtin);
Leon Clarkee46be812010-01-19 14:06:41 +000011711
11712 // Inline comparison of ascii strings.
11713 StringCompareStub::GenerateCompareFlatAsciiStrings(masm,
11714 edx,
11715 eax,
11716 ecx,
11717 ebx,
11718 edi);
11719#ifdef DEBUG
11720 __ Abort("Unexpected fall-through from string comparison");
11721#endif
11722
Steve Blocka7e24c12009-10-30 11:49:00 +000011723 __ bind(&call_builtin);
11724 // must swap argument order
11725 __ pop(ecx);
11726 __ pop(edx);
11727 __ pop(eax);
11728 __ push(edx);
11729 __ push(eax);
11730
11731 // Figure out which native to call and setup the arguments.
11732 Builtins::JavaScript builtin;
11733 if (cc_ == equal) {
11734 builtin = strict_ ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
11735 } else {
11736 builtin = Builtins::COMPARE;
Steve Block6ded16b2010-05-10 14:33:55 +010011737 __ push(Immediate(Smi::FromInt(NegativeComparisonResult(cc_))));
Steve Blocka7e24c12009-10-30 11:49:00 +000011738 }
11739
11740 // Restore return address on the stack.
11741 __ push(ecx);
11742
11743 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
11744 // tagged as a small integer.
11745 __ InvokeBuiltin(builtin, JUMP_FUNCTION);
11746}
11747
11748
11749void CompareStub::BranchIfNonSymbol(MacroAssembler* masm,
11750 Label* label,
11751 Register object,
11752 Register scratch) {
11753 __ test(object, Immediate(kSmiTagMask));
11754 __ j(zero, label);
11755 __ mov(scratch, FieldOperand(object, HeapObject::kMapOffset));
11756 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
11757 __ and_(scratch, kIsSymbolMask | kIsNotStringMask);
11758 __ cmp(scratch, kSymbolTag | kStringTag);
11759 __ j(not_equal, label);
11760}
11761
11762
11763void StackCheckStub::Generate(MacroAssembler* masm) {
11764 // Because builtins always remove the receiver from the stack, we
11765 // have to fake one to avoid underflowing the stack. The receiver
11766 // must be inserted below the return address on the stack so we
11767 // temporarily store that in a register.
11768 __ pop(eax);
11769 __ push(Immediate(Smi::FromInt(0)));
11770 __ push(eax);
11771
11772 // Do tail-call to runtime routine.
Steve Block6ded16b2010-05-10 14:33:55 +010011773 __ TailCallRuntime(Runtime::kStackGuard, 1, 1);
Steve Blocka7e24c12009-10-30 11:49:00 +000011774}
11775
11776
11777void CallFunctionStub::Generate(MacroAssembler* masm) {
11778 Label slow;
11779
Leon Clarkee46be812010-01-19 14:06:41 +000011780 // If the receiver might be a value (string, number or boolean) check for this
11781 // and box it if it is.
11782 if (ReceiverMightBeValue()) {
11783 // Get the receiver from the stack.
11784 // +1 ~ return address
11785 Label receiver_is_value, receiver_is_js_object;
11786 __ mov(eax, Operand(esp, (argc_ + 1) * kPointerSize));
11787
11788 // Check if receiver is a smi (which is a number value).
11789 __ test(eax, Immediate(kSmiTagMask));
11790 __ j(zero, &receiver_is_value, not_taken);
11791
11792 // Check if the receiver is a valid JS object.
11793 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, edi);
11794 __ j(above_equal, &receiver_is_js_object);
11795
11796 // Call the runtime to box the value.
11797 __ bind(&receiver_is_value);
11798 __ EnterInternalFrame();
11799 __ push(eax);
11800 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
11801 __ LeaveInternalFrame();
11802 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), eax);
11803
11804 __ bind(&receiver_is_js_object);
11805 }
11806
Steve Blocka7e24c12009-10-30 11:49:00 +000011807 // Get the function to call from the stack.
11808 // +2 ~ receiver, return address
11809 __ mov(edi, Operand(esp, (argc_ + 2) * kPointerSize));
11810
11811 // Check that the function really is a JavaScript function.
11812 __ test(edi, Immediate(kSmiTagMask));
11813 __ j(zero, &slow, not_taken);
11814 // Goto slow case if we do not have a function.
11815 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
11816 __ j(not_equal, &slow, not_taken);
11817
11818 // Fast-case: Just invoke the function.
11819 ParameterCount actual(argc_);
11820 __ InvokeFunction(edi, actual, JUMP_FUNCTION);
11821
11822 // Slow-case: Non-function called.
11823 __ bind(&slow);
Andrei Popescu402d9372010-02-26 13:31:12 +000011824 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
11825 // of the original receiver from the call site).
11826 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), edi);
Steve Blocka7e24c12009-10-30 11:49:00 +000011827 __ Set(eax, Immediate(argc_));
11828 __ Set(ebx, Immediate(0));
11829 __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
11830 Handle<Code> adaptor(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
11831 __ jmp(adaptor, RelocInfo::CODE_TARGET);
11832}
11833
11834
Steve Blocka7e24c12009-10-30 11:49:00 +000011835void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
11836 // eax holds the exception.
11837
11838 // Adjust this code if not the case.
11839 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
11840
11841 // Drop the sp to the top of the handler.
11842 ExternalReference handler_address(Top::k_handler_address);
11843 __ mov(esp, Operand::StaticVariable(handler_address));
11844
11845 // Restore next handler and frame pointer, discard handler state.
11846 ASSERT(StackHandlerConstants::kNextOffset == 0);
11847 __ pop(Operand::StaticVariable(handler_address));
11848 ASSERT(StackHandlerConstants::kFPOffset == 1 * kPointerSize);
11849 __ pop(ebp);
11850 __ pop(edx); // Remove state.
11851
11852 // Before returning we restore the context from the frame pointer if
11853 // not NULL. The frame pointer is NULL in the exception handler of
11854 // a JS entry frame.
11855 __ xor_(esi, Operand(esi)); // Tentatively set context pointer to NULL.
11856 Label skip;
11857 __ cmp(ebp, 0);
11858 __ j(equal, &skip, not_taken);
11859 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
11860 __ bind(&skip);
11861
11862 ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
11863 __ ret(0);
11864}
11865
11866
Steve Blockd0582a62009-12-15 09:54:21 +000011867// If true, a Handle<T> passed by value is passed and returned by
11868// using the location_ field directly. If false, it is passed and
11869// returned as a pointer to a handle.
Steve Block6ded16b2010-05-10 14:33:55 +010011870#ifdef USING_BSD_ABI
Steve Blockd0582a62009-12-15 09:54:21 +000011871static const bool kPassHandlesDirectly = true;
11872#else
11873static const bool kPassHandlesDirectly = false;
11874#endif
11875
11876
11877void ApiGetterEntryStub::Generate(MacroAssembler* masm) {
11878 Label get_result;
11879 Label prologue;
11880 Label promote_scheduled_exception;
11881 __ EnterApiExitFrame(ExitFrame::MODE_NORMAL, kStackSpace, kArgc);
11882 ASSERT_EQ(kArgc, 4);
11883 if (kPassHandlesDirectly) {
11884 // When handles as passed directly we don't have to allocate extra
11885 // space for and pass an out parameter.
11886 __ mov(Operand(esp, 0 * kPointerSize), ebx); // name.
11887 __ mov(Operand(esp, 1 * kPointerSize), eax); // arguments pointer.
11888 } else {
11889 // The function expects three arguments to be passed but we allocate
11890 // four to get space for the output cell. The argument slots are filled
11891 // as follows:
11892 //
11893 // 3: output cell
11894 // 2: arguments pointer
11895 // 1: name
11896 // 0: pointer to the output cell
11897 //
11898 // Note that this is one more "argument" than the function expects
11899 // so the out cell will have to be popped explicitly after returning
11900 // from the function.
11901 __ mov(Operand(esp, 1 * kPointerSize), ebx); // name.
11902 __ mov(Operand(esp, 2 * kPointerSize), eax); // arguments pointer.
11903 __ mov(ebx, esp);
11904 __ add(Operand(ebx), Immediate(3 * kPointerSize));
11905 __ mov(Operand(esp, 0 * kPointerSize), ebx); // output
11906 __ mov(Operand(esp, 3 * kPointerSize), Immediate(0)); // out cell.
11907 }
11908 // Call the api function!
11909 __ call(fun()->address(), RelocInfo::RUNTIME_ENTRY);
11910 // Check if the function scheduled an exception.
11911 ExternalReference scheduled_exception_address =
11912 ExternalReference::scheduled_exception_address();
11913 __ cmp(Operand::StaticVariable(scheduled_exception_address),
11914 Immediate(Factory::the_hole_value()));
11915 __ j(not_equal, &promote_scheduled_exception, not_taken);
11916 if (!kPassHandlesDirectly) {
11917 // The returned value is a pointer to the handle holding the result.
11918 // Dereference this to get to the location.
11919 __ mov(eax, Operand(eax, 0));
11920 }
11921 // Check if the result handle holds 0
11922 __ test(eax, Operand(eax));
11923 __ j(not_zero, &get_result, taken);
11924 // It was zero; the result is undefined.
11925 __ mov(eax, Factory::undefined_value());
11926 __ jmp(&prologue);
11927 // It was non-zero. Dereference to get the result value.
11928 __ bind(&get_result);
11929 __ mov(eax, Operand(eax, 0));
11930 __ bind(&prologue);
11931 __ LeaveExitFrame(ExitFrame::MODE_NORMAL);
11932 __ ret(0);
11933 __ bind(&promote_scheduled_exception);
Steve Block6ded16b2010-05-10 14:33:55 +010011934 __ TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
Steve Blockd0582a62009-12-15 09:54:21 +000011935}
11936
11937
Steve Blocka7e24c12009-10-30 11:49:00 +000011938void CEntryStub::GenerateCore(MacroAssembler* masm,
11939 Label* throw_normal_exception,
11940 Label* throw_termination_exception,
11941 Label* throw_out_of_memory_exception,
Steve Blocka7e24c12009-10-30 11:49:00 +000011942 bool do_gc,
Steve Block6ded16b2010-05-10 14:33:55 +010011943 bool always_allocate_scope,
11944 int /* alignment_skew */) {
Steve Blocka7e24c12009-10-30 11:49:00 +000011945 // eax: result parameter for PerformGC, if any
11946 // ebx: pointer to C function (C callee-saved)
11947 // ebp: frame pointer (restored after C call)
11948 // esp: stack pointer (restored after C call)
11949 // edi: number of arguments including receiver (C callee-saved)
11950 // esi: pointer to the first argument (C callee-saved)
11951
Leon Clarke4515c472010-02-03 11:58:03 +000011952 // Result returned in eax, or eax+edx if result_size_ is 2.
11953
Steve Block6ded16b2010-05-10 14:33:55 +010011954 // Check stack alignment.
11955 if (FLAG_debug_code) {
11956 __ CheckStackAlignment();
11957 }
11958
Steve Blocka7e24c12009-10-30 11:49:00 +000011959 if (do_gc) {
Steve Block6ded16b2010-05-10 14:33:55 +010011960 // Pass failure code returned from last attempt as first argument to
11961 // PerformGC. No need to use PrepareCallCFunction/CallCFunction here as the
11962 // stack alignment is known to be correct. This function takes one argument
11963 // which is passed on the stack, and we know that the stack has been
11964 // prepared to pass at least one argument.
Steve Blocka7e24c12009-10-30 11:49:00 +000011965 __ mov(Operand(esp, 0 * kPointerSize), eax); // Result.
11966 __ call(FUNCTION_ADDR(Runtime::PerformGC), RelocInfo::RUNTIME_ENTRY);
11967 }
11968
11969 ExternalReference scope_depth =
11970 ExternalReference::heap_always_allocate_scope_depth();
11971 if (always_allocate_scope) {
11972 __ inc(Operand::StaticVariable(scope_depth));
11973 }
11974
11975 // Call C function.
11976 __ mov(Operand(esp, 0 * kPointerSize), edi); // argc.
11977 __ mov(Operand(esp, 1 * kPointerSize), esi); // argv.
11978 __ call(Operand(ebx));
11979 // Result is in eax or edx:eax - do not destroy these registers!
11980
11981 if (always_allocate_scope) {
11982 __ dec(Operand::StaticVariable(scope_depth));
11983 }
11984
11985 // Make sure we're not trying to return 'the hole' from the runtime
11986 // call as this may lead to crashes in the IC code later.
11987 if (FLAG_debug_code) {
11988 Label okay;
11989 __ cmp(eax, Factory::the_hole_value());
11990 __ j(not_equal, &okay);
11991 __ int3();
11992 __ bind(&okay);
11993 }
11994
11995 // Check for failure result.
11996 Label failure_returned;
11997 ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0);
11998 __ lea(ecx, Operand(eax, 1));
11999 // Lower 2 bits of ecx are 0 iff eax has failure tag.
12000 __ test(ecx, Immediate(kFailureTagMask));
12001 __ j(zero, &failure_returned, not_taken);
12002
12003 // Exit the JavaScript to C++ exit frame.
Leon Clarke4515c472010-02-03 11:58:03 +000012004 __ LeaveExitFrame(mode_);
Steve Blocka7e24c12009-10-30 11:49:00 +000012005 __ ret(0);
12006
12007 // Handling of failure.
12008 __ bind(&failure_returned);
12009
12010 Label retry;
12011 // If the returned exception is RETRY_AFTER_GC continue at retry label
12012 ASSERT(Failure::RETRY_AFTER_GC == 0);
12013 __ test(eax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
12014 __ j(zero, &retry, taken);
12015
12016 // Special handling of out of memory exceptions.
12017 __ cmp(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException()));
12018 __ j(equal, throw_out_of_memory_exception);
12019
12020 // Retrieve the pending exception and clear the variable.
12021 ExternalReference pending_exception_address(Top::k_pending_exception_address);
12022 __ mov(eax, Operand::StaticVariable(pending_exception_address));
12023 __ mov(edx,
12024 Operand::StaticVariable(ExternalReference::the_hole_value_location()));
12025 __ mov(Operand::StaticVariable(pending_exception_address), edx);
12026
12027 // Special handling of termination exceptions which are uncatchable
12028 // by javascript code.
12029 __ cmp(eax, Factory::termination_exception());
12030 __ j(equal, throw_termination_exception);
12031
12032 // Handle normal exception.
12033 __ jmp(throw_normal_exception);
12034
12035 // Retry.
12036 __ bind(&retry);
12037}
12038
12039
12040void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm,
12041 UncatchableExceptionType type) {
12042 // Adjust this code if not the case.
12043 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
12044
12045 // Drop sp to the top stack handler.
12046 ExternalReference handler_address(Top::k_handler_address);
12047 __ mov(esp, Operand::StaticVariable(handler_address));
12048
12049 // Unwind the handlers until the ENTRY handler is found.
12050 Label loop, done;
12051 __ bind(&loop);
12052 // Load the type of the current stack handler.
12053 const int kStateOffset = StackHandlerConstants::kStateOffset;
12054 __ cmp(Operand(esp, kStateOffset), Immediate(StackHandler::ENTRY));
12055 __ j(equal, &done);
12056 // Fetch the next handler in the list.
12057 const int kNextOffset = StackHandlerConstants::kNextOffset;
12058 __ mov(esp, Operand(esp, kNextOffset));
12059 __ jmp(&loop);
12060 __ bind(&done);
12061
12062 // Set the top handler address to next handler past the current ENTRY handler.
12063 ASSERT(StackHandlerConstants::kNextOffset == 0);
12064 __ pop(Operand::StaticVariable(handler_address));
12065
12066 if (type == OUT_OF_MEMORY) {
12067 // Set external caught exception to false.
12068 ExternalReference external_caught(Top::k_external_caught_exception_address);
12069 __ mov(eax, false);
12070 __ mov(Operand::StaticVariable(external_caught), eax);
12071
12072 // Set pending exception and eax to out of memory exception.
12073 ExternalReference pending_exception(Top::k_pending_exception_address);
12074 __ mov(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException()));
12075 __ mov(Operand::StaticVariable(pending_exception), eax);
12076 }
12077
12078 // Clear the context pointer.
12079 __ xor_(esi, Operand(esi));
12080
12081 // Restore fp from handler and discard handler state.
12082 ASSERT(StackHandlerConstants::kFPOffset == 1 * kPointerSize);
12083 __ pop(ebp);
12084 __ pop(edx); // State.
12085
12086 ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
12087 __ ret(0);
12088}
12089
12090
Leon Clarke4515c472010-02-03 11:58:03 +000012091void CEntryStub::Generate(MacroAssembler* masm) {
Steve Blocka7e24c12009-10-30 11:49:00 +000012092 // eax: number of arguments including receiver
12093 // ebx: pointer to C function (C callee-saved)
12094 // ebp: frame pointer (restored after C call)
12095 // esp: stack pointer (restored after C call)
12096 // esi: current context (C callee-saved)
12097 // edi: JS function of the caller (C callee-saved)
12098
12099 // NOTE: Invocations of builtins may return failure objects instead
12100 // of a proper result. The builtin entry handles this by performing
12101 // a garbage collection and retrying the builtin (twice).
12102
Steve Blocka7e24c12009-10-30 11:49:00 +000012103 // Enter the exit frame that transitions from JavaScript to C++.
Leon Clarke4515c472010-02-03 11:58:03 +000012104 __ EnterExitFrame(mode_);
Steve Blocka7e24c12009-10-30 11:49:00 +000012105
12106 // eax: result parameter for PerformGC, if any (setup below)
12107 // ebx: pointer to builtin function (C callee-saved)
12108 // ebp: frame pointer (restored after C call)
12109 // esp: stack pointer (restored after C call)
12110 // edi: number of arguments including receiver (C callee-saved)
12111 // esi: argv pointer (C callee-saved)
12112
12113 Label throw_normal_exception;
12114 Label throw_termination_exception;
12115 Label throw_out_of_memory_exception;
12116
12117 // Call into the runtime system.
12118 GenerateCore(masm,
12119 &throw_normal_exception,
12120 &throw_termination_exception,
12121 &throw_out_of_memory_exception,
Steve Blocka7e24c12009-10-30 11:49:00 +000012122 false,
12123 false);
12124
12125 // Do space-specific GC and retry runtime call.
12126 GenerateCore(masm,
12127 &throw_normal_exception,
12128 &throw_termination_exception,
12129 &throw_out_of_memory_exception,
Steve Blocka7e24c12009-10-30 11:49:00 +000012130 true,
12131 false);
12132
12133 // Do full GC and retry runtime call one final time.
12134 Failure* failure = Failure::InternalError();
12135 __ mov(eax, Immediate(reinterpret_cast<int32_t>(failure)));
12136 GenerateCore(masm,
12137 &throw_normal_exception,
12138 &throw_termination_exception,
12139 &throw_out_of_memory_exception,
Steve Blocka7e24c12009-10-30 11:49:00 +000012140 true,
12141 true);
12142
12143 __ bind(&throw_out_of_memory_exception);
12144 GenerateThrowUncatchable(masm, OUT_OF_MEMORY);
12145
12146 __ bind(&throw_termination_exception);
12147 GenerateThrowUncatchable(masm, TERMINATION);
12148
12149 __ bind(&throw_normal_exception);
12150 GenerateThrowTOS(masm);
12151}
12152
12153
12154void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
12155 Label invoke, exit;
12156#ifdef ENABLE_LOGGING_AND_PROFILING
12157 Label not_outermost_js, not_outermost_js_2;
12158#endif
12159
12160 // Setup frame.
12161 __ push(ebp);
12162 __ mov(ebp, Operand(esp));
12163
12164 // Push marker in two places.
12165 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
12166 __ push(Immediate(Smi::FromInt(marker))); // context slot
12167 __ push(Immediate(Smi::FromInt(marker))); // function slot
12168 // Save callee-saved registers (C calling conventions).
12169 __ push(edi);
12170 __ push(esi);
12171 __ push(ebx);
12172
12173 // Save copies of the top frame descriptor on the stack.
12174 ExternalReference c_entry_fp(Top::k_c_entry_fp_address);
12175 __ push(Operand::StaticVariable(c_entry_fp));
12176
12177#ifdef ENABLE_LOGGING_AND_PROFILING
12178 // If this is the outermost JS call, set js_entry_sp value.
12179 ExternalReference js_entry_sp(Top::k_js_entry_sp_address);
12180 __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0));
12181 __ j(not_equal, &not_outermost_js);
12182 __ mov(Operand::StaticVariable(js_entry_sp), ebp);
12183 __ bind(&not_outermost_js);
12184#endif
12185
12186 // Call a faked try-block that does the invoke.
12187 __ call(&invoke);
12188
12189 // Caught exception: Store result (exception) in the pending
12190 // exception field in the JSEnv and return a failure sentinel.
12191 ExternalReference pending_exception(Top::k_pending_exception_address);
12192 __ mov(Operand::StaticVariable(pending_exception), eax);
12193 __ mov(eax, reinterpret_cast<int32_t>(Failure::Exception()));
12194 __ jmp(&exit);
12195
12196 // Invoke: Link this frame into the handler chain.
12197 __ bind(&invoke);
12198 __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER);
12199
12200 // Clear any pending exceptions.
12201 __ mov(edx,
12202 Operand::StaticVariable(ExternalReference::the_hole_value_location()));
12203 __ mov(Operand::StaticVariable(pending_exception), edx);
12204
12205 // Fake a receiver (NULL).
12206 __ push(Immediate(0)); // receiver
12207
12208 // Invoke the function by calling through JS entry trampoline
12209 // builtin and pop the faked function when we return. Notice that we
12210 // cannot store a reference to the trampoline code directly in this
12211 // stub, because the builtin stubs may not have been generated yet.
12212 if (is_construct) {
12213 ExternalReference construct_entry(Builtins::JSConstructEntryTrampoline);
12214 __ mov(edx, Immediate(construct_entry));
12215 } else {
12216 ExternalReference entry(Builtins::JSEntryTrampoline);
12217 __ mov(edx, Immediate(entry));
12218 }
12219 __ mov(edx, Operand(edx, 0)); // deref address
12220 __ lea(edx, FieldOperand(edx, Code::kHeaderSize));
12221 __ call(Operand(edx));
12222
12223 // Unlink this frame from the handler chain.
12224 __ pop(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
12225 // Pop next_sp.
12226 __ add(Operand(esp), Immediate(StackHandlerConstants::kSize - kPointerSize));
12227
12228#ifdef ENABLE_LOGGING_AND_PROFILING
12229 // If current EBP value is the same as js_entry_sp value, it means that
12230 // the current function is the outermost.
12231 __ cmp(ebp, Operand::StaticVariable(js_entry_sp));
12232 __ j(not_equal, &not_outermost_js_2);
12233 __ mov(Operand::StaticVariable(js_entry_sp), Immediate(0));
12234 __ bind(&not_outermost_js_2);
12235#endif
12236
12237 // Restore the top frame descriptor from the stack.
12238 __ bind(&exit);
12239 __ pop(Operand::StaticVariable(ExternalReference(Top::k_c_entry_fp_address)));
12240
12241 // Restore callee-saved registers (C calling conventions).
12242 __ pop(ebx);
12243 __ pop(esi);
12244 __ pop(edi);
12245 __ add(Operand(esp), Immediate(2 * kPointerSize)); // remove markers
12246
12247 // Restore frame pointer and return.
12248 __ pop(ebp);
12249 __ ret(0);
12250}
12251
12252
12253void InstanceofStub::Generate(MacroAssembler* masm) {
12254 // Get the object - go slow case if it's a smi.
12255 Label slow;
12256 __ mov(eax, Operand(esp, 2 * kPointerSize)); // 2 ~ return address, function
12257 __ test(eax, Immediate(kSmiTagMask));
12258 __ j(zero, &slow, not_taken);
12259
12260 // Check that the left hand is a JS object.
12261 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset)); // eax - object map
12262 __ movzx_b(ecx, FieldOperand(eax, Map::kInstanceTypeOffset)); // ecx - type
12263 __ cmp(ecx, FIRST_JS_OBJECT_TYPE);
12264 __ j(less, &slow, not_taken);
12265 __ cmp(ecx, LAST_JS_OBJECT_TYPE);
12266 __ j(greater, &slow, not_taken);
12267
12268 // Get the prototype of the function.
12269 __ mov(edx, Operand(esp, 1 * kPointerSize)); // 1 ~ return address
Kristian Monsen25f61362010-05-21 11:50:48 +010012270 // edx is function, eax is map.
12271
12272 // Look up the function and the map in the instanceof cache.
12273 Label miss;
12274 ExternalReference roots_address = ExternalReference::roots_address();
12275 __ mov(ecx, Immediate(Heap::kInstanceofCacheFunctionRootIndex));
12276 __ cmp(edx, Operand::StaticArray(ecx, times_pointer_size, roots_address));
12277 __ j(not_equal, &miss);
12278 __ mov(ecx, Immediate(Heap::kInstanceofCacheMapRootIndex));
12279 __ cmp(eax, Operand::StaticArray(ecx, times_pointer_size, roots_address));
12280 __ j(not_equal, &miss);
12281 __ mov(ecx, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
12282 __ mov(eax, Operand::StaticArray(ecx, times_pointer_size, roots_address));
12283 __ ret(2 * kPointerSize);
12284
12285 __ bind(&miss);
Steve Blocka7e24c12009-10-30 11:49:00 +000012286 __ TryGetFunctionPrototype(edx, ebx, ecx, &slow);
12287
12288 // Check that the function prototype is a JS object.
12289 __ test(ebx, Immediate(kSmiTagMask));
12290 __ j(zero, &slow, not_taken);
12291 __ mov(ecx, FieldOperand(ebx, HeapObject::kMapOffset));
12292 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
12293 __ cmp(ecx, FIRST_JS_OBJECT_TYPE);
12294 __ j(less, &slow, not_taken);
12295 __ cmp(ecx, LAST_JS_OBJECT_TYPE);
12296 __ j(greater, &slow, not_taken);
12297
Kristian Monsen25f61362010-05-21 11:50:48 +010012298 // Register mapping:
12299 // eax is object map.
12300 // edx is function.
12301 // ebx is function prototype.
12302 __ mov(ecx, Immediate(Heap::kInstanceofCacheMapRootIndex));
12303 __ mov(Operand::StaticArray(ecx, times_pointer_size, roots_address), eax);
12304 __ mov(ecx, Immediate(Heap::kInstanceofCacheFunctionRootIndex));
12305 __ mov(Operand::StaticArray(ecx, times_pointer_size, roots_address), edx);
12306
Steve Blocka7e24c12009-10-30 11:49:00 +000012307 __ mov(ecx, FieldOperand(eax, Map::kPrototypeOffset));
12308
12309 // Loop through the prototype chain looking for the function prototype.
12310 Label loop, is_instance, is_not_instance;
12311 __ bind(&loop);
12312 __ cmp(ecx, Operand(ebx));
12313 __ j(equal, &is_instance);
12314 __ cmp(Operand(ecx), Immediate(Factory::null_value()));
12315 __ j(equal, &is_not_instance);
12316 __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
12317 __ mov(ecx, FieldOperand(ecx, Map::kPrototypeOffset));
12318 __ jmp(&loop);
12319
12320 __ bind(&is_instance);
12321 __ Set(eax, Immediate(0));
Kristian Monsen25f61362010-05-21 11:50:48 +010012322 __ mov(ecx, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
12323 __ mov(Operand::StaticArray(ecx, times_pointer_size, roots_address), eax);
Steve Blocka7e24c12009-10-30 11:49:00 +000012324 __ ret(2 * kPointerSize);
12325
12326 __ bind(&is_not_instance);
12327 __ Set(eax, Immediate(Smi::FromInt(1)));
Kristian Monsen25f61362010-05-21 11:50:48 +010012328 __ mov(ecx, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
12329 __ mov(Operand::StaticArray(ecx, times_pointer_size, roots_address), eax);
Steve Blocka7e24c12009-10-30 11:49:00 +000012330 __ ret(2 * kPointerSize);
12331
12332 // Slow-case: Go through the JavaScript implementation.
12333 __ bind(&slow);
12334 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
12335}
12336
12337
Steve Block6ded16b2010-05-10 14:33:55 +010012338int CompareStub::MinorKey() {
12339 // Encode the three parameters in a unique 16 bit value. To avoid duplicate
12340 // stubs the never NaN NaN condition is only taken into account if the
12341 // condition is equals.
12342 ASSERT(static_cast<unsigned>(cc_) < (1 << 13));
12343 return ConditionField::encode(static_cast<unsigned>(cc_))
12344 | StrictField::encode(strict_)
12345 | NeverNanNanField::encode(cc_ == equal ? never_nan_nan_ : false)
12346 | IncludeNumberCompareField::encode(include_number_compare_);
Leon Clarkee46be812010-01-19 14:06:41 +000012347}
12348
12349
Steve Block6ded16b2010-05-10 14:33:55 +010012350// Unfortunately you have to run without snapshots to see most of these
12351// names in the profile since most compare stubs end up in the snapshot.
12352const char* CompareStub::GetName() {
12353 if (name_ != NULL) return name_;
12354 const int kMaxNameLength = 100;
12355 name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);
12356 if (name_ == NULL) return "OOM";
12357
12358 const char* cc_name;
12359 switch (cc_) {
12360 case less: cc_name = "LT"; break;
12361 case greater: cc_name = "GT"; break;
12362 case less_equal: cc_name = "LE"; break;
12363 case greater_equal: cc_name = "GE"; break;
12364 case equal: cc_name = "EQ"; break;
12365 case not_equal: cc_name = "NE"; break;
12366 default: cc_name = "UnknownCondition"; break;
12367 }
12368
12369 const char* strict_name = "";
12370 if (strict_ && (cc_ == equal || cc_ == not_equal)) {
12371 strict_name = "_STRICT";
12372 }
12373
12374 const char* never_nan_nan_name = "";
12375 if (never_nan_nan_ && (cc_ == equal || cc_ == not_equal)) {
12376 never_nan_nan_name = "_NO_NAN";
12377 }
12378
12379 const char* include_number_compare_name = "";
12380 if (!include_number_compare_) {
12381 include_number_compare_name = "_NO_NUMBER";
12382 }
12383
12384 OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
12385 "CompareStub_%s%s%s%s",
12386 cc_name,
12387 strict_name,
12388 never_nan_nan_name,
12389 include_number_compare_name);
12390 return name_;
12391}
12392
12393
12394void StringHelper::GenerateFastCharCodeAt(MacroAssembler* masm,
12395 Register object,
12396 Register index,
12397 Register scratch,
12398 Register result,
12399 Label* receiver_not_string,
12400 Label* index_not_smi,
12401 Label* index_out_of_range,
12402 Label* slow_case) {
12403 Label not_a_flat_string;
12404 Label try_again_with_new_string;
12405 Label ascii_string;
12406 Label got_char_code;
12407
12408 // If the receiver is a smi trigger the non-string case.
12409 ASSERT(kSmiTag == 0);
12410 __ test(object, Immediate(kSmiTagMask));
12411 __ j(zero, receiver_not_string);
12412
12413 // Fetch the instance type of the receiver into result register.
12414 __ mov(result, FieldOperand(object, HeapObject::kMapOffset));
12415 __ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset));
12416 // If the receiver is not a string trigger the non-string case.
12417 __ test(result, Immediate(kIsNotStringMask));
12418 __ j(not_zero, receiver_not_string);
12419
12420 // If the index is non-smi trigger the non-smi case.
12421 ASSERT(kSmiTag == 0);
12422 __ test(index, Immediate(kSmiTagMask));
12423 __ j(not_zero, index_not_smi);
12424
12425 // Check for index out of range.
12426 __ cmp(index, FieldOperand(object, String::kLengthOffset));
12427 __ j(above_equal, index_out_of_range);
12428
12429 __ bind(&try_again_with_new_string);
12430 // ----------- S t a t e -------------
12431 // -- object : string to access
12432 // -- result : instance type of the string
12433 // -- scratch : non-negative index < length
12434 // -----------------------------------
12435
12436 // We need special handling for non-flat strings.
12437 ASSERT(kSeqStringTag == 0);
12438 __ test(result, Immediate(kStringRepresentationMask));
12439 __ j(not_zero, &not_a_flat_string);
12440
12441 // Check for 1-byte or 2-byte string.
12442 ASSERT(kAsciiStringTag != 0);
12443 __ test(result, Immediate(kStringEncodingMask));
12444 __ j(not_zero, &ascii_string);
12445
12446 // 2-byte string.
12447 // Load the 2-byte character code into the result register.
12448 ASSERT(kSmiTag == 0 && kSmiTagSize == 1); // index is smi (powered by 2).
12449 __ movzx_w(result, FieldOperand(object,
12450 index, times_1,
12451 SeqTwoByteString::kHeaderSize));
12452 __ jmp(&got_char_code);
12453
12454 // Handle non-flat strings.
12455 __ bind(&not_a_flat_string);
12456 __ and_(result, kStringRepresentationMask);
12457 __ cmp(result, kConsStringTag);
12458 __ j(not_equal, slow_case);
12459
12460 // ConsString.
12461 // Check whether the right hand side is the empty string (i.e. if
12462 // this is really a flat string in a cons string). If that is not
12463 // the case we would rather go to the runtime system now to flatten
12464 // the string.
12465 __ mov(result, FieldOperand(object, ConsString::kSecondOffset));
12466 __ cmp(Operand(result), Factory::empty_string());
12467 __ j(not_equal, slow_case);
12468 // Get the first of the two strings and load its instance type.
12469 __ mov(object, FieldOperand(object, ConsString::kFirstOffset));
12470 __ mov(result, FieldOperand(object, HeapObject::kMapOffset));
12471 __ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset));
12472 __ jmp(&try_again_with_new_string);
12473
12474 // ASCII string.
12475 __ bind(&ascii_string);
12476 // Put untagged index into scratch register.
12477 __ mov(scratch, index);
12478 __ SmiUntag(scratch);
12479
12480 // Load the byte into the result register.
12481 __ movzx_b(result, FieldOperand(object,
12482 scratch, times_1,
12483 SeqAsciiString::kHeaderSize));
12484 __ bind(&got_char_code);
12485 __ SmiTag(result);
12486}
12487
12488
12489void StringHelper::GenerateCharFromCode(MacroAssembler* masm,
12490 Register code,
12491 Register result,
12492 InvokeFlag flag) {
12493 ASSERT(!code.is(result));
12494
12495 Label slow_case;
12496 Label exit;
12497
12498 // Fast case of Heap::LookupSingleCharacterStringFromCode.
12499 ASSERT(kSmiTag == 0);
12500 ASSERT(kSmiShiftSize == 0);
12501 ASSERT(IsPowerOf2(String::kMaxAsciiCharCode + 1));
12502 __ test(code,
12503 Immediate(kSmiTagMask |
12504 ((~String::kMaxAsciiCharCode) << kSmiTagSize)));
12505 __ j(not_zero, &slow_case, not_taken);
12506
12507 __ Set(result, Immediate(Factory::single_character_string_cache()));
12508 ASSERT(kSmiTag == 0);
12509 ASSERT(kSmiTagSize == 1);
12510 ASSERT(kSmiShiftSize == 0);
12511 // At this point code register contains smi tagged ascii char code.
12512 __ mov(result, FieldOperand(result,
12513 code, times_half_pointer_size,
12514 FixedArray::kHeaderSize));
12515 __ cmp(result, Factory::undefined_value());
12516 __ j(equal, &slow_case, not_taken);
12517 __ jmp(&exit);
12518
12519 __ bind(&slow_case);
12520 if (flag == CALL_FUNCTION) {
12521 __ push(code);
12522 __ CallRuntime(Runtime::kCharFromCode, 1);
12523 if (!result.is(eax)) {
12524 __ mov(result, eax);
12525 }
12526 } else {
12527 ASSERT(flag == JUMP_FUNCTION);
12528 ASSERT(result.is(eax));
12529 __ pop(eax); // Save return address.
12530 __ push(code);
12531 __ push(eax); // Restore return address.
12532 __ TailCallRuntime(Runtime::kCharFromCode, 1, 1);
12533 }
12534
12535 __ bind(&exit);
12536 if (flag == JUMP_FUNCTION) {
12537 ASSERT(result.is(eax));
12538 __ ret(0);
12539 }
Steve Blocka7e24c12009-10-30 11:49:00 +000012540}
12541
Steve Blockd0582a62009-12-15 09:54:21 +000012542
12543void StringAddStub::Generate(MacroAssembler* masm) {
12544 Label string_add_runtime;
12545
12546 // Load the two arguments.
12547 __ mov(eax, Operand(esp, 2 * kPointerSize)); // First argument.
12548 __ mov(edx, Operand(esp, 1 * kPointerSize)); // Second argument.
12549
12550 // Make sure that both arguments are strings if not known in advance.
12551 if (string_check_) {
12552 __ test(eax, Immediate(kSmiTagMask));
12553 __ j(zero, &string_add_runtime);
12554 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, ebx);
12555 __ j(above_equal, &string_add_runtime);
12556
12557 // First argument is a a string, test second.
12558 __ test(edx, Immediate(kSmiTagMask));
12559 __ j(zero, &string_add_runtime);
12560 __ CmpObjectType(edx, FIRST_NONSTRING_TYPE, ebx);
12561 __ j(above_equal, &string_add_runtime);
12562 }
12563
12564 // Both arguments are strings.
12565 // eax: first string
12566 // edx: second string
12567 // Check if either of the strings are empty. In that case return the other.
12568 Label second_not_zero_length, both_not_zero_length;
12569 __ mov(ecx, FieldOperand(edx, String::kLengthOffset));
Steve Block6ded16b2010-05-10 14:33:55 +010012570 ASSERT(kSmiTag == 0);
Steve Blockd0582a62009-12-15 09:54:21 +000012571 __ test(ecx, Operand(ecx));
12572 __ j(not_zero, &second_not_zero_length);
12573 // Second string is empty, result is first string which is already in eax.
12574 __ IncrementCounter(&Counters::string_add_native, 1);
12575 __ ret(2 * kPointerSize);
12576 __ bind(&second_not_zero_length);
12577 __ mov(ebx, FieldOperand(eax, String::kLengthOffset));
Steve Block6ded16b2010-05-10 14:33:55 +010012578 ASSERT(kSmiTag == 0);
Steve Blockd0582a62009-12-15 09:54:21 +000012579 __ test(ebx, Operand(ebx));
12580 __ j(not_zero, &both_not_zero_length);
12581 // First string is empty, result is second string which is in edx.
12582 __ mov(eax, edx);
12583 __ IncrementCounter(&Counters::string_add_native, 1);
12584 __ ret(2 * kPointerSize);
12585
12586 // Both strings are non-empty.
12587 // eax: first string
Steve Block6ded16b2010-05-10 14:33:55 +010012588 // ebx: length of first string as a smi
12589 // ecx: length of second string as a smi
Steve Blockd0582a62009-12-15 09:54:21 +000012590 // edx: second string
12591 // Look at the length of the result of adding the two strings.
Andrei Popescu402d9372010-02-26 13:31:12 +000012592 Label string_add_flat_result, longer_than_two;
Steve Blockd0582a62009-12-15 09:54:21 +000012593 __ bind(&both_not_zero_length);
12594 __ add(ebx, Operand(ecx));
Steve Block6ded16b2010-05-10 14:33:55 +010012595 ASSERT(Smi::kMaxValue == String::kMaxLength);
12596 // Handle exceptionally long strings in the runtime system.
12597 __ j(overflow, &string_add_runtime);
Steve Blockd0582a62009-12-15 09:54:21 +000012598 // Use the runtime system when adding two one character strings, as it
12599 // contains optimizations for this specific case using the symbol table.
Steve Block6ded16b2010-05-10 14:33:55 +010012600 __ cmp(Operand(ebx), Immediate(Smi::FromInt(2)));
Andrei Popescu402d9372010-02-26 13:31:12 +000012601 __ j(not_equal, &longer_than_two);
12602
12603 // Check that both strings are non-external ascii strings.
12604 __ JumpIfNotBothSequentialAsciiStrings(eax, edx, ebx, ecx,
12605 &string_add_runtime);
12606
12607 // Get the two characters forming the sub string.
12608 __ movzx_b(ebx, FieldOperand(eax, SeqAsciiString::kHeaderSize));
12609 __ movzx_b(ecx, FieldOperand(edx, SeqAsciiString::kHeaderSize));
12610
12611 // Try to lookup two character string in symbol table. If it is not found
12612 // just allocate a new one.
12613 Label make_two_character_string, make_flat_ascii_string;
Steve Block6ded16b2010-05-10 14:33:55 +010012614 StringHelper::GenerateTwoCharacterSymbolTableProbe(
12615 masm, ebx, ecx, eax, edx, edi, &make_two_character_string);
12616 __ IncrementCounter(&Counters::string_add_native, 1);
Andrei Popescu402d9372010-02-26 13:31:12 +000012617 __ ret(2 * kPointerSize);
12618
12619 __ bind(&make_two_character_string);
Steve Block6ded16b2010-05-10 14:33:55 +010012620 __ Set(ebx, Immediate(Smi::FromInt(2)));
Andrei Popescu402d9372010-02-26 13:31:12 +000012621 __ jmp(&make_flat_ascii_string);
12622
12623 __ bind(&longer_than_two);
Steve Blockd0582a62009-12-15 09:54:21 +000012624 // Check if resulting string will be flat.
Steve Block6ded16b2010-05-10 14:33:55 +010012625 __ cmp(Operand(ebx), Immediate(Smi::FromInt(String::kMinNonFlatLength)));
Steve Blockd0582a62009-12-15 09:54:21 +000012626 __ j(below, &string_add_flat_result);
Steve Blockd0582a62009-12-15 09:54:21 +000012627
12628 // If result is not supposed to be flat allocate a cons string object. If both
12629 // strings are ascii the result is an ascii cons string.
12630 Label non_ascii, allocated;
12631 __ mov(edi, FieldOperand(eax, HeapObject::kMapOffset));
12632 __ movzx_b(ecx, FieldOperand(edi, Map::kInstanceTypeOffset));
12633 __ mov(edi, FieldOperand(edx, HeapObject::kMapOffset));
12634 __ movzx_b(edi, FieldOperand(edi, Map::kInstanceTypeOffset));
12635 __ and_(ecx, Operand(edi));
Leon Clarkee46be812010-01-19 14:06:41 +000012636 ASSERT(kStringEncodingMask == kAsciiStringTag);
Steve Blockd0582a62009-12-15 09:54:21 +000012637 __ test(ecx, Immediate(kAsciiStringTag));
12638 __ j(zero, &non_ascii);
12639 // Allocate an acsii cons string.
12640 __ AllocateAsciiConsString(ecx, edi, no_reg, &string_add_runtime);
12641 __ bind(&allocated);
12642 // Fill the fields of the cons string.
Steve Block6ded16b2010-05-10 14:33:55 +010012643 if (FLAG_debug_code) __ AbortIfNotSmi(ebx);
Steve Blockd0582a62009-12-15 09:54:21 +000012644 __ mov(FieldOperand(ecx, ConsString::kLengthOffset), ebx);
12645 __ mov(FieldOperand(ecx, ConsString::kHashFieldOffset),
12646 Immediate(String::kEmptyHashField));
12647 __ mov(FieldOperand(ecx, ConsString::kFirstOffset), eax);
12648 __ mov(FieldOperand(ecx, ConsString::kSecondOffset), edx);
12649 __ mov(eax, ecx);
12650 __ IncrementCounter(&Counters::string_add_native, 1);
12651 __ ret(2 * kPointerSize);
12652 __ bind(&non_ascii);
12653 // Allocate a two byte cons string.
12654 __ AllocateConsString(ecx, edi, no_reg, &string_add_runtime);
12655 __ jmp(&allocated);
12656
12657 // Handle creating a flat result. First check that both strings are not
12658 // external strings.
12659 // eax: first string
Steve Block6ded16b2010-05-10 14:33:55 +010012660 // ebx: length of resulting flat string as a smi
Steve Blockd0582a62009-12-15 09:54:21 +000012661 // edx: second string
12662 __ bind(&string_add_flat_result);
12663 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
12664 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
12665 __ and_(ecx, kStringRepresentationMask);
12666 __ cmp(ecx, kExternalStringTag);
12667 __ j(equal, &string_add_runtime);
12668 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
12669 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
12670 __ and_(ecx, kStringRepresentationMask);
12671 __ cmp(ecx, kExternalStringTag);
12672 __ j(equal, &string_add_runtime);
12673 // Now check if both strings are ascii strings.
12674 // eax: first string
Steve Block6ded16b2010-05-10 14:33:55 +010012675 // ebx: length of resulting flat string as a smi
Steve Blockd0582a62009-12-15 09:54:21 +000012676 // edx: second string
12677 Label non_ascii_string_add_flat_result;
12678 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
12679 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
Leon Clarkee46be812010-01-19 14:06:41 +000012680 ASSERT(kStringEncodingMask == kAsciiStringTag);
Steve Blockd0582a62009-12-15 09:54:21 +000012681 __ test(ecx, Immediate(kAsciiStringTag));
12682 __ j(zero, &non_ascii_string_add_flat_result);
12683 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
12684 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
12685 __ test(ecx, Immediate(kAsciiStringTag));
12686 __ j(zero, &string_add_runtime);
Andrei Popescu402d9372010-02-26 13:31:12 +000012687
12688 __ bind(&make_flat_ascii_string);
Steve Blockd0582a62009-12-15 09:54:21 +000012689 // Both strings are ascii strings. As they are short they are both flat.
Steve Block6ded16b2010-05-10 14:33:55 +010012690 // ebx: length of resulting flat string as a smi
12691 __ SmiUntag(ebx);
Steve Blockd0582a62009-12-15 09:54:21 +000012692 __ AllocateAsciiString(eax, ebx, ecx, edx, edi, &string_add_runtime);
12693 // eax: result string
12694 __ mov(ecx, eax);
12695 // Locate first character of result.
12696 __ add(Operand(ecx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
12697 // Load first argument and locate first character.
12698 __ mov(edx, Operand(esp, 2 * kPointerSize));
12699 __ mov(edi, FieldOperand(edx, String::kLengthOffset));
Steve Block6ded16b2010-05-10 14:33:55 +010012700 __ SmiUntag(edi);
Steve Blockd0582a62009-12-15 09:54:21 +000012701 __ add(Operand(edx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
12702 // eax: result string
12703 // ecx: first character of result
12704 // edx: first char of first argument
12705 // edi: length of first argument
Steve Block6ded16b2010-05-10 14:33:55 +010012706 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, true);
Steve Blockd0582a62009-12-15 09:54:21 +000012707 // Load second argument and locate first character.
12708 __ mov(edx, Operand(esp, 1 * kPointerSize));
12709 __ mov(edi, FieldOperand(edx, String::kLengthOffset));
Steve Block6ded16b2010-05-10 14:33:55 +010012710 __ SmiUntag(edi);
Steve Blockd0582a62009-12-15 09:54:21 +000012711 __ add(Operand(edx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
12712 // eax: result string
12713 // ecx: next character of result
12714 // edx: first char of second argument
12715 // edi: length of second argument
Steve Block6ded16b2010-05-10 14:33:55 +010012716 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, true);
Steve Blockd0582a62009-12-15 09:54:21 +000012717 __ IncrementCounter(&Counters::string_add_native, 1);
12718 __ ret(2 * kPointerSize);
12719
12720 // Handle creating a flat two byte result.
12721 // eax: first string - known to be two byte
Steve Block6ded16b2010-05-10 14:33:55 +010012722 // ebx: length of resulting flat string as a smi
Steve Blockd0582a62009-12-15 09:54:21 +000012723 // edx: second string
12724 __ bind(&non_ascii_string_add_flat_result);
12725 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
12726 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
12727 __ and_(ecx, kAsciiStringTag);
12728 __ j(not_zero, &string_add_runtime);
12729 // Both strings are two byte strings. As they are short they are both
12730 // flat.
Steve Block6ded16b2010-05-10 14:33:55 +010012731 __ SmiUntag(ebx);
Steve Blockd0582a62009-12-15 09:54:21 +000012732 __ AllocateTwoByteString(eax, ebx, ecx, edx, edi, &string_add_runtime);
12733 // eax: result string
12734 __ mov(ecx, eax);
12735 // Locate first character of result.
12736 __ add(Operand(ecx),
12737 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
12738 // Load first argument and locate first character.
12739 __ mov(edx, Operand(esp, 2 * kPointerSize));
12740 __ mov(edi, FieldOperand(edx, String::kLengthOffset));
Steve Block6ded16b2010-05-10 14:33:55 +010012741 __ SmiUntag(edi);
Steve Blockd0582a62009-12-15 09:54:21 +000012742 __ add(Operand(edx),
12743 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
12744 // eax: result string
12745 // ecx: first character of result
12746 // edx: first char of first argument
12747 // edi: length of first argument
Steve Block6ded16b2010-05-10 14:33:55 +010012748 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, false);
Steve Blockd0582a62009-12-15 09:54:21 +000012749 // Load second argument and locate first character.
12750 __ mov(edx, Operand(esp, 1 * kPointerSize));
12751 __ mov(edi, FieldOperand(edx, String::kLengthOffset));
Steve Block6ded16b2010-05-10 14:33:55 +010012752 __ SmiUntag(edi);
Steve Blockd0582a62009-12-15 09:54:21 +000012753 __ add(Operand(edx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
12754 // eax: result string
12755 // ecx: next character of result
12756 // edx: first char of second argument
12757 // edi: length of second argument
Steve Block6ded16b2010-05-10 14:33:55 +010012758 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, false);
Steve Blockd0582a62009-12-15 09:54:21 +000012759 __ IncrementCounter(&Counters::string_add_native, 1);
12760 __ ret(2 * kPointerSize);
12761
12762 // Just jump to runtime to add the two strings.
12763 __ bind(&string_add_runtime);
Steve Block6ded16b2010-05-10 14:33:55 +010012764 __ TailCallRuntime(Runtime::kStringAdd, 2, 1);
Steve Blockd0582a62009-12-15 09:54:21 +000012765}
12766
12767
Steve Block6ded16b2010-05-10 14:33:55 +010012768void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
12769 Register dest,
12770 Register src,
12771 Register count,
12772 Register scratch,
12773 bool ascii) {
Steve Blockd0582a62009-12-15 09:54:21 +000012774 Label loop;
12775 __ bind(&loop);
12776 // This loop just copies one character at a time, as it is only used for very
12777 // short strings.
12778 if (ascii) {
12779 __ mov_b(scratch, Operand(src, 0));
12780 __ mov_b(Operand(dest, 0), scratch);
12781 __ add(Operand(src), Immediate(1));
12782 __ add(Operand(dest), Immediate(1));
12783 } else {
12784 __ mov_w(scratch, Operand(src, 0));
12785 __ mov_w(Operand(dest, 0), scratch);
12786 __ add(Operand(src), Immediate(2));
12787 __ add(Operand(dest), Immediate(2));
12788 }
12789 __ sub(Operand(count), Immediate(1));
12790 __ j(not_zero, &loop);
12791}
12792
12793
Steve Block6ded16b2010-05-10 14:33:55 +010012794void StringHelper::GenerateCopyCharactersREP(MacroAssembler* masm,
12795 Register dest,
12796 Register src,
12797 Register count,
12798 Register scratch,
12799 bool ascii) {
Leon Clarkee46be812010-01-19 14:06:41 +000012800 // Copy characters using rep movs of doublewords. Align destination on 4 byte
12801 // boundary before starting rep movs. Copy remaining characters after running
12802 // rep movs.
12803 ASSERT(dest.is(edi)); // rep movs destination
12804 ASSERT(src.is(esi)); // rep movs source
12805 ASSERT(count.is(ecx)); // rep movs count
12806 ASSERT(!scratch.is(dest));
12807 ASSERT(!scratch.is(src));
12808 ASSERT(!scratch.is(count));
12809
12810 // Nothing to do for zero characters.
12811 Label done;
12812 __ test(count, Operand(count));
12813 __ j(zero, &done);
12814
12815 // Make count the number of bytes to copy.
12816 if (!ascii) {
12817 __ shl(count, 1);
12818 }
12819
12820 // Don't enter the rep movs if there are less than 4 bytes to copy.
12821 Label last_bytes;
12822 __ test(count, Immediate(~3));
12823 __ j(zero, &last_bytes);
12824
12825 // Copy from edi to esi using rep movs instruction.
12826 __ mov(scratch, count);
12827 __ sar(count, 2); // Number of doublewords to copy.
Steve Block6ded16b2010-05-10 14:33:55 +010012828 __ cld();
Leon Clarkee46be812010-01-19 14:06:41 +000012829 __ rep_movs();
12830
12831 // Find number of bytes left.
12832 __ mov(count, scratch);
12833 __ and_(count, 3);
12834
12835 // Check if there are more bytes to copy.
12836 __ bind(&last_bytes);
12837 __ test(count, Operand(count));
12838 __ j(zero, &done);
12839
12840 // Copy remaining characters.
12841 Label loop;
12842 __ bind(&loop);
12843 __ mov_b(scratch, Operand(src, 0));
12844 __ mov_b(Operand(dest, 0), scratch);
12845 __ add(Operand(src), Immediate(1));
12846 __ add(Operand(dest), Immediate(1));
12847 __ sub(Operand(count), Immediate(1));
12848 __ j(not_zero, &loop);
12849
12850 __ bind(&done);
12851}
12852
12853
Steve Block6ded16b2010-05-10 14:33:55 +010012854void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
12855 Register c1,
12856 Register c2,
12857 Register scratch1,
12858 Register scratch2,
12859 Register scratch3,
12860 Label* not_found) {
Andrei Popescu402d9372010-02-26 13:31:12 +000012861 // Register scratch3 is the general scratch register in this function.
12862 Register scratch = scratch3;
12863
12864 // Make sure that both characters are not digits as such strings has a
12865 // different hash algorithm. Don't try to look for these in the symbol table.
12866 Label not_array_index;
12867 __ mov(scratch, c1);
12868 __ sub(Operand(scratch), Immediate(static_cast<int>('0')));
12869 __ cmp(Operand(scratch), Immediate(static_cast<int>('9' - '0')));
12870 __ j(above, &not_array_index);
12871 __ mov(scratch, c2);
12872 __ sub(Operand(scratch), Immediate(static_cast<int>('0')));
12873 __ cmp(Operand(scratch), Immediate(static_cast<int>('9' - '0')));
12874 __ j(below_equal, not_found);
12875
12876 __ bind(&not_array_index);
12877 // Calculate the two character string hash.
12878 Register hash = scratch1;
12879 GenerateHashInit(masm, hash, c1, scratch);
12880 GenerateHashAddCharacter(masm, hash, c2, scratch);
12881 GenerateHashGetHash(masm, hash, scratch);
12882
12883 // Collect the two characters in a register.
12884 Register chars = c1;
12885 __ shl(c2, kBitsPerByte);
12886 __ or_(chars, Operand(c2));
12887
12888 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
12889 // hash: hash of two character string.
12890
12891 // Load the symbol table.
12892 Register symbol_table = c2;
12893 ExternalReference roots_address = ExternalReference::roots_address();
12894 __ mov(scratch, Immediate(Heap::kSymbolTableRootIndex));
12895 __ mov(symbol_table,
12896 Operand::StaticArray(scratch, times_pointer_size, roots_address));
12897
12898 // Calculate capacity mask from the symbol table capacity.
12899 Register mask = scratch2;
Steve Block6ded16b2010-05-10 14:33:55 +010012900 __ mov(mask, FieldOperand(symbol_table, SymbolTable::kCapacityOffset));
Andrei Popescu402d9372010-02-26 13:31:12 +000012901 __ SmiUntag(mask);
12902 __ sub(Operand(mask), Immediate(1));
12903
12904 // Registers
12905 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
12906 // hash: hash of two character string
12907 // symbol_table: symbol table
12908 // mask: capacity mask
12909 // scratch: -
12910
12911 // Perform a number of probes in the symbol table.
12912 static const int kProbes = 4;
12913 Label found_in_symbol_table;
12914 Label next_probe[kProbes], next_probe_pop_mask[kProbes];
12915 for (int i = 0; i < kProbes; i++) {
12916 // Calculate entry in symbol table.
12917 __ mov(scratch, hash);
12918 if (i > 0) {
12919 __ add(Operand(scratch), Immediate(SymbolTable::GetProbeOffset(i)));
12920 }
12921 __ and_(scratch, Operand(mask));
12922
12923 // Load the entry from the symble table.
12924 Register candidate = scratch; // Scratch register contains candidate.
Steve Block6ded16b2010-05-10 14:33:55 +010012925 ASSERT_EQ(1, SymbolTable::kEntrySize);
Andrei Popescu402d9372010-02-26 13:31:12 +000012926 __ mov(candidate,
12927 FieldOperand(symbol_table,
12928 scratch,
12929 times_pointer_size,
Steve Block6ded16b2010-05-10 14:33:55 +010012930 SymbolTable::kElementsStartOffset));
Andrei Popescu402d9372010-02-26 13:31:12 +000012931
12932 // If entry is undefined no string with this hash can be found.
12933 __ cmp(candidate, Factory::undefined_value());
12934 __ j(equal, not_found);
12935
12936 // If length is not 2 the string is not a candidate.
Steve Block6ded16b2010-05-10 14:33:55 +010012937 __ cmp(FieldOperand(candidate, String::kLengthOffset),
12938 Immediate(Smi::FromInt(2)));
Andrei Popescu402d9372010-02-26 13:31:12 +000012939 __ j(not_equal, &next_probe[i]);
12940
12941 // As we are out of registers save the mask on the stack and use that
12942 // register as a temporary.
12943 __ push(mask);
12944 Register temp = mask;
12945
12946 // Check that the candidate is a non-external ascii string.
12947 __ mov(temp, FieldOperand(candidate, HeapObject::kMapOffset));
12948 __ movzx_b(temp, FieldOperand(temp, Map::kInstanceTypeOffset));
12949 __ JumpIfInstanceTypeIsNotSequentialAscii(
12950 temp, temp, &next_probe_pop_mask[i]);
12951
12952 // Check if the two characters match.
12953 __ mov(temp, FieldOperand(candidate, SeqAsciiString::kHeaderSize));
12954 __ and_(temp, 0x0000ffff);
12955 __ cmp(chars, Operand(temp));
12956 __ j(equal, &found_in_symbol_table);
12957 __ bind(&next_probe_pop_mask[i]);
12958 __ pop(mask);
12959 __ bind(&next_probe[i]);
12960 }
12961
12962 // No matching 2 character string found by probing.
12963 __ jmp(not_found);
12964
12965 // Scratch register contains result when we fall through to here.
12966 Register result = scratch;
12967 __ bind(&found_in_symbol_table);
12968 __ pop(mask); // Pop temporally saved mask from the stack.
12969 if (!result.is(eax)) {
12970 __ mov(eax, result);
12971 }
12972}
12973
12974
Steve Block6ded16b2010-05-10 14:33:55 +010012975void StringHelper::GenerateHashInit(MacroAssembler* masm,
12976 Register hash,
12977 Register character,
12978 Register scratch) {
Andrei Popescu402d9372010-02-26 13:31:12 +000012979 // hash = character + (character << 10);
12980 __ mov(hash, character);
12981 __ shl(hash, 10);
12982 __ add(hash, Operand(character));
12983 // hash ^= hash >> 6;
12984 __ mov(scratch, hash);
12985 __ sar(scratch, 6);
12986 __ xor_(hash, Operand(scratch));
12987}
12988
12989
Steve Block6ded16b2010-05-10 14:33:55 +010012990void StringHelper::GenerateHashAddCharacter(MacroAssembler* masm,
12991 Register hash,
12992 Register character,
12993 Register scratch) {
Andrei Popescu402d9372010-02-26 13:31:12 +000012994 // hash += character;
12995 __ add(hash, Operand(character));
12996 // hash += hash << 10;
12997 __ mov(scratch, hash);
12998 __ shl(scratch, 10);
12999 __ add(hash, Operand(scratch));
13000 // hash ^= hash >> 6;
13001 __ mov(scratch, hash);
13002 __ sar(scratch, 6);
13003 __ xor_(hash, Operand(scratch));
13004}
13005
13006
Steve Block6ded16b2010-05-10 14:33:55 +010013007void StringHelper::GenerateHashGetHash(MacroAssembler* masm,
13008 Register hash,
13009 Register scratch) {
Andrei Popescu402d9372010-02-26 13:31:12 +000013010 // hash += hash << 3;
13011 __ mov(scratch, hash);
13012 __ shl(scratch, 3);
13013 __ add(hash, Operand(scratch));
13014 // hash ^= hash >> 11;
13015 __ mov(scratch, hash);
13016 __ sar(scratch, 11);
13017 __ xor_(hash, Operand(scratch));
13018 // hash += hash << 15;
13019 __ mov(scratch, hash);
13020 __ shl(scratch, 15);
13021 __ add(hash, Operand(scratch));
13022
13023 // if (hash == 0) hash = 27;
13024 Label hash_not_zero;
13025 __ test(hash, Operand(hash));
13026 __ j(not_zero, &hash_not_zero);
13027 __ mov(hash, Immediate(27));
13028 __ bind(&hash_not_zero);
13029}
13030
13031
Leon Clarkee46be812010-01-19 14:06:41 +000013032void SubStringStub::Generate(MacroAssembler* masm) {
13033 Label runtime;
13034
13035 // Stack frame on entry.
13036 // esp[0]: return address
13037 // esp[4]: to
13038 // esp[8]: from
13039 // esp[12]: string
13040
13041 // Make sure first argument is a string.
13042 __ mov(eax, Operand(esp, 3 * kPointerSize));
13043 ASSERT_EQ(0, kSmiTag);
13044 __ test(eax, Immediate(kSmiTagMask));
13045 __ j(zero, &runtime);
13046 Condition is_string = masm->IsObjectStringType(eax, ebx, ebx);
13047 __ j(NegateCondition(is_string), &runtime);
13048
13049 // eax: string
13050 // ebx: instance type
13051 // Calculate length of sub string using the smi values.
Andrei Popescu402d9372010-02-26 13:31:12 +000013052 Label result_longer_than_two;
13053 __ mov(ecx, Operand(esp, 1 * kPointerSize)); // To index.
Leon Clarkee46be812010-01-19 14:06:41 +000013054 __ test(ecx, Immediate(kSmiTagMask));
13055 __ j(not_zero, &runtime);
Andrei Popescu402d9372010-02-26 13:31:12 +000013056 __ mov(edx, Operand(esp, 2 * kPointerSize)); // From index.
Leon Clarkee46be812010-01-19 14:06:41 +000013057 __ test(edx, Immediate(kSmiTagMask));
13058 __ j(not_zero, &runtime);
13059 __ sub(ecx, Operand(edx));
Andrei Popescu402d9372010-02-26 13:31:12 +000013060 // Special handling of sub-strings of length 1 and 2. One character strings
13061 // are handled in the runtime system (looked up in the single character
13062 // cache). Two character strings are looked for in the symbol cache.
Leon Clarkee46be812010-01-19 14:06:41 +000013063 __ SmiUntag(ecx); // Result length is no longer smi.
13064 __ cmp(ecx, 2);
Andrei Popescu402d9372010-02-26 13:31:12 +000013065 __ j(greater, &result_longer_than_two);
13066 __ j(less, &runtime);
Leon Clarkee46be812010-01-19 14:06:41 +000013067
Andrei Popescu402d9372010-02-26 13:31:12 +000013068 // Sub string of length 2 requested.
13069 // eax: string
13070 // ebx: instance type
13071 // ecx: sub string length (value is 2)
13072 // edx: from index (smi)
13073 __ JumpIfInstanceTypeIsNotSequentialAscii(ebx, ebx, &runtime);
13074
13075 // Get the two characters forming the sub string.
13076 __ SmiUntag(edx); // From index is no longer smi.
13077 __ movzx_b(ebx, FieldOperand(eax, edx, times_1, SeqAsciiString::kHeaderSize));
13078 __ movzx_b(ecx,
13079 FieldOperand(eax, edx, times_1, SeqAsciiString::kHeaderSize + 1));
13080
13081 // Try to lookup two character string in symbol table.
13082 Label make_two_character_string;
Steve Block6ded16b2010-05-10 14:33:55 +010013083 StringHelper::GenerateTwoCharacterSymbolTableProbe(
13084 masm, ebx, ecx, eax, edx, edi, &make_two_character_string);
13085 __ ret(3 * kPointerSize);
Andrei Popescu402d9372010-02-26 13:31:12 +000013086
13087 __ bind(&make_two_character_string);
13088 // Setup registers for allocating the two character string.
13089 __ mov(eax, Operand(esp, 3 * kPointerSize));
13090 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
13091 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
13092 __ Set(ecx, Immediate(2));
13093
13094 __ bind(&result_longer_than_two);
Leon Clarkee46be812010-01-19 14:06:41 +000013095 // eax: string
13096 // ebx: instance type
13097 // ecx: result string length
13098 // Check for flat ascii string
13099 Label non_ascii_flat;
Andrei Popescu402d9372010-02-26 13:31:12 +000013100 __ JumpIfInstanceTypeIsNotSequentialAscii(ebx, ebx, &non_ascii_flat);
Leon Clarkee46be812010-01-19 14:06:41 +000013101
13102 // Allocate the result.
13103 __ AllocateAsciiString(eax, ecx, ebx, edx, edi, &runtime);
13104
13105 // eax: result string
13106 // ecx: result string length
13107 __ mov(edx, esi); // esi used by following code.
13108 // Locate first character of result.
13109 __ mov(edi, eax);
13110 __ add(Operand(edi), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
13111 // Load string argument and locate character of sub string start.
13112 __ mov(esi, Operand(esp, 3 * kPointerSize));
13113 __ add(Operand(esi), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
13114 __ mov(ebx, Operand(esp, 2 * kPointerSize)); // from
13115 __ SmiUntag(ebx);
13116 __ add(esi, Operand(ebx));
13117
13118 // eax: result string
13119 // ecx: result length
13120 // edx: original value of esi
13121 // edi: first character of result
13122 // esi: character of sub string start
Steve Block6ded16b2010-05-10 14:33:55 +010013123 StringHelper::GenerateCopyCharactersREP(masm, edi, esi, ecx, ebx, true);
Leon Clarkee46be812010-01-19 14:06:41 +000013124 __ mov(esi, edx); // Restore esi.
13125 __ IncrementCounter(&Counters::sub_string_native, 1);
13126 __ ret(3 * kPointerSize);
13127
13128 __ bind(&non_ascii_flat);
13129 // eax: string
13130 // ebx: instance type & kStringRepresentationMask | kStringEncodingMask
13131 // ecx: result string length
13132 // Check for flat two byte string
13133 __ cmp(ebx, kSeqStringTag | kTwoByteStringTag);
13134 __ j(not_equal, &runtime);
13135
13136 // Allocate the result.
13137 __ AllocateTwoByteString(eax, ecx, ebx, edx, edi, &runtime);
13138
13139 // eax: result string
13140 // ecx: result string length
13141 __ mov(edx, esi); // esi used by following code.
13142 // Locate first character of result.
13143 __ mov(edi, eax);
13144 __ add(Operand(edi),
13145 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
13146 // Load string argument and locate character of sub string start.
13147 __ mov(esi, Operand(esp, 3 * kPointerSize));
Andrei Popescu31002712010-02-23 13:46:05 +000013148 __ add(Operand(esi),
13149 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
Leon Clarkee46be812010-01-19 14:06:41 +000013150 __ mov(ebx, Operand(esp, 2 * kPointerSize)); // from
13151 // As from is a smi it is 2 times the value which matches the size of a two
13152 // byte character.
13153 ASSERT_EQ(0, kSmiTag);
13154 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
13155 __ add(esi, Operand(ebx));
13156
13157 // eax: result string
13158 // ecx: result length
13159 // edx: original value of esi
13160 // edi: first character of result
13161 // esi: character of sub string start
Steve Block6ded16b2010-05-10 14:33:55 +010013162 StringHelper::GenerateCopyCharactersREP(masm, edi, esi, ecx, ebx, false);
Leon Clarkee46be812010-01-19 14:06:41 +000013163 __ mov(esi, edx); // Restore esi.
13164 __ IncrementCounter(&Counters::sub_string_native, 1);
13165 __ ret(3 * kPointerSize);
13166
13167 // Just jump to runtime to create the sub string.
13168 __ bind(&runtime);
Steve Block6ded16b2010-05-10 14:33:55 +010013169 __ TailCallRuntime(Runtime::kSubString, 3, 1);
Leon Clarkee46be812010-01-19 14:06:41 +000013170}
13171
13172
13173void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
13174 Register left,
13175 Register right,
13176 Register scratch1,
13177 Register scratch2,
13178 Register scratch3) {
Leon Clarked91b9f72010-01-27 17:25:45 +000013179 Label result_not_equal;
13180 Label result_greater;
13181 Label compare_lengths;
Steve Block6ded16b2010-05-10 14:33:55 +010013182
13183 __ IncrementCounter(&Counters::string_compare_native, 1);
13184
Leon Clarked91b9f72010-01-27 17:25:45 +000013185 // Find minimum length.
13186 Label left_shorter;
Leon Clarkee46be812010-01-19 14:06:41 +000013187 __ mov(scratch1, FieldOperand(left, String::kLengthOffset));
Leon Clarked91b9f72010-01-27 17:25:45 +000013188 __ mov(scratch3, scratch1);
13189 __ sub(scratch3, FieldOperand(right, String::kLengthOffset));
13190
13191 Register length_delta = scratch3;
13192
13193 __ j(less_equal, &left_shorter);
13194 // Right string is shorter. Change scratch1 to be length of right string.
13195 __ sub(scratch1, Operand(length_delta));
13196 __ bind(&left_shorter);
13197
13198 Register min_length = scratch1;
13199
13200 // If either length is zero, just compare lengths.
13201 __ test(min_length, Operand(min_length));
13202 __ j(zero, &compare_lengths);
13203
13204 // Change index to run from -min_length to -1 by adding min_length
13205 // to string start. This means that loop ends when index reaches zero,
13206 // which doesn't need an additional compare.
Steve Block6ded16b2010-05-10 14:33:55 +010013207 __ SmiUntag(min_length);
Leon Clarked91b9f72010-01-27 17:25:45 +000013208 __ lea(left,
13209 FieldOperand(left,
13210 min_length, times_1,
13211 SeqAsciiString::kHeaderSize));
13212 __ lea(right,
13213 FieldOperand(right,
13214 min_length, times_1,
13215 SeqAsciiString::kHeaderSize));
13216 __ neg(min_length);
13217
13218 Register index = min_length; // index = -min_length;
13219
13220 {
13221 // Compare loop.
13222 Label loop;
13223 __ bind(&loop);
13224 // Compare characters.
13225 __ mov_b(scratch2, Operand(left, index, times_1, 0));
13226 __ cmpb(scratch2, Operand(right, index, times_1, 0));
13227 __ j(not_equal, &result_not_equal);
13228 __ add(Operand(index), Immediate(1));
13229 __ j(not_zero, &loop);
Leon Clarkee46be812010-01-19 14:06:41 +000013230 }
13231
Leon Clarked91b9f72010-01-27 17:25:45 +000013232 // Compare lengths - strings up to min-length are equal.
Leon Clarkee46be812010-01-19 14:06:41 +000013233 __ bind(&compare_lengths);
Leon Clarked91b9f72010-01-27 17:25:45 +000013234 __ test(length_delta, Operand(length_delta));
Leon Clarkee46be812010-01-19 14:06:41 +000013235 __ j(not_zero, &result_not_equal);
13236
13237 // Result is EQUAL.
13238 ASSERT_EQ(0, EQUAL);
13239 ASSERT_EQ(0, kSmiTag);
Leon Clarked91b9f72010-01-27 17:25:45 +000013240 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
Leon Clarkee46be812010-01-19 14:06:41 +000013241 __ ret(2 * kPointerSize);
Leon Clarked91b9f72010-01-27 17:25:45 +000013242
Leon Clarkee46be812010-01-19 14:06:41 +000013243 __ bind(&result_not_equal);
13244 __ j(greater, &result_greater);
13245
13246 // Result is LESS.
Leon Clarked91b9f72010-01-27 17:25:45 +000013247 __ Set(eax, Immediate(Smi::FromInt(LESS)));
Leon Clarkee46be812010-01-19 14:06:41 +000013248 __ ret(2 * kPointerSize);
13249
13250 // Result is GREATER.
13251 __ bind(&result_greater);
Leon Clarked91b9f72010-01-27 17:25:45 +000013252 __ Set(eax, Immediate(Smi::FromInt(GREATER)));
Leon Clarkee46be812010-01-19 14:06:41 +000013253 __ ret(2 * kPointerSize);
13254}
13255
13256
13257void StringCompareStub::Generate(MacroAssembler* masm) {
13258 Label runtime;
13259
13260 // Stack frame on entry.
13261 // esp[0]: return address
13262 // esp[4]: right string
13263 // esp[8]: left string
13264
13265 __ mov(edx, Operand(esp, 2 * kPointerSize)); // left
13266 __ mov(eax, Operand(esp, 1 * kPointerSize)); // right
13267
13268 Label not_same;
13269 __ cmp(edx, Operand(eax));
13270 __ j(not_equal, &not_same);
13271 ASSERT_EQ(0, EQUAL);
13272 ASSERT_EQ(0, kSmiTag);
Leon Clarked91b9f72010-01-27 17:25:45 +000013273 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
Leon Clarkee46be812010-01-19 14:06:41 +000013274 __ IncrementCounter(&Counters::string_compare_native, 1);
13275 __ ret(2 * kPointerSize);
13276
13277 __ bind(&not_same);
13278
Leon Clarked91b9f72010-01-27 17:25:45 +000013279 // Check that both objects are sequential ascii strings.
13280 __ JumpIfNotBothSequentialAsciiStrings(edx, eax, ecx, ebx, &runtime);
Leon Clarkee46be812010-01-19 14:06:41 +000013281
13282 // Compare flat ascii strings.
13283 GenerateCompareFlatAsciiStrings(masm, edx, eax, ecx, ebx, edi);
13284
Leon Clarkee46be812010-01-19 14:06:41 +000013285 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
13286 // tagged as a small integer.
13287 __ bind(&runtime);
Steve Block6ded16b2010-05-10 14:33:55 +010013288 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
Leon Clarkee46be812010-01-19 14:06:41 +000013289}
13290
Steve Blocka7e24c12009-10-30 11:49:00 +000013291#undef __
13292
13293} } // namespace v8::internal