blob: c55ec7b20377c40291b7f1f3eccfd3ddf36c5236 [file] [log] [blame]
Leon Clarked91b9f72010-01-27 17:25:45 +00001// Copyright 2010 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_IA32)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "bootstrapper.h"
33#include "codegen-inl.h"
Steve Blockd0582a62009-12-15 09:54:21 +000034#include "compiler.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000035#include "debug.h"
36#include "ic-inl.h"
Leon Clarkee46be812010-01-19 14:06:41 +000037#include "jsregexp.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000038#include "parser.h"
Leon Clarkee46be812010-01-19 14:06:41 +000039#include "regexp-macro-assembler.h"
40#include "regexp-stack.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000041#include "register-allocator-inl.h"
42#include "runtime.h"
43#include "scopes.h"
Steve Block6ded16b2010-05-10 14:33:55 +010044#include "virtual-frame-inl.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000045
46namespace v8 {
47namespace internal {
48
49#define __ ACCESS_MASM(masm_)
50
51// -------------------------------------------------------------------------
52// Platform-specific DeferredCode functions.
53
54void DeferredCode::SaveRegisters() {
55 for (int i = 0; i < RegisterAllocator::kNumRegisters; i++) {
56 int action = registers_[i];
57 if (action == kPush) {
58 __ push(RegisterAllocator::ToRegister(i));
59 } else if (action != kIgnore && (action & kSyncedFlag) == 0) {
60 __ mov(Operand(ebp, action), RegisterAllocator::ToRegister(i));
61 }
62 }
63}
64
65
66void DeferredCode::RestoreRegisters() {
67 // Restore registers in reverse order due to the stack.
68 for (int i = RegisterAllocator::kNumRegisters - 1; i >= 0; i--) {
69 int action = registers_[i];
70 if (action == kPush) {
71 __ pop(RegisterAllocator::ToRegister(i));
72 } else if (action != kIgnore) {
73 action &= ~kSyncedFlag;
74 __ mov(RegisterAllocator::ToRegister(i), Operand(ebp, action));
75 }
76 }
77}
78
79
80// -------------------------------------------------------------------------
81// CodeGenState implementation.
82
83CodeGenState::CodeGenState(CodeGenerator* owner)
84 : owner_(owner),
Steve Blocka7e24c12009-10-30 11:49:00 +000085 destination_(NULL),
86 previous_(NULL) {
87 owner_->set_state(this);
88}
89
90
91CodeGenState::CodeGenState(CodeGenerator* owner,
Steve Blocka7e24c12009-10-30 11:49:00 +000092 ControlDestination* destination)
93 : owner_(owner),
Steve Blocka7e24c12009-10-30 11:49:00 +000094 destination_(destination),
95 previous_(owner->state()) {
96 owner_->set_state(this);
97}
98
99
100CodeGenState::~CodeGenState() {
101 ASSERT(owner_->state() == this);
102 owner_->set_state(previous_);
103}
104
105
106// -------------------------------------------------------------------------
107// CodeGenerator implementation
108
Andrei Popescu31002712010-02-23 13:46:05 +0000109CodeGenerator::CodeGenerator(MacroAssembler* masm)
110 : deferred_(8),
Leon Clarke4515c472010-02-03 11:58:03 +0000111 masm_(masm),
Andrei Popescu31002712010-02-23 13:46:05 +0000112 info_(NULL),
Steve Blocka7e24c12009-10-30 11:49:00 +0000113 frame_(NULL),
114 allocator_(NULL),
115 state_(NULL),
116 loop_nesting_(0),
Steve Block6ded16b2010-05-10 14:33:55 +0100117 in_safe_int32_mode_(false),
118 safe_int32_mode_enabled_(true),
Steve Blocka7e24c12009-10-30 11:49:00 +0000119 function_return_is_shadowed_(false),
120 in_spilled_code_(false) {
121}
122
123
124// Calling conventions:
125// ebp: caller's frame pointer
126// esp: stack pointer
127// edi: called JS function
128// esi: callee's context
129
Andrei Popescu402d9372010-02-26 13:31:12 +0000130void CodeGenerator::Generate(CompilationInfo* info) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000131 // Record the position for debugging purposes.
Andrei Popescu31002712010-02-23 13:46:05 +0000132 CodeForFunctionPosition(info->function());
Steve Block6ded16b2010-05-10 14:33:55 +0100133 Comment cmnt(masm_, "[ function compiled by virtual frame code generator");
Steve Blocka7e24c12009-10-30 11:49:00 +0000134
135 // Initialize state.
Andrei Popescu31002712010-02-23 13:46:05 +0000136 info_ = info;
Steve Blocka7e24c12009-10-30 11:49:00 +0000137 ASSERT(allocator_ == NULL);
138 RegisterAllocator register_allocator(this);
139 allocator_ = &register_allocator;
140 ASSERT(frame_ == NULL);
141 frame_ = new VirtualFrame();
142 set_in_spilled_code(false);
143
144 // Adjust for function-level loop nesting.
Steve Block6ded16b2010-05-10 14:33:55 +0100145 ASSERT_EQ(0, loop_nesting_);
146 loop_nesting_ = info->loop_nesting();
Steve Blocka7e24c12009-10-30 11:49:00 +0000147
148 JumpTarget::set_compiling_deferred_code(false);
149
150#ifdef DEBUG
151 if (strlen(FLAG_stop_at) > 0 &&
Andrei Popescu31002712010-02-23 13:46:05 +0000152 info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000153 frame_->SpillAll();
154 __ int3();
155 }
156#endif
157
158 // New scope to get automatic timing calculation.
Steve Block6ded16b2010-05-10 14:33:55 +0100159 { HistogramTimerScope codegen_timer(&Counters::code_generation);
Steve Blocka7e24c12009-10-30 11:49:00 +0000160 CodeGenState state(this);
161
162 // Entry:
163 // Stack: receiver, arguments, return address.
164 // ebp: caller's frame pointer
165 // esp: stack pointer
166 // edi: called JS function
167 // esi: callee's context
168 allocator_->Initialize();
Steve Blocka7e24c12009-10-30 11:49:00 +0000169
Andrei Popescu402d9372010-02-26 13:31:12 +0000170 if (info->mode() == CompilationInfo::PRIMARY) {
Leon Clarke4515c472010-02-03 11:58:03 +0000171 frame_->Enter();
172
173 // Allocate space for locals and initialize them.
174 frame_->AllocateStackSlots();
175
176 // Allocate the local context if needed.
Andrei Popescu31002712010-02-23 13:46:05 +0000177 int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
Leon Clarke4515c472010-02-03 11:58:03 +0000178 if (heap_slots > 0) {
179 Comment cmnt(masm_, "[ allocate local context");
180 // Allocate local context.
181 // Get outer context and create a new context based on it.
182 frame_->PushFunction();
183 Result context;
184 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
185 FastNewContextStub stub(heap_slots);
186 context = frame_->CallStub(&stub, 1);
187 } else {
188 context = frame_->CallRuntime(Runtime::kNewContext, 1);
189 }
190
191 // Update context local.
192 frame_->SaveContextRegister();
193
194 // Verify that the runtime call result and esi agree.
195 if (FLAG_debug_code) {
196 __ cmp(context.reg(), Operand(esi));
197 __ Assert(equal, "Runtime::NewContext should end up in esi");
198 }
199 }
200
201 // TODO(1241774): Improve this code:
202 // 1) only needed if we have a context
203 // 2) no need to recompute context ptr every single time
204 // 3) don't copy parameter operand code from SlotOperand!
205 {
206 Comment cmnt2(masm_, "[ copy context parameters into .context");
Leon Clarke4515c472010-02-03 11:58:03 +0000207 // Note that iteration order is relevant here! If we have the same
208 // parameter twice (e.g., function (x, y, x)), and that parameter
209 // needs to be copied into the context, it must be the last argument
210 // passed to the parameter that needs to be copied. This is a rare
211 // case so we don't check for it, instead we rely on the copying
212 // order: such a parameter is copied repeatedly into the same
213 // context location and thus the last value is what is seen inside
214 // the function.
Andrei Popescu31002712010-02-23 13:46:05 +0000215 for (int i = 0; i < scope()->num_parameters(); i++) {
216 Variable* par = scope()->parameter(i);
Leon Clarke4515c472010-02-03 11:58:03 +0000217 Slot* slot = par->slot();
218 if (slot != NULL && slot->type() == Slot::CONTEXT) {
219 // The use of SlotOperand below is safe in unspilled code
220 // because the slot is guaranteed to be a context slot.
221 //
222 // There are no parameters in the global scope.
Andrei Popescu31002712010-02-23 13:46:05 +0000223 ASSERT(!scope()->is_global_scope());
Leon Clarke4515c472010-02-03 11:58:03 +0000224 frame_->PushParameterAt(i);
225 Result value = frame_->Pop();
226 value.ToRegister();
227
228 // SlotOperand loads context.reg() with the context object
229 // stored to, used below in RecordWrite.
230 Result context = allocator_->Allocate();
231 ASSERT(context.is_valid());
232 __ mov(SlotOperand(slot, context.reg()), value.reg());
233 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
234 Result scratch = allocator_->Allocate();
235 ASSERT(scratch.is_valid());
236 frame_->Spill(context.reg());
237 frame_->Spill(value.reg());
238 __ RecordWrite(context.reg(), offset, value.reg(), scratch.reg());
239 }
240 }
241 }
242
243 // Store the arguments object. This must happen after context
244 // initialization because the arguments object may be stored in
245 // the context.
246 if (ArgumentsMode() != NO_ARGUMENTS_ALLOCATION) {
247 StoreArgumentsObject(true);
248 }
249
250 // Initialize ThisFunction reference if present.
Andrei Popescu31002712010-02-23 13:46:05 +0000251 if (scope()->is_function_scope() && scope()->function() != NULL) {
Leon Clarke4515c472010-02-03 11:58:03 +0000252 frame_->Push(Factory::the_hole_value());
Andrei Popescu31002712010-02-23 13:46:05 +0000253 StoreToSlot(scope()->function()->slot(), NOT_CONST_INIT);
Leon Clarke4515c472010-02-03 11:58:03 +0000254 }
255 } else {
256 // When used as the secondary compiler for splitting, ebp, esi,
257 // and edi have been pushed on the stack. Adjust the virtual
258 // frame to match this state.
259 frame_->Adjust(3);
260 allocator_->Unuse(edi);
Andrei Popescu402d9372010-02-26 13:31:12 +0000261
262 // Bind all the bailout labels to the beginning of the function.
263 List<CompilationInfo::Bailout*>* bailouts = info->bailouts();
264 for (int i = 0; i < bailouts->length(); i++) {
265 __ bind(bailouts->at(i)->label());
266 }
Leon Clarke4515c472010-02-03 11:58:03 +0000267 }
268
Steve Blocka7e24c12009-10-30 11:49:00 +0000269 // Initialize the function return target after the locals are set
270 // up, because it needs the expected frame height from the frame.
271 function_return_.set_direction(JumpTarget::BIDIRECTIONAL);
272 function_return_is_shadowed_ = false;
273
Steve Blocka7e24c12009-10-30 11:49:00 +0000274 // Generate code to 'execute' declarations and initialize functions
275 // (source elements). In case of an illegal redeclaration we need to
276 // handle that instead of processing the declarations.
Andrei Popescu31002712010-02-23 13:46:05 +0000277 if (scope()->HasIllegalRedeclaration()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000278 Comment cmnt(masm_, "[ illegal redeclarations");
Andrei Popescu31002712010-02-23 13:46:05 +0000279 scope()->VisitIllegalRedeclaration(this);
Steve Blocka7e24c12009-10-30 11:49:00 +0000280 } else {
281 Comment cmnt(masm_, "[ declarations");
Andrei Popescu31002712010-02-23 13:46:05 +0000282 ProcessDeclarations(scope()->declarations());
Steve Blocka7e24c12009-10-30 11:49:00 +0000283 // Bail out if a stack-overflow exception occurred when processing
284 // declarations.
285 if (HasStackOverflow()) return;
286 }
287
288 if (FLAG_trace) {
289 frame_->CallRuntime(Runtime::kTraceEnter, 0);
290 // Ignore the return value.
291 }
292 CheckStack();
293
294 // Compile the body of the function in a vanilla state. Don't
295 // bother compiling all the code if the scope has an illegal
296 // redeclaration.
Andrei Popescu31002712010-02-23 13:46:05 +0000297 if (!scope()->HasIllegalRedeclaration()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000298 Comment cmnt(masm_, "[ function body");
299#ifdef DEBUG
300 bool is_builtin = Bootstrapper::IsActive();
301 bool should_trace =
302 is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls;
303 if (should_trace) {
304 frame_->CallRuntime(Runtime::kDebugTrace, 0);
305 // Ignore the return value.
306 }
307#endif
Andrei Popescu31002712010-02-23 13:46:05 +0000308 VisitStatements(info->function()->body());
Steve Blocka7e24c12009-10-30 11:49:00 +0000309
310 // Handle the return from the function.
311 if (has_valid_frame()) {
312 // If there is a valid frame, control flow can fall off the end of
313 // the body. In that case there is an implicit return statement.
314 ASSERT(!function_return_is_shadowed_);
Andrei Popescu31002712010-02-23 13:46:05 +0000315 CodeForReturnPosition(info->function());
Steve Blocka7e24c12009-10-30 11:49:00 +0000316 frame_->PrepareForReturn();
317 Result undefined(Factory::undefined_value());
318 if (function_return_.is_bound()) {
319 function_return_.Jump(&undefined);
320 } else {
321 function_return_.Bind(&undefined);
322 GenerateReturnSequence(&undefined);
323 }
324 } else if (function_return_.is_linked()) {
325 // If the return target has dangling jumps to it, then we have not
326 // yet generated the return sequence. This can happen when (a)
327 // control does not flow off the end of the body so we did not
328 // compile an artificial return statement just above, and (b) there
329 // are return statements in the body but (c) they are all shadowed.
330 Result return_value;
331 function_return_.Bind(&return_value);
332 GenerateReturnSequence(&return_value);
333 }
334 }
335 }
336
337 // Adjust for function-level loop nesting.
Steve Block6ded16b2010-05-10 14:33:55 +0100338 ASSERT_EQ(info->loop_nesting(), loop_nesting_);
339 loop_nesting_ = 0;
Steve Blocka7e24c12009-10-30 11:49:00 +0000340
341 // Code generation state must be reset.
342 ASSERT(state_ == NULL);
343 ASSERT(loop_nesting() == 0);
344 ASSERT(!function_return_is_shadowed_);
345 function_return_.Unuse();
346 DeleteFrame();
347
348 // Process any deferred code using the register allocator.
349 if (!HasStackOverflow()) {
350 HistogramTimerScope deferred_timer(&Counters::deferred_code_generation);
351 JumpTarget::set_compiling_deferred_code(true);
352 ProcessDeferred();
353 JumpTarget::set_compiling_deferred_code(false);
354 }
355
356 // There is no need to delete the register allocator, it is a
357 // stack-allocated local.
358 allocator_ = NULL;
Steve Blocka7e24c12009-10-30 11:49:00 +0000359}
360
361
362Operand CodeGenerator::SlotOperand(Slot* slot, Register tmp) {
363 // Currently, this assertion will fail if we try to assign to
364 // a constant variable that is constant because it is read-only
365 // (such as the variable referring to a named function expression).
366 // We need to implement assignments to read-only variables.
367 // Ideally, we should do this during AST generation (by converting
368 // such assignments into expression statements); however, in general
369 // we may not be able to make the decision until past AST generation,
370 // that is when the entire program is known.
371 ASSERT(slot != NULL);
372 int index = slot->index();
373 switch (slot->type()) {
374 case Slot::PARAMETER:
375 return frame_->ParameterAt(index);
376
377 case Slot::LOCAL:
378 return frame_->LocalAt(index);
379
380 case Slot::CONTEXT: {
381 // Follow the context chain if necessary.
382 ASSERT(!tmp.is(esi)); // do not overwrite context register
383 Register context = esi;
384 int chain_length = scope()->ContextChainLength(slot->var()->scope());
385 for (int i = 0; i < chain_length; i++) {
386 // Load the closure.
387 // (All contexts, even 'with' contexts, have a closure,
388 // and it is the same for all contexts inside a function.
389 // There is no need to go to the function context first.)
390 __ mov(tmp, ContextOperand(context, Context::CLOSURE_INDEX));
391 // Load the function context (which is the incoming, outer context).
392 __ mov(tmp, FieldOperand(tmp, JSFunction::kContextOffset));
393 context = tmp;
394 }
395 // We may have a 'with' context now. Get the function context.
396 // (In fact this mov may never be the needed, since the scope analysis
397 // may not permit a direct context access in this case and thus we are
398 // always at a function context. However it is safe to dereference be-
399 // cause the function context of a function context is itself. Before
400 // deleting this mov we should try to create a counter-example first,
401 // though...)
402 __ mov(tmp, ContextOperand(context, Context::FCONTEXT_INDEX));
403 return ContextOperand(tmp, index);
404 }
405
406 default:
407 UNREACHABLE();
408 return Operand(eax);
409 }
410}
411
412
413Operand CodeGenerator::ContextSlotOperandCheckExtensions(Slot* slot,
414 Result tmp,
415 JumpTarget* slow) {
416 ASSERT(slot->type() == Slot::CONTEXT);
417 ASSERT(tmp.is_register());
418 Register context = esi;
419
420 for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) {
421 if (s->num_heap_slots() > 0) {
422 if (s->calls_eval()) {
423 // Check that extension is NULL.
424 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
425 Immediate(0));
426 slow->Branch(not_equal, not_taken);
427 }
428 __ mov(tmp.reg(), ContextOperand(context, Context::CLOSURE_INDEX));
429 __ mov(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset));
430 context = tmp.reg();
431 }
432 }
433 // Check that last extension is NULL.
434 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
435 slow->Branch(not_equal, not_taken);
436 __ mov(tmp.reg(), ContextOperand(context, Context::FCONTEXT_INDEX));
437 return ContextOperand(tmp.reg(), slot->index());
438}
439
440
441// Emit code to load the value of an expression to the top of the
442// frame. If the expression is boolean-valued it may be compiled (or
443// partially compiled) into control flow to the control destination.
444// If force_control is true, control flow is forced.
Steve Block6ded16b2010-05-10 14:33:55 +0100445void CodeGenerator::LoadCondition(Expression* expr,
Steve Blocka7e24c12009-10-30 11:49:00 +0000446 ControlDestination* dest,
447 bool force_control) {
448 ASSERT(!in_spilled_code());
449 int original_height = frame_->height();
450
Steve Blockd0582a62009-12-15 09:54:21 +0000451 { CodeGenState new_state(this, dest);
Steve Block6ded16b2010-05-10 14:33:55 +0100452 Visit(expr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000453
454 // If we hit a stack overflow, we may not have actually visited
455 // the expression. In that case, we ensure that we have a
456 // valid-looking frame state because we will continue to generate
457 // code as we unwind the C++ stack.
458 //
459 // It's possible to have both a stack overflow and a valid frame
460 // state (eg, a subexpression overflowed, visiting it returned
461 // with a dummied frame state, and visiting this expression
462 // returned with a normal-looking state).
463 if (HasStackOverflow() &&
464 !dest->is_used() &&
465 frame_->height() == original_height) {
466 dest->Goto(true);
467 }
468 }
469
470 if (force_control && !dest->is_used()) {
471 // Convert the TOS value into flow to the control destination.
472 ToBoolean(dest);
473 }
474
475 ASSERT(!(force_control && !dest->is_used()));
476 ASSERT(dest->is_used() || frame_->height() == original_height + 1);
477}
478
479
Steve Blockd0582a62009-12-15 09:54:21 +0000480void CodeGenerator::LoadAndSpill(Expression* expression) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000481 ASSERT(in_spilled_code());
482 set_in_spilled_code(false);
Steve Blockd0582a62009-12-15 09:54:21 +0000483 Load(expression);
Steve Blocka7e24c12009-10-30 11:49:00 +0000484 frame_->SpillAll();
485 set_in_spilled_code(true);
486}
487
488
Steve Block6ded16b2010-05-10 14:33:55 +0100489void CodeGenerator::LoadInSafeInt32Mode(Expression* expr,
490 BreakTarget* unsafe_bailout) {
491 set_unsafe_bailout(unsafe_bailout);
492 set_in_safe_int32_mode(true);
493 Load(expr);
494 Result value = frame_->Pop();
495 ASSERT(frame_->HasNoUntaggedInt32Elements());
496 if (expr->GuaranteedSmiResult()) {
497 ConvertInt32ResultToSmi(&value);
498 } else {
499 ConvertInt32ResultToNumber(&value);
500 }
501 set_in_safe_int32_mode(false);
502 set_unsafe_bailout(NULL);
503 frame_->Push(&value);
504}
505
506
507void CodeGenerator::LoadWithSafeInt32ModeDisabled(Expression* expr) {
508 set_safe_int32_mode_enabled(false);
509 Load(expr);
510 set_safe_int32_mode_enabled(true);
511}
512
513
514void CodeGenerator::ConvertInt32ResultToSmi(Result* value) {
515 ASSERT(value->is_untagged_int32());
516 if (value->is_register()) {
517 __ add(value->reg(), Operand(value->reg()));
518 } else {
519 ASSERT(value->is_constant());
520 ASSERT(value->handle()->IsSmi());
521 }
522 value->set_untagged_int32(false);
523 value->set_type_info(TypeInfo::Smi());
524}
525
526
527void CodeGenerator::ConvertInt32ResultToNumber(Result* value) {
528 ASSERT(value->is_untagged_int32());
529 if (value->is_register()) {
530 Register val = value->reg();
531 JumpTarget done;
532 __ add(val, Operand(val));
533 done.Branch(no_overflow, value);
534 __ sar(val, 1);
535 // If there was an overflow, bits 30 and 31 of the original number disagree.
536 __ xor_(val, 0x80000000u);
537 if (CpuFeatures::IsSupported(SSE2)) {
538 CpuFeatures::Scope fscope(SSE2);
539 __ cvtsi2sd(xmm0, Operand(val));
540 } else {
541 // Move val to ST[0] in the FPU
542 // Push and pop are safe with respect to the virtual frame because
543 // all synced elements are below the actual stack pointer.
544 __ push(val);
545 __ fild_s(Operand(esp, 0));
546 __ pop(val);
547 }
548 Result scratch = allocator_->Allocate();
549 ASSERT(scratch.is_register());
550 Label allocation_failed;
551 __ AllocateHeapNumber(val, scratch.reg(),
552 no_reg, &allocation_failed);
553 VirtualFrame* clone = new VirtualFrame(frame_);
554 scratch.Unuse();
555 if (CpuFeatures::IsSupported(SSE2)) {
556 CpuFeatures::Scope fscope(SSE2);
557 __ movdbl(FieldOperand(val, HeapNumber::kValueOffset), xmm0);
558 } else {
559 __ fstp_d(FieldOperand(val, HeapNumber::kValueOffset));
560 }
561 done.Jump(value);
562
563 // Establish the virtual frame, cloned from where AllocateHeapNumber
564 // jumped to allocation_failed.
565 RegisterFile empty_regs;
566 SetFrame(clone, &empty_regs);
567 __ bind(&allocation_failed);
568 unsafe_bailout_->Jump();
569
570 done.Bind(value);
571 } else {
572 ASSERT(value->is_constant());
573 }
574 value->set_untagged_int32(false);
575 value->set_type_info(TypeInfo::Integer32());
576}
577
578
Steve Blockd0582a62009-12-15 09:54:21 +0000579void CodeGenerator::Load(Expression* expr) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000580#ifdef DEBUG
581 int original_height = frame_->height();
582#endif
583 ASSERT(!in_spilled_code());
Steve Blocka7e24c12009-10-30 11:49:00 +0000584
Steve Block6ded16b2010-05-10 14:33:55 +0100585 // If the expression should be a side-effect-free 32-bit int computation,
586 // compile that SafeInt32 path, and a bailout path.
587 if (!in_safe_int32_mode() &&
588 safe_int32_mode_enabled() &&
589 expr->side_effect_free() &&
590 expr->num_bit_ops() > 2 &&
591 CpuFeatures::IsSupported(SSE2)) {
592 BreakTarget unsafe_bailout;
593 JumpTarget done;
594 unsafe_bailout.set_expected_height(frame_->height());
595 LoadInSafeInt32Mode(expr, &unsafe_bailout);
596 done.Jump();
597
598 if (unsafe_bailout.is_linked()) {
599 unsafe_bailout.Bind();
600 LoadWithSafeInt32ModeDisabled(expr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000601 }
Steve Block6ded16b2010-05-10 14:33:55 +0100602 done.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +0000603 } else {
Steve Block6ded16b2010-05-10 14:33:55 +0100604 JumpTarget true_target;
605 JumpTarget false_target;
606
607 ControlDestination dest(&true_target, &false_target, true);
608 LoadCondition(expr, &dest, false);
609
610 if (dest.false_was_fall_through()) {
611 // The false target was just bound.
Steve Blocka7e24c12009-10-30 11:49:00 +0000612 JumpTarget loaded;
Steve Block6ded16b2010-05-10 14:33:55 +0100613 frame_->Push(Factory::false_value());
614 // There may be dangling jumps to the true target.
Steve Blocka7e24c12009-10-30 11:49:00 +0000615 if (true_target.is_linked()) {
Steve Block6ded16b2010-05-10 14:33:55 +0100616 loaded.Jump();
Steve Blocka7e24c12009-10-30 11:49:00 +0000617 true_target.Bind();
618 frame_->Push(Factory::true_value());
Steve Block6ded16b2010-05-10 14:33:55 +0100619 loaded.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +0000620 }
Steve Block6ded16b2010-05-10 14:33:55 +0100621
622 } else if (dest.is_used()) {
623 // There is true, and possibly false, control flow (with true as
624 // the fall through).
625 JumpTarget loaded;
626 frame_->Push(Factory::true_value());
Steve Blocka7e24c12009-10-30 11:49:00 +0000627 if (false_target.is_linked()) {
Steve Block6ded16b2010-05-10 14:33:55 +0100628 loaded.Jump();
Steve Blocka7e24c12009-10-30 11:49:00 +0000629 false_target.Bind();
630 frame_->Push(Factory::false_value());
Steve Block6ded16b2010-05-10 14:33:55 +0100631 loaded.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +0000632 }
Steve Block6ded16b2010-05-10 14:33:55 +0100633
634 } else {
635 // We have a valid value on top of the frame, but we still may
636 // have dangling jumps to the true and false targets from nested
637 // subexpressions (eg, the left subexpressions of the
638 // short-circuited boolean operators).
639 ASSERT(has_valid_frame());
640 if (true_target.is_linked() || false_target.is_linked()) {
641 JumpTarget loaded;
642 loaded.Jump(); // Don't lose the current TOS.
643 if (true_target.is_linked()) {
644 true_target.Bind();
645 frame_->Push(Factory::true_value());
646 if (false_target.is_linked()) {
647 loaded.Jump();
648 }
649 }
650 if (false_target.is_linked()) {
651 false_target.Bind();
652 frame_->Push(Factory::false_value());
653 }
654 loaded.Bind();
655 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000656 }
657 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000658 ASSERT(has_valid_frame());
659 ASSERT(frame_->height() == original_height + 1);
660}
661
662
663void CodeGenerator::LoadGlobal() {
664 if (in_spilled_code()) {
665 frame_->EmitPush(GlobalObject());
666 } else {
667 Result temp = allocator_->Allocate();
668 __ mov(temp.reg(), GlobalObject());
669 frame_->Push(&temp);
670 }
671}
672
673
674void CodeGenerator::LoadGlobalReceiver() {
675 Result temp = allocator_->Allocate();
676 Register reg = temp.reg();
677 __ mov(reg, GlobalObject());
678 __ mov(reg, FieldOperand(reg, GlobalObject::kGlobalReceiverOffset));
679 frame_->Push(&temp);
680}
681
682
Steve Blockd0582a62009-12-15 09:54:21 +0000683void CodeGenerator::LoadTypeofExpression(Expression* expr) {
684 // Special handling of identifiers as subexpressions of typeof.
685 Variable* variable = expr->AsVariableProxy()->AsVariable();
Steve Blocka7e24c12009-10-30 11:49:00 +0000686 if (variable != NULL && !variable->is_this() && variable->is_global()) {
Steve Blockd0582a62009-12-15 09:54:21 +0000687 // For a global variable we build the property reference
688 // <global>.<variable> and perform a (regular non-contextual) property
689 // load to make sure we do not get reference errors.
Steve Blocka7e24c12009-10-30 11:49:00 +0000690 Slot global(variable, Slot::CONTEXT, Context::GLOBAL_INDEX);
691 Literal key(variable->name());
Steve Blocka7e24c12009-10-30 11:49:00 +0000692 Property property(&global, &key, RelocInfo::kNoPosition);
Steve Blockd0582a62009-12-15 09:54:21 +0000693 Reference ref(this, &property);
694 ref.GetValue();
695 } else if (variable != NULL && variable->slot() != NULL) {
696 // For a variable that rewrites to a slot, we signal it is the immediate
697 // subexpression of a typeof.
Leon Clarkef7060e22010-06-03 12:02:55 +0100698 LoadFromSlotCheckForArguments(variable->slot(), INSIDE_TYPEOF);
Steve Blocka7e24c12009-10-30 11:49:00 +0000699 } else {
Steve Blockd0582a62009-12-15 09:54:21 +0000700 // Anything else can be handled normally.
701 Load(expr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000702 }
703}
704
705
Andrei Popescu31002712010-02-23 13:46:05 +0000706ArgumentsAllocationMode CodeGenerator::ArgumentsMode() {
707 if (scope()->arguments() == NULL) return NO_ARGUMENTS_ALLOCATION;
708 ASSERT(scope()->arguments_shadow() != NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000709 // We don't want to do lazy arguments allocation for functions that
710 // have heap-allocated contexts, because it interfers with the
711 // uninitialized const tracking in the context objects.
Andrei Popescu31002712010-02-23 13:46:05 +0000712 return (scope()->num_heap_slots() > 0)
Steve Blocka7e24c12009-10-30 11:49:00 +0000713 ? EAGER_ARGUMENTS_ALLOCATION
714 : LAZY_ARGUMENTS_ALLOCATION;
715}
716
717
718Result CodeGenerator::StoreArgumentsObject(bool initial) {
719 ArgumentsAllocationMode mode = ArgumentsMode();
720 ASSERT(mode != NO_ARGUMENTS_ALLOCATION);
721
722 Comment cmnt(masm_, "[ store arguments object");
723 if (mode == LAZY_ARGUMENTS_ALLOCATION && initial) {
724 // When using lazy arguments allocation, we store the hole value
725 // as a sentinel indicating that the arguments object hasn't been
726 // allocated yet.
727 frame_->Push(Factory::the_hole_value());
728 } else {
729 ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT);
730 frame_->PushFunction();
731 frame_->PushReceiverSlotAddress();
Andrei Popescu31002712010-02-23 13:46:05 +0000732 frame_->Push(Smi::FromInt(scope()->num_parameters()));
Steve Blocka7e24c12009-10-30 11:49:00 +0000733 Result result = frame_->CallStub(&stub, 3);
734 frame_->Push(&result);
735 }
736
Andrei Popescu31002712010-02-23 13:46:05 +0000737 Variable* arguments = scope()->arguments()->var();
738 Variable* shadow = scope()->arguments_shadow()->var();
Leon Clarkee46be812010-01-19 14:06:41 +0000739 ASSERT(arguments != NULL && arguments->slot() != NULL);
740 ASSERT(shadow != NULL && shadow->slot() != NULL);
741 JumpTarget done;
742 bool skip_arguments = false;
743 if (mode == LAZY_ARGUMENTS_ALLOCATION && !initial) {
744 // We have to skip storing into the arguments slot if it has already
745 // been written to. This can happen if the a function has a local
746 // variable named 'arguments'.
Leon Clarkef7060e22010-06-03 12:02:55 +0100747 LoadFromSlot(arguments->slot(), NOT_INSIDE_TYPEOF);
748 Result probe = frame_->Pop();
Leon Clarkee46be812010-01-19 14:06:41 +0000749 if (probe.is_constant()) {
750 // We have to skip updating the arguments object if it has
751 // been assigned a proper value.
752 skip_arguments = !probe.handle()->IsTheHole();
753 } else {
754 __ cmp(Operand(probe.reg()), Immediate(Factory::the_hole_value()));
755 probe.Unuse();
756 done.Branch(not_equal);
Steve Blocka7e24c12009-10-30 11:49:00 +0000757 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000758 }
Leon Clarkee46be812010-01-19 14:06:41 +0000759 if (!skip_arguments) {
760 StoreToSlot(arguments->slot(), NOT_CONST_INIT);
761 if (mode == LAZY_ARGUMENTS_ALLOCATION) done.Bind();
762 }
763 StoreToSlot(shadow->slot(), NOT_CONST_INIT);
Steve Blocka7e24c12009-10-30 11:49:00 +0000764 return frame_->Pop();
765}
766
Leon Clarked91b9f72010-01-27 17:25:45 +0000767//------------------------------------------------------------------------------
768// CodeGenerator implementation of variables, lookups, and stores.
Steve Blocka7e24c12009-10-30 11:49:00 +0000769
Leon Clarked91b9f72010-01-27 17:25:45 +0000770Reference::Reference(CodeGenerator* cgen,
771 Expression* expression,
772 bool persist_after_get)
773 : cgen_(cgen),
774 expression_(expression),
775 type_(ILLEGAL),
776 persist_after_get_(persist_after_get) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000777 cgen->LoadReference(this);
778}
779
780
781Reference::~Reference() {
Leon Clarked91b9f72010-01-27 17:25:45 +0000782 ASSERT(is_unloaded() || is_illegal());
Steve Blocka7e24c12009-10-30 11:49:00 +0000783}
784
785
786void CodeGenerator::LoadReference(Reference* ref) {
787 // References are loaded from both spilled and unspilled code. Set the
788 // state to unspilled to allow that (and explicitly spill after
789 // construction at the construction sites).
790 bool was_in_spilled_code = in_spilled_code_;
791 in_spilled_code_ = false;
792
793 Comment cmnt(masm_, "[ LoadReference");
794 Expression* e = ref->expression();
795 Property* property = e->AsProperty();
796 Variable* var = e->AsVariableProxy()->AsVariable();
797
798 if (property != NULL) {
799 // The expression is either a property or a variable proxy that rewrites
800 // to a property.
801 Load(property->obj());
Leon Clarkee46be812010-01-19 14:06:41 +0000802 if (property->key()->IsPropertyName()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000803 ref->set_type(Reference::NAMED);
804 } else {
805 Load(property->key());
806 ref->set_type(Reference::KEYED);
807 }
808 } else if (var != NULL) {
809 // The expression is a variable proxy that does not rewrite to a
810 // property. Global variables are treated as named property references.
811 if (var->is_global()) {
Andrei Popescu402d9372010-02-26 13:31:12 +0000812 // If eax is free, the register allocator prefers it. Thus the code
813 // generator will load the global object into eax, which is where
814 // LoadIC wants it. Most uses of Reference call LoadIC directly
815 // after the reference is created.
816 frame_->Spill(eax);
Steve Blocka7e24c12009-10-30 11:49:00 +0000817 LoadGlobal();
818 ref->set_type(Reference::NAMED);
819 } else {
820 ASSERT(var->slot() != NULL);
821 ref->set_type(Reference::SLOT);
822 }
823 } else {
824 // Anything else is a runtime error.
825 Load(e);
826 frame_->CallRuntime(Runtime::kThrowReferenceError, 1);
827 }
828
829 in_spilled_code_ = was_in_spilled_code;
830}
831
832
Steve Blocka7e24c12009-10-30 11:49:00 +0000833// ECMA-262, section 9.2, page 30: ToBoolean(). Pop the top of stack and
834// convert it to a boolean in the condition code register or jump to
835// 'false_target'/'true_target' as appropriate.
836void CodeGenerator::ToBoolean(ControlDestination* dest) {
837 Comment cmnt(masm_, "[ ToBoolean");
838
839 // The value to convert should be popped from the frame.
840 Result value = frame_->Pop();
841 value.ToRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +0000842
Steve Block6ded16b2010-05-10 14:33:55 +0100843 if (value.is_integer32()) { // Also takes Smi case.
844 Comment cmnt(masm_, "ONLY_INTEGER_32");
Andrei Popescu402d9372010-02-26 13:31:12 +0000845 if (FLAG_debug_code) {
Steve Block6ded16b2010-05-10 14:33:55 +0100846 Label ok;
847 __ AbortIfNotNumber(value.reg());
848 __ test(value.reg(), Immediate(kSmiTagMask));
849 __ j(zero, &ok);
850 __ fldz();
851 __ fld_d(FieldOperand(value.reg(), HeapNumber::kValueOffset));
852 __ FCmp();
853 __ j(not_zero, &ok);
854 __ Abort("Smi was wrapped in HeapNumber in output from bitop");
855 __ bind(&ok);
856 }
857 // In the integer32 case there are no Smis hidden in heap numbers, so we
858 // need only test for Smi zero.
859 __ test(value.reg(), Operand(value.reg()));
860 dest->false_target()->Branch(zero);
861 value.Unuse();
862 dest->Split(not_zero);
863 } else if (value.is_number()) {
864 Comment cmnt(masm_, "ONLY_NUMBER");
865 // Fast case if TypeInfo indicates only numbers.
866 if (FLAG_debug_code) {
867 __ AbortIfNotNumber(value.reg());
Andrei Popescu402d9372010-02-26 13:31:12 +0000868 }
869 // Smi => false iff zero.
870 ASSERT(kSmiTag == 0);
871 __ test(value.reg(), Operand(value.reg()));
872 dest->false_target()->Branch(zero);
873 __ test(value.reg(), Immediate(kSmiTagMask));
874 dest->true_target()->Branch(zero);
875 __ fldz();
876 __ fld_d(FieldOperand(value.reg(), HeapNumber::kValueOffset));
877 __ FCmp();
878 value.Unuse();
879 dest->Split(not_zero);
880 } else {
881 // Fast case checks.
882 // 'false' => false.
883 __ cmp(value.reg(), Factory::false_value());
884 dest->false_target()->Branch(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +0000885
Andrei Popescu402d9372010-02-26 13:31:12 +0000886 // 'true' => true.
887 __ cmp(value.reg(), Factory::true_value());
888 dest->true_target()->Branch(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +0000889
Andrei Popescu402d9372010-02-26 13:31:12 +0000890 // 'undefined' => false.
891 __ cmp(value.reg(), Factory::undefined_value());
892 dest->false_target()->Branch(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +0000893
Andrei Popescu402d9372010-02-26 13:31:12 +0000894 // Smi => false iff zero.
895 ASSERT(kSmiTag == 0);
896 __ test(value.reg(), Operand(value.reg()));
897 dest->false_target()->Branch(zero);
898 __ test(value.reg(), Immediate(kSmiTagMask));
899 dest->true_target()->Branch(zero);
Steve Blocka7e24c12009-10-30 11:49:00 +0000900
Andrei Popescu402d9372010-02-26 13:31:12 +0000901 // Call the stub for all other cases.
902 frame_->Push(&value); // Undo the Pop() from above.
903 ToBooleanStub stub;
904 Result temp = frame_->CallStub(&stub, 1);
905 // Convert the result to a condition code.
906 __ test(temp.reg(), Operand(temp.reg()));
907 temp.Unuse();
908 dest->Split(not_equal);
909 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000910}
911
912
913class FloatingPointHelper : public AllStatic {
914 public:
Leon Clarked91b9f72010-01-27 17:25:45 +0000915
916 enum ArgLocation {
917 ARGS_ON_STACK,
918 ARGS_IN_REGISTERS
919 };
920
Steve Blocka7e24c12009-10-30 11:49:00 +0000921 // Code pattern for loading a floating point value. Input value must
922 // be either a smi or a heap number object (fp value). Requirements:
923 // operand in register number. Returns operand as floating point number
924 // on FPU stack.
925 static void LoadFloatOperand(MacroAssembler* masm, Register number);
Steve Block6ded16b2010-05-10 14:33:55 +0100926
Steve Blocka7e24c12009-10-30 11:49:00 +0000927 // Code pattern for loading floating point values. Input values must
928 // be either smi or heap number objects (fp values). Requirements:
Leon Clarked91b9f72010-01-27 17:25:45 +0000929 // operand_1 on TOS+1 or in edx, operand_2 on TOS+2 or in eax.
930 // Returns operands as floating point numbers on FPU stack.
931 static void LoadFloatOperands(MacroAssembler* masm,
932 Register scratch,
933 ArgLocation arg_location = ARGS_ON_STACK);
934
935 // Similar to LoadFloatOperand but assumes that both operands are smis.
936 // Expects operands in edx, eax.
937 static void LoadFloatSmis(MacroAssembler* masm, Register scratch);
938
Steve Blocka7e24c12009-10-30 11:49:00 +0000939 // Test if operands are smi or number objects (fp). Requirements:
940 // operand_1 in eax, operand_2 in edx; falls through on float
941 // operands, jumps to the non_float label otherwise.
942 static void CheckFloatOperands(MacroAssembler* masm,
943 Label* non_float,
944 Register scratch);
Steve Block6ded16b2010-05-10 14:33:55 +0100945
Leon Clarkee46be812010-01-19 14:06:41 +0000946 // Takes the operands in edx and eax and loads them as integers in eax
947 // and ecx.
948 static void LoadAsIntegers(MacroAssembler* masm,
Steve Block6ded16b2010-05-10 14:33:55 +0100949 TypeInfo type_info,
Leon Clarkee46be812010-01-19 14:06:41 +0000950 bool use_sse3,
951 Label* operand_conversion_failure);
Steve Block6ded16b2010-05-10 14:33:55 +0100952 static void LoadNumbersAsIntegers(MacroAssembler* masm,
953 TypeInfo type_info,
954 bool use_sse3,
955 Label* operand_conversion_failure);
956 static void LoadUnknownsAsIntegers(MacroAssembler* masm,
957 bool use_sse3,
958 Label* operand_conversion_failure);
959
Andrei Popescu402d9372010-02-26 13:31:12 +0000960 // Test if operands are smis or heap numbers and load them
961 // into xmm0 and xmm1 if they are. Operands are in edx and eax.
962 // Leaves operands unchanged.
963 static void LoadSSE2Operands(MacroAssembler* masm);
Steve Block6ded16b2010-05-10 14:33:55 +0100964
Steve Blocka7e24c12009-10-30 11:49:00 +0000965 // Test if operands are numbers (smi or HeapNumber objects), and load
966 // them into xmm0 and xmm1 if they are. Jump to label not_numbers if
967 // either operand is not a number. Operands are in edx and eax.
968 // Leaves operands unchanged.
Leon Clarked91b9f72010-01-27 17:25:45 +0000969 static void LoadSSE2Operands(MacroAssembler* masm, Label* not_numbers);
970
971 // Similar to LoadSSE2Operands but assumes that both operands are smis.
972 // Expects operands in edx, eax.
973 static void LoadSSE2Smis(MacroAssembler* masm, Register scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000974};
975
976
977const char* GenericBinaryOpStub::GetName() {
Leon Clarkee46be812010-01-19 14:06:41 +0000978 if (name_ != NULL) return name_;
979 const int kMaxNameLength = 100;
980 name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);
981 if (name_ == NULL) return "OOM";
982 const char* op_name = Token::Name(op_);
983 const char* overwrite_name;
984 switch (mode_) {
985 case NO_OVERWRITE: overwrite_name = "Alloc"; break;
986 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break;
987 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
988 default: overwrite_name = "UnknownOverwrite"; break;
Steve Blocka7e24c12009-10-30 11:49:00 +0000989 }
Leon Clarkee46be812010-01-19 14:06:41 +0000990
991 OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
Steve Block6ded16b2010-05-10 14:33:55 +0100992 "GenericBinaryOpStub_%s_%s%s_%s%s_%s_%s",
Leon Clarkee46be812010-01-19 14:06:41 +0000993 op_name,
994 overwrite_name,
995 (flags_ & NO_SMI_CODE_IN_STUB) ? "_NoSmiInStub" : "",
996 args_in_registers_ ? "RegArgs" : "StackArgs",
Andrei Popescu402d9372010-02-26 13:31:12 +0000997 args_reversed_ ? "_R" : "",
Steve Block6ded16b2010-05-10 14:33:55 +0100998 static_operands_type_.ToString(),
999 BinaryOpIC::GetName(runtime_operands_type_));
Leon Clarkee46be812010-01-19 14:06:41 +00001000 return name_;
Steve Blocka7e24c12009-10-30 11:49:00 +00001001}
1002
1003
1004// Call the specialized stub for a binary operation.
1005class DeferredInlineBinaryOperation: public DeferredCode {
1006 public:
1007 DeferredInlineBinaryOperation(Token::Value op,
1008 Register dst,
1009 Register left,
1010 Register right,
Steve Block6ded16b2010-05-10 14:33:55 +01001011 TypeInfo left_info,
1012 TypeInfo right_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00001013 OverwriteMode mode)
Steve Block6ded16b2010-05-10 14:33:55 +01001014 : op_(op), dst_(dst), left_(left), right_(right),
1015 left_info_(left_info), right_info_(right_info), mode_(mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001016 set_comment("[ DeferredInlineBinaryOperation");
1017 }
1018
1019 virtual void Generate();
1020
1021 private:
1022 Token::Value op_;
1023 Register dst_;
1024 Register left_;
1025 Register right_;
Steve Block6ded16b2010-05-10 14:33:55 +01001026 TypeInfo left_info_;
1027 TypeInfo right_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +00001028 OverwriteMode mode_;
1029};
1030
1031
1032void DeferredInlineBinaryOperation::Generate() {
Leon Clarkee46be812010-01-19 14:06:41 +00001033 Label done;
1034 if (CpuFeatures::IsSupported(SSE2) && ((op_ == Token::ADD) ||
1035 (op_ ==Token::SUB) ||
1036 (op_ == Token::MUL) ||
1037 (op_ == Token::DIV))) {
1038 CpuFeatures::Scope use_sse2(SSE2);
1039 Label call_runtime, after_alloc_failure;
1040 Label left_smi, right_smi, load_right, do_op;
Steve Block6ded16b2010-05-10 14:33:55 +01001041 if (!left_info_.IsSmi()) {
1042 __ test(left_, Immediate(kSmiTagMask));
1043 __ j(zero, &left_smi);
1044 if (!left_info_.IsNumber()) {
1045 __ cmp(FieldOperand(left_, HeapObject::kMapOffset),
1046 Factory::heap_number_map());
1047 __ j(not_equal, &call_runtime);
1048 }
1049 __ movdbl(xmm0, FieldOperand(left_, HeapNumber::kValueOffset));
1050 if (mode_ == OVERWRITE_LEFT) {
1051 __ mov(dst_, left_);
1052 }
1053 __ jmp(&load_right);
Leon Clarkee46be812010-01-19 14:06:41 +00001054
Steve Block6ded16b2010-05-10 14:33:55 +01001055 __ bind(&left_smi);
1056 } else {
1057 if (FLAG_debug_code) __ AbortIfNotSmi(left_);
1058 }
Leon Clarkee46be812010-01-19 14:06:41 +00001059 __ SmiUntag(left_);
1060 __ cvtsi2sd(xmm0, Operand(left_));
1061 __ SmiTag(left_);
1062 if (mode_ == OVERWRITE_LEFT) {
1063 Label alloc_failure;
1064 __ push(left_);
1065 __ AllocateHeapNumber(dst_, left_, no_reg, &after_alloc_failure);
1066 __ pop(left_);
1067 }
1068
1069 __ bind(&load_right);
Steve Block6ded16b2010-05-10 14:33:55 +01001070 if (!right_info_.IsSmi()) {
1071 __ test(right_, Immediate(kSmiTagMask));
1072 __ j(zero, &right_smi);
1073 if (!right_info_.IsNumber()) {
1074 __ cmp(FieldOperand(right_, HeapObject::kMapOffset),
1075 Factory::heap_number_map());
1076 __ j(not_equal, &call_runtime);
1077 }
1078 __ movdbl(xmm1, FieldOperand(right_, HeapNumber::kValueOffset));
1079 if (mode_ == OVERWRITE_RIGHT) {
1080 __ mov(dst_, right_);
1081 } else if (mode_ == NO_OVERWRITE) {
1082 Label alloc_failure;
1083 __ push(left_);
1084 __ AllocateHeapNumber(dst_, left_, no_reg, &after_alloc_failure);
1085 __ pop(left_);
1086 }
1087 __ jmp(&do_op);
Leon Clarkee46be812010-01-19 14:06:41 +00001088
Steve Block6ded16b2010-05-10 14:33:55 +01001089 __ bind(&right_smi);
1090 } else {
1091 if (FLAG_debug_code) __ AbortIfNotSmi(right_);
1092 }
Leon Clarkee46be812010-01-19 14:06:41 +00001093 __ SmiUntag(right_);
1094 __ cvtsi2sd(xmm1, Operand(right_));
1095 __ SmiTag(right_);
1096 if (mode_ == OVERWRITE_RIGHT || mode_ == NO_OVERWRITE) {
1097 Label alloc_failure;
1098 __ push(left_);
1099 __ AllocateHeapNumber(dst_, left_, no_reg, &after_alloc_failure);
1100 __ pop(left_);
1101 }
1102
1103 __ bind(&do_op);
1104 switch (op_) {
1105 case Token::ADD: __ addsd(xmm0, xmm1); break;
1106 case Token::SUB: __ subsd(xmm0, xmm1); break;
1107 case Token::MUL: __ mulsd(xmm0, xmm1); break;
1108 case Token::DIV: __ divsd(xmm0, xmm1); break;
1109 default: UNREACHABLE();
1110 }
1111 __ movdbl(FieldOperand(dst_, HeapNumber::kValueOffset), xmm0);
1112 __ jmp(&done);
1113
1114 __ bind(&after_alloc_failure);
1115 __ pop(left_);
1116 __ bind(&call_runtime);
1117 }
Steve Block6ded16b2010-05-10 14:33:55 +01001118 GenericBinaryOpStub stub(op_,
1119 mode_,
1120 NO_SMI_CODE_IN_STUB,
1121 TypeInfo::Combine(left_info_, right_info_));
Steve Block3ce2e202009-11-05 08:53:23 +00001122 stub.GenerateCall(masm_, left_, right_);
Steve Blocka7e24c12009-10-30 11:49:00 +00001123 if (!dst_.is(eax)) __ mov(dst_, eax);
Leon Clarkee46be812010-01-19 14:06:41 +00001124 __ bind(&done);
Steve Blocka7e24c12009-10-30 11:49:00 +00001125}
1126
1127
Steve Block6ded16b2010-05-10 14:33:55 +01001128static TypeInfo CalculateTypeInfo(TypeInfo operands_type,
1129 Token::Value op,
1130 const Result& right,
1131 const Result& left) {
1132 // Set TypeInfo of result according to the operation performed.
1133 // Rely on the fact that smis have a 31 bit payload on ia32.
1134 ASSERT(kSmiValueSize == 31);
1135 switch (op) {
1136 case Token::COMMA:
1137 return right.type_info();
1138 case Token::OR:
1139 case Token::AND:
1140 // Result type can be either of the two input types.
1141 return operands_type;
1142 case Token::BIT_AND: {
1143 // Anding with positive Smis will give you a Smi.
1144 if (right.is_constant() && right.handle()->IsSmi() &&
1145 Smi::cast(*right.handle())->value() >= 0) {
1146 return TypeInfo::Smi();
1147 } else if (left.is_constant() && left.handle()->IsSmi() &&
1148 Smi::cast(*left.handle())->value() >= 0) {
1149 return TypeInfo::Smi();
1150 }
1151 return (operands_type.IsSmi())
1152 ? TypeInfo::Smi()
1153 : TypeInfo::Integer32();
1154 }
1155 case Token::BIT_OR: {
1156 // Oring with negative Smis will give you a Smi.
1157 if (right.is_constant() && right.handle()->IsSmi() &&
1158 Smi::cast(*right.handle())->value() < 0) {
1159 return TypeInfo::Smi();
1160 } else if (left.is_constant() && left.handle()->IsSmi() &&
1161 Smi::cast(*left.handle())->value() < 0) {
1162 return TypeInfo::Smi();
1163 }
1164 return (operands_type.IsSmi())
1165 ? TypeInfo::Smi()
1166 : TypeInfo::Integer32();
1167 }
1168 case Token::BIT_XOR:
1169 // Result is always a 32 bit integer. Smi property of inputs is preserved.
1170 return (operands_type.IsSmi())
1171 ? TypeInfo::Smi()
1172 : TypeInfo::Integer32();
1173 case Token::SAR:
1174 if (left.is_smi()) return TypeInfo::Smi();
1175 // Result is a smi if we shift by a constant >= 1, otherwise an integer32.
1176 // Shift amount is masked with 0x1F (ECMA standard 11.7.2).
1177 return (right.is_constant() && right.handle()->IsSmi()
1178 && (Smi::cast(*right.handle())->value() & 0x1F) >= 1)
1179 ? TypeInfo::Smi()
1180 : TypeInfo::Integer32();
1181 case Token::SHR:
1182 // Result is a smi if we shift by a constant >= 2, an integer32 if
1183 // we shift by 1, and an unsigned 32-bit integer if we shift by 0.
1184 if (right.is_constant() && right.handle()->IsSmi()) {
1185 int shift_amount = Smi::cast(*right.handle())->value() & 0x1F;
1186 if (shift_amount > 1) {
1187 return TypeInfo::Smi();
1188 } else if (shift_amount > 0) {
1189 return TypeInfo::Integer32();
1190 }
1191 }
1192 return TypeInfo::Number();
1193 case Token::ADD:
1194 if (operands_type.IsSmi()) {
1195 // The Integer32 range is big enough to take the sum of any two Smis.
1196 return TypeInfo::Integer32();
1197 } else if (operands_type.IsNumber()) {
1198 return TypeInfo::Number();
1199 } else if (left.type_info().IsString() || right.type_info().IsString()) {
1200 return TypeInfo::String();
1201 } else {
1202 return TypeInfo::Unknown();
1203 }
1204 case Token::SHL:
1205 return TypeInfo::Integer32();
1206 case Token::SUB:
1207 // The Integer32 range is big enough to take the difference of any two
1208 // Smis.
1209 return (operands_type.IsSmi()) ?
1210 TypeInfo::Integer32() :
1211 TypeInfo::Number();
1212 case Token::MUL:
1213 case Token::DIV:
1214 case Token::MOD:
1215 // Result is always a number.
1216 return TypeInfo::Number();
1217 default:
1218 UNREACHABLE();
1219 }
1220 UNREACHABLE();
1221 return TypeInfo::Unknown();
1222}
1223
1224
1225void CodeGenerator::GenericBinaryOperation(BinaryOperation* expr,
Steve Blocka7e24c12009-10-30 11:49:00 +00001226 OverwriteMode overwrite_mode) {
1227 Comment cmnt(masm_, "[ BinaryOperation");
Steve Block6ded16b2010-05-10 14:33:55 +01001228 Token::Value op = expr->op();
Steve Blocka7e24c12009-10-30 11:49:00 +00001229 Comment cmnt_token(masm_, Token::String(op));
1230
1231 if (op == Token::COMMA) {
1232 // Simply discard left value.
1233 frame_->Nip(1);
1234 return;
1235 }
1236
Steve Blocka7e24c12009-10-30 11:49:00 +00001237 Result right = frame_->Pop();
1238 Result left = frame_->Pop();
1239
1240 if (op == Token::ADD) {
Steve Block6ded16b2010-05-10 14:33:55 +01001241 const bool left_is_string = left.type_info().IsString();
1242 const bool right_is_string = right.type_info().IsString();
1243 // Make sure constant strings have string type info.
1244 ASSERT(!(left.is_constant() && left.handle()->IsString()) ||
1245 left_is_string);
1246 ASSERT(!(right.is_constant() && right.handle()->IsString()) ||
1247 right_is_string);
Steve Blocka7e24c12009-10-30 11:49:00 +00001248 if (left_is_string || right_is_string) {
1249 frame_->Push(&left);
1250 frame_->Push(&right);
1251 Result answer;
1252 if (left_is_string) {
1253 if (right_is_string) {
Steve Block6ded16b2010-05-10 14:33:55 +01001254 StringAddStub stub(NO_STRING_CHECK_IN_STUB);
1255 answer = frame_->CallStub(&stub, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00001256 } else {
1257 answer =
1258 frame_->InvokeBuiltin(Builtins::STRING_ADD_LEFT, CALL_FUNCTION, 2);
1259 }
1260 } else if (right_is_string) {
1261 answer =
1262 frame_->InvokeBuiltin(Builtins::STRING_ADD_RIGHT, CALL_FUNCTION, 2);
1263 }
Steve Block6ded16b2010-05-10 14:33:55 +01001264 answer.set_type_info(TypeInfo::String());
Steve Blocka7e24c12009-10-30 11:49:00 +00001265 frame_->Push(&answer);
1266 return;
1267 }
1268 // Neither operand is known to be a string.
1269 }
1270
Andrei Popescu402d9372010-02-26 13:31:12 +00001271 bool left_is_smi_constant = left.is_constant() && left.handle()->IsSmi();
1272 bool left_is_non_smi_constant = left.is_constant() && !left.handle()->IsSmi();
1273 bool right_is_smi_constant = right.is_constant() && right.handle()->IsSmi();
1274 bool right_is_non_smi_constant =
1275 right.is_constant() && !right.handle()->IsSmi();
Steve Blocka7e24c12009-10-30 11:49:00 +00001276
Andrei Popescu402d9372010-02-26 13:31:12 +00001277 if (left_is_smi_constant && right_is_smi_constant) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001278 // Compute the constant result at compile time, and leave it on the frame.
1279 int left_int = Smi::cast(*left.handle())->value();
1280 int right_int = Smi::cast(*right.handle())->value();
1281 if (FoldConstantSmis(op, left_int, right_int)) return;
1282 }
1283
Andrei Popescu402d9372010-02-26 13:31:12 +00001284 // Get number type of left and right sub-expressions.
Steve Block6ded16b2010-05-10 14:33:55 +01001285 TypeInfo operands_type =
1286 TypeInfo::Combine(left.type_info(), right.type_info());
1287
1288 TypeInfo result_type = CalculateTypeInfo(operands_type, op, right, left);
Andrei Popescu402d9372010-02-26 13:31:12 +00001289
Leon Clarked91b9f72010-01-27 17:25:45 +00001290 Result answer;
Andrei Popescu402d9372010-02-26 13:31:12 +00001291 if (left_is_non_smi_constant || right_is_non_smi_constant) {
Leon Clarked91b9f72010-01-27 17:25:45 +00001292 // Go straight to the slow case, with no smi code.
Andrei Popescu402d9372010-02-26 13:31:12 +00001293 GenericBinaryOpStub stub(op,
1294 overwrite_mode,
1295 NO_SMI_CODE_IN_STUB,
1296 operands_type);
Leon Clarked91b9f72010-01-27 17:25:45 +00001297 answer = stub.GenerateCall(masm_, frame_, &left, &right);
Andrei Popescu402d9372010-02-26 13:31:12 +00001298 } else if (right_is_smi_constant) {
Steve Block6ded16b2010-05-10 14:33:55 +01001299 answer = ConstantSmiBinaryOperation(expr, &left, right.handle(),
1300 false, overwrite_mode);
Andrei Popescu402d9372010-02-26 13:31:12 +00001301 } else if (left_is_smi_constant) {
Steve Block6ded16b2010-05-10 14:33:55 +01001302 answer = ConstantSmiBinaryOperation(expr, &right, left.handle(),
1303 true, overwrite_mode);
Leon Clarkeeab96aa2010-01-27 16:31:12 +00001304 } else {
Leon Clarked91b9f72010-01-27 17:25:45 +00001305 // Set the flags based on the operation, type and loop nesting level.
1306 // Bit operations always assume they likely operate on Smis. Still only
1307 // generate the inline Smi check code if this operation is part of a loop.
1308 // For all other operations only inline the Smi check code for likely smis
1309 // if the operation is part of a loop.
Steve Block6ded16b2010-05-10 14:33:55 +01001310 if (loop_nesting() > 0 &&
1311 (Token::IsBitOp(op) ||
1312 operands_type.IsInteger32() ||
1313 expr->type()->IsLikelySmi())) {
1314 answer = LikelySmiBinaryOperation(expr, &left, &right, overwrite_mode);
Leon Clarked91b9f72010-01-27 17:25:45 +00001315 } else {
Andrei Popescu402d9372010-02-26 13:31:12 +00001316 GenericBinaryOpStub stub(op,
1317 overwrite_mode,
1318 NO_GENERIC_BINARY_FLAGS,
1319 operands_type);
Leon Clarked91b9f72010-01-27 17:25:45 +00001320 answer = stub.GenerateCall(masm_, frame_, &left, &right);
Leon Clarkeeab96aa2010-01-27 16:31:12 +00001321 }
Leon Clarkeeab96aa2010-01-27 16:31:12 +00001322 }
Andrei Popescu402d9372010-02-26 13:31:12 +00001323
Steve Block6ded16b2010-05-10 14:33:55 +01001324 answer.set_type_info(result_type);
Leon Clarked91b9f72010-01-27 17:25:45 +00001325 frame_->Push(&answer);
Steve Blocka7e24c12009-10-30 11:49:00 +00001326}
1327
1328
1329bool CodeGenerator::FoldConstantSmis(Token::Value op, int left, int right) {
1330 Object* answer_object = Heap::undefined_value();
1331 switch (op) {
1332 case Token::ADD:
1333 if (Smi::IsValid(left + right)) {
1334 answer_object = Smi::FromInt(left + right);
1335 }
1336 break;
1337 case Token::SUB:
1338 if (Smi::IsValid(left - right)) {
1339 answer_object = Smi::FromInt(left - right);
1340 }
1341 break;
1342 case Token::MUL: {
1343 double answer = static_cast<double>(left) * right;
1344 if (answer >= Smi::kMinValue && answer <= Smi::kMaxValue) {
1345 // If the product is zero and the non-zero factor is negative,
1346 // the spec requires us to return floating point negative zero.
1347 if (answer != 0 || (left >= 0 && right >= 0)) {
1348 answer_object = Smi::FromInt(static_cast<int>(answer));
1349 }
1350 }
1351 }
1352 break;
1353 case Token::DIV:
1354 case Token::MOD:
1355 break;
1356 case Token::BIT_OR:
1357 answer_object = Smi::FromInt(left | right);
1358 break;
1359 case Token::BIT_AND:
1360 answer_object = Smi::FromInt(left & right);
1361 break;
1362 case Token::BIT_XOR:
1363 answer_object = Smi::FromInt(left ^ right);
1364 break;
1365
1366 case Token::SHL: {
1367 int shift_amount = right & 0x1F;
1368 if (Smi::IsValid(left << shift_amount)) {
1369 answer_object = Smi::FromInt(left << shift_amount);
1370 }
1371 break;
1372 }
1373 case Token::SHR: {
1374 int shift_amount = right & 0x1F;
1375 unsigned int unsigned_left = left;
1376 unsigned_left >>= shift_amount;
1377 if (unsigned_left <= static_cast<unsigned int>(Smi::kMaxValue)) {
1378 answer_object = Smi::FromInt(unsigned_left);
1379 }
1380 break;
1381 }
1382 case Token::SAR: {
1383 int shift_amount = right & 0x1F;
1384 unsigned int unsigned_left = left;
1385 if (left < 0) {
1386 // Perform arithmetic shift of a negative number by
1387 // complementing number, logical shifting, complementing again.
1388 unsigned_left = ~unsigned_left;
1389 unsigned_left >>= shift_amount;
1390 unsigned_left = ~unsigned_left;
1391 } else {
1392 unsigned_left >>= shift_amount;
1393 }
1394 ASSERT(Smi::IsValid(unsigned_left)); // Converted to signed.
1395 answer_object = Smi::FromInt(unsigned_left); // Converted to signed.
1396 break;
1397 }
1398 default:
1399 UNREACHABLE();
1400 break;
1401 }
1402 if (answer_object == Heap::undefined_value()) {
1403 return false;
1404 }
1405 frame_->Push(Handle<Object>(answer_object));
1406 return true;
1407}
1408
1409
Steve Block6ded16b2010-05-10 14:33:55 +01001410static void CheckTwoForSminess(MacroAssembler* masm,
1411 Register left, Register right, Register scratch,
1412 TypeInfo left_info, TypeInfo right_info,
1413 DeferredInlineBinaryOperation* deferred);
1414
1415
Steve Blocka7e24c12009-10-30 11:49:00 +00001416// Implements a binary operation using a deferred code object and some
1417// inline code to operate on smis quickly.
Steve Block6ded16b2010-05-10 14:33:55 +01001418Result CodeGenerator::LikelySmiBinaryOperation(BinaryOperation* expr,
Leon Clarked91b9f72010-01-27 17:25:45 +00001419 Result* left,
1420 Result* right,
1421 OverwriteMode overwrite_mode) {
Kristian Monsen25f61362010-05-21 11:50:48 +01001422 // Copy the type info because left and right may be overwritten.
1423 TypeInfo left_type_info = left->type_info();
1424 TypeInfo right_type_info = right->type_info();
Steve Block6ded16b2010-05-10 14:33:55 +01001425 Token::Value op = expr->op();
Leon Clarked91b9f72010-01-27 17:25:45 +00001426 Result answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00001427 // Special handling of div and mod because they use fixed registers.
1428 if (op == Token::DIV || op == Token::MOD) {
1429 // We need eax as the quotient register, edx as the remainder
1430 // register, neither left nor right in eax or edx, and left copied
1431 // to eax.
1432 Result quotient;
1433 Result remainder;
1434 bool left_is_in_eax = false;
1435 // Step 1: get eax for quotient.
1436 if ((left->is_register() && left->reg().is(eax)) ||
1437 (right->is_register() && right->reg().is(eax))) {
1438 // One or both is in eax. Use a fresh non-edx register for
1439 // them.
1440 Result fresh = allocator_->Allocate();
1441 ASSERT(fresh.is_valid());
1442 if (fresh.reg().is(edx)) {
1443 remainder = fresh;
1444 fresh = allocator_->Allocate();
1445 ASSERT(fresh.is_valid());
1446 }
1447 if (left->is_register() && left->reg().is(eax)) {
1448 quotient = *left;
1449 *left = fresh;
1450 left_is_in_eax = true;
1451 }
1452 if (right->is_register() && right->reg().is(eax)) {
1453 quotient = *right;
1454 *right = fresh;
1455 }
1456 __ mov(fresh.reg(), eax);
1457 } else {
1458 // Neither left nor right is in eax.
1459 quotient = allocator_->Allocate(eax);
1460 }
1461 ASSERT(quotient.is_register() && quotient.reg().is(eax));
1462 ASSERT(!(left->is_register() && left->reg().is(eax)));
1463 ASSERT(!(right->is_register() && right->reg().is(eax)));
1464
1465 // Step 2: get edx for remainder if necessary.
1466 if (!remainder.is_valid()) {
1467 if ((left->is_register() && left->reg().is(edx)) ||
1468 (right->is_register() && right->reg().is(edx))) {
1469 Result fresh = allocator_->Allocate();
1470 ASSERT(fresh.is_valid());
1471 if (left->is_register() && left->reg().is(edx)) {
1472 remainder = *left;
1473 *left = fresh;
1474 }
1475 if (right->is_register() && right->reg().is(edx)) {
1476 remainder = *right;
1477 *right = fresh;
1478 }
1479 __ mov(fresh.reg(), edx);
1480 } else {
1481 // Neither left nor right is in edx.
1482 remainder = allocator_->Allocate(edx);
1483 }
1484 }
1485 ASSERT(remainder.is_register() && remainder.reg().is(edx));
1486 ASSERT(!(left->is_register() && left->reg().is(edx)));
1487 ASSERT(!(right->is_register() && right->reg().is(edx)));
1488
1489 left->ToRegister();
1490 right->ToRegister();
1491 frame_->Spill(eax);
1492 frame_->Spill(edx);
1493
1494 // Check that left and right are smi tagged.
1495 DeferredInlineBinaryOperation* deferred =
1496 new DeferredInlineBinaryOperation(op,
1497 (op == Token::DIV) ? eax : edx,
1498 left->reg(),
1499 right->reg(),
Kristian Monsen25f61362010-05-21 11:50:48 +01001500 left_type_info,
1501 right_type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00001502 overwrite_mode);
1503 if (left->reg().is(right->reg())) {
1504 __ test(left->reg(), Immediate(kSmiTagMask));
1505 } else {
1506 // Use the quotient register as a scratch for the tag check.
1507 if (!left_is_in_eax) __ mov(eax, left->reg());
1508 left_is_in_eax = false; // About to destroy the value in eax.
1509 __ or_(eax, Operand(right->reg()));
1510 ASSERT(kSmiTag == 0); // Adjust test if not the case.
1511 __ test(eax, Immediate(kSmiTagMask));
1512 }
1513 deferred->Branch(not_zero);
1514
1515 if (!left_is_in_eax) __ mov(eax, left->reg());
1516 // Sign extend eax into edx:eax.
1517 __ cdq();
1518 // Check for 0 divisor.
1519 __ test(right->reg(), Operand(right->reg()));
1520 deferred->Branch(zero);
1521 // Divide edx:eax by the right operand.
1522 __ idiv(right->reg());
1523
1524 // Complete the operation.
1525 if (op == Token::DIV) {
1526 // Check for negative zero result. If result is zero, and divisor
1527 // is negative, return a floating point negative zero. The
1528 // virtual frame is unchanged in this block, so local control flow
Steve Block6ded16b2010-05-10 14:33:55 +01001529 // can use a Label rather than a JumpTarget. If the context of this
1530 // expression will treat -0 like 0, do not do this test.
1531 if (!expr->no_negative_zero()) {
1532 Label non_zero_result;
1533 __ test(left->reg(), Operand(left->reg()));
1534 __ j(not_zero, &non_zero_result);
1535 __ test(right->reg(), Operand(right->reg()));
1536 deferred->Branch(negative);
1537 __ bind(&non_zero_result);
1538 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001539 // Check for the corner case of dividing the most negative smi by
1540 // -1. We cannot use the overflow flag, since it is not set by
1541 // idiv instruction.
1542 ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
1543 __ cmp(eax, 0x40000000);
1544 deferred->Branch(equal);
1545 // Check that the remainder is zero.
1546 __ test(edx, Operand(edx));
1547 deferred->Branch(not_zero);
1548 // Tag the result and store it in the quotient register.
Leon Clarkee46be812010-01-19 14:06:41 +00001549 __ SmiTag(eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00001550 deferred->BindExit();
1551 left->Unuse();
1552 right->Unuse();
Leon Clarked91b9f72010-01-27 17:25:45 +00001553 answer = quotient;
Steve Blocka7e24c12009-10-30 11:49:00 +00001554 } else {
1555 ASSERT(op == Token::MOD);
1556 // Check for a negative zero result. If the result is zero, and
1557 // the dividend is negative, return a floating point negative
1558 // zero. The frame is unchanged in this block, so local control
1559 // flow can use a Label rather than a JumpTarget.
Steve Block6ded16b2010-05-10 14:33:55 +01001560 if (!expr->no_negative_zero()) {
1561 Label non_zero_result;
1562 __ test(edx, Operand(edx));
1563 __ j(not_zero, &non_zero_result, taken);
1564 __ test(left->reg(), Operand(left->reg()));
1565 deferred->Branch(negative);
1566 __ bind(&non_zero_result);
1567 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001568 deferred->BindExit();
1569 left->Unuse();
1570 right->Unuse();
Leon Clarked91b9f72010-01-27 17:25:45 +00001571 answer = remainder;
Steve Blocka7e24c12009-10-30 11:49:00 +00001572 }
Leon Clarked91b9f72010-01-27 17:25:45 +00001573 ASSERT(answer.is_valid());
1574 return answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00001575 }
1576
1577 // Special handling of shift operations because they use fixed
1578 // registers.
1579 if (op == Token::SHL || op == Token::SHR || op == Token::SAR) {
1580 // Move left out of ecx if necessary.
1581 if (left->is_register() && left->reg().is(ecx)) {
1582 *left = allocator_->Allocate();
1583 ASSERT(left->is_valid());
1584 __ mov(left->reg(), ecx);
1585 }
1586 right->ToRegister(ecx);
1587 left->ToRegister();
1588 ASSERT(left->is_register() && !left->reg().is(ecx));
1589 ASSERT(right->is_register() && right->reg().is(ecx));
1590
1591 // We will modify right, it must be spilled.
1592 frame_->Spill(ecx);
1593
1594 // Use a fresh answer register to avoid spilling the left operand.
Leon Clarked91b9f72010-01-27 17:25:45 +00001595 answer = allocator_->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00001596 ASSERT(answer.is_valid());
1597 // Check that both operands are smis using the answer register as a
1598 // temporary.
1599 DeferredInlineBinaryOperation* deferred =
1600 new DeferredInlineBinaryOperation(op,
1601 answer.reg(),
1602 left->reg(),
1603 ecx,
Kristian Monsen25f61362010-05-21 11:50:48 +01001604 left_type_info,
1605 right_type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00001606 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00001607
Steve Block6ded16b2010-05-10 14:33:55 +01001608 Label do_op, left_nonsmi;
1609 // If right is a smi we make a fast case if left is either a smi
1610 // or a heapnumber.
Kristian Monsen25f61362010-05-21 11:50:48 +01001611 if (CpuFeatures::IsSupported(SSE2) && right_type_info.IsSmi()) {
Steve Block6ded16b2010-05-10 14:33:55 +01001612 CpuFeatures::Scope use_sse2(SSE2);
1613 __ mov(answer.reg(), left->reg());
1614 // Fast case - both are actually smis.
Kristian Monsen25f61362010-05-21 11:50:48 +01001615 if (!left_type_info.IsSmi()) {
Steve Block6ded16b2010-05-10 14:33:55 +01001616 __ test(answer.reg(), Immediate(kSmiTagMask));
1617 __ j(not_zero, &left_nonsmi);
1618 } else {
1619 if (FLAG_debug_code) __ AbortIfNotSmi(left->reg());
1620 }
1621 if (FLAG_debug_code) __ AbortIfNotSmi(right->reg());
1622 __ SmiUntag(answer.reg());
1623 __ jmp(&do_op);
1624
1625 __ bind(&left_nonsmi);
1626 // Branch if not a heapnumber.
1627 __ cmp(FieldOperand(answer.reg(), HeapObject::kMapOffset),
1628 Factory::heap_number_map());
1629 deferred->Branch(not_equal);
1630
1631 // Load integer value into answer register using truncation.
1632 __ cvttsd2si(answer.reg(),
1633 FieldOperand(answer.reg(), HeapNumber::kValueOffset));
1634 // Branch if we do not fit in a smi.
1635 __ cmp(answer.reg(), 0xc0000000);
1636 deferred->Branch(negative);
1637 } else {
1638 CheckTwoForSminess(masm_, left->reg(), right->reg(), answer.reg(),
Kristian Monsen25f61362010-05-21 11:50:48 +01001639 left_type_info, right_type_info, deferred);
Steve Block6ded16b2010-05-10 14:33:55 +01001640
1641 // Untag both operands.
1642 __ mov(answer.reg(), left->reg());
1643 __ SmiUntag(answer.reg());
1644 }
1645
1646 __ bind(&do_op);
Leon Clarkee46be812010-01-19 14:06:41 +00001647 __ SmiUntag(ecx);
Steve Blocka7e24c12009-10-30 11:49:00 +00001648 // Perform the operation.
1649 switch (op) {
1650 case Token::SAR:
Steve Blockd0582a62009-12-15 09:54:21 +00001651 __ sar_cl(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001652 // No checks of result necessary
1653 break;
1654 case Token::SHR: {
1655 Label result_ok;
Steve Blockd0582a62009-12-15 09:54:21 +00001656 __ shr_cl(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001657 // Check that the *unsigned* result fits in a smi. Neither of
1658 // the two high-order bits can be set:
1659 // * 0x80000000: high bit would be lost when smi tagging.
1660 // * 0x40000000: this number would convert to negative when smi
1661 // tagging.
1662 // These two cases can only happen with shifts by 0 or 1 when
1663 // handed a valid smi. If the answer cannot be represented by a
1664 // smi, restore the left and right arguments, and jump to slow
1665 // case. The low bit of the left argument may be lost, but only
1666 // in a case where it is dropped anyway.
1667 __ test(answer.reg(), Immediate(0xc0000000));
1668 __ j(zero, &result_ok);
Leon Clarkee46be812010-01-19 14:06:41 +00001669 __ SmiTag(ecx);
Steve Blocka7e24c12009-10-30 11:49:00 +00001670 deferred->Jump();
1671 __ bind(&result_ok);
1672 break;
1673 }
1674 case Token::SHL: {
1675 Label result_ok;
Steve Blockd0582a62009-12-15 09:54:21 +00001676 __ shl_cl(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001677 // Check that the *signed* result fits in a smi.
1678 __ cmp(answer.reg(), 0xc0000000);
1679 __ j(positive, &result_ok);
Leon Clarkee46be812010-01-19 14:06:41 +00001680 __ SmiTag(ecx);
Steve Blocka7e24c12009-10-30 11:49:00 +00001681 deferred->Jump();
1682 __ bind(&result_ok);
1683 break;
1684 }
1685 default:
1686 UNREACHABLE();
1687 }
1688 // Smi-tag the result in answer.
Leon Clarkee46be812010-01-19 14:06:41 +00001689 __ SmiTag(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001690 deferred->BindExit();
1691 left->Unuse();
1692 right->Unuse();
Leon Clarked91b9f72010-01-27 17:25:45 +00001693 ASSERT(answer.is_valid());
1694 return answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00001695 }
1696
1697 // Handle the other binary operations.
1698 left->ToRegister();
1699 right->ToRegister();
1700 // A newly allocated register answer is used to hold the answer. The
1701 // registers containing left and right are not modified so they don't
1702 // need to be spilled in the fast case.
Leon Clarked91b9f72010-01-27 17:25:45 +00001703 answer = allocator_->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00001704 ASSERT(answer.is_valid());
1705
1706 // Perform the smi tag check.
1707 DeferredInlineBinaryOperation* deferred =
1708 new DeferredInlineBinaryOperation(op,
1709 answer.reg(),
1710 left->reg(),
1711 right->reg(),
Kristian Monsen25f61362010-05-21 11:50:48 +01001712 left_type_info,
1713 right_type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00001714 overwrite_mode);
Steve Block6ded16b2010-05-10 14:33:55 +01001715 CheckTwoForSminess(masm_, left->reg(), right->reg(), answer.reg(),
Kristian Monsen25f61362010-05-21 11:50:48 +01001716 left_type_info, right_type_info, deferred);
Steve Block6ded16b2010-05-10 14:33:55 +01001717
Steve Blocka7e24c12009-10-30 11:49:00 +00001718 __ mov(answer.reg(), left->reg());
1719 switch (op) {
1720 case Token::ADD:
Leon Clarked91b9f72010-01-27 17:25:45 +00001721 __ add(answer.reg(), Operand(right->reg()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001722 deferred->Branch(overflow);
1723 break;
1724
1725 case Token::SUB:
Leon Clarked91b9f72010-01-27 17:25:45 +00001726 __ sub(answer.reg(), Operand(right->reg()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001727 deferred->Branch(overflow);
1728 break;
1729
1730 case Token::MUL: {
1731 // If the smi tag is 0 we can just leave the tag on one operand.
1732 ASSERT(kSmiTag == 0); // Adjust code below if not the case.
1733 // Remove smi tag from the left operand (but keep sign).
1734 // Left-hand operand has been copied into answer.
Leon Clarkee46be812010-01-19 14:06:41 +00001735 __ SmiUntag(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001736 // Do multiplication of smis, leaving result in answer.
1737 __ imul(answer.reg(), Operand(right->reg()));
1738 // Go slow on overflows.
1739 deferred->Branch(overflow);
1740 // Check for negative zero result. If product is zero, and one
1741 // argument is negative, go to slow case. The frame is unchanged
1742 // in this block, so local control flow can use a Label rather
1743 // than a JumpTarget.
Steve Block6ded16b2010-05-10 14:33:55 +01001744 if (!expr->no_negative_zero()) {
1745 Label non_zero_result;
1746 __ test(answer.reg(), Operand(answer.reg()));
1747 __ j(not_zero, &non_zero_result, taken);
1748 __ mov(answer.reg(), left->reg());
1749 __ or_(answer.reg(), Operand(right->reg()));
1750 deferred->Branch(negative);
1751 __ xor_(answer.reg(), Operand(answer.reg())); // Positive 0 is correct.
1752 __ bind(&non_zero_result);
1753 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001754 break;
1755 }
1756
1757 case Token::BIT_OR:
1758 __ or_(answer.reg(), Operand(right->reg()));
1759 break;
1760
1761 case Token::BIT_AND:
1762 __ and_(answer.reg(), Operand(right->reg()));
1763 break;
1764
1765 case Token::BIT_XOR:
1766 __ xor_(answer.reg(), Operand(right->reg()));
1767 break;
1768
1769 default:
1770 UNREACHABLE();
1771 break;
1772 }
1773 deferred->BindExit();
1774 left->Unuse();
1775 right->Unuse();
Leon Clarked91b9f72010-01-27 17:25:45 +00001776 ASSERT(answer.is_valid());
1777 return answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00001778}
1779
1780
1781// Call the appropriate binary operation stub to compute src op value
1782// and leave the result in dst.
1783class DeferredInlineSmiOperation: public DeferredCode {
1784 public:
1785 DeferredInlineSmiOperation(Token::Value op,
1786 Register dst,
1787 Register src,
Steve Block6ded16b2010-05-10 14:33:55 +01001788 TypeInfo type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00001789 Smi* value,
1790 OverwriteMode overwrite_mode)
1791 : op_(op),
1792 dst_(dst),
1793 src_(src),
Steve Block6ded16b2010-05-10 14:33:55 +01001794 type_info_(type_info),
Steve Blocka7e24c12009-10-30 11:49:00 +00001795 value_(value),
1796 overwrite_mode_(overwrite_mode) {
Steve Block6ded16b2010-05-10 14:33:55 +01001797 if (type_info.IsSmi()) overwrite_mode_ = NO_OVERWRITE;
Steve Blocka7e24c12009-10-30 11:49:00 +00001798 set_comment("[ DeferredInlineSmiOperation");
1799 }
1800
1801 virtual void Generate();
1802
1803 private:
1804 Token::Value op_;
1805 Register dst_;
1806 Register src_;
Steve Block6ded16b2010-05-10 14:33:55 +01001807 TypeInfo type_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +00001808 Smi* value_;
1809 OverwriteMode overwrite_mode_;
1810};
1811
1812
1813void DeferredInlineSmiOperation::Generate() {
Steve Blocka7e24c12009-10-30 11:49:00 +00001814 // For mod we don't generate all the Smi code inline.
1815 GenericBinaryOpStub stub(
1816 op_,
1817 overwrite_mode_,
Steve Block6ded16b2010-05-10 14:33:55 +01001818 (op_ == Token::MOD) ? NO_GENERIC_BINARY_FLAGS : NO_SMI_CODE_IN_STUB,
1819 TypeInfo::Combine(TypeInfo::Smi(), type_info_));
Steve Block3ce2e202009-11-05 08:53:23 +00001820 stub.GenerateCall(masm_, src_, value_);
Steve Blocka7e24c12009-10-30 11:49:00 +00001821 if (!dst_.is(eax)) __ mov(dst_, eax);
1822}
1823
1824
1825// Call the appropriate binary operation stub to compute value op src
1826// and leave the result in dst.
1827class DeferredInlineSmiOperationReversed: public DeferredCode {
1828 public:
1829 DeferredInlineSmiOperationReversed(Token::Value op,
1830 Register dst,
1831 Smi* value,
1832 Register src,
Steve Block6ded16b2010-05-10 14:33:55 +01001833 TypeInfo type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00001834 OverwriteMode overwrite_mode)
1835 : op_(op),
1836 dst_(dst),
Steve Block6ded16b2010-05-10 14:33:55 +01001837 type_info_(type_info),
Steve Blocka7e24c12009-10-30 11:49:00 +00001838 value_(value),
1839 src_(src),
1840 overwrite_mode_(overwrite_mode) {
1841 set_comment("[ DeferredInlineSmiOperationReversed");
1842 }
1843
1844 virtual void Generate();
1845
1846 private:
1847 Token::Value op_;
1848 Register dst_;
Steve Block6ded16b2010-05-10 14:33:55 +01001849 TypeInfo type_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +00001850 Smi* value_;
1851 Register src_;
1852 OverwriteMode overwrite_mode_;
1853};
1854
1855
1856void DeferredInlineSmiOperationReversed::Generate() {
Steve Block6ded16b2010-05-10 14:33:55 +01001857 GenericBinaryOpStub igostub(
1858 op_,
1859 overwrite_mode_,
1860 NO_SMI_CODE_IN_STUB,
1861 TypeInfo::Combine(TypeInfo::Smi(), type_info_));
Steve Block3ce2e202009-11-05 08:53:23 +00001862 igostub.GenerateCall(masm_, value_, src_);
Steve Blocka7e24c12009-10-30 11:49:00 +00001863 if (!dst_.is(eax)) __ mov(dst_, eax);
1864}
1865
1866
1867// The result of src + value is in dst. It either overflowed or was not
1868// smi tagged. Undo the speculative addition and call the appropriate
1869// specialized stub for add. The result is left in dst.
1870class DeferredInlineSmiAdd: public DeferredCode {
1871 public:
1872 DeferredInlineSmiAdd(Register dst,
Steve Block6ded16b2010-05-10 14:33:55 +01001873 TypeInfo type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00001874 Smi* value,
1875 OverwriteMode overwrite_mode)
Steve Block6ded16b2010-05-10 14:33:55 +01001876 : dst_(dst),
1877 type_info_(type_info),
1878 value_(value),
1879 overwrite_mode_(overwrite_mode) {
1880 if (type_info_.IsSmi()) overwrite_mode_ = NO_OVERWRITE;
Steve Blocka7e24c12009-10-30 11:49:00 +00001881 set_comment("[ DeferredInlineSmiAdd");
1882 }
1883
1884 virtual void Generate();
1885
1886 private:
1887 Register dst_;
Steve Block6ded16b2010-05-10 14:33:55 +01001888 TypeInfo type_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +00001889 Smi* value_;
1890 OverwriteMode overwrite_mode_;
1891};
1892
1893
1894void DeferredInlineSmiAdd::Generate() {
1895 // Undo the optimistic add operation and call the shared stub.
1896 __ sub(Operand(dst_), Immediate(value_));
Steve Block6ded16b2010-05-10 14:33:55 +01001897 GenericBinaryOpStub igostub(
1898 Token::ADD,
1899 overwrite_mode_,
1900 NO_SMI_CODE_IN_STUB,
1901 TypeInfo::Combine(TypeInfo::Smi(), type_info_));
Steve Block3ce2e202009-11-05 08:53:23 +00001902 igostub.GenerateCall(masm_, dst_, value_);
Steve Blocka7e24c12009-10-30 11:49:00 +00001903 if (!dst_.is(eax)) __ mov(dst_, eax);
1904}
1905
1906
1907// The result of value + src is in dst. It either overflowed or was not
1908// smi tagged. Undo the speculative addition and call the appropriate
1909// specialized stub for add. The result is left in dst.
1910class DeferredInlineSmiAddReversed: public DeferredCode {
1911 public:
1912 DeferredInlineSmiAddReversed(Register dst,
Steve Block6ded16b2010-05-10 14:33:55 +01001913 TypeInfo type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00001914 Smi* value,
1915 OverwriteMode overwrite_mode)
Steve Block6ded16b2010-05-10 14:33:55 +01001916 : dst_(dst),
1917 type_info_(type_info),
1918 value_(value),
1919 overwrite_mode_(overwrite_mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001920 set_comment("[ DeferredInlineSmiAddReversed");
1921 }
1922
1923 virtual void Generate();
1924
1925 private:
1926 Register dst_;
Steve Block6ded16b2010-05-10 14:33:55 +01001927 TypeInfo type_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +00001928 Smi* value_;
1929 OverwriteMode overwrite_mode_;
1930};
1931
1932
1933void DeferredInlineSmiAddReversed::Generate() {
1934 // Undo the optimistic add operation and call the shared stub.
1935 __ sub(Operand(dst_), Immediate(value_));
Steve Block6ded16b2010-05-10 14:33:55 +01001936 GenericBinaryOpStub igostub(
1937 Token::ADD,
1938 overwrite_mode_,
1939 NO_SMI_CODE_IN_STUB,
1940 TypeInfo::Combine(TypeInfo::Smi(), type_info_));
Steve Block3ce2e202009-11-05 08:53:23 +00001941 igostub.GenerateCall(masm_, value_, dst_);
Steve Blocka7e24c12009-10-30 11:49:00 +00001942 if (!dst_.is(eax)) __ mov(dst_, eax);
1943}
1944
1945
1946// The result of src - value is in dst. It either overflowed or was not
1947// smi tagged. Undo the speculative subtraction and call the
1948// appropriate specialized stub for subtract. The result is left in
1949// dst.
1950class DeferredInlineSmiSub: public DeferredCode {
1951 public:
1952 DeferredInlineSmiSub(Register dst,
Steve Block6ded16b2010-05-10 14:33:55 +01001953 TypeInfo type_info,
Steve Blocka7e24c12009-10-30 11:49:00 +00001954 Smi* value,
1955 OverwriteMode overwrite_mode)
Steve Block6ded16b2010-05-10 14:33:55 +01001956 : dst_(dst),
1957 type_info_(type_info),
1958 value_(value),
1959 overwrite_mode_(overwrite_mode) {
1960 if (type_info.IsSmi()) overwrite_mode_ = NO_OVERWRITE;
Steve Blocka7e24c12009-10-30 11:49:00 +00001961 set_comment("[ DeferredInlineSmiSub");
1962 }
1963
1964 virtual void Generate();
1965
1966 private:
1967 Register dst_;
Steve Block6ded16b2010-05-10 14:33:55 +01001968 TypeInfo type_info_;
Steve Blocka7e24c12009-10-30 11:49:00 +00001969 Smi* value_;
1970 OverwriteMode overwrite_mode_;
1971};
1972
1973
1974void DeferredInlineSmiSub::Generate() {
1975 // Undo the optimistic sub operation and call the shared stub.
1976 __ add(Operand(dst_), Immediate(value_));
Steve Block6ded16b2010-05-10 14:33:55 +01001977 GenericBinaryOpStub igostub(
1978 Token::SUB,
1979 overwrite_mode_,
1980 NO_SMI_CODE_IN_STUB,
1981 TypeInfo::Combine(TypeInfo::Smi(), type_info_));
Steve Block3ce2e202009-11-05 08:53:23 +00001982 igostub.GenerateCall(masm_, dst_, value_);
Steve Blocka7e24c12009-10-30 11:49:00 +00001983 if (!dst_.is(eax)) __ mov(dst_, eax);
1984}
1985
1986
Kristian Monsen25f61362010-05-21 11:50:48 +01001987Result CodeGenerator::ConstantSmiBinaryOperation(BinaryOperation* expr,
1988 Result* operand,
1989 Handle<Object> value,
1990 bool reversed,
1991 OverwriteMode overwrite_mode) {
1992 // Generate inline code for a binary operation when one of the
1993 // operands is a constant smi. Consumes the argument "operand".
Steve Blocka7e24c12009-10-30 11:49:00 +00001994 if (IsUnsafeSmi(value)) {
1995 Result unsafe_operand(value);
1996 if (reversed) {
Steve Block6ded16b2010-05-10 14:33:55 +01001997 return LikelySmiBinaryOperation(expr, &unsafe_operand, operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00001998 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00001999 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01002000 return LikelySmiBinaryOperation(expr, operand, &unsafe_operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00002001 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002002 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002003 }
2004
2005 // Get the literal value.
2006 Smi* smi_value = Smi::cast(*value);
2007 int int_value = smi_value->value();
2008
Steve Block6ded16b2010-05-10 14:33:55 +01002009 Token::Value op = expr->op();
Leon Clarked91b9f72010-01-27 17:25:45 +00002010 Result answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00002011 switch (op) {
2012 case Token::ADD: {
2013 operand->ToRegister();
2014 frame_->Spill(operand->reg());
2015
2016 // Optimistically add. Call the specialized add stub if the
2017 // result is not a smi or overflows.
2018 DeferredCode* deferred = NULL;
2019 if (reversed) {
2020 deferred = new DeferredInlineSmiAddReversed(operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002021 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002022 smi_value,
2023 overwrite_mode);
2024 } else {
2025 deferred = new DeferredInlineSmiAdd(operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002026 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002027 smi_value,
2028 overwrite_mode);
2029 }
2030 __ add(Operand(operand->reg()), Immediate(value));
2031 deferred->Branch(overflow);
Steve Block6ded16b2010-05-10 14:33:55 +01002032 if (!operand->type_info().IsSmi()) {
2033 __ test(operand->reg(), Immediate(kSmiTagMask));
2034 deferred->Branch(not_zero);
2035 } else if (FLAG_debug_code) {
2036 __ AbortIfNotSmi(operand->reg());
2037 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002038 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00002039 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00002040 break;
2041 }
2042
2043 case Token::SUB: {
2044 DeferredCode* deferred = NULL;
Steve Blocka7e24c12009-10-30 11:49:00 +00002045 if (reversed) {
2046 // The reversed case is only hit when the right operand is not a
2047 // constant.
2048 ASSERT(operand->is_register());
2049 answer = allocator()->Allocate();
2050 ASSERT(answer.is_valid());
2051 __ Set(answer.reg(), Immediate(value));
Steve Block6ded16b2010-05-10 14:33:55 +01002052 deferred =
2053 new DeferredInlineSmiOperationReversed(op,
2054 answer.reg(),
2055 smi_value,
2056 operand->reg(),
2057 operand->type_info(),
2058 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002059 __ sub(answer.reg(), Operand(operand->reg()));
2060 } else {
2061 operand->ToRegister();
2062 frame_->Spill(operand->reg());
2063 answer = *operand;
2064 deferred = new DeferredInlineSmiSub(operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002065 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002066 smi_value,
2067 overwrite_mode);
2068 __ sub(Operand(operand->reg()), Immediate(value));
2069 }
2070 deferred->Branch(overflow);
Steve Block6ded16b2010-05-10 14:33:55 +01002071 if (!operand->type_info().IsSmi()) {
2072 __ test(answer.reg(), Immediate(kSmiTagMask));
2073 deferred->Branch(not_zero);
2074 } else if (FLAG_debug_code) {
2075 __ AbortIfNotSmi(operand->reg());
2076 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002077 deferred->BindExit();
2078 operand->Unuse();
Steve Blocka7e24c12009-10-30 11:49:00 +00002079 break;
2080 }
2081
2082 case Token::SAR:
2083 if (reversed) {
2084 Result constant_operand(value);
Steve Block6ded16b2010-05-10 14:33:55 +01002085 answer = LikelySmiBinaryOperation(expr, &constant_operand, operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00002086 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002087 } else {
2088 // Only the least significant 5 bits of the shift value are used.
2089 // In the slow case, this masking is done inside the runtime call.
2090 int shift_value = int_value & 0x1f;
2091 operand->ToRegister();
2092 frame_->Spill(operand->reg());
Steve Block6ded16b2010-05-10 14:33:55 +01002093 if (!operand->type_info().IsSmi()) {
2094 DeferredInlineSmiOperation* deferred =
2095 new DeferredInlineSmiOperation(op,
2096 operand->reg(),
2097 operand->reg(),
2098 operand->type_info(),
2099 smi_value,
2100 overwrite_mode);
2101 __ test(operand->reg(), Immediate(kSmiTagMask));
2102 deferred->Branch(not_zero);
2103 if (shift_value > 0) {
2104 __ sar(operand->reg(), shift_value);
2105 __ and_(operand->reg(), ~kSmiTagMask);
2106 }
2107 deferred->BindExit();
2108 } else {
2109 if (FLAG_debug_code) {
2110 __ AbortIfNotSmi(operand->reg());
2111 }
2112 if (shift_value > 0) {
2113 __ sar(operand->reg(), shift_value);
2114 __ and_(operand->reg(), ~kSmiTagMask);
2115 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002116 }
Leon Clarked91b9f72010-01-27 17:25:45 +00002117 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00002118 }
2119 break;
2120
2121 case Token::SHR:
2122 if (reversed) {
2123 Result constant_operand(value);
Steve Block6ded16b2010-05-10 14:33:55 +01002124 answer = LikelySmiBinaryOperation(expr, &constant_operand, operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00002125 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002126 } else {
2127 // Only the least significant 5 bits of the shift value are used.
2128 // In the slow case, this masking is done inside the runtime call.
2129 int shift_value = int_value & 0x1f;
2130 operand->ToRegister();
Leon Clarked91b9f72010-01-27 17:25:45 +00002131 answer = allocator()->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00002132 ASSERT(answer.is_valid());
2133 DeferredInlineSmiOperation* deferred =
2134 new DeferredInlineSmiOperation(op,
2135 answer.reg(),
2136 operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002137 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002138 smi_value,
2139 overwrite_mode);
Steve Block6ded16b2010-05-10 14:33:55 +01002140 if (!operand->type_info().IsSmi()) {
2141 __ test(operand->reg(), Immediate(kSmiTagMask));
2142 deferred->Branch(not_zero);
2143 } else if (FLAG_debug_code) {
2144 __ AbortIfNotSmi(operand->reg());
2145 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002146 __ mov(answer.reg(), operand->reg());
Leon Clarkee46be812010-01-19 14:06:41 +00002147 __ SmiUntag(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00002148 __ shr(answer.reg(), shift_value);
2149 // A negative Smi shifted right two is in the positive Smi range.
2150 if (shift_value < 2) {
2151 __ test(answer.reg(), Immediate(0xc0000000));
2152 deferred->Branch(not_zero);
2153 }
2154 operand->Unuse();
Leon Clarkee46be812010-01-19 14:06:41 +00002155 __ SmiTag(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00002156 deferred->BindExit();
Steve Blocka7e24c12009-10-30 11:49:00 +00002157 }
2158 break;
2159
2160 case Token::SHL:
2161 if (reversed) {
Steve Block6ded16b2010-05-10 14:33:55 +01002162 // Move operand into ecx and also into a second register.
2163 // If operand is already in a register, take advantage of that.
2164 // This lets us modify ecx, but still bail out to deferred code.
Leon Clarkee46be812010-01-19 14:06:41 +00002165 Result right;
2166 Result right_copy_in_ecx;
Steve Block6ded16b2010-05-10 14:33:55 +01002167 TypeInfo right_type_info = operand->type_info();
Leon Clarkee46be812010-01-19 14:06:41 +00002168 operand->ToRegister();
2169 if (operand->reg().is(ecx)) {
2170 right = allocator()->Allocate();
2171 __ mov(right.reg(), ecx);
2172 frame_->Spill(ecx);
2173 right_copy_in_ecx = *operand;
2174 } else {
2175 right_copy_in_ecx = allocator()->Allocate(ecx);
2176 __ mov(ecx, operand->reg());
2177 right = *operand;
2178 }
2179 operand->Unuse();
2180
Leon Clarked91b9f72010-01-27 17:25:45 +00002181 answer = allocator()->Allocate();
Leon Clarkee46be812010-01-19 14:06:41 +00002182 DeferredInlineSmiOperationReversed* deferred =
2183 new DeferredInlineSmiOperationReversed(op,
2184 answer.reg(),
2185 smi_value,
2186 right.reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002187 right_type_info,
Leon Clarkee46be812010-01-19 14:06:41 +00002188 overwrite_mode);
2189 __ mov(answer.reg(), Immediate(int_value));
2190 __ sar(ecx, kSmiTagSize);
Steve Block6ded16b2010-05-10 14:33:55 +01002191 if (!right_type_info.IsSmi()) {
2192 deferred->Branch(carry);
2193 } else if (FLAG_debug_code) {
2194 __ AbortIfNotSmi(right.reg());
2195 }
Leon Clarkee46be812010-01-19 14:06:41 +00002196 __ shl_cl(answer.reg());
2197 __ cmp(answer.reg(), 0xc0000000);
2198 deferred->Branch(sign);
2199 __ SmiTag(answer.reg());
2200
2201 deferred->BindExit();
Steve Blocka7e24c12009-10-30 11:49:00 +00002202 } else {
2203 // Only the least significant 5 bits of the shift value are used.
2204 // In the slow case, this masking is done inside the runtime call.
2205 int shift_value = int_value & 0x1f;
2206 operand->ToRegister();
2207 if (shift_value == 0) {
2208 // Spill operand so it can be overwritten in the slow case.
2209 frame_->Spill(operand->reg());
2210 DeferredInlineSmiOperation* deferred =
2211 new DeferredInlineSmiOperation(op,
2212 operand->reg(),
2213 operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002214 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002215 smi_value,
2216 overwrite_mode);
2217 __ test(operand->reg(), Immediate(kSmiTagMask));
2218 deferred->Branch(not_zero);
2219 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00002220 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00002221 } else {
2222 // Use a fresh temporary for nonzero shift values.
Leon Clarked91b9f72010-01-27 17:25:45 +00002223 answer = allocator()->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00002224 ASSERT(answer.is_valid());
2225 DeferredInlineSmiOperation* deferred =
2226 new DeferredInlineSmiOperation(op,
2227 answer.reg(),
2228 operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002229 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002230 smi_value,
2231 overwrite_mode);
Steve Block6ded16b2010-05-10 14:33:55 +01002232 if (!operand->type_info().IsSmi()) {
2233 __ test(operand->reg(), Immediate(kSmiTagMask));
2234 deferred->Branch(not_zero);
2235 } else if (FLAG_debug_code) {
2236 __ AbortIfNotSmi(operand->reg());
2237 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002238 __ mov(answer.reg(), operand->reg());
2239 ASSERT(kSmiTag == 0); // adjust code if not the case
2240 // We do no shifts, only the Smi conversion, if shift_value is 1.
2241 if (shift_value > 1) {
2242 __ shl(answer.reg(), shift_value - 1);
2243 }
2244 // Convert int result to Smi, checking that it is in int range.
2245 ASSERT(kSmiTagSize == 1); // adjust code if not the case
2246 __ add(answer.reg(), Operand(answer.reg()));
2247 deferred->Branch(overflow);
2248 deferred->BindExit();
2249 operand->Unuse();
Steve Blocka7e24c12009-10-30 11:49:00 +00002250 }
2251 }
2252 break;
2253
2254 case Token::BIT_OR:
2255 case Token::BIT_XOR:
2256 case Token::BIT_AND: {
2257 operand->ToRegister();
2258 frame_->Spill(operand->reg());
2259 DeferredCode* deferred = NULL;
2260 if (reversed) {
Steve Block6ded16b2010-05-10 14:33:55 +01002261 deferred =
2262 new DeferredInlineSmiOperationReversed(op,
2263 operand->reg(),
2264 smi_value,
2265 operand->reg(),
2266 operand->type_info(),
2267 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002268 } else {
2269 deferred = new DeferredInlineSmiOperation(op,
2270 operand->reg(),
2271 operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002272 operand->type_info(),
Steve Blocka7e24c12009-10-30 11:49:00 +00002273 smi_value,
2274 overwrite_mode);
2275 }
Steve Block6ded16b2010-05-10 14:33:55 +01002276 if (!operand->type_info().IsSmi()) {
2277 __ test(operand->reg(), Immediate(kSmiTagMask));
2278 deferred->Branch(not_zero);
2279 } else if (FLAG_debug_code) {
2280 __ AbortIfNotSmi(operand->reg());
2281 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002282 if (op == Token::BIT_AND) {
2283 __ and_(Operand(operand->reg()), Immediate(value));
2284 } else if (op == Token::BIT_XOR) {
2285 if (int_value != 0) {
2286 __ xor_(Operand(operand->reg()), Immediate(value));
2287 }
2288 } else {
2289 ASSERT(op == Token::BIT_OR);
2290 if (int_value != 0) {
2291 __ or_(Operand(operand->reg()), Immediate(value));
2292 }
2293 }
2294 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00002295 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00002296 break;
2297 }
2298
Andrei Popescu402d9372010-02-26 13:31:12 +00002299 case Token::DIV:
2300 if (!reversed && int_value == 2) {
2301 operand->ToRegister();
2302 frame_->Spill(operand->reg());
2303
2304 DeferredInlineSmiOperation* deferred =
2305 new DeferredInlineSmiOperation(op,
2306 operand->reg(),
2307 operand->reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01002308 operand->type_info(),
Andrei Popescu402d9372010-02-26 13:31:12 +00002309 smi_value,
2310 overwrite_mode);
2311 // Check that lowest log2(value) bits of operand are zero, and test
2312 // smi tag at the same time.
2313 ASSERT_EQ(0, kSmiTag);
2314 ASSERT_EQ(1, kSmiTagSize);
2315 __ test(operand->reg(), Immediate(3));
2316 deferred->Branch(not_zero); // Branch if non-smi or odd smi.
2317 __ sar(operand->reg(), 1);
2318 deferred->BindExit();
2319 answer = *operand;
2320 } else {
2321 // Cannot fall through MOD to default case, so we duplicate the
2322 // default case here.
2323 Result constant_operand(value);
2324 if (reversed) {
Steve Block6ded16b2010-05-10 14:33:55 +01002325 answer = LikelySmiBinaryOperation(expr, &constant_operand, operand,
Andrei Popescu402d9372010-02-26 13:31:12 +00002326 overwrite_mode);
2327 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01002328 answer = LikelySmiBinaryOperation(expr, operand, &constant_operand,
Andrei Popescu402d9372010-02-26 13:31:12 +00002329 overwrite_mode);
2330 }
2331 }
2332 break;
Steve Block6ded16b2010-05-10 14:33:55 +01002333
Steve Blocka7e24c12009-10-30 11:49:00 +00002334 // Generate inline code for mod of powers of 2 and negative powers of 2.
2335 case Token::MOD:
2336 if (!reversed &&
2337 int_value != 0 &&
2338 (IsPowerOf2(int_value) || IsPowerOf2(-int_value))) {
2339 operand->ToRegister();
2340 frame_->Spill(operand->reg());
Steve Block6ded16b2010-05-10 14:33:55 +01002341 DeferredCode* deferred =
2342 new DeferredInlineSmiOperation(op,
2343 operand->reg(),
2344 operand->reg(),
2345 operand->type_info(),
2346 smi_value,
2347 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002348 // Check for negative or non-Smi left hand side.
Steve Block6ded16b2010-05-10 14:33:55 +01002349 __ test(operand->reg(), Immediate(kSmiTagMask | kSmiSignMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00002350 deferred->Branch(not_zero);
2351 if (int_value < 0) int_value = -int_value;
2352 if (int_value == 1) {
2353 __ mov(operand->reg(), Immediate(Smi::FromInt(0)));
2354 } else {
2355 __ and_(operand->reg(), (int_value << kSmiTagSize) - 1);
2356 }
2357 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00002358 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00002359 break;
2360 }
2361 // Fall through if we did not find a power of 2 on the right hand side!
2362
2363 default: {
2364 Result constant_operand(value);
2365 if (reversed) {
Steve Block6ded16b2010-05-10 14:33:55 +01002366 answer = LikelySmiBinaryOperation(expr, &constant_operand, operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00002367 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002368 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01002369 answer = LikelySmiBinaryOperation(expr, operand, &constant_operand,
Leon Clarked91b9f72010-01-27 17:25:45 +00002370 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00002371 }
2372 break;
2373 }
2374 }
Leon Clarked91b9f72010-01-27 17:25:45 +00002375 ASSERT(answer.is_valid());
2376 return answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00002377}
2378
2379
Leon Clarkee46be812010-01-19 14:06:41 +00002380static bool CouldBeNaN(const Result& result) {
Steve Block6ded16b2010-05-10 14:33:55 +01002381 if (result.type_info().IsSmi()) return false;
2382 if (result.type_info().IsInteger32()) return false;
Leon Clarkee46be812010-01-19 14:06:41 +00002383 if (!result.is_constant()) return true;
2384 if (!result.handle()->IsHeapNumber()) return false;
2385 return isnan(HeapNumber::cast(*result.handle())->value());
2386}
2387
2388
Steve Block6ded16b2010-05-10 14:33:55 +01002389// Convert from signed to unsigned comparison to match the way EFLAGS are set
2390// by FPU and XMM compare instructions.
2391static Condition DoubleCondition(Condition cc) {
2392 switch (cc) {
2393 case less: return below;
2394 case equal: return equal;
2395 case less_equal: return below_equal;
2396 case greater: return above;
2397 case greater_equal: return above_equal;
2398 default: UNREACHABLE();
2399 }
2400 UNREACHABLE();
2401 return equal;
2402}
2403
2404
Leon Clarkee46be812010-01-19 14:06:41 +00002405void CodeGenerator::Comparison(AstNode* node,
2406 Condition cc,
Steve Blocka7e24c12009-10-30 11:49:00 +00002407 bool strict,
2408 ControlDestination* dest) {
2409 // Strict only makes sense for equality comparisons.
2410 ASSERT(!strict || cc == equal);
2411
2412 Result left_side;
2413 Result right_side;
2414 // Implement '>' and '<=' by reversal to obtain ECMA-262 conversion order.
2415 if (cc == greater || cc == less_equal) {
2416 cc = ReverseCondition(cc);
2417 left_side = frame_->Pop();
2418 right_side = frame_->Pop();
2419 } else {
2420 right_side = frame_->Pop();
2421 left_side = frame_->Pop();
2422 }
2423 ASSERT(cc == less || cc == equal || cc == greater_equal);
2424
Leon Clarkee46be812010-01-19 14:06:41 +00002425 // If either side is a constant of some sort, we can probably optimize the
2426 // comparison.
2427 bool left_side_constant_smi = false;
2428 bool left_side_constant_null = false;
2429 bool left_side_constant_1_char_string = false;
2430 if (left_side.is_constant()) {
2431 left_side_constant_smi = left_side.handle()->IsSmi();
2432 left_side_constant_null = left_side.handle()->IsNull();
2433 left_side_constant_1_char_string =
2434 (left_side.handle()->IsString() &&
Steve Block6ded16b2010-05-10 14:33:55 +01002435 String::cast(*left_side.handle())->length() == 1 &&
2436 String::cast(*left_side.handle())->IsAsciiRepresentation());
Leon Clarkee46be812010-01-19 14:06:41 +00002437 }
2438 bool right_side_constant_smi = false;
2439 bool right_side_constant_null = false;
2440 bool right_side_constant_1_char_string = false;
2441 if (right_side.is_constant()) {
2442 right_side_constant_smi = right_side.handle()->IsSmi();
2443 right_side_constant_null = right_side.handle()->IsNull();
2444 right_side_constant_1_char_string =
2445 (right_side.handle()->IsString() &&
Steve Block6ded16b2010-05-10 14:33:55 +01002446 String::cast(*right_side.handle())->length() == 1 &&
2447 String::cast(*right_side.handle())->IsAsciiRepresentation());
Leon Clarkee46be812010-01-19 14:06:41 +00002448 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002449
2450 if (left_side_constant_smi || right_side_constant_smi) {
2451 if (left_side_constant_smi && right_side_constant_smi) {
2452 // Trivial case, comparing two constants.
2453 int left_value = Smi::cast(*left_side.handle())->value();
2454 int right_value = Smi::cast(*right_side.handle())->value();
2455 switch (cc) {
2456 case less:
2457 dest->Goto(left_value < right_value);
2458 break;
2459 case equal:
2460 dest->Goto(left_value == right_value);
2461 break;
2462 case greater_equal:
2463 dest->Goto(left_value >= right_value);
2464 break;
2465 default:
2466 UNREACHABLE();
2467 }
Leon Clarkee46be812010-01-19 14:06:41 +00002468 } else {
2469 // Only one side is a constant Smi.
Steve Blocka7e24c12009-10-30 11:49:00 +00002470 // If left side is a constant Smi, reverse the operands.
2471 // Since one side is a constant Smi, conversion order does not matter.
2472 if (left_side_constant_smi) {
2473 Result temp = left_side;
2474 left_side = right_side;
2475 right_side = temp;
2476 cc = ReverseCondition(cc);
Steve Block6ded16b2010-05-10 14:33:55 +01002477 // This may re-introduce greater or less_equal as the value of cc.
Steve Blocka7e24c12009-10-30 11:49:00 +00002478 // CompareStub and the inline code both support all values of cc.
2479 }
2480 // Implement comparison against a constant Smi, inlining the case
2481 // where both sides are Smis.
2482 left_side.ToRegister();
Leon Clarkee46be812010-01-19 14:06:41 +00002483 Register left_reg = left_side.reg();
2484 Handle<Object> right_val = right_side.handle();
Steve Blocka7e24c12009-10-30 11:49:00 +00002485
2486 // Here we split control flow to the stub call and inlined cases
2487 // before finally splitting it to the control destination. We use
2488 // a jump target and branching to duplicate the virtual frame at
2489 // the first split. We manually handle the off-frame references
2490 // by reconstituting them on the non-fall-through path.
Steve Blocka7e24c12009-10-30 11:49:00 +00002491
Steve Block6ded16b2010-05-10 14:33:55 +01002492 if (left_side.is_smi()) {
Kristian Monsen25f61362010-05-21 11:50:48 +01002493 if (FLAG_debug_code) {
2494 __ AbortIfNotSmi(left_side.reg());
2495 }
Steve Block6ded16b2010-05-10 14:33:55 +01002496 } else {
2497 JumpTarget is_smi;
2498 __ test(left_side.reg(), Immediate(kSmiTagMask));
2499 is_smi.Branch(zero, taken);
2500
2501 bool is_loop_condition = (node->AsExpression() != NULL) &&
2502 node->AsExpression()->is_loop_condition();
2503 if (!is_loop_condition &&
2504 CpuFeatures::IsSupported(SSE2) &&
2505 right_val->IsSmi()) {
2506 // Right side is a constant smi and left side has been checked
2507 // not to be a smi.
2508 CpuFeatures::Scope use_sse2(SSE2);
2509 JumpTarget not_number;
2510 __ cmp(FieldOperand(left_reg, HeapObject::kMapOffset),
2511 Immediate(Factory::heap_number_map()));
2512 not_number.Branch(not_equal, &left_side);
2513 __ movdbl(xmm1,
2514 FieldOperand(left_reg, HeapNumber::kValueOffset));
2515 int value = Smi::cast(*right_val)->value();
2516 if (value == 0) {
2517 __ xorpd(xmm0, xmm0);
2518 } else {
2519 Result temp = allocator()->Allocate();
2520 __ mov(temp.reg(), Immediate(value));
2521 __ cvtsi2sd(xmm0, Operand(temp.reg()));
2522 temp.Unuse();
2523 }
Kristian Monsen25f61362010-05-21 11:50:48 +01002524 __ ucomisd(xmm1, xmm0);
Steve Block6ded16b2010-05-10 14:33:55 +01002525 // Jump to builtin for NaN.
2526 not_number.Branch(parity_even, &left_side);
2527 left_side.Unuse();
2528 dest->true_target()->Branch(DoubleCondition(cc));
2529 dest->false_target()->Jump();
2530 not_number.Bind(&left_side);
Leon Clarkee46be812010-01-19 14:06:41 +00002531 }
Steve Block6ded16b2010-05-10 14:33:55 +01002532
2533 // Setup and call the compare stub.
2534 CompareStub stub(cc, strict, kCantBothBeNaN);
2535 Result result = frame_->CallStub(&stub, &left_side, &right_side);
2536 result.ToRegister();
2537 __ cmp(result.reg(), 0);
2538 result.Unuse();
2539 dest->true_target()->Branch(cc);
Leon Clarkee46be812010-01-19 14:06:41 +00002540 dest->false_target()->Jump();
Steve Block6ded16b2010-05-10 14:33:55 +01002541
2542 is_smi.Bind();
Leon Clarkee46be812010-01-19 14:06:41 +00002543 }
2544
Steve Blocka7e24c12009-10-30 11:49:00 +00002545 left_side = Result(left_reg);
2546 right_side = Result(right_val);
2547 // Test smi equality and comparison by signed int comparison.
2548 if (IsUnsafeSmi(right_side.handle())) {
2549 right_side.ToRegister();
2550 __ cmp(left_side.reg(), Operand(right_side.reg()));
2551 } else {
2552 __ cmp(Operand(left_side.reg()), Immediate(right_side.handle()));
2553 }
2554 left_side.Unuse();
2555 right_side.Unuse();
2556 dest->Split(cc);
2557 }
Leon Clarkee46be812010-01-19 14:06:41 +00002558
Steve Blocka7e24c12009-10-30 11:49:00 +00002559 } else if (cc == equal &&
2560 (left_side_constant_null || right_side_constant_null)) {
2561 // To make null checks efficient, we check if either the left side or
2562 // the right side is the constant 'null'.
2563 // If so, we optimize the code by inlining a null check instead of
2564 // calling the (very) general runtime routine for checking equality.
2565 Result operand = left_side_constant_null ? right_side : left_side;
2566 right_side.Unuse();
2567 left_side.Unuse();
2568 operand.ToRegister();
2569 __ cmp(operand.reg(), Factory::null_value());
2570 if (strict) {
2571 operand.Unuse();
2572 dest->Split(equal);
2573 } else {
2574 // The 'null' value is only equal to 'undefined' if using non-strict
2575 // comparisons.
2576 dest->true_target()->Branch(equal);
2577 __ cmp(operand.reg(), Factory::undefined_value());
2578 dest->true_target()->Branch(equal);
2579 __ test(operand.reg(), Immediate(kSmiTagMask));
2580 dest->false_target()->Branch(equal);
2581
2582 // It can be an undetectable object.
2583 // Use a scratch register in preference to spilling operand.reg().
2584 Result temp = allocator()->Allocate();
2585 ASSERT(temp.is_valid());
2586 __ mov(temp.reg(),
2587 FieldOperand(operand.reg(), HeapObject::kMapOffset));
2588 __ movzx_b(temp.reg(),
2589 FieldOperand(temp.reg(), Map::kBitFieldOffset));
2590 __ test(temp.reg(), Immediate(1 << Map::kIsUndetectable));
2591 temp.Unuse();
2592 operand.Unuse();
2593 dest->Split(not_zero);
2594 }
Leon Clarkee46be812010-01-19 14:06:41 +00002595 } else if (left_side_constant_1_char_string ||
2596 right_side_constant_1_char_string) {
2597 if (left_side_constant_1_char_string && right_side_constant_1_char_string) {
2598 // Trivial case, comparing two constants.
2599 int left_value = String::cast(*left_side.handle())->Get(0);
2600 int right_value = String::cast(*right_side.handle())->Get(0);
2601 switch (cc) {
2602 case less:
2603 dest->Goto(left_value < right_value);
2604 break;
2605 case equal:
2606 dest->Goto(left_value == right_value);
2607 break;
2608 case greater_equal:
2609 dest->Goto(left_value >= right_value);
2610 break;
2611 default:
2612 UNREACHABLE();
2613 }
2614 } else {
2615 // Only one side is a constant 1 character string.
2616 // If left side is a constant 1-character string, reverse the operands.
2617 // Since one side is a constant string, conversion order does not matter.
2618 if (left_side_constant_1_char_string) {
2619 Result temp = left_side;
2620 left_side = right_side;
2621 right_side = temp;
2622 cc = ReverseCondition(cc);
2623 // This may reintroduce greater or less_equal as the value of cc.
2624 // CompareStub and the inline code both support all values of cc.
2625 }
2626 // Implement comparison against a constant string, inlining the case
2627 // where both sides are strings.
2628 left_side.ToRegister();
2629
2630 // Here we split control flow to the stub call and inlined cases
2631 // before finally splitting it to the control destination. We use
2632 // a jump target and branching to duplicate the virtual frame at
2633 // the first split. We manually handle the off-frame references
2634 // by reconstituting them on the non-fall-through path.
2635 JumpTarget is_not_string, is_string;
2636 Register left_reg = left_side.reg();
2637 Handle<Object> right_val = right_side.handle();
Steve Block6ded16b2010-05-10 14:33:55 +01002638 ASSERT(StringShape(String::cast(*right_val)).IsSymbol());
Leon Clarkee46be812010-01-19 14:06:41 +00002639 __ test(left_side.reg(), Immediate(kSmiTagMask));
2640 is_not_string.Branch(zero, &left_side);
2641 Result temp = allocator_->Allocate();
2642 ASSERT(temp.is_valid());
2643 __ mov(temp.reg(),
2644 FieldOperand(left_side.reg(), HeapObject::kMapOffset));
2645 __ movzx_b(temp.reg(),
2646 FieldOperand(temp.reg(), Map::kInstanceTypeOffset));
2647 // If we are testing for equality then make use of the symbol shortcut.
2648 // Check if the right left hand side has the same type as the left hand
2649 // side (which is always a symbol).
2650 if (cc == equal) {
2651 Label not_a_symbol;
2652 ASSERT(kSymbolTag != 0);
2653 // Ensure that no non-strings have the symbol bit set.
2654 ASSERT(kNotStringTag + kIsSymbolMask > LAST_TYPE);
2655 __ test(temp.reg(), Immediate(kIsSymbolMask)); // Test the symbol bit.
2656 __ j(zero, &not_a_symbol);
2657 // They are symbols, so do identity compare.
2658 __ cmp(left_side.reg(), right_side.handle());
2659 dest->true_target()->Branch(equal);
2660 dest->false_target()->Branch(not_equal);
2661 __ bind(&not_a_symbol);
2662 }
Steve Block6ded16b2010-05-10 14:33:55 +01002663 // Call the compare stub if the left side is not a flat ascii string.
Leon Clarkee46be812010-01-19 14:06:41 +00002664 __ and_(temp.reg(),
2665 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
2666 __ cmp(temp.reg(), kStringTag | kSeqStringTag | kAsciiStringTag);
2667 temp.Unuse();
2668 is_string.Branch(equal, &left_side);
2669
2670 // Setup and call the compare stub.
2671 is_not_string.Bind(&left_side);
2672 CompareStub stub(cc, strict, kCantBothBeNaN);
2673 Result result = frame_->CallStub(&stub, &left_side, &right_side);
2674 result.ToRegister();
2675 __ cmp(result.reg(), 0);
2676 result.Unuse();
2677 dest->true_target()->Branch(cc);
2678 dest->false_target()->Jump();
2679
2680 is_string.Bind(&left_side);
Steve Block6ded16b2010-05-10 14:33:55 +01002681 // left_side is a sequential ASCII string.
Leon Clarkee46be812010-01-19 14:06:41 +00002682 left_side = Result(left_reg);
2683 right_side = Result(right_val);
2684 Result temp2 = allocator_->Allocate();
2685 ASSERT(temp2.is_valid());
2686 // Test string equality and comparison.
2687 if (cc == equal) {
2688 Label comparison_done;
2689 __ cmp(FieldOperand(left_side.reg(), String::kLengthOffset),
Steve Block6ded16b2010-05-10 14:33:55 +01002690 Immediate(Smi::FromInt(1)));
Leon Clarkee46be812010-01-19 14:06:41 +00002691 __ j(not_equal, &comparison_done);
2692 uint8_t char_value =
Steve Block6ded16b2010-05-10 14:33:55 +01002693 static_cast<uint8_t>(String::cast(*right_val)->Get(0));
Leon Clarkee46be812010-01-19 14:06:41 +00002694 __ cmpb(FieldOperand(left_side.reg(), SeqAsciiString::kHeaderSize),
2695 char_value);
2696 __ bind(&comparison_done);
2697 } else {
2698 __ mov(temp2.reg(),
2699 FieldOperand(left_side.reg(), String::kLengthOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01002700 __ SmiUntag(temp2.reg());
Leon Clarkee46be812010-01-19 14:06:41 +00002701 __ sub(Operand(temp2.reg()), Immediate(1));
2702 Label comparison;
Steve Block6ded16b2010-05-10 14:33:55 +01002703 // If the length is 0 then the subtraction gave -1 which compares less
Leon Clarkee46be812010-01-19 14:06:41 +00002704 // than any character.
2705 __ j(negative, &comparison);
2706 // Otherwise load the first character.
2707 __ movzx_b(temp2.reg(),
2708 FieldOperand(left_side.reg(), SeqAsciiString::kHeaderSize));
2709 __ bind(&comparison);
Steve Block6ded16b2010-05-10 14:33:55 +01002710 // Compare the first character of the string with the
2711 // constant 1-character string.
Leon Clarkee46be812010-01-19 14:06:41 +00002712 uint8_t char_value =
Steve Block6ded16b2010-05-10 14:33:55 +01002713 static_cast<uint8_t>(String::cast(*right_val)->Get(0));
Leon Clarkee46be812010-01-19 14:06:41 +00002714 __ cmp(Operand(temp2.reg()), Immediate(char_value));
2715 Label characters_were_different;
2716 __ j(not_equal, &characters_were_different);
2717 // If the first character is the same then the long string sorts after
2718 // the short one.
2719 __ cmp(FieldOperand(left_side.reg(), String::kLengthOffset),
Steve Block6ded16b2010-05-10 14:33:55 +01002720 Immediate(Smi::FromInt(1)));
Leon Clarkee46be812010-01-19 14:06:41 +00002721 __ bind(&characters_were_different);
2722 }
2723 temp2.Unuse();
2724 left_side.Unuse();
2725 right_side.Unuse();
2726 dest->Split(cc);
2727 }
2728 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01002729 // Neither side is a constant Smi, constant 1-char string or constant null.
2730 // If either side is a non-smi constant, or known to be a heap number skip
2731 // the smi check.
Steve Blocka7e24c12009-10-30 11:49:00 +00002732 bool known_non_smi =
2733 (left_side.is_constant() && !left_side.handle()->IsSmi()) ||
Steve Block6ded16b2010-05-10 14:33:55 +01002734 (right_side.is_constant() && !right_side.handle()->IsSmi()) ||
2735 left_side.type_info().IsDouble() ||
2736 right_side.type_info().IsDouble();
Leon Clarkee46be812010-01-19 14:06:41 +00002737 NaNInformation nan_info =
2738 (CouldBeNaN(left_side) && CouldBeNaN(right_side)) ?
2739 kBothCouldBeNaN :
2740 kCantBothBeNaN;
Steve Block6ded16b2010-05-10 14:33:55 +01002741
2742 // Inline number comparison handling any combination of smi's and heap
2743 // numbers if:
2744 // code is in a loop
2745 // the compare operation is different from equal
2746 // compare is not a for-loop comparison
2747 // The reason for excluding equal is that it will most likely be done
2748 // with smi's (not heap numbers) and the code to comparing smi's is inlined
2749 // separately. The same reason applies for for-loop comparison which will
2750 // also most likely be smi comparisons.
2751 bool is_loop_condition = (node->AsExpression() != NULL)
2752 && node->AsExpression()->is_loop_condition();
2753 bool inline_number_compare =
2754 loop_nesting() > 0 && cc != equal && !is_loop_condition;
2755
2756 // Left and right needed in registers for the following code.
Steve Blocka7e24c12009-10-30 11:49:00 +00002757 left_side.ToRegister();
2758 right_side.ToRegister();
2759
2760 if (known_non_smi) {
Steve Block6ded16b2010-05-10 14:33:55 +01002761 // Inline the equality check if both operands can't be a NaN. If both
2762 // objects are the same they are equal.
2763 if (nan_info == kCantBothBeNaN && cc == equal) {
2764 __ cmp(left_side.reg(), Operand(right_side.reg()));
2765 dest->true_target()->Branch(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +00002766 }
Steve Block6ded16b2010-05-10 14:33:55 +01002767
2768 // Inline number comparison.
2769 if (inline_number_compare) {
2770 GenerateInlineNumberComparison(&left_side, &right_side, cc, dest);
2771 }
2772
2773 // End of in-line compare, call out to the compare stub. Don't include
2774 // number comparison in the stub if it was inlined.
2775 CompareStub stub(cc, strict, nan_info, !inline_number_compare);
2776 Result answer = frame_->CallStub(&stub, &left_side, &right_side);
2777 __ test(answer.reg(), Operand(answer.reg()));
Steve Blocka7e24c12009-10-30 11:49:00 +00002778 answer.Unuse();
2779 dest->Split(cc);
2780 } else {
2781 // Here we split control flow to the stub call and inlined cases
2782 // before finally splitting it to the control destination. We use
2783 // a jump target and branching to duplicate the virtual frame at
2784 // the first split. We manually handle the off-frame references
2785 // by reconstituting them on the non-fall-through path.
2786 JumpTarget is_smi;
2787 Register left_reg = left_side.reg();
2788 Register right_reg = right_side.reg();
2789
Steve Block6ded16b2010-05-10 14:33:55 +01002790 // In-line check for comparing two smis.
Steve Blocka7e24c12009-10-30 11:49:00 +00002791 Result temp = allocator_->Allocate();
2792 ASSERT(temp.is_valid());
2793 __ mov(temp.reg(), left_side.reg());
2794 __ or_(temp.reg(), Operand(right_side.reg()));
2795 __ test(temp.reg(), Immediate(kSmiTagMask));
2796 temp.Unuse();
2797 is_smi.Branch(zero, taken);
Steve Block6ded16b2010-05-10 14:33:55 +01002798
2799 // Inline the equality check if both operands can't be a NaN. If both
2800 // objects are the same they are equal.
2801 if (nan_info == kCantBothBeNaN && cc == equal) {
2802 __ cmp(left_side.reg(), Operand(right_side.reg()));
2803 dest->true_target()->Branch(equal);
2804 }
2805
2806 // Inline number comparison.
2807 if (inline_number_compare) {
2808 GenerateInlineNumberComparison(&left_side, &right_side, cc, dest);
2809 }
2810
2811 // End of in-line compare, call out to the compare stub. Don't include
2812 // number comparison in the stub if it was inlined.
2813 CompareStub stub(cc, strict, nan_info, !inline_number_compare);
Steve Blocka7e24c12009-10-30 11:49:00 +00002814 Result answer = frame_->CallStub(&stub, &left_side, &right_side);
Kristian Monsen25f61362010-05-21 11:50:48 +01002815 __ test(answer.reg(), Operand(answer.reg()));
Steve Blocka7e24c12009-10-30 11:49:00 +00002816 answer.Unuse();
2817 dest->true_target()->Branch(cc);
2818 dest->false_target()->Jump();
2819
2820 is_smi.Bind();
2821 left_side = Result(left_reg);
2822 right_side = Result(right_reg);
2823 __ cmp(left_side.reg(), Operand(right_side.reg()));
2824 right_side.Unuse();
2825 left_side.Unuse();
2826 dest->Split(cc);
2827 }
2828 }
2829}
2830
2831
Steve Block6ded16b2010-05-10 14:33:55 +01002832// Check that the comparison operand is a number. Jump to not_numbers jump
2833// target passing the left and right result if the operand is not a number.
2834static void CheckComparisonOperand(MacroAssembler* masm_,
2835 Result* operand,
2836 Result* left_side,
2837 Result* right_side,
2838 JumpTarget* not_numbers) {
2839 // Perform check if operand is not known to be a number.
2840 if (!operand->type_info().IsNumber()) {
2841 Label done;
2842 __ test(operand->reg(), Immediate(kSmiTagMask));
2843 __ j(zero, &done);
2844 __ cmp(FieldOperand(operand->reg(), HeapObject::kMapOffset),
2845 Immediate(Factory::heap_number_map()));
2846 not_numbers->Branch(not_equal, left_side, right_side, not_taken);
2847 __ bind(&done);
2848 }
2849}
2850
2851
2852// Load a comparison operand to the FPU stack. This assumes that the operand has
2853// already been checked and is a number.
2854static void LoadComparisonOperand(MacroAssembler* masm_,
2855 Result* operand) {
2856 Label done;
2857 if (operand->type_info().IsDouble()) {
2858 // Operand is known to be a heap number, just load it.
2859 __ fld_d(FieldOperand(operand->reg(), HeapNumber::kValueOffset));
2860 } else if (operand->type_info().IsSmi()) {
2861 // Operand is known to be a smi. Convert it to double and keep the original
2862 // smi.
2863 __ SmiUntag(operand->reg());
2864 __ push(operand->reg());
2865 __ fild_s(Operand(esp, 0));
2866 __ pop(operand->reg());
2867 __ SmiTag(operand->reg());
2868 } else {
2869 // Operand type not known, check for smi otherwise assume heap number.
2870 Label smi;
2871 __ test(operand->reg(), Immediate(kSmiTagMask));
2872 __ j(zero, &smi);
2873 __ fld_d(FieldOperand(operand->reg(), HeapNumber::kValueOffset));
2874 __ jmp(&done);
2875 __ bind(&smi);
2876 __ SmiUntag(operand->reg());
2877 __ push(operand->reg());
2878 __ fild_s(Operand(esp, 0));
2879 __ pop(operand->reg());
2880 __ SmiTag(operand->reg());
2881 __ jmp(&done);
2882 }
2883 __ bind(&done);
2884}
2885
2886
2887// Load a comparison operand into into a XMM register. Jump to not_numbers jump
2888// target passing the left and right result if the operand is not a number.
2889static void LoadComparisonOperandSSE2(MacroAssembler* masm_,
2890 Result* operand,
2891 XMMRegister reg,
2892 Result* left_side,
2893 Result* right_side,
2894 JumpTarget* not_numbers) {
2895 Label done;
2896 if (operand->type_info().IsDouble()) {
2897 // Operand is known to be a heap number, just load it.
2898 __ movdbl(reg, FieldOperand(operand->reg(), HeapNumber::kValueOffset));
2899 } else if (operand->type_info().IsSmi()) {
2900 // Operand is known to be a smi. Convert it to double and keep the original
2901 // smi.
2902 __ SmiUntag(operand->reg());
2903 __ cvtsi2sd(reg, Operand(operand->reg()));
2904 __ SmiTag(operand->reg());
2905 } else {
2906 // Operand type not known, check for smi or heap number.
2907 Label smi;
2908 __ test(operand->reg(), Immediate(kSmiTagMask));
2909 __ j(zero, &smi);
2910 if (!operand->type_info().IsNumber()) {
2911 __ cmp(FieldOperand(operand->reg(), HeapObject::kMapOffset),
2912 Immediate(Factory::heap_number_map()));
2913 not_numbers->Branch(not_equal, left_side, right_side, taken);
2914 }
2915 __ movdbl(reg, FieldOperand(operand->reg(), HeapNumber::kValueOffset));
2916 __ jmp(&done);
2917
2918 __ bind(&smi);
2919 // Comvert smi to float and keep the original smi.
2920 __ SmiUntag(operand->reg());
2921 __ cvtsi2sd(reg, Operand(operand->reg()));
2922 __ SmiTag(operand->reg());
2923 __ jmp(&done);
2924 }
2925 __ bind(&done);
2926}
2927
2928
2929void CodeGenerator::GenerateInlineNumberComparison(Result* left_side,
2930 Result* right_side,
2931 Condition cc,
2932 ControlDestination* dest) {
2933 ASSERT(left_side->is_register());
2934 ASSERT(right_side->is_register());
2935
2936 JumpTarget not_numbers;
2937 if (CpuFeatures::IsSupported(SSE2)) {
2938 CpuFeatures::Scope use_sse2(SSE2);
2939
2940 // Load left and right operand into registers xmm0 and xmm1 and compare.
2941 LoadComparisonOperandSSE2(masm_, left_side, xmm0, left_side, right_side,
2942 &not_numbers);
2943 LoadComparisonOperandSSE2(masm_, right_side, xmm1, left_side, right_side,
2944 &not_numbers);
2945 __ comisd(xmm0, xmm1);
2946 } else {
2947 Label check_right, compare;
2948
2949 // Make sure that both comparison operands are numbers.
2950 CheckComparisonOperand(masm_, left_side, left_side, right_side,
2951 &not_numbers);
2952 CheckComparisonOperand(masm_, right_side, left_side, right_side,
2953 &not_numbers);
2954
2955 // Load right and left operand to FPU stack and compare.
2956 LoadComparisonOperand(masm_, right_side);
2957 LoadComparisonOperand(masm_, left_side);
2958 __ FCmp();
2959 }
2960
2961 // Bail out if a NaN is involved.
2962 not_numbers.Branch(parity_even, left_side, right_side, not_taken);
2963
2964 // Split to destination targets based on comparison.
2965 left_side->Unuse();
2966 right_side->Unuse();
2967 dest->true_target()->Branch(DoubleCondition(cc));
2968 dest->false_target()->Jump();
2969
2970 not_numbers.Bind(left_side, right_side);
2971}
2972
2973
Steve Blocka7e24c12009-10-30 11:49:00 +00002974// Call the function just below TOS on the stack with the given
2975// arguments. The receiver is the TOS.
2976void CodeGenerator::CallWithArguments(ZoneList<Expression*>* args,
Leon Clarkee46be812010-01-19 14:06:41 +00002977 CallFunctionFlags flags,
Steve Blocka7e24c12009-10-30 11:49:00 +00002978 int position) {
2979 // Push the arguments ("left-to-right") on the stack.
2980 int arg_count = args->length();
2981 for (int i = 0; i < arg_count; i++) {
2982 Load(args->at(i));
Leon Clarkef7060e22010-06-03 12:02:55 +01002983 frame_->SpillTop();
Steve Blocka7e24c12009-10-30 11:49:00 +00002984 }
2985
2986 // Record the position for debugging purposes.
2987 CodeForSourcePosition(position);
2988
2989 // Use the shared code stub to call the function.
2990 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Leon Clarkee46be812010-01-19 14:06:41 +00002991 CallFunctionStub call_function(arg_count, in_loop, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00002992 Result answer = frame_->CallStub(&call_function, arg_count + 1);
2993 // Restore context and replace function on the stack with the
2994 // result of the stub invocation.
2995 frame_->RestoreContextRegister();
2996 frame_->SetElementAt(0, &answer);
2997}
2998
2999
Leon Clarked91b9f72010-01-27 17:25:45 +00003000void CodeGenerator::CallApplyLazy(Expression* applicand,
Steve Blocka7e24c12009-10-30 11:49:00 +00003001 Expression* receiver,
3002 VariableProxy* arguments,
3003 int position) {
Leon Clarked91b9f72010-01-27 17:25:45 +00003004 // An optimized implementation of expressions of the form
3005 // x.apply(y, arguments).
3006 // If the arguments object of the scope has not been allocated,
3007 // and x.apply is Function.prototype.apply, this optimization
3008 // just copies y and the arguments of the current function on the
3009 // stack, as receiver and arguments, and calls x.
3010 // In the implementation comments, we call x the applicand
3011 // and y the receiver.
Steve Blocka7e24c12009-10-30 11:49:00 +00003012 ASSERT(ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION);
3013 ASSERT(arguments->IsArguments());
3014
Leon Clarked91b9f72010-01-27 17:25:45 +00003015 // Load applicand.apply onto the stack. This will usually
Steve Blocka7e24c12009-10-30 11:49:00 +00003016 // give us a megamorphic load site. Not super, but it works.
Leon Clarked91b9f72010-01-27 17:25:45 +00003017 Load(applicand);
Andrei Popescu402d9372010-02-26 13:31:12 +00003018 frame()->Dup();
Leon Clarked91b9f72010-01-27 17:25:45 +00003019 Handle<String> name = Factory::LookupAsciiSymbol("apply");
3020 frame()->Push(name);
3021 Result answer = frame()->CallLoadIC(RelocInfo::CODE_TARGET);
3022 __ nop();
3023 frame()->Push(&answer);
Steve Blocka7e24c12009-10-30 11:49:00 +00003024
3025 // Load the receiver and the existing arguments object onto the
3026 // expression stack. Avoid allocating the arguments object here.
3027 Load(receiver);
Leon Clarkef7060e22010-06-03 12:02:55 +01003028 LoadFromSlot(scope()->arguments()->var()->slot(), NOT_INSIDE_TYPEOF);
Steve Blocka7e24c12009-10-30 11:49:00 +00003029
3030 // Emit the source position information after having loaded the
3031 // receiver and the arguments.
3032 CodeForSourcePosition(position);
Leon Clarked91b9f72010-01-27 17:25:45 +00003033 // Contents of frame at this point:
3034 // Frame[0]: arguments object of the current function or the hole.
3035 // Frame[1]: receiver
3036 // Frame[2]: applicand.apply
3037 // Frame[3]: applicand.
Steve Blocka7e24c12009-10-30 11:49:00 +00003038
3039 // Check if the arguments object has been lazily allocated
3040 // already. If so, just use that instead of copying the arguments
3041 // from the stack. This also deals with cases where a local variable
3042 // named 'arguments' has been introduced.
3043 frame_->Dup();
3044 Result probe = frame_->Pop();
Leon Clarked91b9f72010-01-27 17:25:45 +00003045 { VirtualFrame::SpilledScope spilled_scope;
3046 Label slow, done;
3047 bool try_lazy = true;
3048 if (probe.is_constant()) {
3049 try_lazy = probe.handle()->IsTheHole();
3050 } else {
3051 __ cmp(Operand(probe.reg()), Immediate(Factory::the_hole_value()));
3052 probe.Unuse();
3053 __ j(not_equal, &slow);
3054 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003055
Leon Clarked91b9f72010-01-27 17:25:45 +00003056 if (try_lazy) {
3057 Label build_args;
3058 // Get rid of the arguments object probe.
3059 frame_->Drop(); // Can be called on a spilled frame.
3060 // Stack now has 3 elements on it.
3061 // Contents of stack at this point:
3062 // esp[0]: receiver
3063 // esp[1]: applicand.apply
3064 // esp[2]: applicand.
Steve Blocka7e24c12009-10-30 11:49:00 +00003065
Leon Clarked91b9f72010-01-27 17:25:45 +00003066 // Check that the receiver really is a JavaScript object.
3067 __ mov(eax, Operand(esp, 0));
3068 __ test(eax, Immediate(kSmiTagMask));
3069 __ j(zero, &build_args);
Steve Blocka7e24c12009-10-30 11:49:00 +00003070 // We allow all JSObjects including JSFunctions. As long as
3071 // JS_FUNCTION_TYPE is the last instance type and it is right
3072 // after LAST_JS_OBJECT_TYPE, we do not have to check the upper
3073 // bound.
3074 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
3075 ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
Leon Clarked91b9f72010-01-27 17:25:45 +00003076 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
3077 __ j(below, &build_args);
Steve Blocka7e24c12009-10-30 11:49:00 +00003078
Leon Clarked91b9f72010-01-27 17:25:45 +00003079 // Check that applicand.apply is Function.prototype.apply.
3080 __ mov(eax, Operand(esp, kPointerSize));
3081 __ test(eax, Immediate(kSmiTagMask));
3082 __ j(zero, &build_args);
3083 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ecx);
3084 __ j(not_equal, &build_args);
3085 __ mov(ecx, FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset));
Leon Clarkeeab96aa2010-01-27 16:31:12 +00003086 Handle<Code> apply_code(Builtins::builtin(Builtins::FunctionApply));
Leon Clarked91b9f72010-01-27 17:25:45 +00003087 __ cmp(FieldOperand(ecx, SharedFunctionInfo::kCodeOffset),
Leon Clarkeeab96aa2010-01-27 16:31:12 +00003088 Immediate(apply_code));
Leon Clarked91b9f72010-01-27 17:25:45 +00003089 __ j(not_equal, &build_args);
3090
3091 // Check that applicand is a function.
3092 __ mov(edi, Operand(esp, 2 * kPointerSize));
3093 __ test(edi, Immediate(kSmiTagMask));
3094 __ j(zero, &build_args);
3095 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
3096 __ j(not_equal, &build_args);
3097
3098 // Copy the arguments to this function possibly from the
3099 // adaptor frame below it.
3100 Label invoke, adapted;
3101 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
3102 __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
3103 __ cmp(Operand(ecx),
3104 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3105 __ j(equal, &adapted);
3106
3107 // No arguments adaptor frame. Copy fixed number of arguments.
Andrei Popescu31002712010-02-23 13:46:05 +00003108 __ mov(eax, Immediate(scope()->num_parameters()));
3109 for (int i = 0; i < scope()->num_parameters(); i++) {
Leon Clarked91b9f72010-01-27 17:25:45 +00003110 __ push(frame_->ParameterAt(i));
3111 }
3112 __ jmp(&invoke);
3113
3114 // Arguments adaptor frame present. Copy arguments from there, but
3115 // avoid copying too many arguments to avoid stack overflows.
3116 __ bind(&adapted);
3117 static const uint32_t kArgumentsLimit = 1 * KB;
3118 __ mov(eax, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
3119 __ SmiUntag(eax);
3120 __ mov(ecx, Operand(eax));
3121 __ cmp(eax, kArgumentsLimit);
3122 __ j(above, &build_args);
3123
3124 // Loop through the arguments pushing them onto the execution
3125 // stack. We don't inform the virtual frame of the push, so we don't
3126 // have to worry about getting rid of the elements from the virtual
3127 // frame.
3128 Label loop;
3129 // ecx is a small non-negative integer, due to the test above.
3130 __ test(ecx, Operand(ecx));
3131 __ j(zero, &invoke);
3132 __ bind(&loop);
3133 __ push(Operand(edx, ecx, times_pointer_size, 1 * kPointerSize));
3134 __ dec(ecx);
3135 __ j(not_zero, &loop);
3136
3137 // Invoke the function.
3138 __ bind(&invoke);
3139 ParameterCount actual(eax);
3140 __ InvokeFunction(edi, actual, CALL_FUNCTION);
3141 // Drop applicand.apply and applicand from the stack, and push
3142 // the result of the function call, but leave the spilled frame
3143 // unchanged, with 3 elements, so it is correct when we compile the
3144 // slow-case code.
3145 __ add(Operand(esp), Immediate(2 * kPointerSize));
3146 __ push(eax);
3147 // Stack now has 1 element:
3148 // esp[0]: result
3149 __ jmp(&done);
3150
3151 // Slow-case: Allocate the arguments object since we know it isn't
3152 // there, and fall-through to the slow-case where we call
3153 // applicand.apply.
3154 __ bind(&build_args);
3155 // Stack now has 3 elements, because we have jumped from where:
3156 // esp[0]: receiver
3157 // esp[1]: applicand.apply
3158 // esp[2]: applicand.
3159
3160 // StoreArgumentsObject requires a correct frame, and may modify it.
3161 Result arguments_object = StoreArgumentsObject(false);
3162 frame_->SpillAll();
3163 arguments_object.ToRegister();
3164 frame_->EmitPush(arguments_object.reg());
3165 arguments_object.Unuse();
3166 // Stack and frame now have 4 elements.
3167 __ bind(&slow);
Leon Clarkeeab96aa2010-01-27 16:31:12 +00003168 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003169
Leon Clarked91b9f72010-01-27 17:25:45 +00003170 // Generic computation of x.apply(y, args) with no special optimization.
3171 // Flip applicand.apply and applicand on the stack, so
3172 // applicand looks like the receiver of the applicand.apply call.
3173 // Then process it as a normal function call.
3174 __ mov(eax, Operand(esp, 3 * kPointerSize));
3175 __ mov(ebx, Operand(esp, 2 * kPointerSize));
3176 __ mov(Operand(esp, 2 * kPointerSize), eax);
3177 __ mov(Operand(esp, 3 * kPointerSize), ebx);
Leon Clarkeeab96aa2010-01-27 16:31:12 +00003178
Leon Clarked91b9f72010-01-27 17:25:45 +00003179 CallFunctionStub call_function(2, NOT_IN_LOOP, NO_CALL_FUNCTION_FLAGS);
3180 Result res = frame_->CallStub(&call_function, 3);
3181 // The function and its two arguments have been dropped.
3182 frame_->Drop(1); // Drop the receiver as well.
3183 res.ToRegister();
3184 frame_->EmitPush(res.reg());
3185 // Stack now has 1 element:
3186 // esp[0]: result
3187 if (try_lazy) __ bind(&done);
3188 } // End of spilled scope.
3189 // Restore the context register after a call.
Steve Blocka7e24c12009-10-30 11:49:00 +00003190 frame_->RestoreContextRegister();
3191}
3192
3193
3194class DeferredStackCheck: public DeferredCode {
3195 public:
3196 DeferredStackCheck() {
3197 set_comment("[ DeferredStackCheck");
3198 }
3199
3200 virtual void Generate();
3201};
3202
3203
3204void DeferredStackCheck::Generate() {
3205 StackCheckStub stub;
3206 __ CallStub(&stub);
3207}
3208
3209
3210void CodeGenerator::CheckStack() {
Steve Blockd0582a62009-12-15 09:54:21 +00003211 DeferredStackCheck* deferred = new DeferredStackCheck;
3212 ExternalReference stack_limit =
3213 ExternalReference::address_of_stack_limit();
3214 __ cmp(esp, Operand::StaticVariable(stack_limit));
3215 deferred->Branch(below);
3216 deferred->BindExit();
Steve Blocka7e24c12009-10-30 11:49:00 +00003217}
3218
3219
3220void CodeGenerator::VisitAndSpill(Statement* statement) {
3221 ASSERT(in_spilled_code());
3222 set_in_spilled_code(false);
3223 Visit(statement);
3224 if (frame_ != NULL) {
3225 frame_->SpillAll();
3226 }
3227 set_in_spilled_code(true);
3228}
3229
3230
3231void CodeGenerator::VisitStatementsAndSpill(ZoneList<Statement*>* statements) {
3232 ASSERT(in_spilled_code());
3233 set_in_spilled_code(false);
3234 VisitStatements(statements);
3235 if (frame_ != NULL) {
3236 frame_->SpillAll();
3237 }
3238 set_in_spilled_code(true);
3239}
3240
3241
3242void CodeGenerator::VisitStatements(ZoneList<Statement*>* statements) {
3243 ASSERT(!in_spilled_code());
3244 for (int i = 0; has_valid_frame() && i < statements->length(); i++) {
3245 Visit(statements->at(i));
3246 }
3247}
3248
3249
3250void CodeGenerator::VisitBlock(Block* node) {
3251 ASSERT(!in_spilled_code());
3252 Comment cmnt(masm_, "[ Block");
3253 CodeForStatementPosition(node);
3254 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
3255 VisitStatements(node->statements());
3256 if (node->break_target()->is_linked()) {
3257 node->break_target()->Bind();
3258 }
3259 node->break_target()->Unuse();
3260}
3261
3262
3263void CodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
3264 // Call the runtime to declare the globals. The inevitable call
3265 // will sync frame elements to memory anyway, so we do it eagerly to
3266 // allow us to push the arguments directly into place.
3267 frame_->SyncRange(0, frame_->element_count() - 1);
3268
Steve Block3ce2e202009-11-05 08:53:23 +00003269 frame_->EmitPush(esi); // The context is the first argument.
Steve Blocka7e24c12009-10-30 11:49:00 +00003270 frame_->EmitPush(Immediate(pairs));
Steve Blocka7e24c12009-10-30 11:49:00 +00003271 frame_->EmitPush(Immediate(Smi::FromInt(is_eval() ? 1 : 0)));
3272 Result ignored = frame_->CallRuntime(Runtime::kDeclareGlobals, 3);
3273 // Return value is ignored.
3274}
3275
3276
3277void CodeGenerator::VisitDeclaration(Declaration* node) {
3278 Comment cmnt(masm_, "[ Declaration");
3279 Variable* var = node->proxy()->var();
3280 ASSERT(var != NULL); // must have been resolved
3281 Slot* slot = var->slot();
3282
3283 // If it was not possible to allocate the variable at compile time,
3284 // we need to "declare" it at runtime to make sure it actually
3285 // exists in the local context.
3286 if (slot != NULL && slot->type() == Slot::LOOKUP) {
3287 // Variables with a "LOOKUP" slot were introduced as non-locals
3288 // during variable resolution and must have mode DYNAMIC.
3289 ASSERT(var->is_dynamic());
3290 // For now, just do a runtime call. Sync the virtual frame eagerly
3291 // so we can simply push the arguments into place.
3292 frame_->SyncRange(0, frame_->element_count() - 1);
3293 frame_->EmitPush(esi);
3294 frame_->EmitPush(Immediate(var->name()));
3295 // Declaration nodes are always introduced in one of two modes.
3296 ASSERT(node->mode() == Variable::VAR || node->mode() == Variable::CONST);
3297 PropertyAttributes attr = node->mode() == Variable::VAR ? NONE : READ_ONLY;
3298 frame_->EmitPush(Immediate(Smi::FromInt(attr)));
3299 // Push initial value, if any.
3300 // Note: For variables we must not push an initial value (such as
3301 // 'undefined') because we may have a (legal) redeclaration and we
3302 // must not destroy the current value.
3303 if (node->mode() == Variable::CONST) {
3304 frame_->EmitPush(Immediate(Factory::the_hole_value()));
3305 } else if (node->fun() != NULL) {
3306 Load(node->fun());
3307 } else {
3308 frame_->EmitPush(Immediate(Smi::FromInt(0))); // no initial value!
3309 }
3310 Result ignored = frame_->CallRuntime(Runtime::kDeclareContextSlot, 4);
3311 // Ignore the return value (declarations are statements).
3312 return;
3313 }
3314
3315 ASSERT(!var->is_global());
3316
3317 // If we have a function or a constant, we need to initialize the variable.
3318 Expression* val = NULL;
3319 if (node->mode() == Variable::CONST) {
3320 val = new Literal(Factory::the_hole_value());
3321 } else {
3322 val = node->fun(); // NULL if we don't have a function
3323 }
3324
3325 if (val != NULL) {
3326 {
3327 // Set the initial value.
3328 Reference target(this, node->proxy());
3329 Load(val);
3330 target.SetValue(NOT_CONST_INIT);
3331 // The reference is removed from the stack (preserving TOS) when
3332 // it goes out of scope.
3333 }
3334 // Get rid of the assigned value (declarations are statements).
3335 frame_->Drop();
3336 }
3337}
3338
3339
3340void CodeGenerator::VisitExpressionStatement(ExpressionStatement* node) {
3341 ASSERT(!in_spilled_code());
3342 Comment cmnt(masm_, "[ ExpressionStatement");
3343 CodeForStatementPosition(node);
3344 Expression* expression = node->expression();
3345 expression->MarkAsStatement();
3346 Load(expression);
3347 // Remove the lingering expression result from the top of stack.
3348 frame_->Drop();
3349}
3350
3351
3352void CodeGenerator::VisitEmptyStatement(EmptyStatement* node) {
3353 ASSERT(!in_spilled_code());
3354 Comment cmnt(masm_, "// EmptyStatement");
3355 CodeForStatementPosition(node);
3356 // nothing to do
3357}
3358
3359
3360void CodeGenerator::VisitIfStatement(IfStatement* node) {
3361 ASSERT(!in_spilled_code());
3362 Comment cmnt(masm_, "[ IfStatement");
3363 // Generate different code depending on which parts of the if statement
3364 // are present or not.
3365 bool has_then_stm = node->HasThenStatement();
3366 bool has_else_stm = node->HasElseStatement();
3367
3368 CodeForStatementPosition(node);
3369 JumpTarget exit;
3370 if (has_then_stm && has_else_stm) {
3371 JumpTarget then;
3372 JumpTarget else_;
3373 ControlDestination dest(&then, &else_, true);
Steve Blockd0582a62009-12-15 09:54:21 +00003374 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00003375
3376 if (dest.false_was_fall_through()) {
3377 // The else target was bound, so we compile the else part first.
3378 Visit(node->else_statement());
3379
3380 // We may have dangling jumps to the then part.
3381 if (then.is_linked()) {
3382 if (has_valid_frame()) exit.Jump();
3383 then.Bind();
3384 Visit(node->then_statement());
3385 }
3386 } else {
3387 // The then target was bound, so we compile the then part first.
3388 Visit(node->then_statement());
3389
3390 if (else_.is_linked()) {
3391 if (has_valid_frame()) exit.Jump();
3392 else_.Bind();
3393 Visit(node->else_statement());
3394 }
3395 }
3396
3397 } else if (has_then_stm) {
3398 ASSERT(!has_else_stm);
3399 JumpTarget then;
3400 ControlDestination dest(&then, &exit, true);
Steve Blockd0582a62009-12-15 09:54:21 +00003401 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00003402
3403 if (dest.false_was_fall_through()) {
3404 // The exit label was bound. We may have dangling jumps to the
3405 // then part.
3406 if (then.is_linked()) {
3407 exit.Unuse();
3408 exit.Jump();
3409 then.Bind();
3410 Visit(node->then_statement());
3411 }
3412 } else {
3413 // The then label was bound.
3414 Visit(node->then_statement());
3415 }
3416
3417 } else if (has_else_stm) {
3418 ASSERT(!has_then_stm);
3419 JumpTarget else_;
3420 ControlDestination dest(&exit, &else_, false);
Steve Blockd0582a62009-12-15 09:54:21 +00003421 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00003422
3423 if (dest.true_was_fall_through()) {
3424 // The exit label was bound. We may have dangling jumps to the
3425 // else part.
3426 if (else_.is_linked()) {
3427 exit.Unuse();
3428 exit.Jump();
3429 else_.Bind();
3430 Visit(node->else_statement());
3431 }
3432 } else {
3433 // The else label was bound.
3434 Visit(node->else_statement());
3435 }
3436
3437 } else {
3438 ASSERT(!has_then_stm && !has_else_stm);
3439 // We only care about the condition's side effects (not its value
3440 // or control flow effect). LoadCondition is called without
3441 // forcing control flow.
3442 ControlDestination dest(&exit, &exit, true);
Steve Blockd0582a62009-12-15 09:54:21 +00003443 LoadCondition(node->condition(), &dest, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00003444 if (!dest.is_used()) {
3445 // We got a value on the frame rather than (or in addition to)
3446 // control flow.
3447 frame_->Drop();
3448 }
3449 }
3450
3451 if (exit.is_linked()) {
3452 exit.Bind();
3453 }
3454}
3455
3456
3457void CodeGenerator::VisitContinueStatement(ContinueStatement* node) {
3458 ASSERT(!in_spilled_code());
3459 Comment cmnt(masm_, "[ ContinueStatement");
3460 CodeForStatementPosition(node);
3461 node->target()->continue_target()->Jump();
3462}
3463
3464
3465void CodeGenerator::VisitBreakStatement(BreakStatement* node) {
3466 ASSERT(!in_spilled_code());
3467 Comment cmnt(masm_, "[ BreakStatement");
3468 CodeForStatementPosition(node);
3469 node->target()->break_target()->Jump();
3470}
3471
3472
3473void CodeGenerator::VisitReturnStatement(ReturnStatement* node) {
3474 ASSERT(!in_spilled_code());
3475 Comment cmnt(masm_, "[ ReturnStatement");
3476
3477 CodeForStatementPosition(node);
3478 Load(node->expression());
3479 Result return_value = frame_->Pop();
Steve Blockd0582a62009-12-15 09:54:21 +00003480 masm()->WriteRecordedPositions();
Steve Blocka7e24c12009-10-30 11:49:00 +00003481 if (function_return_is_shadowed_) {
3482 function_return_.Jump(&return_value);
3483 } else {
3484 frame_->PrepareForReturn();
3485 if (function_return_.is_bound()) {
3486 // If the function return label is already bound we reuse the
3487 // code by jumping to the return site.
3488 function_return_.Jump(&return_value);
3489 } else {
3490 function_return_.Bind(&return_value);
3491 GenerateReturnSequence(&return_value);
3492 }
3493 }
3494}
3495
3496
3497void CodeGenerator::GenerateReturnSequence(Result* return_value) {
3498 // The return value is a live (but not currently reference counted)
3499 // reference to eax. This is safe because the current frame does not
3500 // contain a reference to eax (it is prepared for the return by spilling
3501 // all registers).
3502 if (FLAG_trace) {
3503 frame_->Push(return_value);
3504 *return_value = frame_->CallRuntime(Runtime::kTraceExit, 1);
3505 }
3506 return_value->ToRegister(eax);
3507
3508 // Add a label for checking the size of the code used for returning.
3509 Label check_exit_codesize;
3510 masm_->bind(&check_exit_codesize);
3511
3512 // Leave the frame and return popping the arguments and the
3513 // receiver.
3514 frame_->Exit();
Andrei Popescu31002712010-02-23 13:46:05 +00003515 masm_->ret((scope()->num_parameters() + 1) * kPointerSize);
Steve Blocka7e24c12009-10-30 11:49:00 +00003516 DeleteFrame();
3517
3518#ifdef ENABLE_DEBUGGER_SUPPORT
3519 // Check that the size of the code used for returning matches what is
3520 // expected by the debugger.
Steve Blockd0582a62009-12-15 09:54:21 +00003521 ASSERT_EQ(Assembler::kJSReturnSequenceLength,
Steve Blocka7e24c12009-10-30 11:49:00 +00003522 masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
3523#endif
3524}
3525
3526
3527void CodeGenerator::VisitWithEnterStatement(WithEnterStatement* node) {
3528 ASSERT(!in_spilled_code());
3529 Comment cmnt(masm_, "[ WithEnterStatement");
3530 CodeForStatementPosition(node);
3531 Load(node->expression());
3532 Result context;
3533 if (node->is_catch_block()) {
3534 context = frame_->CallRuntime(Runtime::kPushCatchContext, 1);
3535 } else {
3536 context = frame_->CallRuntime(Runtime::kPushContext, 1);
3537 }
3538
3539 // Update context local.
3540 frame_->SaveContextRegister();
3541
3542 // Verify that the runtime call result and esi agree.
3543 if (FLAG_debug_code) {
3544 __ cmp(context.reg(), Operand(esi));
3545 __ Assert(equal, "Runtime::NewContext should end up in esi");
3546 }
3547}
3548
3549
3550void CodeGenerator::VisitWithExitStatement(WithExitStatement* node) {
3551 ASSERT(!in_spilled_code());
3552 Comment cmnt(masm_, "[ WithExitStatement");
3553 CodeForStatementPosition(node);
3554 // Pop context.
3555 __ mov(esi, ContextOperand(esi, Context::PREVIOUS_INDEX));
3556 // Update context local.
3557 frame_->SaveContextRegister();
3558}
3559
3560
3561void CodeGenerator::VisitSwitchStatement(SwitchStatement* node) {
3562 ASSERT(!in_spilled_code());
3563 Comment cmnt(masm_, "[ SwitchStatement");
3564 CodeForStatementPosition(node);
3565 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
3566
3567 // Compile the switch value.
3568 Load(node->tag());
3569
3570 ZoneList<CaseClause*>* cases = node->cases();
3571 int length = cases->length();
3572 CaseClause* default_clause = NULL;
3573
3574 JumpTarget next_test;
3575 // Compile the case label expressions and comparisons. Exit early
3576 // if a comparison is unconditionally true. The target next_test is
3577 // bound before the loop in order to indicate control flow to the
3578 // first comparison.
3579 next_test.Bind();
3580 for (int i = 0; i < length && !next_test.is_unused(); i++) {
3581 CaseClause* clause = cases->at(i);
3582 // The default is not a test, but remember it for later.
3583 if (clause->is_default()) {
3584 default_clause = clause;
3585 continue;
3586 }
3587
3588 Comment cmnt(masm_, "[ Case comparison");
3589 // We recycle the same target next_test for each test. Bind it if
3590 // the previous test has not done so and then unuse it for the
3591 // loop.
3592 if (next_test.is_linked()) {
3593 next_test.Bind();
3594 }
3595 next_test.Unuse();
3596
3597 // Duplicate the switch value.
3598 frame_->Dup();
3599
3600 // Compile the label expression.
3601 Load(clause->label());
3602
3603 // Compare and branch to the body if true or the next test if
3604 // false. Prefer the next test as a fall through.
3605 ControlDestination dest(clause->body_target(), &next_test, false);
Leon Clarkee46be812010-01-19 14:06:41 +00003606 Comparison(node, equal, true, &dest);
Steve Blocka7e24c12009-10-30 11:49:00 +00003607
3608 // If the comparison fell through to the true target, jump to the
3609 // actual body.
3610 if (dest.true_was_fall_through()) {
3611 clause->body_target()->Unuse();
3612 clause->body_target()->Jump();
3613 }
3614 }
3615
3616 // If there was control flow to a next test from the last one
3617 // compiled, compile a jump to the default or break target.
3618 if (!next_test.is_unused()) {
3619 if (next_test.is_linked()) {
3620 next_test.Bind();
3621 }
3622 // Drop the switch value.
3623 frame_->Drop();
3624 if (default_clause != NULL) {
3625 default_clause->body_target()->Jump();
3626 } else {
3627 node->break_target()->Jump();
3628 }
3629 }
3630
3631
3632 // The last instruction emitted was a jump, either to the default
3633 // clause or the break target, or else to a case body from the loop
3634 // that compiles the tests.
3635 ASSERT(!has_valid_frame());
3636 // Compile case bodies as needed.
3637 for (int i = 0; i < length; i++) {
3638 CaseClause* clause = cases->at(i);
3639
3640 // There are two ways to reach the body: from the corresponding
3641 // test or as the fall through of the previous body.
3642 if (clause->body_target()->is_linked() || has_valid_frame()) {
3643 if (clause->body_target()->is_linked()) {
3644 if (has_valid_frame()) {
3645 // If we have both a jump to the test and a fall through, put
3646 // a jump on the fall through path to avoid the dropping of
3647 // the switch value on the test path. The exception is the
3648 // default which has already had the switch value dropped.
3649 if (clause->is_default()) {
3650 clause->body_target()->Bind();
3651 } else {
3652 JumpTarget body;
3653 body.Jump();
3654 clause->body_target()->Bind();
3655 frame_->Drop();
3656 body.Bind();
3657 }
3658 } else {
3659 // No fall through to worry about.
3660 clause->body_target()->Bind();
3661 if (!clause->is_default()) {
3662 frame_->Drop();
3663 }
3664 }
3665 } else {
3666 // Otherwise, we have only fall through.
3667 ASSERT(has_valid_frame());
3668 }
3669
3670 // We are now prepared to compile the body.
3671 Comment cmnt(masm_, "[ Case body");
3672 VisitStatements(clause->statements());
3673 }
3674 clause->body_target()->Unuse();
3675 }
3676
3677 // We may not have a valid frame here so bind the break target only
3678 // if needed.
3679 if (node->break_target()->is_linked()) {
3680 node->break_target()->Bind();
3681 }
3682 node->break_target()->Unuse();
3683}
3684
3685
Steve Block3ce2e202009-11-05 08:53:23 +00003686void CodeGenerator::VisitDoWhileStatement(DoWhileStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003687 ASSERT(!in_spilled_code());
Steve Block3ce2e202009-11-05 08:53:23 +00003688 Comment cmnt(masm_, "[ DoWhileStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00003689 CodeForStatementPosition(node);
3690 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
Steve Block3ce2e202009-11-05 08:53:23 +00003691 JumpTarget body(JumpTarget::BIDIRECTIONAL);
3692 IncrementLoopNesting();
Steve Blocka7e24c12009-10-30 11:49:00 +00003693
Steve Block3ce2e202009-11-05 08:53:23 +00003694 ConditionAnalysis info = AnalyzeCondition(node->cond());
3695 // Label the top of the loop for the backward jump if necessary.
3696 switch (info) {
3697 case ALWAYS_TRUE:
3698 // Use the continue target.
3699 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
3700 node->continue_target()->Bind();
3701 break;
3702 case ALWAYS_FALSE:
3703 // No need to label it.
3704 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
3705 break;
3706 case DONT_KNOW:
3707 // Continue is the test, so use the backward body target.
3708 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
3709 body.Bind();
3710 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00003711 }
3712
Steve Block3ce2e202009-11-05 08:53:23 +00003713 CheckStack(); // TODO(1222600): ignore if body contains calls.
3714 Visit(node->body());
Steve Blocka7e24c12009-10-30 11:49:00 +00003715
Steve Block3ce2e202009-11-05 08:53:23 +00003716 // Compile the test.
3717 switch (info) {
3718 case ALWAYS_TRUE:
3719 // If control flow can fall off the end of the body, jump back to
3720 // the top and bind the break target at the exit.
3721 if (has_valid_frame()) {
3722 node->continue_target()->Jump();
Steve Blocka7e24c12009-10-30 11:49:00 +00003723 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003724 if (node->break_target()->is_linked()) {
3725 node->break_target()->Bind();
3726 }
3727 break;
Steve Block3ce2e202009-11-05 08:53:23 +00003728 case ALWAYS_FALSE:
3729 // We may have had continues or breaks in the body.
3730 if (node->continue_target()->is_linked()) {
3731 node->continue_target()->Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00003732 }
Steve Block3ce2e202009-11-05 08:53:23 +00003733 if (node->break_target()->is_linked()) {
3734 node->break_target()->Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00003735 }
Steve Block3ce2e202009-11-05 08:53:23 +00003736 break;
3737 case DONT_KNOW:
3738 // We have to compile the test expression if it can be reached by
3739 // control flow falling out of the body or via continue.
3740 if (node->continue_target()->is_linked()) {
3741 node->continue_target()->Bind();
3742 }
3743 if (has_valid_frame()) {
Steve Blockd0582a62009-12-15 09:54:21 +00003744 Comment cmnt(masm_, "[ DoWhileCondition");
3745 CodeForDoWhileConditionPosition(node);
Steve Block3ce2e202009-11-05 08:53:23 +00003746 ControlDestination dest(&body, node->break_target(), false);
Steve Blockd0582a62009-12-15 09:54:21 +00003747 LoadCondition(node->cond(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00003748 }
Steve Block3ce2e202009-11-05 08:53:23 +00003749 if (node->break_target()->is_linked()) {
3750 node->break_target()->Bind();
3751 }
3752 break;
3753 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003754
Steve Block3ce2e202009-11-05 08:53:23 +00003755 DecrementLoopNesting();
3756}
Steve Blocka7e24c12009-10-30 11:49:00 +00003757
Steve Block3ce2e202009-11-05 08:53:23 +00003758
3759void CodeGenerator::VisitWhileStatement(WhileStatement* node) {
3760 ASSERT(!in_spilled_code());
3761 Comment cmnt(masm_, "[ WhileStatement");
3762 CodeForStatementPosition(node);
3763
3764 // If the condition is always false and has no side effects, we do not
3765 // need to compile anything.
3766 ConditionAnalysis info = AnalyzeCondition(node->cond());
3767 if (info == ALWAYS_FALSE) return;
3768
3769 // Do not duplicate conditions that may have function literal
3770 // subexpressions. This can cause us to compile the function literal
3771 // twice.
3772 bool test_at_bottom = !node->may_have_function_literal();
3773 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
3774 IncrementLoopNesting();
3775 JumpTarget body;
3776 if (test_at_bottom) {
3777 body.set_direction(JumpTarget::BIDIRECTIONAL);
3778 }
3779
3780 // Based on the condition analysis, compile the test as necessary.
3781 switch (info) {
3782 case ALWAYS_TRUE:
3783 // We will not compile the test expression. Label the top of the
3784 // loop with the continue target.
3785 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
3786 node->continue_target()->Bind();
3787 break;
3788 case DONT_KNOW: {
3789 if (test_at_bottom) {
3790 // Continue is the test at the bottom, no need to label the test
3791 // at the top. The body is a backward target.
3792 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
3793 } else {
3794 // Label the test at the top as the continue target. The body
3795 // is a forward-only target.
3796 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
3797 node->continue_target()->Bind();
3798 }
3799 // Compile the test with the body as the true target and preferred
3800 // fall-through and with the break target as the false target.
3801 ControlDestination dest(&body, node->break_target(), true);
Steve Blockd0582a62009-12-15 09:54:21 +00003802 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00003803
3804 if (dest.false_was_fall_through()) {
3805 // If we got the break target as fall-through, the test may have
3806 // been unconditionally false (if there are no jumps to the
3807 // body).
3808 if (!body.is_linked()) {
3809 DecrementLoopNesting();
3810 return;
3811 }
3812
3813 // Otherwise, jump around the body on the fall through and then
3814 // bind the body target.
3815 node->break_target()->Unuse();
3816 node->break_target()->Jump();
3817 body.Bind();
3818 }
3819 break;
3820 }
3821 case ALWAYS_FALSE:
3822 UNREACHABLE();
3823 break;
3824 }
3825
3826 CheckStack(); // TODO(1222600): ignore if body contains calls.
3827 Visit(node->body());
3828
3829 // Based on the condition analysis, compile the backward jump as
3830 // necessary.
3831 switch (info) {
3832 case ALWAYS_TRUE:
3833 // The loop body has been labeled with the continue target.
3834 if (has_valid_frame()) {
3835 node->continue_target()->Jump();
3836 }
3837 break;
3838 case DONT_KNOW:
3839 if (test_at_bottom) {
3840 // If we have chosen to recompile the test at the bottom, then
3841 // it is the continue target.
Steve Blocka7e24c12009-10-30 11:49:00 +00003842 if (node->continue_target()->is_linked()) {
3843 node->continue_target()->Bind();
3844 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003845 if (has_valid_frame()) {
Steve Block3ce2e202009-11-05 08:53:23 +00003846 // The break target is the fall-through (body is a backward
3847 // jump from here and thus an invalid fall-through).
3848 ControlDestination dest(&body, node->break_target(), false);
Steve Blockd0582a62009-12-15 09:54:21 +00003849 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00003850 }
3851 } else {
3852 // If we have chosen not to recompile the test at the bottom,
3853 // jump back to the one at the top.
3854 if (has_valid_frame()) {
3855 node->continue_target()->Jump();
Steve Blocka7e24c12009-10-30 11:49:00 +00003856 }
3857 }
Steve Block3ce2e202009-11-05 08:53:23 +00003858 break;
3859 case ALWAYS_FALSE:
3860 UNREACHABLE();
3861 break;
3862 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003863
Steve Block3ce2e202009-11-05 08:53:23 +00003864 // The break target may be already bound (by the condition), or there
3865 // may not be a valid frame. Bind it only if needed.
3866 if (node->break_target()->is_linked()) {
3867 node->break_target()->Bind();
3868 }
3869 DecrementLoopNesting();
3870}
3871
3872
Steve Block6ded16b2010-05-10 14:33:55 +01003873void CodeGenerator::SetTypeForStackSlot(Slot* slot, TypeInfo info) {
3874 ASSERT(slot->type() == Slot::LOCAL || slot->type() == Slot::PARAMETER);
3875 if (slot->type() == Slot::LOCAL) {
3876 frame_->SetTypeForLocalAt(slot->index(), info);
3877 } else {
3878 frame_->SetTypeForParamAt(slot->index(), info);
3879 }
3880 if (FLAG_debug_code && info.IsSmi()) {
3881 if (slot->type() == Slot::LOCAL) {
3882 frame_->PushLocalAt(slot->index());
3883 } else {
3884 frame_->PushParameterAt(slot->index());
3885 }
3886 Result var = frame_->Pop();
3887 var.ToRegister();
3888 __ AbortIfNotSmi(var.reg());
3889 }
3890}
3891
3892
Steve Block3ce2e202009-11-05 08:53:23 +00003893void CodeGenerator::VisitForStatement(ForStatement* node) {
3894 ASSERT(!in_spilled_code());
3895 Comment cmnt(masm_, "[ ForStatement");
3896 CodeForStatementPosition(node);
3897
3898 // Compile the init expression if present.
3899 if (node->init() != NULL) {
3900 Visit(node->init());
3901 }
3902
3903 // If the condition is always false and has no side effects, we do not
3904 // need to compile anything else.
3905 ConditionAnalysis info = AnalyzeCondition(node->cond());
3906 if (info == ALWAYS_FALSE) return;
3907
3908 // Do not duplicate conditions that may have function literal
3909 // subexpressions. This can cause us to compile the function literal
3910 // twice.
3911 bool test_at_bottom = !node->may_have_function_literal();
3912 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
3913 IncrementLoopNesting();
3914
3915 // Target for backward edge if no test at the bottom, otherwise
3916 // unused.
3917 JumpTarget loop(JumpTarget::BIDIRECTIONAL);
3918
3919 // Target for backward edge if there is a test at the bottom,
3920 // otherwise used as target for test at the top.
3921 JumpTarget body;
3922 if (test_at_bottom) {
3923 body.set_direction(JumpTarget::BIDIRECTIONAL);
3924 }
3925
3926 // Based on the condition analysis, compile the test as necessary.
3927 switch (info) {
3928 case ALWAYS_TRUE:
3929 // We will not compile the test expression. Label the top of the
3930 // loop.
3931 if (node->next() == NULL) {
3932 // Use the continue target if there is no update expression.
3933 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
3934 node->continue_target()->Bind();
3935 } else {
3936 // Otherwise use the backward loop target.
3937 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
3938 loop.Bind();
3939 }
3940 break;
3941 case DONT_KNOW: {
3942 if (test_at_bottom) {
3943 // Continue is either the update expression or the test at the
3944 // bottom, no need to label the test at the top.
3945 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
3946 } else if (node->next() == NULL) {
3947 // We are not recompiling the test at the bottom and there is no
3948 // update expression.
3949 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
3950 node->continue_target()->Bind();
3951 } else {
3952 // We are not recompiling the test at the bottom and there is an
3953 // update expression.
3954 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
3955 loop.Bind();
3956 }
3957 // Compile the test with the body as the true target and preferred
3958 // fall-through and with the break target as the false target.
3959 ControlDestination dest(&body, node->break_target(), true);
Steve Blockd0582a62009-12-15 09:54:21 +00003960 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00003961
3962 if (dest.false_was_fall_through()) {
3963 // If we got the break target as fall-through, the test may have
3964 // been unconditionally false (if there are no jumps to the
3965 // body).
3966 if (!body.is_linked()) {
3967 DecrementLoopNesting();
3968 return;
3969 }
3970
3971 // Otherwise, jump around the body on the fall through and then
3972 // bind the body target.
3973 node->break_target()->Unuse();
3974 node->break_target()->Jump();
3975 body.Bind();
3976 }
3977 break;
3978 }
3979 case ALWAYS_FALSE:
3980 UNREACHABLE();
3981 break;
3982 }
3983
3984 CheckStack(); // TODO(1222600): ignore if body contains calls.
Steve Block6ded16b2010-05-10 14:33:55 +01003985
3986 // We know that the loop index is a smi if it is not modified in the
3987 // loop body and it is checked against a constant limit in the loop
3988 // condition. In this case, we reset the static type information of the
3989 // loop index to smi before compiling the body, the update expression, and
3990 // the bottom check of the loop condition.
3991 if (node->is_fast_smi_loop()) {
3992 // Set number type of the loop variable to smi.
3993 SetTypeForStackSlot(node->loop_variable()->slot(), TypeInfo::Smi());
3994 }
3995
Steve Block3ce2e202009-11-05 08:53:23 +00003996 Visit(node->body());
3997
3998 // If there is an update expression, compile it if necessary.
3999 if (node->next() != NULL) {
4000 if (node->continue_target()->is_linked()) {
4001 node->continue_target()->Bind();
4002 }
4003
4004 // Control can reach the update by falling out of the body or by a
4005 // continue.
4006 if (has_valid_frame()) {
4007 // Record the source position of the statement as this code which
4008 // is after the code for the body actually belongs to the loop
4009 // statement and not the body.
4010 CodeForStatementPosition(node);
4011 Visit(node->next());
4012 }
4013 }
4014
Steve Block6ded16b2010-05-10 14:33:55 +01004015 // Set the type of the loop variable to smi before compiling the test
4016 // expression if we are in a fast smi loop condition.
4017 if (node->is_fast_smi_loop() && has_valid_frame()) {
4018 // Set number type of the loop variable to smi.
4019 SetTypeForStackSlot(node->loop_variable()->slot(), TypeInfo::Smi());
4020 }
4021
Steve Block3ce2e202009-11-05 08:53:23 +00004022 // Based on the condition analysis, compile the backward jump as
4023 // necessary.
4024 switch (info) {
4025 case ALWAYS_TRUE:
4026 if (has_valid_frame()) {
4027 if (node->next() == NULL) {
4028 node->continue_target()->Jump();
4029 } else {
4030 loop.Jump();
4031 }
4032 }
4033 break;
4034 case DONT_KNOW:
4035 if (test_at_bottom) {
4036 if (node->continue_target()->is_linked()) {
4037 // We can have dangling jumps to the continue target if there
4038 // was no update expression.
4039 node->continue_target()->Bind();
4040 }
4041 // Control can reach the test at the bottom by falling out of
4042 // the body, by a continue in the body, or from the update
4043 // expression.
4044 if (has_valid_frame()) {
4045 // The break target is the fall-through (body is a backward
4046 // jump from here).
4047 ControlDestination dest(&body, node->break_target(), false);
Steve Blockd0582a62009-12-15 09:54:21 +00004048 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00004049 }
4050 } else {
4051 // Otherwise, jump back to the test at the top.
Steve Blocka7e24c12009-10-30 11:49:00 +00004052 if (has_valid_frame()) {
4053 if (node->next() == NULL) {
4054 node->continue_target()->Jump();
4055 } else {
4056 loop.Jump();
4057 }
4058 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004059 }
4060 break;
Steve Block3ce2e202009-11-05 08:53:23 +00004061 case ALWAYS_FALSE:
4062 UNREACHABLE();
4063 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00004064 }
4065
Steve Block3ce2e202009-11-05 08:53:23 +00004066 // The break target may be already bound (by the condition), or
4067 // there may not be a valid frame. Bind it only if needed.
4068 if (node->break_target()->is_linked()) {
4069 node->break_target()->Bind();
4070 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004071 DecrementLoopNesting();
Steve Blocka7e24c12009-10-30 11:49:00 +00004072}
4073
4074
4075void CodeGenerator::VisitForInStatement(ForInStatement* node) {
4076 ASSERT(!in_spilled_code());
4077 VirtualFrame::SpilledScope spilled_scope;
4078 Comment cmnt(masm_, "[ ForInStatement");
4079 CodeForStatementPosition(node);
4080
4081 JumpTarget primitive;
4082 JumpTarget jsobject;
4083 JumpTarget fixed_array;
4084 JumpTarget entry(JumpTarget::BIDIRECTIONAL);
4085 JumpTarget end_del_check;
4086 JumpTarget exit;
4087
4088 // Get the object to enumerate over (converted to JSObject).
4089 LoadAndSpill(node->enumerable());
4090
4091 // Both SpiderMonkey and kjs ignore null and undefined in contrast
4092 // to the specification. 12.6.4 mandates a call to ToObject.
4093 frame_->EmitPop(eax);
4094
4095 // eax: value to be iterated over
4096 __ cmp(eax, Factory::undefined_value());
4097 exit.Branch(equal);
4098 __ cmp(eax, Factory::null_value());
4099 exit.Branch(equal);
4100
4101 // Stack layout in body:
4102 // [iteration counter (smi)] <- slot 0
4103 // [length of array] <- slot 1
4104 // [FixedArray] <- slot 2
4105 // [Map or 0] <- slot 3
4106 // [Object] <- slot 4
4107
4108 // Check if enumerable is already a JSObject
4109 // eax: value to be iterated over
4110 __ test(eax, Immediate(kSmiTagMask));
4111 primitive.Branch(zero);
4112 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
4113 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
4114 __ cmp(ecx, FIRST_JS_OBJECT_TYPE);
4115 jsobject.Branch(above_equal);
4116
4117 primitive.Bind();
4118 frame_->EmitPush(eax);
4119 frame_->InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION, 1);
4120 // function call returns the value in eax, which is where we want it below
4121
4122 jsobject.Bind();
4123 // Get the set of properties (as a FixedArray or Map).
4124 // eax: value to be iterated over
Steve Blockd0582a62009-12-15 09:54:21 +00004125 frame_->EmitPush(eax); // Push the object being iterated over.
Steve Blocka7e24c12009-10-30 11:49:00 +00004126
Steve Blockd0582a62009-12-15 09:54:21 +00004127 // Check cache validity in generated code. This is a fast case for
4128 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
4129 // guarantee cache validity, call the runtime system to check cache
4130 // validity or get the property names in a fixed array.
4131 JumpTarget call_runtime;
4132 JumpTarget loop(JumpTarget::BIDIRECTIONAL);
4133 JumpTarget check_prototype;
4134 JumpTarget use_cache;
4135 __ mov(ecx, eax);
4136 loop.Bind();
4137 // Check that there are no elements.
4138 __ mov(edx, FieldOperand(ecx, JSObject::kElementsOffset));
4139 __ cmp(Operand(edx), Immediate(Factory::empty_fixed_array()));
4140 call_runtime.Branch(not_equal);
4141 // Check that instance descriptors are not empty so that we can
4142 // check for an enum cache. Leave the map in ebx for the subsequent
4143 // prototype load.
4144 __ mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
4145 __ mov(edx, FieldOperand(ebx, Map::kInstanceDescriptorsOffset));
4146 __ cmp(Operand(edx), Immediate(Factory::empty_descriptor_array()));
4147 call_runtime.Branch(equal);
4148 // Check that there in an enum cache in the non-empty instance
4149 // descriptors. This is the case if the next enumeration index
4150 // field does not contain a smi.
4151 __ mov(edx, FieldOperand(edx, DescriptorArray::kEnumerationIndexOffset));
4152 __ test(edx, Immediate(kSmiTagMask));
4153 call_runtime.Branch(zero);
4154 // For all objects but the receiver, check that the cache is empty.
4155 __ cmp(ecx, Operand(eax));
4156 check_prototype.Branch(equal);
4157 __ mov(edx, FieldOperand(edx, DescriptorArray::kEnumCacheBridgeCacheOffset));
4158 __ cmp(Operand(edx), Immediate(Factory::empty_fixed_array()));
4159 call_runtime.Branch(not_equal);
4160 check_prototype.Bind();
4161 // Load the prototype from the map and loop if non-null.
4162 __ mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
4163 __ cmp(Operand(ecx), Immediate(Factory::null_value()));
4164 loop.Branch(not_equal);
4165 // The enum cache is valid. Load the map of the object being
4166 // iterated over and use the cache for the iteration.
4167 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
4168 use_cache.Jump();
4169
4170 call_runtime.Bind();
4171 // Call the runtime to get the property names for the object.
Steve Blocka7e24c12009-10-30 11:49:00 +00004172 frame_->EmitPush(eax); // push the Object (slot 4) for the runtime call
4173 frame_->CallRuntime(Runtime::kGetPropertyNamesFast, 1);
4174
Steve Blockd0582a62009-12-15 09:54:21 +00004175 // If we got a map from the runtime call, we can do a fast
4176 // modification check. Otherwise, we got a fixed array, and we have
4177 // to do a slow check.
Steve Blocka7e24c12009-10-30 11:49:00 +00004178 // eax: map or fixed array (result from call to
4179 // Runtime::kGetPropertyNamesFast)
4180 __ mov(edx, Operand(eax));
4181 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
4182 __ cmp(ecx, Factory::meta_map());
4183 fixed_array.Branch(not_equal);
4184
Steve Blockd0582a62009-12-15 09:54:21 +00004185 use_cache.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00004186 // Get enum cache
Steve Blockd0582a62009-12-15 09:54:21 +00004187 // eax: map (either the result from a call to
4188 // Runtime::kGetPropertyNamesFast or has been fetched directly from
4189 // the object)
Steve Blocka7e24c12009-10-30 11:49:00 +00004190 __ mov(ecx, Operand(eax));
Steve Blockd0582a62009-12-15 09:54:21 +00004191
Steve Blocka7e24c12009-10-30 11:49:00 +00004192 __ mov(ecx, FieldOperand(ecx, Map::kInstanceDescriptorsOffset));
4193 // Get the bridge array held in the enumeration index field.
4194 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumerationIndexOffset));
4195 // Get the cache from the bridge array.
4196 __ mov(edx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset));
4197
4198 frame_->EmitPush(eax); // <- slot 3
4199 frame_->EmitPush(edx); // <- slot 2
4200 __ mov(eax, FieldOperand(edx, FixedArray::kLengthOffset));
Leon Clarkee46be812010-01-19 14:06:41 +00004201 __ SmiTag(eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00004202 frame_->EmitPush(eax); // <- slot 1
4203 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 0
4204 entry.Jump();
4205
4206 fixed_array.Bind();
4207 // eax: fixed array (result from call to Runtime::kGetPropertyNamesFast)
4208 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 3
4209 frame_->EmitPush(eax); // <- slot 2
4210
4211 // Push the length of the array and the initial index onto the stack.
4212 __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset));
Leon Clarkee46be812010-01-19 14:06:41 +00004213 __ SmiTag(eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00004214 frame_->EmitPush(eax); // <- slot 1
4215 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 0
4216
4217 // Condition.
4218 entry.Bind();
4219 // Grab the current frame's height for the break and continue
4220 // targets only after all the state is pushed on the frame.
4221 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
4222 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
4223
4224 __ mov(eax, frame_->ElementAt(0)); // load the current count
4225 __ cmp(eax, frame_->ElementAt(1)); // compare to the array length
4226 node->break_target()->Branch(above_equal);
4227
4228 // Get the i'th entry of the array.
4229 __ mov(edx, frame_->ElementAt(2));
Kristian Monsen25f61362010-05-21 11:50:48 +01004230 __ mov(ebx, FixedArrayElementOperand(edx, eax));
Steve Blocka7e24c12009-10-30 11:49:00 +00004231
4232 // Get the expected map from the stack or a zero map in the
4233 // permanent slow case eax: current iteration count ebx: i'th entry
4234 // of the enum cache
4235 __ mov(edx, frame_->ElementAt(3));
4236 // Check if the expected map still matches that of the enumerable.
4237 // If not, we have to filter the key.
4238 // eax: current iteration count
4239 // ebx: i'th entry of the enum cache
4240 // edx: expected map value
4241 __ mov(ecx, frame_->ElementAt(4));
4242 __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
4243 __ cmp(ecx, Operand(edx));
4244 end_del_check.Branch(equal);
4245
4246 // Convert the entry to a string (or null if it isn't a property anymore).
4247 frame_->EmitPush(frame_->ElementAt(4)); // push enumerable
4248 frame_->EmitPush(ebx); // push entry
4249 frame_->InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION, 2);
4250 __ mov(ebx, Operand(eax));
4251
4252 // If the property has been removed while iterating, we just skip it.
4253 __ cmp(ebx, Factory::null_value());
4254 node->continue_target()->Branch(equal);
4255
4256 end_del_check.Bind();
4257 // Store the entry in the 'each' expression and take another spin in the
4258 // loop. edx: i'th entry of the enum cache (or string there of)
4259 frame_->EmitPush(ebx);
4260 { Reference each(this, node->each());
4261 // Loading a reference may leave the frame in an unspilled state.
4262 frame_->SpillAll();
4263 if (!each.is_illegal()) {
4264 if (each.size() > 0) {
4265 frame_->EmitPush(frame_->ElementAt(each.size()));
Leon Clarked91b9f72010-01-27 17:25:45 +00004266 each.SetValue(NOT_CONST_INIT);
4267 frame_->Drop(2);
4268 } else {
4269 // If the reference was to a slot we rely on the convenient property
4270 // that it doesn't matter whether a value (eg, ebx pushed above) is
4271 // right on top of or right underneath a zero-sized reference.
4272 each.SetValue(NOT_CONST_INIT);
Steve Blocka7e24c12009-10-30 11:49:00 +00004273 frame_->Drop();
4274 }
4275 }
4276 }
4277 // Unloading a reference may leave the frame in an unspilled state.
4278 frame_->SpillAll();
4279
Steve Blocka7e24c12009-10-30 11:49:00 +00004280 // Body.
4281 CheckStack(); // TODO(1222600): ignore if body contains calls.
4282 VisitAndSpill(node->body());
4283
4284 // Next. Reestablish a spilled frame in case we are coming here via
4285 // a continue in the body.
4286 node->continue_target()->Bind();
4287 frame_->SpillAll();
4288 frame_->EmitPop(eax);
4289 __ add(Operand(eax), Immediate(Smi::FromInt(1)));
4290 frame_->EmitPush(eax);
4291 entry.Jump();
4292
4293 // Cleanup. No need to spill because VirtualFrame::Drop is safe for
4294 // any frame.
4295 node->break_target()->Bind();
4296 frame_->Drop(5);
4297
4298 // Exit.
4299 exit.Bind();
4300
4301 node->continue_target()->Unuse();
4302 node->break_target()->Unuse();
4303}
4304
4305
Steve Block3ce2e202009-11-05 08:53:23 +00004306void CodeGenerator::VisitTryCatchStatement(TryCatchStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004307 ASSERT(!in_spilled_code());
4308 VirtualFrame::SpilledScope spilled_scope;
Steve Block3ce2e202009-11-05 08:53:23 +00004309 Comment cmnt(masm_, "[ TryCatchStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00004310 CodeForStatementPosition(node);
4311
4312 JumpTarget try_block;
4313 JumpTarget exit;
4314
4315 try_block.Call();
4316 // --- Catch block ---
4317 frame_->EmitPush(eax);
4318
4319 // Store the caught exception in the catch variable.
Leon Clarkee46be812010-01-19 14:06:41 +00004320 Variable* catch_var = node->catch_var()->var();
4321 ASSERT(catch_var != NULL && catch_var->slot() != NULL);
4322 StoreToSlot(catch_var->slot(), NOT_CONST_INIT);
Steve Blocka7e24c12009-10-30 11:49:00 +00004323
4324 // Remove the exception from the stack.
4325 frame_->Drop();
4326
4327 VisitStatementsAndSpill(node->catch_block()->statements());
4328 if (has_valid_frame()) {
4329 exit.Jump();
4330 }
4331
4332
4333 // --- Try block ---
4334 try_block.Bind();
4335
4336 frame_->PushTryHandler(TRY_CATCH_HANDLER);
4337 int handler_height = frame_->height();
4338
4339 // Shadow the jump targets for all escapes from the try block, including
4340 // returns. During shadowing, the original target is hidden as the
4341 // ShadowTarget and operations on the original actually affect the
4342 // shadowing target.
4343 //
4344 // We should probably try to unify the escaping targets and the return
4345 // target.
4346 int nof_escapes = node->escaping_targets()->length();
4347 List<ShadowTarget*> shadows(1 + nof_escapes);
4348
4349 // Add the shadow target for the function return.
4350 static const int kReturnShadowIndex = 0;
4351 shadows.Add(new ShadowTarget(&function_return_));
4352 bool function_return_was_shadowed = function_return_is_shadowed_;
4353 function_return_is_shadowed_ = true;
4354 ASSERT(shadows[kReturnShadowIndex]->other_target() == &function_return_);
4355
4356 // Add the remaining shadow targets.
4357 for (int i = 0; i < nof_escapes; i++) {
4358 shadows.Add(new ShadowTarget(node->escaping_targets()->at(i)));
4359 }
4360
4361 // Generate code for the statements in the try block.
4362 VisitStatementsAndSpill(node->try_block()->statements());
4363
4364 // Stop the introduced shadowing and count the number of required unlinks.
4365 // After shadowing stops, the original targets are unshadowed and the
4366 // ShadowTargets represent the formerly shadowing targets.
4367 bool has_unlinks = false;
4368 for (int i = 0; i < shadows.length(); i++) {
4369 shadows[i]->StopShadowing();
4370 has_unlinks = has_unlinks || shadows[i]->is_linked();
4371 }
4372 function_return_is_shadowed_ = function_return_was_shadowed;
4373
4374 // Get an external reference to the handler address.
4375 ExternalReference handler_address(Top::k_handler_address);
4376
4377 // Make sure that there's nothing left on the stack above the
4378 // handler structure.
4379 if (FLAG_debug_code) {
4380 __ mov(eax, Operand::StaticVariable(handler_address));
4381 __ cmp(esp, Operand(eax));
4382 __ Assert(equal, "stack pointer should point to top handler");
4383 }
4384
4385 // If we can fall off the end of the try block, unlink from try chain.
4386 if (has_valid_frame()) {
4387 // The next handler address is on top of the frame. Unlink from
4388 // the handler list and drop the rest of this handler from the
4389 // frame.
4390 ASSERT(StackHandlerConstants::kNextOffset == 0);
4391 frame_->EmitPop(Operand::StaticVariable(handler_address));
4392 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
4393 if (has_unlinks) {
4394 exit.Jump();
4395 }
4396 }
4397
4398 // Generate unlink code for the (formerly) shadowing targets that
4399 // have been jumped to. Deallocate each shadow target.
4400 Result return_value;
4401 for (int i = 0; i < shadows.length(); i++) {
4402 if (shadows[i]->is_linked()) {
4403 // Unlink from try chain; be careful not to destroy the TOS if
4404 // there is one.
4405 if (i == kReturnShadowIndex) {
4406 shadows[i]->Bind(&return_value);
4407 return_value.ToRegister(eax);
4408 } else {
4409 shadows[i]->Bind();
4410 }
4411 // Because we can be jumping here (to spilled code) from
4412 // unspilled code, we need to reestablish a spilled frame at
4413 // this block.
4414 frame_->SpillAll();
4415
4416 // Reload sp from the top handler, because some statements that we
4417 // break from (eg, for...in) may have left stuff on the stack.
4418 __ mov(esp, Operand::StaticVariable(handler_address));
4419 frame_->Forget(frame_->height() - handler_height);
4420
4421 ASSERT(StackHandlerConstants::kNextOffset == 0);
4422 frame_->EmitPop(Operand::StaticVariable(handler_address));
4423 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
4424
4425 if (i == kReturnShadowIndex) {
4426 if (!function_return_is_shadowed_) frame_->PrepareForReturn();
4427 shadows[i]->other_target()->Jump(&return_value);
4428 } else {
4429 shadows[i]->other_target()->Jump();
4430 }
4431 }
4432 }
4433
4434 exit.Bind();
4435}
4436
4437
Steve Block3ce2e202009-11-05 08:53:23 +00004438void CodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004439 ASSERT(!in_spilled_code());
4440 VirtualFrame::SpilledScope spilled_scope;
Steve Block3ce2e202009-11-05 08:53:23 +00004441 Comment cmnt(masm_, "[ TryFinallyStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00004442 CodeForStatementPosition(node);
4443
4444 // State: Used to keep track of reason for entering the finally
4445 // block. Should probably be extended to hold information for
4446 // break/continue from within the try block.
4447 enum { FALLING, THROWING, JUMPING };
4448
4449 JumpTarget try_block;
4450 JumpTarget finally_block;
4451
4452 try_block.Call();
4453
4454 frame_->EmitPush(eax);
4455 // In case of thrown exceptions, this is where we continue.
4456 __ Set(ecx, Immediate(Smi::FromInt(THROWING)));
4457 finally_block.Jump();
4458
4459 // --- Try block ---
4460 try_block.Bind();
4461
4462 frame_->PushTryHandler(TRY_FINALLY_HANDLER);
4463 int handler_height = frame_->height();
4464
4465 // Shadow the jump targets for all escapes from the try block, including
4466 // returns. During shadowing, the original target is hidden as the
4467 // ShadowTarget and operations on the original actually affect the
4468 // shadowing target.
4469 //
4470 // We should probably try to unify the escaping targets and the return
4471 // target.
4472 int nof_escapes = node->escaping_targets()->length();
4473 List<ShadowTarget*> shadows(1 + nof_escapes);
4474
4475 // Add the shadow target for the function return.
4476 static const int kReturnShadowIndex = 0;
4477 shadows.Add(new ShadowTarget(&function_return_));
4478 bool function_return_was_shadowed = function_return_is_shadowed_;
4479 function_return_is_shadowed_ = true;
4480 ASSERT(shadows[kReturnShadowIndex]->other_target() == &function_return_);
4481
4482 // Add the remaining shadow targets.
4483 for (int i = 0; i < nof_escapes; i++) {
4484 shadows.Add(new ShadowTarget(node->escaping_targets()->at(i)));
4485 }
4486
4487 // Generate code for the statements in the try block.
4488 VisitStatementsAndSpill(node->try_block()->statements());
4489
4490 // Stop the introduced shadowing and count the number of required unlinks.
4491 // After shadowing stops, the original targets are unshadowed and the
4492 // ShadowTargets represent the formerly shadowing targets.
4493 int nof_unlinks = 0;
4494 for (int i = 0; i < shadows.length(); i++) {
4495 shadows[i]->StopShadowing();
4496 if (shadows[i]->is_linked()) nof_unlinks++;
4497 }
4498 function_return_is_shadowed_ = function_return_was_shadowed;
4499
4500 // Get an external reference to the handler address.
4501 ExternalReference handler_address(Top::k_handler_address);
4502
4503 // If we can fall off the end of the try block, unlink from the try
4504 // chain and set the state on the frame to FALLING.
4505 if (has_valid_frame()) {
4506 // The next handler address is on top of the frame.
4507 ASSERT(StackHandlerConstants::kNextOffset == 0);
4508 frame_->EmitPop(Operand::StaticVariable(handler_address));
4509 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
4510
4511 // Fake a top of stack value (unneeded when FALLING) and set the
4512 // state in ecx, then jump around the unlink blocks if any.
4513 frame_->EmitPush(Immediate(Factory::undefined_value()));
4514 __ Set(ecx, Immediate(Smi::FromInt(FALLING)));
4515 if (nof_unlinks > 0) {
4516 finally_block.Jump();
4517 }
4518 }
4519
4520 // Generate code to unlink and set the state for the (formerly)
4521 // shadowing targets that have been jumped to.
4522 for (int i = 0; i < shadows.length(); i++) {
4523 if (shadows[i]->is_linked()) {
4524 // If we have come from the shadowed return, the return value is
4525 // on the virtual frame. We must preserve it until it is
4526 // pushed.
4527 if (i == kReturnShadowIndex) {
4528 Result return_value;
4529 shadows[i]->Bind(&return_value);
4530 return_value.ToRegister(eax);
4531 } else {
4532 shadows[i]->Bind();
4533 }
4534 // Because we can be jumping here (to spilled code) from
4535 // unspilled code, we need to reestablish a spilled frame at
4536 // this block.
4537 frame_->SpillAll();
4538
4539 // Reload sp from the top handler, because some statements that
4540 // we break from (eg, for...in) may have left stuff on the
4541 // stack.
4542 __ mov(esp, Operand::StaticVariable(handler_address));
4543 frame_->Forget(frame_->height() - handler_height);
4544
4545 // Unlink this handler and drop it from the frame.
4546 ASSERT(StackHandlerConstants::kNextOffset == 0);
4547 frame_->EmitPop(Operand::StaticVariable(handler_address));
4548 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
4549
4550 if (i == kReturnShadowIndex) {
4551 // If this target shadowed the function return, materialize
4552 // the return value on the stack.
4553 frame_->EmitPush(eax);
4554 } else {
4555 // Fake TOS for targets that shadowed breaks and continues.
4556 frame_->EmitPush(Immediate(Factory::undefined_value()));
4557 }
4558 __ Set(ecx, Immediate(Smi::FromInt(JUMPING + i)));
4559 if (--nof_unlinks > 0) {
4560 // If this is not the last unlink block, jump around the next.
4561 finally_block.Jump();
4562 }
4563 }
4564 }
4565
4566 // --- Finally block ---
4567 finally_block.Bind();
4568
4569 // Push the state on the stack.
4570 frame_->EmitPush(ecx);
4571
4572 // We keep two elements on the stack - the (possibly faked) result
4573 // and the state - while evaluating the finally block.
4574 //
4575 // Generate code for the statements in the finally block.
4576 VisitStatementsAndSpill(node->finally_block()->statements());
4577
4578 if (has_valid_frame()) {
4579 // Restore state and return value or faked TOS.
4580 frame_->EmitPop(ecx);
4581 frame_->EmitPop(eax);
4582 }
4583
4584 // Generate code to jump to the right destination for all used
4585 // formerly shadowing targets. Deallocate each shadow target.
4586 for (int i = 0; i < shadows.length(); i++) {
4587 if (has_valid_frame() && shadows[i]->is_bound()) {
4588 BreakTarget* original = shadows[i]->other_target();
4589 __ cmp(Operand(ecx), Immediate(Smi::FromInt(JUMPING + i)));
4590 if (i == kReturnShadowIndex) {
4591 // The return value is (already) in eax.
4592 Result return_value = allocator_->Allocate(eax);
4593 ASSERT(return_value.is_valid());
4594 if (function_return_is_shadowed_) {
4595 original->Branch(equal, &return_value);
4596 } else {
4597 // Branch around the preparation for return which may emit
4598 // code.
4599 JumpTarget skip;
4600 skip.Branch(not_equal);
4601 frame_->PrepareForReturn();
4602 original->Jump(&return_value);
4603 skip.Bind();
4604 }
4605 } else {
4606 original->Branch(equal);
4607 }
4608 }
4609 }
4610
4611 if (has_valid_frame()) {
4612 // Check if we need to rethrow the exception.
4613 JumpTarget exit;
4614 __ cmp(Operand(ecx), Immediate(Smi::FromInt(THROWING)));
4615 exit.Branch(not_equal);
4616
4617 // Rethrow exception.
4618 frame_->EmitPush(eax); // undo pop from above
4619 frame_->CallRuntime(Runtime::kReThrow, 1);
4620
4621 // Done.
4622 exit.Bind();
4623 }
4624}
4625
4626
4627void CodeGenerator::VisitDebuggerStatement(DebuggerStatement* node) {
4628 ASSERT(!in_spilled_code());
4629 Comment cmnt(masm_, "[ DebuggerStatement");
4630 CodeForStatementPosition(node);
4631#ifdef ENABLE_DEBUGGER_SUPPORT
4632 // Spill everything, even constants, to the frame.
4633 frame_->SpillAll();
Leon Clarke4515c472010-02-03 11:58:03 +00004634
Andrei Popescu402d9372010-02-26 13:31:12 +00004635 frame_->DebugBreak();
Steve Blocka7e24c12009-10-30 11:49:00 +00004636 // Ignore the return value.
4637#endif
4638}
4639
4640
Steve Block6ded16b2010-05-10 14:33:55 +01004641Result CodeGenerator::InstantiateFunction(
4642 Handle<SharedFunctionInfo> function_info) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004643 // The inevitable call will sync frame elements to memory anyway, so
4644 // we do it eagerly to allow us to push the arguments directly into
4645 // place.
Andrei Popescu402d9372010-02-26 13:31:12 +00004646 frame()->SyncRange(0, frame()->element_count() - 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00004647
Leon Clarkee46be812010-01-19 14:06:41 +00004648 // Use the fast case closure allocation code that allocates in new
4649 // space for nested functions that don't need literals cloning.
Steve Block6ded16b2010-05-10 14:33:55 +01004650 if (scope()->is_function_scope() && function_info->num_literals() == 0) {
Leon Clarkee46be812010-01-19 14:06:41 +00004651 FastNewClosureStub stub;
Steve Block6ded16b2010-05-10 14:33:55 +01004652 frame()->EmitPush(Immediate(function_info));
Andrei Popescu402d9372010-02-26 13:31:12 +00004653 return frame()->CallStub(&stub, 1);
Leon Clarkee46be812010-01-19 14:06:41 +00004654 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01004655 // Call the runtime to instantiate the function based on the
4656 // shared function info.
Andrei Popescu402d9372010-02-26 13:31:12 +00004657 frame()->EmitPush(esi);
Steve Block6ded16b2010-05-10 14:33:55 +01004658 frame()->EmitPush(Immediate(function_info));
Andrei Popescu402d9372010-02-26 13:31:12 +00004659 return frame()->CallRuntime(Runtime::kNewClosure, 2);
Leon Clarkee46be812010-01-19 14:06:41 +00004660 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004661}
4662
4663
4664void CodeGenerator::VisitFunctionLiteral(FunctionLiteral* node) {
4665 Comment cmnt(masm_, "[ FunctionLiteral");
Steve Block6ded16b2010-05-10 14:33:55 +01004666 ASSERT(!in_safe_int32_mode());
4667 // Build the function info and instantiate it.
4668 Handle<SharedFunctionInfo> function_info =
4669 Compiler::BuildFunctionInfo(node, script(), this);
Steve Blocka7e24c12009-10-30 11:49:00 +00004670 // Check for stack-overflow exception.
4671 if (HasStackOverflow()) return;
Steve Block6ded16b2010-05-10 14:33:55 +01004672 Result result = InstantiateFunction(function_info);
Andrei Popescu402d9372010-02-26 13:31:12 +00004673 frame()->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00004674}
4675
4676
Steve Block6ded16b2010-05-10 14:33:55 +01004677void CodeGenerator::VisitSharedFunctionInfoLiteral(
4678 SharedFunctionInfoLiteral* node) {
4679 ASSERT(!in_safe_int32_mode());
4680 Comment cmnt(masm_, "[ SharedFunctionInfoLiteral");
4681 Result result = InstantiateFunction(node->shared_function_info());
Andrei Popescu402d9372010-02-26 13:31:12 +00004682 frame()->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00004683}
4684
4685
4686void CodeGenerator::VisitConditional(Conditional* node) {
4687 Comment cmnt(masm_, "[ Conditional");
Steve Block6ded16b2010-05-10 14:33:55 +01004688 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00004689 JumpTarget then;
4690 JumpTarget else_;
4691 JumpTarget exit;
4692 ControlDestination dest(&then, &else_, true);
Steve Blockd0582a62009-12-15 09:54:21 +00004693 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00004694
4695 if (dest.false_was_fall_through()) {
4696 // The else target was bound, so we compile the else part first.
Steve Blockd0582a62009-12-15 09:54:21 +00004697 Load(node->else_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00004698
4699 if (then.is_linked()) {
4700 exit.Jump();
4701 then.Bind();
Steve Blockd0582a62009-12-15 09:54:21 +00004702 Load(node->then_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00004703 }
4704 } else {
4705 // The then target was bound, so we compile the then part first.
Steve Blockd0582a62009-12-15 09:54:21 +00004706 Load(node->then_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00004707
4708 if (else_.is_linked()) {
4709 exit.Jump();
4710 else_.Bind();
Steve Blockd0582a62009-12-15 09:54:21 +00004711 Load(node->else_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00004712 }
4713 }
4714
4715 exit.Bind();
4716}
4717
4718
Leon Clarkef7060e22010-06-03 12:02:55 +01004719void CodeGenerator::LoadFromSlot(Slot* slot, TypeofState typeof_state) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004720 if (slot->type() == Slot::LOOKUP) {
4721 ASSERT(slot->var()->is_dynamic());
Steve Blocka7e24c12009-10-30 11:49:00 +00004722 JumpTarget slow;
4723 JumpTarget done;
Leon Clarkef7060e22010-06-03 12:02:55 +01004724 Result value;
Steve Blocka7e24c12009-10-30 11:49:00 +00004725
Kristian Monsen25f61362010-05-21 11:50:48 +01004726 // Generate fast case for loading from slots that correspond to
4727 // local/global variables or arguments unless they are shadowed by
4728 // eval-introduced bindings.
4729 EmitDynamicLoadFromSlotFastCase(slot,
4730 typeof_state,
Leon Clarkef7060e22010-06-03 12:02:55 +01004731 &value,
Kristian Monsen25f61362010-05-21 11:50:48 +01004732 &slow,
4733 &done);
Steve Blocka7e24c12009-10-30 11:49:00 +00004734
4735 slow.Bind();
4736 // A runtime call is inevitable. We eagerly sync frame elements
4737 // to memory so that we can push the arguments directly into place
4738 // on top of the frame.
Andrei Popescu402d9372010-02-26 13:31:12 +00004739 frame()->SyncRange(0, frame()->element_count() - 1);
4740 frame()->EmitPush(esi);
4741 frame()->EmitPush(Immediate(slot->var()->name()));
Steve Blocka7e24c12009-10-30 11:49:00 +00004742 if (typeof_state == INSIDE_TYPEOF) {
Leon Clarkef7060e22010-06-03 12:02:55 +01004743 value =
Andrei Popescu402d9372010-02-26 13:31:12 +00004744 frame()->CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00004745 } else {
Leon Clarkef7060e22010-06-03 12:02:55 +01004746 value = frame()->CallRuntime(Runtime::kLoadContextSlot, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00004747 }
4748
Leon Clarkef7060e22010-06-03 12:02:55 +01004749 done.Bind(&value);
4750 frame_->Push(&value);
Steve Blocka7e24c12009-10-30 11:49:00 +00004751
4752 } else if (slot->var()->mode() == Variable::CONST) {
4753 // Const slots may contain 'the hole' value (the constant hasn't been
4754 // initialized yet) which needs to be converted into the 'undefined'
4755 // value.
4756 //
4757 // We currently spill the virtual frame because constants use the
4758 // potentially unsafe direct-frame access of SlotOperand.
4759 VirtualFrame::SpilledScope spilled_scope;
4760 Comment cmnt(masm_, "[ Load const");
Andrei Popescu402d9372010-02-26 13:31:12 +00004761 Label exit;
Steve Blocka7e24c12009-10-30 11:49:00 +00004762 __ mov(ecx, SlotOperand(slot, ecx));
4763 __ cmp(ecx, Factory::the_hole_value());
Andrei Popescu402d9372010-02-26 13:31:12 +00004764 __ j(not_equal, &exit);
Steve Blocka7e24c12009-10-30 11:49:00 +00004765 __ mov(ecx, Factory::undefined_value());
Andrei Popescu402d9372010-02-26 13:31:12 +00004766 __ bind(&exit);
Leon Clarkef7060e22010-06-03 12:02:55 +01004767 frame()->EmitPush(ecx);
Steve Blocka7e24c12009-10-30 11:49:00 +00004768
4769 } else if (slot->type() == Slot::PARAMETER) {
Andrei Popescu402d9372010-02-26 13:31:12 +00004770 frame()->PushParameterAt(slot->index());
Steve Blocka7e24c12009-10-30 11:49:00 +00004771
4772 } else if (slot->type() == Slot::LOCAL) {
Andrei Popescu402d9372010-02-26 13:31:12 +00004773 frame()->PushLocalAt(slot->index());
Steve Blocka7e24c12009-10-30 11:49:00 +00004774
4775 } else {
4776 // The other remaining slot types (LOOKUP and GLOBAL) cannot reach
4777 // here.
4778 //
4779 // The use of SlotOperand below is safe for an unspilled frame
4780 // because it will always be a context slot.
4781 ASSERT(slot->type() == Slot::CONTEXT);
Leon Clarkef7060e22010-06-03 12:02:55 +01004782 Result temp = allocator()->Allocate();
4783 ASSERT(temp.is_valid());
4784 __ mov(temp.reg(), SlotOperand(slot, temp.reg()));
4785 frame()->Push(&temp);
Steve Blocka7e24c12009-10-30 11:49:00 +00004786 }
4787}
4788
4789
Leon Clarkef7060e22010-06-03 12:02:55 +01004790void CodeGenerator::LoadFromSlotCheckForArguments(Slot* slot,
Andrei Popescu402d9372010-02-26 13:31:12 +00004791 TypeofState state) {
Leon Clarkef7060e22010-06-03 12:02:55 +01004792 LoadFromSlot(slot, state);
Steve Blocka7e24c12009-10-30 11:49:00 +00004793
4794 // Bail out quickly if we're not using lazy arguments allocation.
Leon Clarkef7060e22010-06-03 12:02:55 +01004795 if (ArgumentsMode() != LAZY_ARGUMENTS_ALLOCATION) return;
Steve Blocka7e24c12009-10-30 11:49:00 +00004796
4797 // ... or if the slot isn't a non-parameter arguments slot.
Leon Clarkef7060e22010-06-03 12:02:55 +01004798 if (slot->type() == Slot::PARAMETER || !slot->is_arguments()) return;
Steve Blocka7e24c12009-10-30 11:49:00 +00004799
4800 // If the loaded value is a constant, we know if the arguments
4801 // object has been lazily loaded yet.
Leon Clarkef7060e22010-06-03 12:02:55 +01004802 Result result = frame()->Pop();
Andrei Popescu402d9372010-02-26 13:31:12 +00004803 if (result.is_constant()) {
4804 if (result.handle()->IsTheHole()) {
Leon Clarkef7060e22010-06-03 12:02:55 +01004805 result = StoreArgumentsObject(false);
Steve Blocka7e24c12009-10-30 11:49:00 +00004806 }
Leon Clarkef7060e22010-06-03 12:02:55 +01004807 frame()->Push(&result);
4808 return;
Steve Blocka7e24c12009-10-30 11:49:00 +00004809 }
Leon Clarkef7060e22010-06-03 12:02:55 +01004810 ASSERT(result.is_register());
Steve Blocka7e24c12009-10-30 11:49:00 +00004811 // The loaded value is in a register. If it is the sentinel that
4812 // indicates that we haven't loaded the arguments object yet, we
4813 // need to do it now.
4814 JumpTarget exit;
Andrei Popescu402d9372010-02-26 13:31:12 +00004815 __ cmp(Operand(result.reg()), Immediate(Factory::the_hole_value()));
Leon Clarkef7060e22010-06-03 12:02:55 +01004816 frame()->Push(&result);
4817 exit.Branch(not_equal);
Andrei Popescu402d9372010-02-26 13:31:12 +00004818
Andrei Popescu402d9372010-02-26 13:31:12 +00004819 result = StoreArgumentsObject(false);
Leon Clarkef7060e22010-06-03 12:02:55 +01004820 frame()->SetElementAt(0, &result);
4821 result.Unuse();
4822 exit.Bind();
4823 return;
Steve Blocka7e24c12009-10-30 11:49:00 +00004824}
4825
4826
4827Result CodeGenerator::LoadFromGlobalSlotCheckExtensions(
4828 Slot* slot,
4829 TypeofState typeof_state,
4830 JumpTarget* slow) {
Steve Block6ded16b2010-05-10 14:33:55 +01004831 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00004832 // Check that no extension objects have been created by calls to
4833 // eval from the current scope to the global scope.
4834 Register context = esi;
4835 Result tmp = allocator_->Allocate();
4836 ASSERT(tmp.is_valid()); // All non-reserved registers were available.
4837
4838 Scope* s = scope();
4839 while (s != NULL) {
4840 if (s->num_heap_slots() > 0) {
4841 if (s->calls_eval()) {
4842 // Check that extension is NULL.
4843 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
4844 Immediate(0));
4845 slow->Branch(not_equal, not_taken);
4846 }
4847 // Load next context in chain.
4848 __ mov(tmp.reg(), ContextOperand(context, Context::CLOSURE_INDEX));
4849 __ mov(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset));
4850 context = tmp.reg();
4851 }
4852 // If no outer scope calls eval, we do not need to check more
4853 // context extensions. If we have reached an eval scope, we check
4854 // all extensions from this point.
4855 if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break;
4856 s = s->outer_scope();
4857 }
4858
4859 if (s != NULL && s->is_eval_scope()) {
4860 // Loop up the context chain. There is no frame effect so it is
4861 // safe to use raw labels here.
4862 Label next, fast;
4863 if (!context.is(tmp.reg())) {
4864 __ mov(tmp.reg(), context);
4865 }
4866 __ bind(&next);
4867 // Terminate at global context.
4868 __ cmp(FieldOperand(tmp.reg(), HeapObject::kMapOffset),
4869 Immediate(Factory::global_context_map()));
4870 __ j(equal, &fast);
4871 // Check that extension is NULL.
4872 __ cmp(ContextOperand(tmp.reg(), Context::EXTENSION_INDEX), Immediate(0));
4873 slow->Branch(not_equal, not_taken);
4874 // Load next context in chain.
4875 __ mov(tmp.reg(), ContextOperand(tmp.reg(), Context::CLOSURE_INDEX));
4876 __ mov(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset));
4877 __ jmp(&next);
4878 __ bind(&fast);
4879 }
4880 tmp.Unuse();
4881
4882 // All extension objects were empty and it is safe to use a global
4883 // load IC call.
Andrei Popescu402d9372010-02-26 13:31:12 +00004884 // The register allocator prefers eax if it is free, so the code generator
4885 // will load the global object directly into eax, which is where the LoadIC
4886 // expects it.
4887 frame_->Spill(eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00004888 LoadGlobal();
4889 frame_->Push(slot->var()->name());
4890 RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
4891 ? RelocInfo::CODE_TARGET
4892 : RelocInfo::CODE_TARGET_CONTEXT;
4893 Result answer = frame_->CallLoadIC(mode);
4894 // A test eax instruction following the call signals that the inobject
4895 // property case was inlined. Ensure that there is not a test eax
4896 // instruction here.
4897 __ nop();
Steve Blocka7e24c12009-10-30 11:49:00 +00004898 return answer;
4899}
4900
4901
Kristian Monsen25f61362010-05-21 11:50:48 +01004902void CodeGenerator::EmitDynamicLoadFromSlotFastCase(Slot* slot,
4903 TypeofState typeof_state,
4904 Result* result,
4905 JumpTarget* slow,
4906 JumpTarget* done) {
4907 // Generate fast-case code for variables that might be shadowed by
4908 // eval-introduced variables. Eval is used a lot without
4909 // introducing variables. In those cases, we do not want to
4910 // perform a runtime call for all variables in the scope
4911 // containing the eval.
4912 if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) {
4913 *result = LoadFromGlobalSlotCheckExtensions(slot, typeof_state, slow);
4914 done->Jump(result);
4915
4916 } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) {
4917 Slot* potential_slot = slot->var()->local_if_not_shadowed()->slot();
4918 Expression* rewrite = slot->var()->local_if_not_shadowed()->rewrite();
4919 if (potential_slot != NULL) {
4920 // Generate fast case for locals that rewrite to slots.
4921 // Allocate a fresh register to use as a temp in
4922 // ContextSlotOperandCheckExtensions and to hold the result
4923 // value.
4924 *result = allocator()->Allocate();
4925 ASSERT(result->is_valid());
4926 __ mov(result->reg(),
4927 ContextSlotOperandCheckExtensions(potential_slot, *result, slow));
4928 if (potential_slot->var()->mode() == Variable::CONST) {
4929 __ cmp(result->reg(), Factory::the_hole_value());
4930 done->Branch(not_equal, result);
4931 __ mov(result->reg(), Factory::undefined_value());
4932 }
4933 done->Jump(result);
4934 } else if (rewrite != NULL) {
4935 // Generate fast case for calls of an argument function.
4936 Property* property = rewrite->AsProperty();
4937 if (property != NULL) {
4938 VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
4939 Literal* key_literal = property->key()->AsLiteral();
4940 if (obj_proxy != NULL &&
4941 key_literal != NULL &&
4942 obj_proxy->IsArguments() &&
4943 key_literal->handle()->IsSmi()) {
4944 // Load arguments object if there are no eval-introduced
4945 // variables. Then load the argument from the arguments
4946 // object using keyed load.
4947 Result arguments = allocator()->Allocate();
4948 ASSERT(arguments.is_valid());
4949 __ mov(arguments.reg(),
4950 ContextSlotOperandCheckExtensions(obj_proxy->var()->slot(),
4951 arguments,
4952 slow));
4953 frame_->Push(&arguments);
4954 frame_->Push(key_literal->handle());
4955 *result = EmitKeyedLoad();
4956 done->Jump(result);
4957 }
4958 }
4959 }
4960 }
4961}
4962
4963
Steve Blocka7e24c12009-10-30 11:49:00 +00004964void CodeGenerator::StoreToSlot(Slot* slot, InitState init_state) {
4965 if (slot->type() == Slot::LOOKUP) {
4966 ASSERT(slot->var()->is_dynamic());
4967
4968 // For now, just do a runtime call. Since the call is inevitable,
4969 // we eagerly sync the virtual frame so we can directly push the
4970 // arguments into place.
4971 frame_->SyncRange(0, frame_->element_count() - 1);
4972
4973 frame_->EmitPush(esi);
4974 frame_->EmitPush(Immediate(slot->var()->name()));
4975
4976 Result value;
4977 if (init_state == CONST_INIT) {
4978 // Same as the case for a normal store, but ignores attribute
4979 // (e.g. READ_ONLY) of context slot so that we can initialize const
4980 // properties (introduced via eval("const foo = (some expr);")). Also,
4981 // uses the current function context instead of the top context.
4982 //
4983 // Note that we must declare the foo upon entry of eval(), via a
4984 // context slot declaration, but we cannot initialize it at the same
4985 // time, because the const declaration may be at the end of the eval
4986 // code (sigh...) and the const variable may have been used before
4987 // (where its value is 'undefined'). Thus, we can only do the
4988 // initialization when we actually encounter the expression and when
4989 // the expression operands are defined and valid, and thus we need the
4990 // split into 2 operations: declaration of the context slot followed
4991 // by initialization.
4992 value = frame_->CallRuntime(Runtime::kInitializeConstContextSlot, 3);
4993 } else {
4994 value = frame_->CallRuntime(Runtime::kStoreContextSlot, 3);
4995 }
4996 // Storing a variable must keep the (new) value on the expression
4997 // stack. This is necessary for compiling chained assignment
4998 // expressions.
4999 frame_->Push(&value);
5000
5001 } else {
5002 ASSERT(!slot->var()->is_dynamic());
5003
5004 JumpTarget exit;
5005 if (init_state == CONST_INIT) {
5006 ASSERT(slot->var()->mode() == Variable::CONST);
5007 // Only the first const initialization must be executed (the slot
5008 // still contains 'the hole' value). When the assignment is executed,
5009 // the code is identical to a normal store (see below).
5010 //
5011 // We spill the frame in the code below because the direct-frame
5012 // access of SlotOperand is potentially unsafe with an unspilled
5013 // frame.
5014 VirtualFrame::SpilledScope spilled_scope;
5015 Comment cmnt(masm_, "[ Init const");
5016 __ mov(ecx, SlotOperand(slot, ecx));
5017 __ cmp(ecx, Factory::the_hole_value());
5018 exit.Branch(not_equal);
5019 }
5020
5021 // We must execute the store. Storing a variable must keep the (new)
5022 // value on the stack. This is necessary for compiling assignment
5023 // expressions.
5024 //
5025 // Note: We will reach here even with slot->var()->mode() ==
5026 // Variable::CONST because of const declarations which will initialize
5027 // consts to 'the hole' value and by doing so, end up calling this code.
5028 if (slot->type() == Slot::PARAMETER) {
5029 frame_->StoreToParameterAt(slot->index());
5030 } else if (slot->type() == Slot::LOCAL) {
5031 frame_->StoreToLocalAt(slot->index());
5032 } else {
5033 // The other slot types (LOOKUP and GLOBAL) cannot reach here.
5034 //
5035 // The use of SlotOperand below is safe for an unspilled frame
5036 // because the slot is a context slot.
5037 ASSERT(slot->type() == Slot::CONTEXT);
5038 frame_->Dup();
5039 Result value = frame_->Pop();
5040 value.ToRegister();
5041 Result start = allocator_->Allocate();
5042 ASSERT(start.is_valid());
5043 __ mov(SlotOperand(slot, start.reg()), value.reg());
5044 // RecordWrite may destroy the value registers.
5045 //
5046 // TODO(204): Avoid actually spilling when the value is not
5047 // needed (probably the common case).
5048 frame_->Spill(value.reg());
5049 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
5050 Result temp = allocator_->Allocate();
5051 ASSERT(temp.is_valid());
5052 __ RecordWrite(start.reg(), offset, value.reg(), temp.reg());
5053 // The results start, value, and temp are unused by going out of
5054 // scope.
5055 }
5056
5057 exit.Bind();
5058 }
5059}
5060
5061
Steve Block6ded16b2010-05-10 14:33:55 +01005062void CodeGenerator::VisitSlot(Slot* slot) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005063 Comment cmnt(masm_, "[ Slot");
Steve Block6ded16b2010-05-10 14:33:55 +01005064 if (in_safe_int32_mode()) {
5065 if ((slot->type() == Slot::LOCAL && !slot->is_arguments())) {
5066 frame()->UntaggedPushLocalAt(slot->index());
5067 } else if (slot->type() == Slot::PARAMETER) {
5068 frame()->UntaggedPushParameterAt(slot->index());
5069 } else {
5070 UNREACHABLE();
5071 }
5072 } else {
Leon Clarkef7060e22010-06-03 12:02:55 +01005073 LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF);
Steve Block6ded16b2010-05-10 14:33:55 +01005074 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005075}
5076
5077
5078void CodeGenerator::VisitVariableProxy(VariableProxy* node) {
5079 Comment cmnt(masm_, "[ VariableProxy");
5080 Variable* var = node->var();
5081 Expression* expr = var->rewrite();
5082 if (expr != NULL) {
5083 Visit(expr);
5084 } else {
5085 ASSERT(var->is_global());
Steve Block6ded16b2010-05-10 14:33:55 +01005086 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005087 Reference ref(this, node);
Steve Blockd0582a62009-12-15 09:54:21 +00005088 ref.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00005089 }
5090}
5091
5092
5093void CodeGenerator::VisitLiteral(Literal* node) {
5094 Comment cmnt(masm_, "[ Literal");
Steve Block6ded16b2010-05-10 14:33:55 +01005095 if (in_safe_int32_mode()) {
5096 frame_->PushUntaggedElement(node->handle());
5097 } else {
5098 frame_->Push(node->handle());
5099 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005100}
5101
5102
Steve Blockd0582a62009-12-15 09:54:21 +00005103void CodeGenerator::PushUnsafeSmi(Handle<Object> value) {
5104 ASSERT(value->IsSmi());
5105 int bits = reinterpret_cast<int>(*value);
5106 __ push(Immediate(bits & 0x0000FFFF));
5107 __ or_(Operand(esp, 0), Immediate(bits & 0xFFFF0000));
5108}
5109
5110
5111void CodeGenerator::StoreUnsafeSmiToLocal(int offset, Handle<Object> value) {
5112 ASSERT(value->IsSmi());
5113 int bits = reinterpret_cast<int>(*value);
5114 __ mov(Operand(ebp, offset), Immediate(bits & 0x0000FFFF));
5115 __ or_(Operand(ebp, offset), Immediate(bits & 0xFFFF0000));
5116}
5117
5118
5119void CodeGenerator::MoveUnsafeSmi(Register target, Handle<Object> value) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005120 ASSERT(target.is_valid());
5121 ASSERT(value->IsSmi());
5122 int bits = reinterpret_cast<int>(*value);
5123 __ Set(target, Immediate(bits & 0x0000FFFF));
Steve Blockd0582a62009-12-15 09:54:21 +00005124 __ or_(target, bits & 0xFFFF0000);
Steve Blocka7e24c12009-10-30 11:49:00 +00005125}
5126
5127
5128bool CodeGenerator::IsUnsafeSmi(Handle<Object> value) {
5129 if (!value->IsSmi()) return false;
5130 int int_value = Smi::cast(*value)->value();
5131 return !is_intn(int_value, kMaxSmiInlinedBits);
5132}
5133
5134
5135// Materialize the regexp literal 'node' in the literals array
5136// 'literals' of the function. Leave the regexp boilerplate in
5137// 'boilerplate'.
5138class DeferredRegExpLiteral: public DeferredCode {
5139 public:
5140 DeferredRegExpLiteral(Register boilerplate,
5141 Register literals,
5142 RegExpLiteral* node)
5143 : boilerplate_(boilerplate), literals_(literals), node_(node) {
5144 set_comment("[ DeferredRegExpLiteral");
5145 }
5146
5147 void Generate();
5148
5149 private:
5150 Register boilerplate_;
5151 Register literals_;
5152 RegExpLiteral* node_;
5153};
5154
5155
5156void DeferredRegExpLiteral::Generate() {
5157 // Since the entry is undefined we call the runtime system to
5158 // compute the literal.
5159 // Literal array (0).
5160 __ push(literals_);
5161 // Literal index (1).
5162 __ push(Immediate(Smi::FromInt(node_->literal_index())));
5163 // RegExp pattern (2).
5164 __ push(Immediate(node_->pattern()));
5165 // RegExp flags (3).
5166 __ push(Immediate(node_->flags()));
5167 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
5168 if (!boilerplate_.is(eax)) __ mov(boilerplate_, eax);
5169}
5170
5171
5172void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01005173 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005174 Comment cmnt(masm_, "[ RegExp Literal");
5175
5176 // Retrieve the literals array and check the allocated entry. Begin
5177 // with a writable copy of the function of this activation in a
5178 // register.
5179 frame_->PushFunction();
5180 Result literals = frame_->Pop();
5181 literals.ToRegister();
5182 frame_->Spill(literals.reg());
5183
5184 // Load the literals array of the function.
5185 __ mov(literals.reg(),
5186 FieldOperand(literals.reg(), JSFunction::kLiteralsOffset));
5187
5188 // Load the literal at the ast saved index.
5189 Result boilerplate = allocator_->Allocate();
5190 ASSERT(boilerplate.is_valid());
5191 int literal_offset =
5192 FixedArray::kHeaderSize + node->literal_index() * kPointerSize;
5193 __ mov(boilerplate.reg(), FieldOperand(literals.reg(), literal_offset));
5194
5195 // Check whether we need to materialize the RegExp object. If so,
5196 // jump to the deferred code passing the literals array.
5197 DeferredRegExpLiteral* deferred =
5198 new DeferredRegExpLiteral(boilerplate.reg(), literals.reg(), node);
5199 __ cmp(boilerplate.reg(), Factory::undefined_value());
5200 deferred->Branch(equal);
5201 deferred->BindExit();
5202 literals.Unuse();
5203
5204 // Push the boilerplate object.
5205 frame_->Push(&boilerplate);
5206}
5207
5208
Steve Blocka7e24c12009-10-30 11:49:00 +00005209void CodeGenerator::VisitObjectLiteral(ObjectLiteral* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01005210 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005211 Comment cmnt(masm_, "[ ObjectLiteral");
5212
Leon Clarkee46be812010-01-19 14:06:41 +00005213 // Load a writable copy of the function of this activation in a
Steve Blocka7e24c12009-10-30 11:49:00 +00005214 // register.
5215 frame_->PushFunction();
5216 Result literals = frame_->Pop();
5217 literals.ToRegister();
5218 frame_->Spill(literals.reg());
5219
5220 // Load the literals array of the function.
5221 __ mov(literals.reg(),
5222 FieldOperand(literals.reg(), JSFunction::kLiteralsOffset));
Leon Clarkee46be812010-01-19 14:06:41 +00005223 // Literal array.
5224 frame_->Push(&literals);
5225 // Literal index.
5226 frame_->Push(Smi::FromInt(node->literal_index()));
5227 // Constant properties.
5228 frame_->Push(node->constant_properties());
Steve Block6ded16b2010-05-10 14:33:55 +01005229 // Should the object literal have fast elements?
5230 frame_->Push(Smi::FromInt(node->fast_elements() ? 1 : 0));
Leon Clarkee46be812010-01-19 14:06:41 +00005231 Result clone;
5232 if (node->depth() > 1) {
Steve Block6ded16b2010-05-10 14:33:55 +01005233 clone = frame_->CallRuntime(Runtime::kCreateObjectLiteral, 4);
Leon Clarkee46be812010-01-19 14:06:41 +00005234 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01005235 clone = frame_->CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
Steve Blocka7e24c12009-10-30 11:49:00 +00005236 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005237 frame_->Push(&clone);
5238
5239 for (int i = 0; i < node->properties()->length(); i++) {
5240 ObjectLiteral::Property* property = node->properties()->at(i);
5241 switch (property->kind()) {
5242 case ObjectLiteral::Property::CONSTANT:
5243 break;
5244 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
5245 if (CompileTimeValue::IsCompileTimeValue(property->value())) break;
5246 // else fall through.
5247 case ObjectLiteral::Property::COMPUTED: {
5248 Handle<Object> key(property->key()->handle());
5249 if (key->IsSymbol()) {
5250 // Duplicate the object as the IC receiver.
5251 frame_->Dup();
5252 Load(property->value());
Andrei Popescu402d9372010-02-26 13:31:12 +00005253 Result dummy = frame_->CallStoreIC(Handle<String>::cast(key), false);
5254 dummy.Unuse();
Steve Blocka7e24c12009-10-30 11:49:00 +00005255 break;
5256 }
5257 // Fall through
5258 }
5259 case ObjectLiteral::Property::PROTOTYPE: {
5260 // Duplicate the object as an argument to the runtime call.
5261 frame_->Dup();
5262 Load(property->key());
5263 Load(property->value());
5264 Result ignored = frame_->CallRuntime(Runtime::kSetProperty, 3);
5265 // Ignore the result.
5266 break;
5267 }
5268 case ObjectLiteral::Property::SETTER: {
5269 // Duplicate the object as an argument to the runtime call.
5270 frame_->Dup();
5271 Load(property->key());
5272 frame_->Push(Smi::FromInt(1));
5273 Load(property->value());
5274 Result ignored = frame_->CallRuntime(Runtime::kDefineAccessor, 4);
5275 // Ignore the result.
5276 break;
5277 }
5278 case ObjectLiteral::Property::GETTER: {
5279 // Duplicate the object as an argument to the runtime call.
5280 frame_->Dup();
5281 Load(property->key());
5282 frame_->Push(Smi::FromInt(0));
5283 Load(property->value());
5284 Result ignored = frame_->CallRuntime(Runtime::kDefineAccessor, 4);
5285 // Ignore the result.
5286 break;
5287 }
5288 default: UNREACHABLE();
5289 }
5290 }
5291}
5292
5293
Steve Blocka7e24c12009-10-30 11:49:00 +00005294void CodeGenerator::VisitArrayLiteral(ArrayLiteral* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01005295 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005296 Comment cmnt(masm_, "[ ArrayLiteral");
5297
Leon Clarkee46be812010-01-19 14:06:41 +00005298 // Load a writable copy of the function of this activation in a
Steve Blocka7e24c12009-10-30 11:49:00 +00005299 // register.
5300 frame_->PushFunction();
5301 Result literals = frame_->Pop();
5302 literals.ToRegister();
5303 frame_->Spill(literals.reg());
5304
5305 // Load the literals array of the function.
5306 __ mov(literals.reg(),
5307 FieldOperand(literals.reg(), JSFunction::kLiteralsOffset));
5308
Leon Clarkee46be812010-01-19 14:06:41 +00005309 frame_->Push(&literals);
5310 frame_->Push(Smi::FromInt(node->literal_index()));
5311 frame_->Push(node->constant_elements());
5312 int length = node->values()->length();
5313 Result clone;
5314 if (node->depth() > 1) {
5315 clone = frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3);
5316 } else if (length > FastCloneShallowArrayStub::kMaximumLength) {
5317 clone = frame_->CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
5318 } else {
5319 FastCloneShallowArrayStub stub(length);
5320 clone = frame_->CallStub(&stub, 3);
Steve Blocka7e24c12009-10-30 11:49:00 +00005321 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005322 frame_->Push(&clone);
5323
5324 // Generate code to set the elements in the array that are not
5325 // literals.
Leon Clarkee46be812010-01-19 14:06:41 +00005326 for (int i = 0; i < length; i++) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005327 Expression* value = node->values()->at(i);
5328
5329 // If value is a literal the property value is already set in the
5330 // boilerplate object.
5331 if (value->AsLiteral() != NULL) continue;
5332 // If value is a materialized literal the property value is already set
5333 // in the boilerplate object if it is simple.
5334 if (CompileTimeValue::IsCompileTimeValue(value)) continue;
5335
5336 // The property must be set by generated code.
5337 Load(value);
5338
5339 // Get the property value off the stack.
5340 Result prop_value = frame_->Pop();
5341 prop_value.ToRegister();
5342
5343 // Fetch the array literal while leaving a copy on the stack and
5344 // use it to get the elements array.
5345 frame_->Dup();
5346 Result elements = frame_->Pop();
5347 elements.ToRegister();
5348 frame_->Spill(elements.reg());
5349 // Get the elements array.
5350 __ mov(elements.reg(),
5351 FieldOperand(elements.reg(), JSObject::kElementsOffset));
5352
5353 // Write to the indexed properties array.
5354 int offset = i * kPointerSize + FixedArray::kHeaderSize;
5355 __ mov(FieldOperand(elements.reg(), offset), prop_value.reg());
5356
5357 // Update the write barrier for the array address.
5358 frame_->Spill(prop_value.reg()); // Overwritten by the write barrier.
5359 Result scratch = allocator_->Allocate();
5360 ASSERT(scratch.is_valid());
5361 __ RecordWrite(elements.reg(), offset, prop_value.reg(), scratch.reg());
5362 }
5363}
5364
5365
5366void CodeGenerator::VisitCatchExtensionObject(CatchExtensionObject* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01005367 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005368 ASSERT(!in_spilled_code());
5369 // Call runtime routine to allocate the catch extension object and
5370 // assign the exception value to the catch variable.
5371 Comment cmnt(masm_, "[ CatchExtensionObject");
5372 Load(node->key());
5373 Load(node->value());
5374 Result result =
5375 frame_->CallRuntime(Runtime::kCreateCatchExtensionObject, 2);
5376 frame_->Push(&result);
5377}
5378
5379
Andrei Popescu402d9372010-02-26 13:31:12 +00005380void CodeGenerator::EmitSlotAssignment(Assignment* node) {
5381#ifdef DEBUG
5382 int original_height = frame()->height();
5383#endif
5384 Comment cmnt(masm(), "[ Variable Assignment");
5385 Variable* var = node->target()->AsVariableProxy()->AsVariable();
5386 ASSERT(var != NULL);
5387 Slot* slot = var->slot();
5388 ASSERT(slot != NULL);
5389
5390 // Evaluate the right-hand side.
5391 if (node->is_compound()) {
Steve Block6ded16b2010-05-10 14:33:55 +01005392 // For a compound assignment the right-hand side is a binary operation
5393 // between the current property value and the actual right-hand side.
Leon Clarkef7060e22010-06-03 12:02:55 +01005394 LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF);
Andrei Popescu402d9372010-02-26 13:31:12 +00005395 Load(node->value());
5396
Steve Block6ded16b2010-05-10 14:33:55 +01005397 // Perform the binary operation.
Andrei Popescu402d9372010-02-26 13:31:12 +00005398 bool overwrite_value =
5399 (node->value()->AsBinaryOperation() != NULL &&
5400 node->value()->AsBinaryOperation()->ResultOverwriteAllowed());
Steve Block6ded16b2010-05-10 14:33:55 +01005401 // Construct the implicit binary operation.
5402 BinaryOperation expr(node, node->binary_op(), node->target(),
5403 node->value());
5404 GenericBinaryOperation(&expr,
Andrei Popescu402d9372010-02-26 13:31:12 +00005405 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
5406 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01005407 // For non-compound assignment just load the right-hand side.
Andrei Popescu402d9372010-02-26 13:31:12 +00005408 Load(node->value());
5409 }
5410
5411 // Perform the assignment.
5412 if (var->mode() != Variable::CONST || node->op() == Token::INIT_CONST) {
5413 CodeForSourcePosition(node->position());
5414 StoreToSlot(slot,
5415 node->op() == Token::INIT_CONST ? CONST_INIT : NOT_CONST_INIT);
5416 }
5417 ASSERT(frame()->height() == original_height + 1);
5418}
5419
5420
5421void CodeGenerator::EmitNamedPropertyAssignment(Assignment* node) {
5422#ifdef DEBUG
5423 int original_height = frame()->height();
5424#endif
5425 Comment cmnt(masm(), "[ Named Property Assignment");
5426 Variable* var = node->target()->AsVariableProxy()->AsVariable();
5427 Property* prop = node->target()->AsProperty();
5428 ASSERT(var == NULL || (prop == NULL && var->is_global()));
5429
Steve Block6ded16b2010-05-10 14:33:55 +01005430 // Initialize name and evaluate the receiver sub-expression if necessary. If
5431 // the receiver is trivial it is not placed on the stack at this point, but
5432 // loaded whenever actually needed.
Andrei Popescu402d9372010-02-26 13:31:12 +00005433 Handle<String> name;
5434 bool is_trivial_receiver = false;
5435 if (var != NULL) {
5436 name = var->name();
5437 } else {
5438 Literal* lit = prop->key()->AsLiteral();
5439 ASSERT_NOT_NULL(lit);
5440 name = Handle<String>::cast(lit->handle());
5441 // Do not materialize the receiver on the frame if it is trivial.
5442 is_trivial_receiver = prop->obj()->IsTrivial();
5443 if (!is_trivial_receiver) Load(prop->obj());
5444 }
5445
Steve Block6ded16b2010-05-10 14:33:55 +01005446 // Change to slow case in the beginning of an initialization block to
5447 // avoid the quadratic behavior of repeatedly adding fast properties.
Andrei Popescu402d9372010-02-26 13:31:12 +00005448 if (node->starts_initialization_block()) {
Steve Block6ded16b2010-05-10 14:33:55 +01005449 // Initialization block consists of assignments of the form expr.x = ..., so
5450 // this will never be an assignment to a variable, so there must be a
5451 // receiver object.
Andrei Popescu402d9372010-02-26 13:31:12 +00005452 ASSERT_EQ(NULL, var);
Andrei Popescu402d9372010-02-26 13:31:12 +00005453 if (is_trivial_receiver) {
5454 frame()->Push(prop->obj());
5455 } else {
5456 frame()->Dup();
5457 }
5458 Result ignored = frame()->CallRuntime(Runtime::kToSlowProperties, 1);
5459 }
5460
Steve Block6ded16b2010-05-10 14:33:55 +01005461 // Change to fast case at the end of an initialization block. To prepare for
5462 // that add an extra copy of the receiver to the frame, so that it can be
5463 // converted back to fast case after the assignment.
Andrei Popescu402d9372010-02-26 13:31:12 +00005464 if (node->ends_initialization_block() && !is_trivial_receiver) {
Andrei Popescu402d9372010-02-26 13:31:12 +00005465 frame()->Dup();
5466 }
5467
Steve Block6ded16b2010-05-10 14:33:55 +01005468 // Stack layout:
5469 // [tos] : receiver (only materialized if non-trivial)
5470 // [tos+1] : receiver if at the end of an initialization block
5471
Andrei Popescu402d9372010-02-26 13:31:12 +00005472 // Evaluate the right-hand side.
5473 if (node->is_compound()) {
Steve Block6ded16b2010-05-10 14:33:55 +01005474 // For a compound assignment the right-hand side is a binary operation
5475 // between the current property value and the actual right-hand side.
Andrei Popescu402d9372010-02-26 13:31:12 +00005476 if (is_trivial_receiver) {
5477 frame()->Push(prop->obj());
5478 } else if (var != NULL) {
5479 // The LoadIC stub expects the object in eax.
5480 // Freeing eax causes the code generator to load the global into it.
5481 frame_->Spill(eax);
5482 LoadGlobal();
5483 } else {
5484 frame()->Dup();
5485 }
5486 Result value = EmitNamedLoad(name, var != NULL);
5487 frame()->Push(&value);
5488 Load(node->value());
5489
5490 bool overwrite_value =
5491 (node->value()->AsBinaryOperation() != NULL &&
5492 node->value()->AsBinaryOperation()->ResultOverwriteAllowed());
Steve Block6ded16b2010-05-10 14:33:55 +01005493 // Construct the implicit binary operation.
5494 BinaryOperation expr(node, node->binary_op(), node->target(),
5495 node->value());
5496 GenericBinaryOperation(&expr,
Andrei Popescu402d9372010-02-26 13:31:12 +00005497 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
5498 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01005499 // For non-compound assignment just load the right-hand side.
Andrei Popescu402d9372010-02-26 13:31:12 +00005500 Load(node->value());
5501 }
5502
Steve Block6ded16b2010-05-10 14:33:55 +01005503 // Stack layout:
5504 // [tos] : value
5505 // [tos+1] : receiver (only materialized if non-trivial)
5506 // [tos+2] : receiver if at the end of an initialization block
5507
Andrei Popescu402d9372010-02-26 13:31:12 +00005508 // Perform the assignment. It is safe to ignore constants here.
5509 ASSERT(var == NULL || var->mode() != Variable::CONST);
5510 ASSERT_NE(Token::INIT_CONST, node->op());
5511 if (is_trivial_receiver) {
5512 Result value = frame()->Pop();
5513 frame()->Push(prop->obj());
5514 frame()->Push(&value);
5515 }
5516 CodeForSourcePosition(node->position());
5517 bool is_contextual = (var != NULL);
5518 Result answer = EmitNamedStore(name, is_contextual);
5519 frame()->Push(&answer);
5520
Steve Block6ded16b2010-05-10 14:33:55 +01005521 // Stack layout:
5522 // [tos] : result
5523 // [tos+1] : receiver if at the end of an initialization block
5524
Andrei Popescu402d9372010-02-26 13:31:12 +00005525 if (node->ends_initialization_block()) {
5526 ASSERT_EQ(NULL, var);
5527 // The argument to the runtime call is the receiver.
5528 if (is_trivial_receiver) {
5529 frame()->Push(prop->obj());
5530 } else {
5531 // A copy of the receiver is below the value of the assignment. Swap
5532 // the receiver and the value of the assignment expression.
5533 Result result = frame()->Pop();
5534 Result receiver = frame()->Pop();
5535 frame()->Push(&result);
5536 frame()->Push(&receiver);
5537 }
5538 Result ignored = frame_->CallRuntime(Runtime::kToFastProperties, 1);
5539 }
5540
Steve Block6ded16b2010-05-10 14:33:55 +01005541 // Stack layout:
5542 // [tos] : result
5543
Andrei Popescu402d9372010-02-26 13:31:12 +00005544 ASSERT_EQ(frame()->height(), original_height + 1);
5545}
5546
5547
5548void CodeGenerator::EmitKeyedPropertyAssignment(Assignment* node) {
5549#ifdef DEBUG
5550 int original_height = frame()->height();
5551#endif
Steve Block6ded16b2010-05-10 14:33:55 +01005552 Comment cmnt(masm_, "[ Keyed Property Assignment");
Andrei Popescu402d9372010-02-26 13:31:12 +00005553 Property* prop = node->target()->AsProperty();
5554 ASSERT_NOT_NULL(prop);
5555
5556 // Evaluate the receiver subexpression.
5557 Load(prop->obj());
5558
Steve Block6ded16b2010-05-10 14:33:55 +01005559 // Change to slow case in the beginning of an initialization block to
5560 // avoid the quadratic behavior of repeatedly adding fast properties.
Andrei Popescu402d9372010-02-26 13:31:12 +00005561 if (node->starts_initialization_block()) {
Andrei Popescu402d9372010-02-26 13:31:12 +00005562 frame_->Dup();
5563 Result ignored = frame_->CallRuntime(Runtime::kToSlowProperties, 1);
5564 }
5565
Steve Block6ded16b2010-05-10 14:33:55 +01005566 // Change to fast case at the end of an initialization block. To prepare for
5567 // that add an extra copy of the receiver to the frame, so that it can be
5568 // converted back to fast case after the assignment.
Andrei Popescu402d9372010-02-26 13:31:12 +00005569 if (node->ends_initialization_block()) {
Andrei Popescu402d9372010-02-26 13:31:12 +00005570 frame_->Dup();
5571 }
5572
5573 // Evaluate the key subexpression.
5574 Load(prop->key());
5575
Steve Block6ded16b2010-05-10 14:33:55 +01005576 // Stack layout:
5577 // [tos] : key
5578 // [tos+1] : receiver
5579 // [tos+2] : receiver if at the end of an initialization block
5580
Andrei Popescu402d9372010-02-26 13:31:12 +00005581 // Evaluate the right-hand side.
5582 if (node->is_compound()) {
Steve Block6ded16b2010-05-10 14:33:55 +01005583 // For a compound assignment the right-hand side is a binary operation
5584 // between the current property value and the actual right-hand side.
5585 // Duplicate receiver and key for loading the current property value.
Andrei Popescu402d9372010-02-26 13:31:12 +00005586 frame()->PushElementAt(1);
5587 frame()->PushElementAt(1);
5588 Result value = EmitKeyedLoad();
5589 frame()->Push(&value);
5590 Load(node->value());
5591
Steve Block6ded16b2010-05-10 14:33:55 +01005592 // Perform the binary operation.
Andrei Popescu402d9372010-02-26 13:31:12 +00005593 bool overwrite_value =
5594 (node->value()->AsBinaryOperation() != NULL &&
5595 node->value()->AsBinaryOperation()->ResultOverwriteAllowed());
Steve Block6ded16b2010-05-10 14:33:55 +01005596 BinaryOperation expr(node, node->binary_op(), node->target(),
5597 node->value());
5598 GenericBinaryOperation(&expr,
Andrei Popescu402d9372010-02-26 13:31:12 +00005599 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
5600 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01005601 // For non-compound assignment just load the right-hand side.
Andrei Popescu402d9372010-02-26 13:31:12 +00005602 Load(node->value());
5603 }
5604
Steve Block6ded16b2010-05-10 14:33:55 +01005605 // Stack layout:
5606 // [tos] : value
5607 // [tos+1] : key
5608 // [tos+2] : receiver
5609 // [tos+3] : receiver if at the end of an initialization block
5610
Andrei Popescu402d9372010-02-26 13:31:12 +00005611 // Perform the assignment. It is safe to ignore constants here.
5612 ASSERT(node->op() != Token::INIT_CONST);
5613 CodeForSourcePosition(node->position());
5614 Result answer = EmitKeyedStore(prop->key()->type());
5615 frame()->Push(&answer);
5616
Steve Block6ded16b2010-05-10 14:33:55 +01005617 // Stack layout:
5618 // [tos] : result
5619 // [tos+1] : receiver if at the end of an initialization block
5620
5621 // Change to fast case at the end of an initialization block.
Andrei Popescu402d9372010-02-26 13:31:12 +00005622 if (node->ends_initialization_block()) {
5623 // The argument to the runtime call is the extra copy of the receiver,
5624 // which is below the value of the assignment. Swap the receiver and
5625 // the value of the assignment expression.
5626 Result result = frame()->Pop();
5627 Result receiver = frame()->Pop();
5628 frame()->Push(&result);
5629 frame()->Push(&receiver);
5630 Result ignored = frame_->CallRuntime(Runtime::kToFastProperties, 1);
5631 }
5632
Steve Block6ded16b2010-05-10 14:33:55 +01005633 // Stack layout:
5634 // [tos] : result
5635
Andrei Popescu402d9372010-02-26 13:31:12 +00005636 ASSERT(frame()->height() == original_height + 1);
5637}
5638
5639
Steve Blocka7e24c12009-10-30 11:49:00 +00005640void CodeGenerator::VisitAssignment(Assignment* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01005641 ASSERT(!in_safe_int32_mode());
Leon Clarked91b9f72010-01-27 17:25:45 +00005642#ifdef DEBUG
Andrei Popescu402d9372010-02-26 13:31:12 +00005643 int original_height = frame()->height();
Leon Clarked91b9f72010-01-27 17:25:45 +00005644#endif
Andrei Popescu402d9372010-02-26 13:31:12 +00005645 Variable* var = node->target()->AsVariableProxy()->AsVariable();
5646 Property* prop = node->target()->AsProperty();
Steve Blocka7e24c12009-10-30 11:49:00 +00005647
Andrei Popescu402d9372010-02-26 13:31:12 +00005648 if (var != NULL && !var->is_global()) {
5649 EmitSlotAssignment(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00005650
Andrei Popescu402d9372010-02-26 13:31:12 +00005651 } else if ((prop != NULL && prop->key()->IsPropertyName()) ||
5652 (var != NULL && var->is_global())) {
5653 // Properties whose keys are property names and global variables are
5654 // treated as named property references. We do not need to consider
5655 // global 'this' because it is not a valid left-hand side.
5656 EmitNamedPropertyAssignment(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00005657
Andrei Popescu402d9372010-02-26 13:31:12 +00005658 } else if (prop != NULL) {
5659 // Other properties (including rewritten parameters for a function that
5660 // uses arguments) are keyed property assignments.
5661 EmitKeyedPropertyAssignment(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00005662
Andrei Popescu402d9372010-02-26 13:31:12 +00005663 } else {
5664 // Invalid left-hand side.
5665 Load(node->target());
5666 Result result = frame()->CallRuntime(Runtime::kThrowReferenceError, 1);
5667 // The runtime call doesn't actually return but the code generator will
5668 // still generate code and expects a certain frame height.
5669 frame()->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00005670 }
Andrei Popescu402d9372010-02-26 13:31:12 +00005671
5672 ASSERT(frame()->height() == original_height + 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00005673}
5674
5675
5676void CodeGenerator::VisitThrow(Throw* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01005677 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005678 Comment cmnt(masm_, "[ Throw");
5679 Load(node->exception());
5680 Result result = frame_->CallRuntime(Runtime::kThrow, 1);
5681 frame_->Push(&result);
5682}
5683
5684
5685void CodeGenerator::VisitProperty(Property* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01005686 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005687 Comment cmnt(masm_, "[ Property");
5688 Reference property(this, node);
Steve Blockd0582a62009-12-15 09:54:21 +00005689 property.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00005690}
5691
5692
5693void CodeGenerator::VisitCall(Call* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01005694 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005695 Comment cmnt(masm_, "[ Call");
5696
5697 Expression* function = node->expression();
5698 ZoneList<Expression*>* args = node->arguments();
5699
5700 // Check if the function is a variable or a property.
5701 Variable* var = function->AsVariableProxy()->AsVariable();
5702 Property* property = function->AsProperty();
5703
5704 // ------------------------------------------------------------------------
5705 // Fast-case: Use inline caching.
5706 // ---
5707 // According to ECMA-262, section 11.2.3, page 44, the function to call
5708 // must be resolved after the arguments have been evaluated. The IC code
5709 // automatically handles this by loading the arguments before the function
5710 // is resolved in cache misses (this also holds for megamorphic calls).
5711 // ------------------------------------------------------------------------
5712
5713 if (var != NULL && var->is_possibly_eval()) {
5714 // ----------------------------------
5715 // JavaScript example: 'eval(arg)' // eval is not known to be shadowed
5716 // ----------------------------------
5717
5718 // In a call to eval, we first call %ResolvePossiblyDirectEval to
5719 // resolve the function we need to call and the receiver of the
5720 // call. Then we call the resolved function using the given
5721 // arguments.
5722
5723 // Prepare the stack for the call to the resolved function.
5724 Load(function);
5725
5726 // Allocate a frame slot for the receiver.
5727 frame_->Push(Factory::undefined_value());
5728 int arg_count = args->length();
5729 for (int i = 0; i < arg_count; i++) {
5730 Load(args->at(i));
Leon Clarkef7060e22010-06-03 12:02:55 +01005731 frame_->SpillTop();
Steve Blocka7e24c12009-10-30 11:49:00 +00005732 }
5733
5734 // Prepare the stack for the call to ResolvePossiblyDirectEval.
5735 frame_->PushElementAt(arg_count + 1);
5736 if (arg_count > 0) {
5737 frame_->PushElementAt(arg_count);
5738 } else {
5739 frame_->Push(Factory::undefined_value());
5740 }
5741
Leon Clarkee46be812010-01-19 14:06:41 +00005742 // Push the receiver.
5743 frame_->PushParameterAt(-1);
5744
Steve Blocka7e24c12009-10-30 11:49:00 +00005745 // Resolve the call.
5746 Result result =
Leon Clarkee46be812010-01-19 14:06:41 +00005747 frame_->CallRuntime(Runtime::kResolvePossiblyDirectEval, 3);
Steve Blocka7e24c12009-10-30 11:49:00 +00005748
Leon Clarkee46be812010-01-19 14:06:41 +00005749 // The runtime call returns a pair of values in eax (function) and
5750 // edx (receiver). Touch up the stack with the right values.
5751 Result receiver = allocator_->Allocate(edx);
5752 frame_->SetElementAt(arg_count + 1, &result);
5753 frame_->SetElementAt(arg_count, &receiver);
5754 receiver.Unuse();
Steve Blocka7e24c12009-10-30 11:49:00 +00005755
5756 // Call the function.
5757 CodeForSourcePosition(node->position());
5758 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Leon Clarkee46be812010-01-19 14:06:41 +00005759 CallFunctionStub call_function(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
Steve Blocka7e24c12009-10-30 11:49:00 +00005760 result = frame_->CallStub(&call_function, arg_count + 1);
5761
5762 // Restore the context and overwrite the function on the stack with
5763 // the result.
5764 frame_->RestoreContextRegister();
5765 frame_->SetElementAt(0, &result);
5766
5767 } else if (var != NULL && !var->is_this() && var->is_global()) {
5768 // ----------------------------------
5769 // JavaScript example: 'foo(1, 2, 3)' // foo is global
5770 // ----------------------------------
5771
Steve Blocka7e24c12009-10-30 11:49:00 +00005772 // Pass the global object as the receiver and let the IC stub
5773 // patch the stack to use the global proxy as 'this' in the
5774 // invoked function.
5775 LoadGlobal();
5776
5777 // Load the arguments.
5778 int arg_count = args->length();
5779 for (int i = 0; i < arg_count; i++) {
5780 Load(args->at(i));
Leon Clarkef7060e22010-06-03 12:02:55 +01005781 frame_->SpillTop();
Steve Blocka7e24c12009-10-30 11:49:00 +00005782 }
5783
Leon Clarkee46be812010-01-19 14:06:41 +00005784 // Push the name of the function onto the frame.
5785 frame_->Push(var->name());
5786
Steve Blocka7e24c12009-10-30 11:49:00 +00005787 // Call the IC initialization code.
5788 CodeForSourcePosition(node->position());
5789 Result result = frame_->CallCallIC(RelocInfo::CODE_TARGET_CONTEXT,
5790 arg_count,
5791 loop_nesting());
5792 frame_->RestoreContextRegister();
Leon Clarkee46be812010-01-19 14:06:41 +00005793 frame_->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00005794
5795 } else if (var != NULL && var->slot() != NULL &&
5796 var->slot()->type() == Slot::LOOKUP) {
5797 // ----------------------------------
Kristian Monsen25f61362010-05-21 11:50:48 +01005798 // JavaScript examples:
5799 //
5800 // with (obj) foo(1, 2, 3) // foo may be in obj.
5801 //
5802 // function f() {};
5803 // function g() {
5804 // eval(...);
5805 // f(); // f could be in extension object.
5806 // }
Steve Blocka7e24c12009-10-30 11:49:00 +00005807 // ----------------------------------
5808
Kristian Monsen25f61362010-05-21 11:50:48 +01005809 JumpTarget slow, done;
5810 Result function;
5811
5812 // Generate fast case for loading functions from slots that
5813 // correspond to local/global variables or arguments unless they
5814 // are shadowed by eval-introduced bindings.
5815 EmitDynamicLoadFromSlotFastCase(var->slot(),
5816 NOT_INSIDE_TYPEOF,
5817 &function,
5818 &slow,
5819 &done);
5820
5821 slow.Bind();
5822 // Enter the runtime system to load the function from the context.
5823 // Sync the frame so we can push the arguments directly into
5824 // place.
Steve Blocka7e24c12009-10-30 11:49:00 +00005825 frame_->SyncRange(0, frame_->element_count() - 1);
5826 frame_->EmitPush(esi);
5827 frame_->EmitPush(Immediate(var->name()));
5828 frame_->CallRuntime(Runtime::kLoadContextSlot, 2);
5829 // The runtime call returns a pair of values in eax and edx. The
5830 // looked-up function is in eax and the receiver is in edx. These
5831 // register references are not ref counted here. We spill them
5832 // eagerly since they are arguments to an inevitable call (and are
5833 // not sharable by the arguments).
5834 ASSERT(!allocator()->is_used(eax));
5835 frame_->EmitPush(eax);
5836
5837 // Load the receiver.
5838 ASSERT(!allocator()->is_used(edx));
5839 frame_->EmitPush(edx);
5840
Kristian Monsen25f61362010-05-21 11:50:48 +01005841 // If fast case code has been generated, emit code to push the
5842 // function and receiver and have the slow path jump around this
5843 // code.
5844 if (done.is_linked()) {
5845 JumpTarget call;
5846 call.Jump();
5847 done.Bind(&function);
5848 frame_->Push(&function);
5849 LoadGlobalReceiver();
5850 call.Bind();
5851 }
5852
Steve Blocka7e24c12009-10-30 11:49:00 +00005853 // Call the function.
Leon Clarkee46be812010-01-19 14:06:41 +00005854 CallWithArguments(args, NO_CALL_FUNCTION_FLAGS, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00005855
5856 } else if (property != NULL) {
5857 // Check if the key is a literal string.
5858 Literal* literal = property->key()->AsLiteral();
5859
5860 if (literal != NULL && literal->handle()->IsSymbol()) {
5861 // ------------------------------------------------------------------
5862 // JavaScript example: 'object.foo(1, 2, 3)' or 'map["key"](1, 2, 3)'
5863 // ------------------------------------------------------------------
5864
5865 Handle<String> name = Handle<String>::cast(literal->handle());
5866
5867 if (ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION &&
5868 name->IsEqualTo(CStrVector("apply")) &&
5869 args->length() == 2 &&
5870 args->at(1)->AsVariableProxy() != NULL &&
5871 args->at(1)->AsVariableProxy()->IsArguments()) {
5872 // Use the optimized Function.prototype.apply that avoids
5873 // allocating lazily allocated arguments objects.
Leon Clarked91b9f72010-01-27 17:25:45 +00005874 CallApplyLazy(property->obj(),
Steve Blocka7e24c12009-10-30 11:49:00 +00005875 args->at(0),
5876 args->at(1)->AsVariableProxy(),
5877 node->position());
5878
5879 } else {
Leon Clarkee46be812010-01-19 14:06:41 +00005880 // Push the receiver onto the frame.
Steve Blocka7e24c12009-10-30 11:49:00 +00005881 Load(property->obj());
5882
5883 // Load the arguments.
5884 int arg_count = args->length();
5885 for (int i = 0; i < arg_count; i++) {
5886 Load(args->at(i));
Leon Clarkef7060e22010-06-03 12:02:55 +01005887 frame_->SpillTop();
Steve Blocka7e24c12009-10-30 11:49:00 +00005888 }
5889
Leon Clarkee46be812010-01-19 14:06:41 +00005890 // Push the name of the function onto the frame.
5891 frame_->Push(name);
5892
Steve Blocka7e24c12009-10-30 11:49:00 +00005893 // Call the IC initialization code.
5894 CodeForSourcePosition(node->position());
5895 Result result =
5896 frame_->CallCallIC(RelocInfo::CODE_TARGET, arg_count,
5897 loop_nesting());
5898 frame_->RestoreContextRegister();
Leon Clarkee46be812010-01-19 14:06:41 +00005899 frame_->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00005900 }
5901
5902 } else {
5903 // -------------------------------------------
5904 // JavaScript example: 'array[index](1, 2, 3)'
5905 // -------------------------------------------
5906
5907 // Load the function to call from the property through a reference.
Steve Blocka7e24c12009-10-30 11:49:00 +00005908
5909 // Pass receiver to called function.
5910 if (property->is_synthetic()) {
Leon Clarked91b9f72010-01-27 17:25:45 +00005911 Reference ref(this, property);
5912 ref.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00005913 // Use global object as receiver.
5914 LoadGlobalReceiver();
5915 } else {
Leon Clarked91b9f72010-01-27 17:25:45 +00005916 Load(property->obj());
Andrei Popescu402d9372010-02-26 13:31:12 +00005917 frame()->Dup();
Leon Clarked91b9f72010-01-27 17:25:45 +00005918 Load(property->key());
Andrei Popescu402d9372010-02-26 13:31:12 +00005919 Result function = EmitKeyedLoad();
Leon Clarked91b9f72010-01-27 17:25:45 +00005920 Result receiver = frame_->Pop();
5921 frame_->Push(&function);
5922 frame_->Push(&receiver);
Steve Blocka7e24c12009-10-30 11:49:00 +00005923 }
5924
5925 // Call the function.
Leon Clarkee46be812010-01-19 14:06:41 +00005926 CallWithArguments(args, RECEIVER_MIGHT_BE_VALUE, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00005927 }
5928
5929 } else {
5930 // ----------------------------------
5931 // JavaScript example: 'foo(1, 2, 3)' // foo is not global
5932 // ----------------------------------
5933
5934 // Load the function.
5935 Load(function);
5936
5937 // Pass the global proxy as the receiver.
5938 LoadGlobalReceiver();
5939
5940 // Call the function.
Leon Clarkee46be812010-01-19 14:06:41 +00005941 CallWithArguments(args, NO_CALL_FUNCTION_FLAGS, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00005942 }
5943}
5944
5945
5946void CodeGenerator::VisitCallNew(CallNew* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01005947 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00005948 Comment cmnt(masm_, "[ CallNew");
5949
5950 // According to ECMA-262, section 11.2.2, page 44, the function
5951 // expression in new calls must be evaluated before the
5952 // arguments. This is different from ordinary calls, where the
5953 // actual function to call is resolved after the arguments have been
5954 // evaluated.
5955
5956 // Compute function to call and use the global object as the
5957 // receiver. There is no need to use the global proxy here because
5958 // it will always be replaced with a newly allocated object.
5959 Load(node->expression());
5960 LoadGlobal();
5961
5962 // Push the arguments ("left-to-right") on the stack.
5963 ZoneList<Expression*>* args = node->arguments();
5964 int arg_count = args->length();
5965 for (int i = 0; i < arg_count; i++) {
5966 Load(args->at(i));
5967 }
5968
5969 // Call the construct call builtin that handles allocation and
5970 // constructor invocation.
5971 CodeForSourcePosition(node->position());
5972 Result result = frame_->CallConstructor(arg_count);
5973 // Replace the function on the stack with the result.
5974 frame_->SetElementAt(0, &result);
5975}
5976
5977
5978void CodeGenerator::GenerateIsSmi(ZoneList<Expression*>* args) {
5979 ASSERT(args->length() == 1);
5980 Load(args->at(0));
5981 Result value = frame_->Pop();
5982 value.ToRegister();
5983 ASSERT(value.is_valid());
5984 __ test(value.reg(), Immediate(kSmiTagMask));
5985 value.Unuse();
5986 destination()->Split(zero);
5987}
5988
5989
5990void CodeGenerator::GenerateLog(ZoneList<Expression*>* args) {
5991 // Conditionally generate a log call.
5992 // Args:
5993 // 0 (literal string): The type of logging (corresponds to the flags).
5994 // This is used to determine whether or not to generate the log call.
5995 // 1 (string): Format string. Access the string at argument index 2
5996 // with '%2s' (see Logger::LogRuntime for all the formats).
5997 // 2 (array): Arguments to the format string.
5998 ASSERT_EQ(args->length(), 3);
5999#ifdef ENABLE_LOGGING_AND_PROFILING
6000 if (ShouldGenerateLog(args->at(0))) {
6001 Load(args->at(1));
6002 Load(args->at(2));
6003 frame_->CallRuntime(Runtime::kLog, 2);
6004 }
6005#endif
6006 // Finally, we're expected to leave a value on the top of the stack.
6007 frame_->Push(Factory::undefined_value());
6008}
6009
6010
6011void CodeGenerator::GenerateIsNonNegativeSmi(ZoneList<Expression*>* args) {
6012 ASSERT(args->length() == 1);
6013 Load(args->at(0));
6014 Result value = frame_->Pop();
6015 value.ToRegister();
6016 ASSERT(value.is_valid());
Steve Block6ded16b2010-05-10 14:33:55 +01006017 __ test(value.reg(), Immediate(kSmiTagMask | kSmiSignMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00006018 value.Unuse();
6019 destination()->Split(zero);
6020}
6021
6022
6023// This generates code that performs a charCodeAt() call or returns
6024// undefined in order to trigger the slow case, Runtime_StringCharCodeAt.
Steve Blockd0582a62009-12-15 09:54:21 +00006025// It can handle flat, 8 and 16 bit characters and cons strings where the
6026// answer is found in the left hand branch of the cons. The slow case will
6027// flatten the string, which will ensure that the answer is in the left hand
6028// side the next time around.
Steve Blocka7e24c12009-10-30 11:49:00 +00006029void CodeGenerator::GenerateFastCharCodeAt(ZoneList<Expression*>* args) {
6030 Comment(masm_, "[ GenerateFastCharCodeAt");
6031 ASSERT(args->length() == 2);
6032
Steve Blocka7e24c12009-10-30 11:49:00 +00006033 Load(args->at(0));
6034 Load(args->at(1));
6035 Result index = frame_->Pop();
6036 Result object = frame_->Pop();
6037
Steve Blocka7e24c12009-10-30 11:49:00 +00006038 // We will mutate the index register and possibly the object register.
6039 // The case where they are somehow the same register is handled
6040 // because we only mutate them in the case where the receiver is a
6041 // heap object and the index is not.
6042 object.ToRegister();
6043 index.ToRegister();
6044 frame_->Spill(object.reg());
6045 frame_->Spill(index.reg());
6046
Steve Block6ded16b2010-05-10 14:33:55 +01006047 // We need two extra registers.
6048 Result result = allocator()->Allocate();
6049 ASSERT(result.is_valid());
6050 Result scratch = allocator()->Allocate();
6051 ASSERT(scratch.is_valid());
Steve Blocka7e24c12009-10-30 11:49:00 +00006052
6053 // There is no virtual frame effect from here up to the final result
6054 // push.
Steve Block6ded16b2010-05-10 14:33:55 +01006055 Label slow_case;
6056 Label exit;
6057 StringHelper::GenerateFastCharCodeAt(masm_,
6058 object.reg(),
6059 index.reg(),
6060 scratch.reg(),
6061 result.reg(),
6062 &slow_case,
6063 &slow_case,
6064 &slow_case,
6065 &slow_case);
6066 __ jmp(&exit);
Steve Blocka7e24c12009-10-30 11:49:00 +00006067
6068 __ bind(&slow_case);
6069 // Move the undefined value into the result register, which will
6070 // trigger the slow case.
Steve Block6ded16b2010-05-10 14:33:55 +01006071 __ Set(result.reg(), Immediate(Factory::undefined_value()));
Steve Blocka7e24c12009-10-30 11:49:00 +00006072
Steve Block6ded16b2010-05-10 14:33:55 +01006073 __ bind(&exit);
6074 frame_->Push(&result);
6075}
6076
6077
6078void CodeGenerator::GenerateCharFromCode(ZoneList<Expression*>* args) {
6079 Comment(masm_, "[ GenerateCharFromCode");
6080 ASSERT(args->length() == 1);
6081
6082 Load(args->at(0));
6083
6084 Result code = frame_->Pop();
6085 code.ToRegister();
6086 ASSERT(code.is_valid());
6087
6088 // StringHelper::GenerateCharFromCode may do a runtime call.
6089 frame_->SpillAll();
6090
6091 Result result = allocator()->Allocate();
6092 ASSERT(result.is_valid());
6093
6094 StringHelper::GenerateCharFromCode(masm_,
6095 code.reg(),
6096 result.reg(),
6097 CALL_FUNCTION);
6098 frame_->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00006099}
6100
6101
6102void CodeGenerator::GenerateIsArray(ZoneList<Expression*>* args) {
6103 ASSERT(args->length() == 1);
6104 Load(args->at(0));
6105 Result value = frame_->Pop();
6106 value.ToRegister();
6107 ASSERT(value.is_valid());
6108 __ test(value.reg(), Immediate(kSmiTagMask));
6109 destination()->false_target()->Branch(equal);
6110 // It is a heap object - get map.
6111 Result temp = allocator()->Allocate();
6112 ASSERT(temp.is_valid());
6113 // Check if the object is a JS array or not.
6114 __ CmpObjectType(value.reg(), JS_ARRAY_TYPE, temp.reg());
6115 value.Unuse();
6116 temp.Unuse();
6117 destination()->Split(equal);
6118}
6119
6120
Andrei Popescu402d9372010-02-26 13:31:12 +00006121void CodeGenerator::GenerateIsRegExp(ZoneList<Expression*>* args) {
6122 ASSERT(args->length() == 1);
6123 Load(args->at(0));
6124 Result value = frame_->Pop();
6125 value.ToRegister();
6126 ASSERT(value.is_valid());
6127 __ test(value.reg(), Immediate(kSmiTagMask));
6128 destination()->false_target()->Branch(equal);
6129 // It is a heap object - get map.
6130 Result temp = allocator()->Allocate();
6131 ASSERT(temp.is_valid());
6132 // Check if the object is a regexp.
6133 __ CmpObjectType(value.reg(), JS_REGEXP_TYPE, temp.reg());
6134 value.Unuse();
6135 temp.Unuse();
6136 destination()->Split(equal);
6137}
6138
6139
Steve Blockd0582a62009-12-15 09:54:21 +00006140void CodeGenerator::GenerateIsObject(ZoneList<Expression*>* args) {
6141 // This generates a fast version of:
6142 // (typeof(arg) === 'object' || %_ClassOf(arg) == 'RegExp')
6143 ASSERT(args->length() == 1);
6144 Load(args->at(0));
6145 Result obj = frame_->Pop();
6146 obj.ToRegister();
6147
6148 __ test(obj.reg(), Immediate(kSmiTagMask));
6149 destination()->false_target()->Branch(zero);
6150 __ cmp(obj.reg(), Factory::null_value());
6151 destination()->true_target()->Branch(equal);
6152
6153 Result map = allocator()->Allocate();
6154 ASSERT(map.is_valid());
6155 __ mov(map.reg(), FieldOperand(obj.reg(), HeapObject::kMapOffset));
6156 // Undetectable objects behave like undefined when tested with typeof.
6157 __ movzx_b(map.reg(), FieldOperand(map.reg(), Map::kBitFieldOffset));
6158 __ test(map.reg(), Immediate(1 << Map::kIsUndetectable));
6159 destination()->false_target()->Branch(not_zero);
6160 __ mov(map.reg(), FieldOperand(obj.reg(), HeapObject::kMapOffset));
6161 __ movzx_b(map.reg(), FieldOperand(map.reg(), Map::kInstanceTypeOffset));
6162 __ cmp(map.reg(), FIRST_JS_OBJECT_TYPE);
Leon Clarkef7060e22010-06-03 12:02:55 +01006163 destination()->false_target()->Branch(below);
Steve Blockd0582a62009-12-15 09:54:21 +00006164 __ cmp(map.reg(), LAST_JS_OBJECT_TYPE);
6165 obj.Unuse();
6166 map.Unuse();
Leon Clarkef7060e22010-06-03 12:02:55 +01006167 destination()->Split(below_equal);
Steve Blockd0582a62009-12-15 09:54:21 +00006168}
6169
6170
6171void CodeGenerator::GenerateIsFunction(ZoneList<Expression*>* args) {
6172 // This generates a fast version of:
6173 // (%_ClassOf(arg) === 'Function')
6174 ASSERT(args->length() == 1);
6175 Load(args->at(0));
6176 Result obj = frame_->Pop();
6177 obj.ToRegister();
6178 __ test(obj.reg(), Immediate(kSmiTagMask));
6179 destination()->false_target()->Branch(zero);
6180 Result temp = allocator()->Allocate();
6181 ASSERT(temp.is_valid());
6182 __ CmpObjectType(obj.reg(), JS_FUNCTION_TYPE, temp.reg());
6183 obj.Unuse();
6184 temp.Unuse();
6185 destination()->Split(equal);
6186}
6187
6188
Leon Clarked91b9f72010-01-27 17:25:45 +00006189void CodeGenerator::GenerateIsUndetectableObject(ZoneList<Expression*>* args) {
6190 ASSERT(args->length() == 1);
6191 Load(args->at(0));
6192 Result obj = frame_->Pop();
6193 obj.ToRegister();
6194 __ test(obj.reg(), Immediate(kSmiTagMask));
6195 destination()->false_target()->Branch(zero);
6196 Result temp = allocator()->Allocate();
6197 ASSERT(temp.is_valid());
6198 __ mov(temp.reg(),
6199 FieldOperand(obj.reg(), HeapObject::kMapOffset));
6200 __ movzx_b(temp.reg(),
6201 FieldOperand(temp.reg(), Map::kBitFieldOffset));
6202 __ test(temp.reg(), Immediate(1 << Map::kIsUndetectable));
6203 obj.Unuse();
6204 temp.Unuse();
6205 destination()->Split(not_zero);
6206}
6207
6208
Steve Blocka7e24c12009-10-30 11:49:00 +00006209void CodeGenerator::GenerateIsConstructCall(ZoneList<Expression*>* args) {
6210 ASSERT(args->length() == 0);
6211
6212 // Get the frame pointer for the calling frame.
6213 Result fp = allocator()->Allocate();
6214 __ mov(fp.reg(), Operand(ebp, StandardFrameConstants::kCallerFPOffset));
6215
6216 // Skip the arguments adaptor frame if it exists.
6217 Label check_frame_marker;
6218 __ cmp(Operand(fp.reg(), StandardFrameConstants::kContextOffset),
6219 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
6220 __ j(not_equal, &check_frame_marker);
6221 __ mov(fp.reg(), Operand(fp.reg(), StandardFrameConstants::kCallerFPOffset));
6222
6223 // Check the marker in the calling frame.
6224 __ bind(&check_frame_marker);
6225 __ cmp(Operand(fp.reg(), StandardFrameConstants::kMarkerOffset),
6226 Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
6227 fp.Unuse();
6228 destination()->Split(equal);
6229}
6230
6231
6232void CodeGenerator::GenerateArgumentsLength(ZoneList<Expression*>* args) {
6233 ASSERT(args->length() == 0);
Steve Block6ded16b2010-05-10 14:33:55 +01006234
6235 Result fp = allocator_->Allocate();
6236 Result result = allocator_->Allocate();
6237 ASSERT(fp.is_valid() && result.is_valid());
6238
6239 Label exit;
6240
6241 // Get the number of formal parameters.
6242 __ Set(result.reg(), Immediate(Smi::FromInt(scope()->num_parameters())));
6243
6244 // Check if the calling frame is an arguments adaptor frame.
6245 __ mov(fp.reg(), Operand(ebp, StandardFrameConstants::kCallerFPOffset));
6246 __ cmp(Operand(fp.reg(), StandardFrameConstants::kContextOffset),
6247 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
6248 __ j(not_equal, &exit);
6249
6250 // Arguments adaptor case: Read the arguments length from the
6251 // adaptor frame.
6252 __ mov(result.reg(),
6253 Operand(fp.reg(), ArgumentsAdaptorFrameConstants::kLengthOffset));
6254
6255 __ bind(&exit);
6256 result.set_type_info(TypeInfo::Smi());
6257 if (FLAG_debug_code) __ AbortIfNotSmi(result.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00006258 frame_->Push(&result);
6259}
6260
6261
6262void CodeGenerator::GenerateClassOf(ZoneList<Expression*>* args) {
6263 ASSERT(args->length() == 1);
6264 JumpTarget leave, null, function, non_function_constructor;
6265 Load(args->at(0)); // Load the object.
6266 Result obj = frame_->Pop();
6267 obj.ToRegister();
6268 frame_->Spill(obj.reg());
6269
6270 // If the object is a smi, we return null.
6271 __ test(obj.reg(), Immediate(kSmiTagMask));
6272 null.Branch(zero);
6273
6274 // Check that the object is a JS object but take special care of JS
6275 // functions to make sure they have 'Function' as their class.
6276 { Result tmp = allocator()->Allocate();
6277 __ mov(obj.reg(), FieldOperand(obj.reg(), HeapObject::kMapOffset));
6278 __ movzx_b(tmp.reg(), FieldOperand(obj.reg(), Map::kInstanceTypeOffset));
6279 __ cmp(tmp.reg(), FIRST_JS_OBJECT_TYPE);
Leon Clarkef7060e22010-06-03 12:02:55 +01006280 null.Branch(below);
Steve Blocka7e24c12009-10-30 11:49:00 +00006281
6282 // As long as JS_FUNCTION_TYPE is the last instance type and it is
6283 // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
6284 // LAST_JS_OBJECT_TYPE.
6285 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
6286 ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
6287 __ cmp(tmp.reg(), JS_FUNCTION_TYPE);
6288 function.Branch(equal);
6289 }
6290
6291 // Check if the constructor in the map is a function.
6292 { Result tmp = allocator()->Allocate();
6293 __ mov(obj.reg(), FieldOperand(obj.reg(), Map::kConstructorOffset));
6294 __ CmpObjectType(obj.reg(), JS_FUNCTION_TYPE, tmp.reg());
6295 non_function_constructor.Branch(not_equal);
6296 }
6297
6298 // The map register now contains the constructor function. Grab the
6299 // instance class name from there.
6300 __ mov(obj.reg(),
6301 FieldOperand(obj.reg(), JSFunction::kSharedFunctionInfoOffset));
6302 __ mov(obj.reg(),
6303 FieldOperand(obj.reg(), SharedFunctionInfo::kInstanceClassNameOffset));
6304 frame_->Push(&obj);
6305 leave.Jump();
6306
6307 // Functions have class 'Function'.
6308 function.Bind();
6309 frame_->Push(Factory::function_class_symbol());
6310 leave.Jump();
6311
6312 // Objects with a non-function constructor have class 'Object'.
6313 non_function_constructor.Bind();
6314 frame_->Push(Factory::Object_symbol());
6315 leave.Jump();
6316
6317 // Non-JS objects have class null.
6318 null.Bind();
6319 frame_->Push(Factory::null_value());
6320
6321 // All done.
6322 leave.Bind();
6323}
6324
6325
6326void CodeGenerator::GenerateValueOf(ZoneList<Expression*>* args) {
6327 ASSERT(args->length() == 1);
6328 JumpTarget leave;
6329 Load(args->at(0)); // Load the object.
6330 frame_->Dup();
6331 Result object = frame_->Pop();
6332 object.ToRegister();
6333 ASSERT(object.is_valid());
6334 // if (object->IsSmi()) return object.
6335 __ test(object.reg(), Immediate(kSmiTagMask));
6336 leave.Branch(zero, taken);
6337 // It is a heap object - get map.
6338 Result temp = allocator()->Allocate();
6339 ASSERT(temp.is_valid());
6340 // if (!object->IsJSValue()) return object.
6341 __ CmpObjectType(object.reg(), JS_VALUE_TYPE, temp.reg());
6342 leave.Branch(not_equal, not_taken);
6343 __ mov(temp.reg(), FieldOperand(object.reg(), JSValue::kValueOffset));
6344 object.Unuse();
6345 frame_->SetElementAt(0, &temp);
6346 leave.Bind();
6347}
6348
6349
6350void CodeGenerator::GenerateSetValueOf(ZoneList<Expression*>* args) {
6351 ASSERT(args->length() == 2);
6352 JumpTarget leave;
6353 Load(args->at(0)); // Load the object.
6354 Load(args->at(1)); // Load the value.
6355 Result value = frame_->Pop();
6356 Result object = frame_->Pop();
6357 value.ToRegister();
6358 object.ToRegister();
6359
6360 // if (object->IsSmi()) return value.
6361 __ test(object.reg(), Immediate(kSmiTagMask));
6362 leave.Branch(zero, &value, taken);
6363
6364 // It is a heap object - get its map.
6365 Result scratch = allocator_->Allocate();
6366 ASSERT(scratch.is_valid());
6367 // if (!object->IsJSValue()) return value.
6368 __ CmpObjectType(object.reg(), JS_VALUE_TYPE, scratch.reg());
6369 leave.Branch(not_equal, &value, not_taken);
6370
6371 // Store the value.
6372 __ mov(FieldOperand(object.reg(), JSValue::kValueOffset), value.reg());
6373 // Update the write barrier. Save the value as it will be
6374 // overwritten by the write barrier code and is needed afterward.
6375 Result duplicate_value = allocator_->Allocate();
6376 ASSERT(duplicate_value.is_valid());
6377 __ mov(duplicate_value.reg(), value.reg());
6378 // The object register is also overwritten by the write barrier and
6379 // possibly aliased in the frame.
6380 frame_->Spill(object.reg());
6381 __ RecordWrite(object.reg(), JSValue::kValueOffset, duplicate_value.reg(),
6382 scratch.reg());
6383 object.Unuse();
6384 scratch.Unuse();
6385 duplicate_value.Unuse();
6386
6387 // Leave.
6388 leave.Bind(&value);
6389 frame_->Push(&value);
6390}
6391
6392
Steve Block6ded16b2010-05-10 14:33:55 +01006393void CodeGenerator::GenerateArguments(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00006394 ASSERT(args->length() == 1);
6395
6396 // ArgumentsAccessStub expects the key in edx and the formal
6397 // parameter count in eax.
6398 Load(args->at(0));
6399 Result key = frame_->Pop();
6400 // Explicitly create a constant result.
Andrei Popescu31002712010-02-23 13:46:05 +00006401 Result count(Handle<Smi>(Smi::FromInt(scope()->num_parameters())));
Steve Blocka7e24c12009-10-30 11:49:00 +00006402 // Call the shared stub to get to arguments[key].
6403 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
6404 Result result = frame_->CallStub(&stub, &key, &count);
6405 frame_->Push(&result);
6406}
6407
6408
6409void CodeGenerator::GenerateObjectEquals(ZoneList<Expression*>* args) {
6410 ASSERT(args->length() == 2);
6411
6412 // Load the two objects into registers and perform the comparison.
6413 Load(args->at(0));
6414 Load(args->at(1));
6415 Result right = frame_->Pop();
6416 Result left = frame_->Pop();
6417 right.ToRegister();
6418 left.ToRegister();
6419 __ cmp(right.reg(), Operand(left.reg()));
6420 right.Unuse();
6421 left.Unuse();
6422 destination()->Split(equal);
6423}
6424
6425
6426void CodeGenerator::GenerateGetFramePointer(ZoneList<Expression*>* args) {
6427 ASSERT(args->length() == 0);
6428 ASSERT(kSmiTag == 0); // EBP value is aligned, so it should look like Smi.
6429 Result ebp_as_smi = allocator_->Allocate();
6430 ASSERT(ebp_as_smi.is_valid());
6431 __ mov(ebp_as_smi.reg(), Operand(ebp));
6432 frame_->Push(&ebp_as_smi);
6433}
6434
6435
Steve Block6ded16b2010-05-10 14:33:55 +01006436void CodeGenerator::GenerateRandomHeapNumber(
6437 ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00006438 ASSERT(args->length() == 0);
6439 frame_->SpillAll();
6440
Steve Block6ded16b2010-05-10 14:33:55 +01006441 Label slow_allocate_heapnumber;
6442 Label heapnumber_allocated;
Steve Blocka7e24c12009-10-30 11:49:00 +00006443
Steve Block6ded16b2010-05-10 14:33:55 +01006444 __ AllocateHeapNumber(edi, ebx, ecx, &slow_allocate_heapnumber);
6445 __ jmp(&heapnumber_allocated);
Steve Blocka7e24c12009-10-30 11:49:00 +00006446
Steve Block6ded16b2010-05-10 14:33:55 +01006447 __ bind(&slow_allocate_heapnumber);
6448 // To allocate a heap number, and ensure that it is not a smi, we
6449 // call the runtime function FUnaryMinus on 0, returning the double
6450 // -0.0. A new, distinct heap number is returned each time.
6451 __ push(Immediate(Smi::FromInt(0)));
6452 __ CallRuntime(Runtime::kNumberUnaryMinus, 1);
6453 __ mov(edi, eax);
6454
6455 __ bind(&heapnumber_allocated);
6456
6457 __ PrepareCallCFunction(0, ebx);
6458 __ CallCFunction(ExternalReference::random_uint32_function(), 0);
6459
6460 // Convert 32 random bits in eax to 0.(32 random bits) in a double
6461 // by computing:
6462 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
6463 // This is implemented on both SSE2 and FPU.
6464 if (CpuFeatures::IsSupported(SSE2)) {
6465 CpuFeatures::Scope fscope(SSE2);
6466 __ mov(ebx, Immediate(0x49800000)); // 1.0 x 2^20 as single.
6467 __ movd(xmm1, Operand(ebx));
6468 __ movd(xmm0, Operand(eax));
6469 __ cvtss2sd(xmm1, xmm1);
6470 __ pxor(xmm0, xmm1);
6471 __ subsd(xmm0, xmm1);
6472 __ movdbl(FieldOperand(edi, HeapNumber::kValueOffset), xmm0);
6473 } else {
6474 // 0x4130000000000000 is 1.0 x 2^20 as a double.
6475 __ mov(FieldOperand(edi, HeapNumber::kExponentOffset),
6476 Immediate(0x41300000));
6477 __ mov(FieldOperand(edi, HeapNumber::kMantissaOffset), eax);
6478 __ fld_d(FieldOperand(edi, HeapNumber::kValueOffset));
6479 __ mov(FieldOperand(edi, HeapNumber::kMantissaOffset), Immediate(0));
6480 __ fld_d(FieldOperand(edi, HeapNumber::kValueOffset));
6481 __ fsubp(1);
6482 __ fstp_d(FieldOperand(edi, HeapNumber::kValueOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00006483 }
Steve Block6ded16b2010-05-10 14:33:55 +01006484 __ mov(eax, edi);
Steve Blocka7e24c12009-10-30 11:49:00 +00006485
6486 Result result = allocator_->Allocate(eax);
6487 frame_->Push(&result);
6488}
6489
6490
Steve Blockd0582a62009-12-15 09:54:21 +00006491void CodeGenerator::GenerateStringAdd(ZoneList<Expression*>* args) {
6492 ASSERT_EQ(2, args->length());
6493
6494 Load(args->at(0));
6495 Load(args->at(1));
6496
6497 StringAddStub stub(NO_STRING_ADD_FLAGS);
6498 Result answer = frame_->CallStub(&stub, 2);
6499 frame_->Push(&answer);
6500}
6501
6502
Leon Clarkee46be812010-01-19 14:06:41 +00006503void CodeGenerator::GenerateSubString(ZoneList<Expression*>* args) {
6504 ASSERT_EQ(3, args->length());
6505
6506 Load(args->at(0));
6507 Load(args->at(1));
6508 Load(args->at(2));
6509
6510 SubStringStub stub;
6511 Result answer = frame_->CallStub(&stub, 3);
6512 frame_->Push(&answer);
6513}
6514
6515
6516void CodeGenerator::GenerateStringCompare(ZoneList<Expression*>* args) {
6517 ASSERT_EQ(2, args->length());
6518
6519 Load(args->at(0));
6520 Load(args->at(1));
6521
6522 StringCompareStub stub;
6523 Result answer = frame_->CallStub(&stub, 2);
6524 frame_->Push(&answer);
6525}
6526
6527
6528void CodeGenerator::GenerateRegExpExec(ZoneList<Expression*>* args) {
Steve Block6ded16b2010-05-10 14:33:55 +01006529 ASSERT_EQ(4, args->length());
Leon Clarkee46be812010-01-19 14:06:41 +00006530
6531 // Load the arguments on the stack and call the stub.
6532 Load(args->at(0));
6533 Load(args->at(1));
6534 Load(args->at(2));
6535 Load(args->at(3));
6536 RegExpExecStub stub;
6537 Result result = frame_->CallStub(&stub, 4);
6538 frame_->Push(&result);
6539}
6540
6541
Steve Block6ded16b2010-05-10 14:33:55 +01006542void CodeGenerator::GenerateRegExpConstructResult(ZoneList<Expression*>* args) {
6543 // No stub. This code only occurs a few times in regexp.js.
6544 const int kMaxInlineLength = 100;
6545 ASSERT_EQ(3, args->length());
6546 Load(args->at(0)); // Size of array, smi.
6547 Load(args->at(1)); // "index" property value.
6548 Load(args->at(2)); // "input" property value.
6549 {
6550 VirtualFrame::SpilledScope spilled_scope;
6551
6552 Label slowcase;
6553 Label done;
6554 __ mov(ebx, Operand(esp, kPointerSize * 2));
6555 __ test(ebx, Immediate(kSmiTagMask));
6556 __ j(not_zero, &slowcase);
6557 __ cmp(Operand(ebx), Immediate(Smi::FromInt(kMaxInlineLength)));
6558 __ j(above, &slowcase);
6559 // Smi-tagging is equivalent to multiplying by 2.
6560 STATIC_ASSERT(kSmiTag == 0);
6561 STATIC_ASSERT(kSmiTagSize == 1);
6562 // Allocate RegExpResult followed by FixedArray with size in ebx.
6563 // JSArray: [Map][empty properties][Elements][Length-smi][index][input]
6564 // Elements: [Map][Length][..elements..]
6565 __ AllocateInNewSpace(JSRegExpResult::kSize + FixedArray::kHeaderSize,
6566 times_half_pointer_size,
6567 ebx, // In: Number of elements (times 2, being a smi)
6568 eax, // Out: Start of allocation (tagged).
6569 ecx, // Out: End of allocation.
6570 edx, // Scratch register
6571 &slowcase,
6572 TAG_OBJECT);
6573 // eax: Start of allocated area, object-tagged.
6574
6575 // Set JSArray map to global.regexp_result_map().
6576 // Set empty properties FixedArray.
6577 // Set elements to point to FixedArray allocated right after the JSArray.
6578 // Interleave operations for better latency.
6579 __ mov(edx, ContextOperand(esi, Context::GLOBAL_INDEX));
6580 __ mov(ecx, Immediate(Factory::empty_fixed_array()));
6581 __ lea(ebx, Operand(eax, JSRegExpResult::kSize));
6582 __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalContextOffset));
6583 __ mov(FieldOperand(eax, JSObject::kElementsOffset), ebx);
6584 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset), ecx);
6585 __ mov(edx, ContextOperand(edx, Context::REGEXP_RESULT_MAP_INDEX));
6586 __ mov(FieldOperand(eax, HeapObject::kMapOffset), edx);
6587
6588 // Set input, index and length fields from arguments.
6589 __ pop(FieldOperand(eax, JSRegExpResult::kInputOffset));
6590 __ pop(FieldOperand(eax, JSRegExpResult::kIndexOffset));
6591 __ pop(ecx);
6592 __ mov(FieldOperand(eax, JSArray::kLengthOffset), ecx);
6593
6594 // Fill out the elements FixedArray.
6595 // eax: JSArray.
6596 // ebx: FixedArray.
6597 // ecx: Number of elements in array, as smi.
6598
6599 // Set map.
6600 __ mov(FieldOperand(ebx, HeapObject::kMapOffset),
6601 Immediate(Factory::fixed_array_map()));
6602 // Set length.
6603 __ SmiUntag(ecx);
6604 __ mov(FieldOperand(ebx, FixedArray::kLengthOffset), ecx);
6605 // Fill contents of fixed-array with the-hole.
6606 __ mov(edx, Immediate(Factory::the_hole_value()));
6607 __ lea(ebx, FieldOperand(ebx, FixedArray::kHeaderSize));
6608 // Fill fixed array elements with hole.
6609 // eax: JSArray.
6610 // ecx: Number of elements to fill.
6611 // ebx: Start of elements in FixedArray.
6612 // edx: the hole.
6613 Label loop;
6614 __ test(ecx, Operand(ecx));
6615 __ bind(&loop);
6616 __ j(less_equal, &done); // Jump if ecx is negative or zero.
6617 __ sub(Operand(ecx), Immediate(1));
6618 __ mov(Operand(ebx, ecx, times_pointer_size, 0), edx);
6619 __ jmp(&loop);
6620
6621 __ bind(&slowcase);
6622 __ CallRuntime(Runtime::kRegExpConstructResult, 3);
6623
6624 __ bind(&done);
6625 }
6626 frame_->Forget(3);
6627 frame_->Push(eax);
6628}
6629
6630
6631class DeferredSearchCache: public DeferredCode {
6632 public:
6633 DeferredSearchCache(Register dst, Register cache, Register key)
6634 : dst_(dst), cache_(cache), key_(key) {
6635 set_comment("[ DeferredSearchCache");
6636 }
6637
6638 virtual void Generate();
6639
6640 private:
Kristian Monsen25f61362010-05-21 11:50:48 +01006641 Register dst_; // on invocation Smi index of finger, on exit
6642 // holds value being looked up.
6643 Register cache_; // instance of JSFunctionResultCache.
6644 Register key_; // key being looked up.
Steve Block6ded16b2010-05-10 14:33:55 +01006645};
6646
6647
6648void DeferredSearchCache::Generate() {
Kristian Monsen25f61362010-05-21 11:50:48 +01006649 Label first_loop, search_further, second_loop, cache_miss;
6650
6651 // Smi-tagging is equivalent to multiplying by 2.
6652 STATIC_ASSERT(kSmiTag == 0);
6653 STATIC_ASSERT(kSmiTagSize == 1);
6654
6655 Smi* kEntrySizeSmi = Smi::FromInt(JSFunctionResultCache::kEntrySize);
6656 Smi* kEntriesIndexSmi = Smi::FromInt(JSFunctionResultCache::kEntriesIndex);
6657
6658 // Check the cache from finger to start of the cache.
6659 __ bind(&first_loop);
6660 __ sub(Operand(dst_), Immediate(kEntrySizeSmi));
6661 __ cmp(Operand(dst_), Immediate(kEntriesIndexSmi));
6662 __ j(less, &search_further);
6663
6664 __ cmp(key_, CodeGenerator::FixedArrayElementOperand(cache_, dst_));
6665 __ j(not_equal, &first_loop);
6666
6667 __ mov(FieldOperand(cache_, JSFunctionResultCache::kFingerOffset), dst_);
6668 __ mov(dst_, CodeGenerator::FixedArrayElementOperand(cache_, dst_, 1));
6669 __ jmp(exit_label());
6670
6671 __ bind(&search_further);
6672
6673 // Check the cache from end of cache up to finger.
6674 __ mov(dst_, FieldOperand(cache_, JSFunctionResultCache::kCacheSizeOffset));
6675
6676 __ bind(&second_loop);
6677 __ sub(Operand(dst_), Immediate(kEntrySizeSmi));
6678 // Consider prefetching into some reg.
6679 __ cmp(dst_, FieldOperand(cache_, JSFunctionResultCache::kFingerOffset));
6680 __ j(less_equal, &cache_miss);
6681
6682 __ cmp(key_, CodeGenerator::FixedArrayElementOperand(cache_, dst_));
6683 __ j(not_equal, &second_loop);
6684
6685 __ mov(FieldOperand(cache_, JSFunctionResultCache::kFingerOffset), dst_);
6686 __ mov(dst_, CodeGenerator::FixedArrayElementOperand(cache_, dst_, 1));
6687 __ jmp(exit_label());
6688
6689 __ bind(&cache_miss);
6690 __ push(cache_); // store a reference to cache
6691 __ push(key_); // store a key
6692 Handle<Object> receiver(Top::global_context()->global());
6693 __ push(Immediate(receiver));
Steve Block6ded16b2010-05-10 14:33:55 +01006694 __ push(key_);
Kristian Monsen25f61362010-05-21 11:50:48 +01006695 // On ia32 function must be in edi.
6696 __ mov(edi, FieldOperand(cache_, JSFunctionResultCache::kFactoryOffset));
6697 ParameterCount expected(1);
6698 __ InvokeFunction(edi, expected, CALL_FUNCTION);
6699
6700 // Find a place to put new cached value into.
6701 Label add_new_entry, update_cache;
6702 __ mov(ecx, Operand(esp, kPointerSize)); // restore the cache
6703 // Possible optimization: cache size is constant for the given cache
6704 // so technically we could use a constant here. However, if we have
6705 // cache miss this optimization would hardly matter much.
6706
6707 // Check if we could add new entry to cache.
6708 __ mov(ebx, FieldOperand(ecx, FixedArray::kLengthOffset));
6709 __ SmiTag(ebx);
6710 __ cmp(ebx, FieldOperand(ecx, JSFunctionResultCache::kCacheSizeOffset));
6711 __ j(greater, &add_new_entry);
6712
6713 // Check if we could evict entry after finger.
6714 __ mov(edx, FieldOperand(ecx, JSFunctionResultCache::kFingerOffset));
6715 __ add(Operand(edx), Immediate(kEntrySizeSmi));
6716 __ cmp(ebx, Operand(edx));
6717 __ j(greater, &update_cache);
6718
6719 // Need to wrap over the cache.
6720 __ mov(edx, Immediate(kEntriesIndexSmi));
6721 __ jmp(&update_cache);
6722
6723 __ bind(&add_new_entry);
6724 __ mov(edx, FieldOperand(ecx, JSFunctionResultCache::kCacheSizeOffset));
6725 __ lea(ebx, Operand(edx, JSFunctionResultCache::kEntrySize << 1));
6726 __ mov(FieldOperand(ecx, JSFunctionResultCache::kCacheSizeOffset), ebx);
6727
6728 // Update the cache itself.
6729 // edx holds the index.
6730 __ bind(&update_cache);
6731 __ pop(ebx); // restore the key
6732 __ mov(FieldOperand(ecx, JSFunctionResultCache::kFingerOffset), edx);
6733 // Store key.
6734 __ mov(CodeGenerator::FixedArrayElementOperand(ecx, edx), ebx);
6735 __ RecordWrite(ecx, 0, ebx, edx);
6736
6737 // Store value.
6738 __ pop(ecx); // restore the cache.
6739 __ mov(edx, FieldOperand(ecx, JSFunctionResultCache::kFingerOffset));
6740 __ add(Operand(edx), Immediate(Smi::FromInt(1)));
6741 __ mov(ebx, eax);
6742 __ mov(CodeGenerator::FixedArrayElementOperand(ecx, edx), ebx);
6743 __ RecordWrite(ecx, 0, ebx, edx);
6744
Steve Block6ded16b2010-05-10 14:33:55 +01006745 if (!dst_.is(eax)) {
6746 __ mov(dst_, eax);
6747 }
6748}
6749
6750
6751void CodeGenerator::GenerateGetFromCache(ZoneList<Expression*>* args) {
6752 ASSERT_EQ(2, args->length());
6753
6754 ASSERT_NE(NULL, args->at(0)->AsLiteral());
6755 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
6756
6757 Handle<FixedArray> jsfunction_result_caches(
6758 Top::global_context()->jsfunction_result_caches());
6759 if (jsfunction_result_caches->length() <= cache_id) {
6760 __ Abort("Attempt to use undefined cache.");
6761 frame_->Push(Factory::undefined_value());
6762 return;
6763 }
6764
6765 Load(args->at(1));
6766 Result key = frame_->Pop();
6767 key.ToRegister();
6768
6769 Result cache = allocator()->Allocate();
6770 ASSERT(cache.is_valid());
6771 __ mov(cache.reg(), ContextOperand(esi, Context::GLOBAL_INDEX));
6772 __ mov(cache.reg(),
6773 FieldOperand(cache.reg(), GlobalObject::kGlobalContextOffset));
6774 __ mov(cache.reg(),
6775 ContextOperand(cache.reg(), Context::JSFUNCTION_RESULT_CACHES_INDEX));
6776 __ mov(cache.reg(),
6777 FieldOperand(cache.reg(), FixedArray::OffsetOfElementAt(cache_id)));
6778
6779 Result tmp = allocator()->Allocate();
6780 ASSERT(tmp.is_valid());
6781
6782 DeferredSearchCache* deferred = new DeferredSearchCache(tmp.reg(),
6783 cache.reg(),
6784 key.reg());
6785
Steve Block6ded16b2010-05-10 14:33:55 +01006786 // tmp.reg() now holds finger offset as a smi.
6787 ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
Kristian Monsen25f61362010-05-21 11:50:48 +01006788 __ mov(tmp.reg(), FieldOperand(cache.reg(),
6789 JSFunctionResultCache::kFingerOffset));
6790 __ cmp(key.reg(), FixedArrayElementOperand(cache.reg(), tmp.reg()));
Steve Block6ded16b2010-05-10 14:33:55 +01006791 deferred->Branch(not_equal);
6792
Kristian Monsen25f61362010-05-21 11:50:48 +01006793 __ mov(tmp.reg(), FixedArrayElementOperand(cache.reg(), tmp.reg(), 1));
Steve Block6ded16b2010-05-10 14:33:55 +01006794
6795 deferred->BindExit();
6796 frame_->Push(&tmp);
6797}
6798
6799
Andrei Popescu402d9372010-02-26 13:31:12 +00006800void CodeGenerator::GenerateNumberToString(ZoneList<Expression*>* args) {
6801 ASSERT_EQ(args->length(), 1);
6802
6803 // Load the argument on the stack and call the stub.
6804 Load(args->at(0));
6805 NumberToStringStub stub;
6806 Result result = frame_->CallStub(&stub, 1);
6807 frame_->Push(&result);
6808}
6809
6810
Steve Block6ded16b2010-05-10 14:33:55 +01006811class DeferredSwapElements: public DeferredCode {
6812 public:
6813 DeferredSwapElements(Register object, Register index1, Register index2)
6814 : object_(object), index1_(index1), index2_(index2) {
6815 set_comment("[ DeferredSwapElements");
6816 }
6817
6818 virtual void Generate();
6819
6820 private:
6821 Register object_, index1_, index2_;
6822};
6823
6824
6825void DeferredSwapElements::Generate() {
6826 __ push(object_);
6827 __ push(index1_);
6828 __ push(index2_);
6829 __ CallRuntime(Runtime::kSwapElements, 3);
6830}
6831
6832
6833void CodeGenerator::GenerateSwapElements(ZoneList<Expression*>* args) {
6834 // Note: this code assumes that indices are passed are within
6835 // elements' bounds and refer to valid (not holes) values.
6836 Comment cmnt(masm_, "[ GenerateSwapElements");
6837
6838 ASSERT_EQ(3, args->length());
6839
6840 Load(args->at(0));
6841 Load(args->at(1));
6842 Load(args->at(2));
6843
6844 Result index2 = frame_->Pop();
6845 index2.ToRegister();
6846
6847 Result index1 = frame_->Pop();
6848 index1.ToRegister();
6849
6850 Result object = frame_->Pop();
6851 object.ToRegister();
6852
6853 Result tmp1 = allocator()->Allocate();
6854 tmp1.ToRegister();
6855 Result tmp2 = allocator()->Allocate();
6856 tmp2.ToRegister();
6857
6858 frame_->Spill(object.reg());
6859 frame_->Spill(index1.reg());
6860 frame_->Spill(index2.reg());
6861
6862 DeferredSwapElements* deferred = new DeferredSwapElements(object.reg(),
6863 index1.reg(),
6864 index2.reg());
6865
6866 // Fetch the map and check if array is in fast case.
6867 // Check that object doesn't require security checks and
6868 // has no indexed interceptor.
6869 __ CmpObjectType(object.reg(), FIRST_JS_OBJECT_TYPE, tmp1.reg());
Leon Clarkef7060e22010-06-03 12:02:55 +01006870 deferred->Branch(below);
Steve Block6ded16b2010-05-10 14:33:55 +01006871 __ movzx_b(tmp1.reg(), FieldOperand(tmp1.reg(), Map::kBitFieldOffset));
6872 __ test(tmp1.reg(), Immediate(KeyedLoadIC::kSlowCaseBitFieldMask));
6873 deferred->Branch(not_zero);
6874
6875 // Check the object's elements are in fast case.
6876 __ mov(tmp1.reg(), FieldOperand(object.reg(), JSObject::kElementsOffset));
6877 __ cmp(FieldOperand(tmp1.reg(), HeapObject::kMapOffset),
6878 Immediate(Factory::fixed_array_map()));
6879 deferred->Branch(not_equal);
6880
6881 // Smi-tagging is equivalent to multiplying by 2.
6882 STATIC_ASSERT(kSmiTag == 0);
6883 STATIC_ASSERT(kSmiTagSize == 1);
6884
6885 // Check that both indices are smis.
6886 __ mov(tmp2.reg(), index1.reg());
6887 __ or_(tmp2.reg(), Operand(index2.reg()));
6888 __ test(tmp2.reg(), Immediate(kSmiTagMask));
6889 deferred->Branch(not_zero);
6890
6891 // Bring addresses into index1 and index2.
Kristian Monsen25f61362010-05-21 11:50:48 +01006892 __ lea(index1.reg(), FixedArrayElementOperand(tmp1.reg(), index1.reg()));
6893 __ lea(index2.reg(), FixedArrayElementOperand(tmp1.reg(), index2.reg()));
Steve Block6ded16b2010-05-10 14:33:55 +01006894
6895 // Swap elements.
6896 __ mov(object.reg(), Operand(index1.reg(), 0));
6897 __ mov(tmp2.reg(), Operand(index2.reg(), 0));
6898 __ mov(Operand(index2.reg(), 0), object.reg());
6899 __ mov(Operand(index1.reg(), 0), tmp2.reg());
6900
6901 Label done;
6902 __ InNewSpace(tmp1.reg(), tmp2.reg(), equal, &done);
6903 // Possible optimization: do a check that both values are Smis
6904 // (or them and test against Smi mask.)
6905
6906 __ mov(tmp2.reg(), tmp1.reg());
6907 RecordWriteStub recordWrite1(tmp2.reg(), index1.reg(), object.reg());
6908 __ CallStub(&recordWrite1);
6909
6910 RecordWriteStub recordWrite2(tmp1.reg(), index2.reg(), object.reg());
6911 __ CallStub(&recordWrite2);
6912
6913 __ bind(&done);
6914
6915 deferred->BindExit();
6916 frame_->Push(Factory::undefined_value());
6917}
6918
6919
6920void CodeGenerator::GenerateCallFunction(ZoneList<Expression*>* args) {
6921 Comment cmnt(masm_, "[ GenerateCallFunction");
6922
6923 ASSERT(args->length() >= 2);
6924
6925 int n_args = args->length() - 2; // for receiver and function.
6926 Load(args->at(0)); // receiver
6927 for (int i = 0; i < n_args; i++) {
6928 Load(args->at(i + 1));
6929 }
6930 Load(args->at(n_args + 1)); // function
6931 Result result = frame_->CallJSFunction(n_args);
6932 frame_->Push(&result);
6933}
6934
6935
6936// Generates the Math.pow method. Only handles special cases and
6937// branches to the runtime system for everything else. Please note
6938// that this function assumes that the callsite has executed ToNumber
6939// on both arguments.
6940void CodeGenerator::GenerateMathPow(ZoneList<Expression*>* args) {
6941 ASSERT(args->length() == 2);
6942 Load(args->at(0));
6943 Load(args->at(1));
6944 if (!CpuFeatures::IsSupported(SSE2)) {
6945 Result res = frame_->CallRuntime(Runtime::kMath_pow, 2);
6946 frame_->Push(&res);
6947 } else {
6948 CpuFeatures::Scope use_sse2(SSE2);
6949 Label allocate_return;
6950 // Load the two operands while leaving the values on the frame.
6951 frame()->Dup();
6952 Result exponent = frame()->Pop();
6953 exponent.ToRegister();
6954 frame()->Spill(exponent.reg());
6955 frame()->PushElementAt(1);
6956 Result base = frame()->Pop();
6957 base.ToRegister();
6958 frame()->Spill(base.reg());
6959
6960 Result answer = allocator()->Allocate();
6961 ASSERT(answer.is_valid());
6962 ASSERT(!exponent.reg().is(base.reg()));
6963 JumpTarget call_runtime;
6964
6965 // Save 1 in xmm3 - we need this several times later on.
6966 __ mov(answer.reg(), Immediate(1));
6967 __ cvtsi2sd(xmm3, Operand(answer.reg()));
6968
6969 Label exponent_nonsmi;
6970 Label base_nonsmi;
6971 // If the exponent is a heap number go to that specific case.
6972 __ test(exponent.reg(), Immediate(kSmiTagMask));
6973 __ j(not_zero, &exponent_nonsmi);
6974 __ test(base.reg(), Immediate(kSmiTagMask));
6975 __ j(not_zero, &base_nonsmi);
6976
6977 // Optimized version when y is an integer.
6978 Label powi;
6979 __ SmiUntag(base.reg());
6980 __ cvtsi2sd(xmm0, Operand(base.reg()));
6981 __ jmp(&powi);
6982 // exponent is smi and base is a heapnumber.
6983 __ bind(&base_nonsmi);
6984 __ cmp(FieldOperand(base.reg(), HeapObject::kMapOffset),
6985 Factory::heap_number_map());
6986 call_runtime.Branch(not_equal);
6987
6988 __ movdbl(xmm0, FieldOperand(base.reg(), HeapNumber::kValueOffset));
6989
6990 // Optimized version of pow if y is an integer.
6991 __ bind(&powi);
6992 __ SmiUntag(exponent.reg());
6993
6994 // Save exponent in base as we need to check if exponent is negative later.
6995 // We know that base and exponent are in different registers.
6996 __ mov(base.reg(), exponent.reg());
6997
6998 // Get absolute value of exponent.
6999 Label no_neg;
7000 __ cmp(exponent.reg(), 0);
7001 __ j(greater_equal, &no_neg);
7002 __ neg(exponent.reg());
7003 __ bind(&no_neg);
7004
7005 // Load xmm1 with 1.
7006 __ movsd(xmm1, xmm3);
7007 Label while_true;
7008 Label no_multiply;
7009
7010 __ bind(&while_true);
7011 __ shr(exponent.reg(), 1);
7012 __ j(not_carry, &no_multiply);
7013 __ mulsd(xmm1, xmm0);
7014 __ bind(&no_multiply);
7015 __ test(exponent.reg(), Operand(exponent.reg()));
7016 __ mulsd(xmm0, xmm0);
7017 __ j(not_zero, &while_true);
7018
7019 // x has the original value of y - if y is negative return 1/result.
7020 __ test(base.reg(), Operand(base.reg()));
7021 __ j(positive, &allocate_return);
7022 // Special case if xmm1 has reached infinity.
7023 __ mov(answer.reg(), Immediate(0x7FB00000));
7024 __ movd(xmm0, Operand(answer.reg()));
7025 __ cvtss2sd(xmm0, xmm0);
7026 __ ucomisd(xmm0, xmm1);
7027 call_runtime.Branch(equal);
7028 __ divsd(xmm3, xmm1);
7029 __ movsd(xmm1, xmm3);
7030 __ jmp(&allocate_return);
7031
7032 // exponent (or both) is a heapnumber - no matter what we should now work
7033 // on doubles.
7034 __ bind(&exponent_nonsmi);
7035 __ cmp(FieldOperand(exponent.reg(), HeapObject::kMapOffset),
7036 Factory::heap_number_map());
7037 call_runtime.Branch(not_equal);
7038 __ movdbl(xmm1, FieldOperand(exponent.reg(), HeapNumber::kValueOffset));
7039 // Test if exponent is nan.
7040 __ ucomisd(xmm1, xmm1);
7041 call_runtime.Branch(parity_even);
7042
7043 Label base_not_smi;
7044 Label handle_special_cases;
7045 __ test(base.reg(), Immediate(kSmiTagMask));
7046 __ j(not_zero, &base_not_smi);
7047 __ SmiUntag(base.reg());
7048 __ cvtsi2sd(xmm0, Operand(base.reg()));
7049 __ jmp(&handle_special_cases);
7050 __ bind(&base_not_smi);
7051 __ cmp(FieldOperand(base.reg(), HeapObject::kMapOffset),
7052 Factory::heap_number_map());
7053 call_runtime.Branch(not_equal);
7054 __ mov(answer.reg(), FieldOperand(base.reg(), HeapNumber::kExponentOffset));
7055 __ and_(answer.reg(), HeapNumber::kExponentMask);
7056 __ cmp(Operand(answer.reg()), Immediate(HeapNumber::kExponentMask));
7057 // base is NaN or +/-Infinity
7058 call_runtime.Branch(greater_equal);
7059 __ movdbl(xmm0, FieldOperand(base.reg(), HeapNumber::kValueOffset));
7060
7061 // base is in xmm0 and exponent is in xmm1.
7062 __ bind(&handle_special_cases);
7063 Label not_minus_half;
7064 // Test for -0.5.
7065 // Load xmm2 with -0.5.
7066 __ mov(answer.reg(), Immediate(0xBF000000));
7067 __ movd(xmm2, Operand(answer.reg()));
7068 __ cvtss2sd(xmm2, xmm2);
7069 // xmm2 now has -0.5.
7070 __ ucomisd(xmm2, xmm1);
7071 __ j(not_equal, &not_minus_half);
7072
7073 // Calculates reciprocal of square root.
7074 // Note that 1/sqrt(x) = sqrt(1/x))
7075 __ divsd(xmm3, xmm0);
7076 __ movsd(xmm1, xmm3);
7077 __ sqrtsd(xmm1, xmm1);
7078 __ jmp(&allocate_return);
7079
7080 // Test for 0.5.
7081 __ bind(&not_minus_half);
7082 // Load xmm2 with 0.5.
7083 // Since xmm3 is 1 and xmm2 is -0.5 this is simply xmm2 + xmm3.
7084 __ addsd(xmm2, xmm3);
7085 // xmm2 now has 0.5.
7086 __ comisd(xmm2, xmm1);
7087 call_runtime.Branch(not_equal);
7088 // Calculates square root.
7089 __ movsd(xmm1, xmm0);
7090 __ sqrtsd(xmm1, xmm1);
7091
7092 JumpTarget done;
7093 Label failure, success;
7094 __ bind(&allocate_return);
7095 // Make a copy of the frame to enable us to handle allocation
7096 // failure after the JumpTarget jump.
7097 VirtualFrame* clone = new VirtualFrame(frame());
7098 __ AllocateHeapNumber(answer.reg(), exponent.reg(),
7099 base.reg(), &failure);
7100 __ movdbl(FieldOperand(answer.reg(), HeapNumber::kValueOffset), xmm1);
7101 // Remove the two original values from the frame - we only need those
7102 // in the case where we branch to runtime.
7103 frame()->Drop(2);
7104 exponent.Unuse();
7105 base.Unuse();
7106 done.Jump(&answer);
7107 // Use the copy of the original frame as our current frame.
7108 RegisterFile empty_regs;
7109 SetFrame(clone, &empty_regs);
7110 // If we experience an allocation failure we branch to runtime.
7111 __ bind(&failure);
7112 call_runtime.Bind();
7113 answer = frame()->CallRuntime(Runtime::kMath_pow_cfunction, 2);
7114
7115 done.Bind(&answer);
7116 frame()->Push(&answer);
7117 }
7118}
7119
7120
Andrei Popescu402d9372010-02-26 13:31:12 +00007121void CodeGenerator::GenerateMathSin(ZoneList<Expression*>* args) {
7122 ASSERT_EQ(args->length(), 1);
7123 Load(args->at(0));
7124 TranscendentalCacheStub stub(TranscendentalCache::SIN);
7125 Result result = frame_->CallStub(&stub, 1);
7126 frame_->Push(&result);
7127}
7128
7129
7130void CodeGenerator::GenerateMathCos(ZoneList<Expression*>* args) {
7131 ASSERT_EQ(args->length(), 1);
7132 Load(args->at(0));
7133 TranscendentalCacheStub stub(TranscendentalCache::COS);
7134 Result result = frame_->CallStub(&stub, 1);
7135 frame_->Push(&result);
7136}
7137
7138
Steve Block6ded16b2010-05-10 14:33:55 +01007139// Generates the Math.sqrt method. Please note - this function assumes that
7140// the callsite has executed ToNumber on the argument.
7141void CodeGenerator::GenerateMathSqrt(ZoneList<Expression*>* args) {
7142 ASSERT_EQ(args->length(), 1);
7143 Load(args->at(0));
7144
7145 if (!CpuFeatures::IsSupported(SSE2)) {
7146 Result result = frame()->CallRuntime(Runtime::kMath_sqrt, 1);
7147 frame()->Push(&result);
7148 } else {
7149 CpuFeatures::Scope use_sse2(SSE2);
7150 // Leave original value on the frame if we need to call runtime.
7151 frame()->Dup();
7152 Result result = frame()->Pop();
7153 result.ToRegister();
7154 frame()->Spill(result.reg());
7155 Label runtime;
7156 Label non_smi;
7157 Label load_done;
7158 JumpTarget end;
7159
7160 __ test(result.reg(), Immediate(kSmiTagMask));
7161 __ j(not_zero, &non_smi);
7162 __ SmiUntag(result.reg());
7163 __ cvtsi2sd(xmm0, Operand(result.reg()));
7164 __ jmp(&load_done);
7165 __ bind(&non_smi);
7166 __ cmp(FieldOperand(result.reg(), HeapObject::kMapOffset),
7167 Factory::heap_number_map());
7168 __ j(not_equal, &runtime);
7169 __ movdbl(xmm0, FieldOperand(result.reg(), HeapNumber::kValueOffset));
7170
7171 __ bind(&load_done);
7172 __ sqrtsd(xmm0, xmm0);
7173 // A copy of the virtual frame to allow us to go to runtime after the
7174 // JumpTarget jump.
7175 Result scratch = allocator()->Allocate();
7176 VirtualFrame* clone = new VirtualFrame(frame());
7177 __ AllocateHeapNumber(result.reg(), scratch.reg(), no_reg, &runtime);
7178
7179 __ movdbl(FieldOperand(result.reg(), HeapNumber::kValueOffset), xmm0);
7180 frame()->Drop(1);
7181 scratch.Unuse();
7182 end.Jump(&result);
7183 // We only branch to runtime if we have an allocation error.
7184 // Use the copy of the original frame as our current frame.
7185 RegisterFile empty_regs;
7186 SetFrame(clone, &empty_regs);
7187 __ bind(&runtime);
7188 result = frame()->CallRuntime(Runtime::kMath_sqrt, 1);
7189
7190 end.Bind(&result);
7191 frame()->Push(&result);
7192 }
7193}
7194
7195
Steve Blocka7e24c12009-10-30 11:49:00 +00007196void CodeGenerator::VisitCallRuntime(CallRuntime* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01007197 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00007198 if (CheckForInlineRuntimeCall(node)) {
7199 return;
7200 }
7201
7202 ZoneList<Expression*>* args = node->arguments();
7203 Comment cmnt(masm_, "[ CallRuntime");
7204 Runtime::Function* function = node->function();
7205
7206 if (function == NULL) {
Steve Blocka7e24c12009-10-30 11:49:00 +00007207 // Push the builtins object found in the current global object.
7208 Result temp = allocator()->Allocate();
7209 ASSERT(temp.is_valid());
7210 __ mov(temp.reg(), GlobalObject());
7211 __ mov(temp.reg(), FieldOperand(temp.reg(), GlobalObject::kBuiltinsOffset));
7212 frame_->Push(&temp);
7213 }
7214
7215 // Push the arguments ("left-to-right").
7216 int arg_count = args->length();
7217 for (int i = 0; i < arg_count; i++) {
7218 Load(args->at(i));
7219 }
7220
7221 if (function == NULL) {
7222 // Call the JS runtime function.
Leon Clarkee46be812010-01-19 14:06:41 +00007223 frame_->Push(node->name());
Steve Blocka7e24c12009-10-30 11:49:00 +00007224 Result answer = frame_->CallCallIC(RelocInfo::CODE_TARGET,
7225 arg_count,
7226 loop_nesting_);
7227 frame_->RestoreContextRegister();
Leon Clarkee46be812010-01-19 14:06:41 +00007228 frame_->Push(&answer);
Steve Blocka7e24c12009-10-30 11:49:00 +00007229 } else {
7230 // Call the C runtime function.
7231 Result answer = frame_->CallRuntime(function, arg_count);
7232 frame_->Push(&answer);
7233 }
7234}
7235
7236
7237void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00007238 Comment cmnt(masm_, "[ UnaryOperation");
7239
7240 Token::Value op = node->op();
7241
7242 if (op == Token::NOT) {
7243 // Swap the true and false targets but keep the same actual label
7244 // as the fall through.
7245 destination()->Invert();
Steve Blockd0582a62009-12-15 09:54:21 +00007246 LoadCondition(node->expression(), destination(), true);
Steve Blocka7e24c12009-10-30 11:49:00 +00007247 // Swap the labels back.
7248 destination()->Invert();
7249
7250 } else if (op == Token::DELETE) {
7251 Property* property = node->expression()->AsProperty();
7252 if (property != NULL) {
7253 Load(property->obj());
7254 Load(property->key());
7255 Result answer = frame_->InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, 2);
7256 frame_->Push(&answer);
7257 return;
7258 }
7259
7260 Variable* variable = node->expression()->AsVariableProxy()->AsVariable();
7261 if (variable != NULL) {
7262 Slot* slot = variable->slot();
7263 if (variable->is_global()) {
7264 LoadGlobal();
7265 frame_->Push(variable->name());
7266 Result answer = frame_->InvokeBuiltin(Builtins::DELETE,
7267 CALL_FUNCTION, 2);
7268 frame_->Push(&answer);
7269 return;
7270
7271 } else if (slot != NULL && slot->type() == Slot::LOOKUP) {
7272 // Call the runtime to look up the context holding the named
7273 // variable. Sync the virtual frame eagerly so we can push the
7274 // arguments directly into place.
7275 frame_->SyncRange(0, frame_->element_count() - 1);
7276 frame_->EmitPush(esi);
7277 frame_->EmitPush(Immediate(variable->name()));
7278 Result context = frame_->CallRuntime(Runtime::kLookupContext, 2);
7279 ASSERT(context.is_register());
7280 frame_->EmitPush(context.reg());
7281 context.Unuse();
7282 frame_->EmitPush(Immediate(variable->name()));
7283 Result answer = frame_->InvokeBuiltin(Builtins::DELETE,
7284 CALL_FUNCTION, 2);
7285 frame_->Push(&answer);
7286 return;
7287 }
7288
7289 // Default: Result of deleting non-global, not dynamically
7290 // introduced variables is false.
7291 frame_->Push(Factory::false_value());
7292
7293 } else {
7294 // Default: Result of deleting expressions is true.
7295 Load(node->expression()); // may have side-effects
7296 frame_->SetElementAt(0, Factory::true_value());
7297 }
7298
7299 } else if (op == Token::TYPEOF) {
7300 // Special case for loading the typeof expression; see comment on
7301 // LoadTypeofExpression().
7302 LoadTypeofExpression(node->expression());
7303 Result answer = frame_->CallRuntime(Runtime::kTypeof, 1);
7304 frame_->Push(&answer);
7305
7306 } else if (op == Token::VOID) {
7307 Expression* expression = node->expression();
7308 if (expression && expression->AsLiteral() && (
7309 expression->AsLiteral()->IsTrue() ||
7310 expression->AsLiteral()->IsFalse() ||
7311 expression->AsLiteral()->handle()->IsNumber() ||
7312 expression->AsLiteral()->handle()->IsString() ||
7313 expression->AsLiteral()->handle()->IsJSRegExp() ||
7314 expression->AsLiteral()->IsNull())) {
7315 // Omit evaluating the value of the primitive literal.
7316 // It will be discarded anyway, and can have no side effect.
7317 frame_->Push(Factory::undefined_value());
7318 } else {
7319 Load(node->expression());
7320 frame_->SetElementAt(0, Factory::undefined_value());
7321 }
7322
7323 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01007324 if (in_safe_int32_mode()) {
7325 Visit(node->expression());
7326 Result value = frame_->Pop();
7327 ASSERT(value.is_untagged_int32());
7328 // Registers containing an int32 value are not multiply used.
7329 ASSERT(!value.is_register() || !frame_->is_used(value.reg()));
7330 value.ToRegister();
7331 switch (op) {
7332 case Token::SUB: {
7333 __ neg(value.reg());
7334 if (node->no_negative_zero()) {
7335 // -MIN_INT is MIN_INT with the overflow flag set.
7336 unsafe_bailout_->Branch(overflow);
7337 } else {
7338 // MIN_INT and 0 both have bad negations. They both have 31 zeros.
7339 __ test(value.reg(), Immediate(0x7FFFFFFF));
7340 unsafe_bailout_->Branch(zero);
7341 }
7342 break;
7343 }
7344 case Token::BIT_NOT: {
7345 __ not_(value.reg());
7346 break;
7347 }
7348 case Token::ADD: {
7349 // Unary plus has no effect on int32 values.
7350 break;
7351 }
7352 default:
7353 UNREACHABLE();
7354 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00007355 }
Steve Block6ded16b2010-05-10 14:33:55 +01007356 frame_->Push(&value);
7357 } else {
7358 Load(node->expression());
7359 bool overwrite =
7360 (node->expression()->AsBinaryOperation() != NULL &&
7361 node->expression()->AsBinaryOperation()->ResultOverwriteAllowed());
7362 switch (op) {
7363 case Token::NOT:
7364 case Token::DELETE:
7365 case Token::TYPEOF:
7366 UNREACHABLE(); // handled above
7367 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00007368
Steve Block6ded16b2010-05-10 14:33:55 +01007369 case Token::SUB: {
7370 GenericUnaryOpStub stub(Token::SUB, overwrite);
7371 Result operand = frame_->Pop();
7372 Result answer = frame_->CallStub(&stub, &operand);
7373 answer.set_type_info(TypeInfo::Number());
7374 frame_->Push(&answer);
7375 break;
7376 }
7377 case Token::BIT_NOT: {
7378 // Smi check.
7379 JumpTarget smi_label;
7380 JumpTarget continue_label;
7381 Result operand = frame_->Pop();
7382 TypeInfo operand_info = operand.type_info();
7383 operand.ToRegister();
7384 if (operand_info.IsSmi()) {
7385 if (FLAG_debug_code) __ AbortIfNotSmi(operand.reg());
7386 frame_->Spill(operand.reg());
7387 // Set smi tag bit. It will be reset by the not operation.
7388 __ lea(operand.reg(), Operand(operand.reg(), kSmiTagMask));
7389 __ not_(operand.reg());
7390 Result answer = operand;
7391 answer.set_type_info(TypeInfo::Smi());
7392 frame_->Push(&answer);
7393 } else {
7394 __ test(operand.reg(), Immediate(kSmiTagMask));
7395 smi_label.Branch(zero, &operand, taken);
Steve Blocka7e24c12009-10-30 11:49:00 +00007396
Steve Block6ded16b2010-05-10 14:33:55 +01007397 GenericUnaryOpStub stub(Token::BIT_NOT, overwrite);
7398 Result answer = frame_->CallStub(&stub, &operand);
7399 continue_label.Jump(&answer);
Leon Clarkee46be812010-01-19 14:06:41 +00007400
Steve Block6ded16b2010-05-10 14:33:55 +01007401 smi_label.Bind(&answer);
7402 answer.ToRegister();
7403 frame_->Spill(answer.reg());
7404 // Set smi tag bit. It will be reset by the not operation.
7405 __ lea(answer.reg(), Operand(answer.reg(), kSmiTagMask));
7406 __ not_(answer.reg());
Leon Clarkee46be812010-01-19 14:06:41 +00007407
Steve Block6ded16b2010-05-10 14:33:55 +01007408 continue_label.Bind(&answer);
7409 answer.set_type_info(TypeInfo::Integer32());
7410 frame_->Push(&answer);
7411 }
7412 break;
7413 }
7414 case Token::ADD: {
7415 // Smi check.
7416 JumpTarget continue_label;
7417 Result operand = frame_->Pop();
7418 TypeInfo operand_info = operand.type_info();
7419 operand.ToRegister();
7420 __ test(operand.reg(), Immediate(kSmiTagMask));
7421 continue_label.Branch(zero, &operand, taken);
Steve Blocka7e24c12009-10-30 11:49:00 +00007422
Steve Block6ded16b2010-05-10 14:33:55 +01007423 frame_->Push(&operand);
7424 Result answer = frame_->InvokeBuiltin(Builtins::TO_NUMBER,
Steve Blocka7e24c12009-10-30 11:49:00 +00007425 CALL_FUNCTION, 1);
7426
Steve Block6ded16b2010-05-10 14:33:55 +01007427 continue_label.Bind(&answer);
7428 if (operand_info.IsSmi()) {
7429 answer.set_type_info(TypeInfo::Smi());
7430 } else if (operand_info.IsInteger32()) {
7431 answer.set_type_info(TypeInfo::Integer32());
7432 } else {
7433 answer.set_type_info(TypeInfo::Number());
7434 }
7435 frame_->Push(&answer);
7436 break;
7437 }
7438 default:
7439 UNREACHABLE();
Steve Blocka7e24c12009-10-30 11:49:00 +00007440 }
Steve Blocka7e24c12009-10-30 11:49:00 +00007441 }
7442 }
7443}
7444
7445
7446// The value in dst was optimistically incremented or decremented. The
7447// result overflowed or was not smi tagged. Undo the operation, call
7448// into the runtime to convert the argument to a number, and call the
7449// specialized add or subtract stub. The result is left in dst.
7450class DeferredPrefixCountOperation: public DeferredCode {
7451 public:
Steve Block6ded16b2010-05-10 14:33:55 +01007452 DeferredPrefixCountOperation(Register dst,
7453 bool is_increment,
7454 TypeInfo input_type)
7455 : dst_(dst), is_increment_(is_increment), input_type_(input_type) {
Steve Blocka7e24c12009-10-30 11:49:00 +00007456 set_comment("[ DeferredCountOperation");
7457 }
7458
7459 virtual void Generate();
7460
7461 private:
7462 Register dst_;
7463 bool is_increment_;
Steve Block6ded16b2010-05-10 14:33:55 +01007464 TypeInfo input_type_;
Steve Blocka7e24c12009-10-30 11:49:00 +00007465};
7466
7467
7468void DeferredPrefixCountOperation::Generate() {
7469 // Undo the optimistic smi operation.
7470 if (is_increment_) {
7471 __ sub(Operand(dst_), Immediate(Smi::FromInt(1)));
7472 } else {
7473 __ add(Operand(dst_), Immediate(Smi::FromInt(1)));
7474 }
Steve Block6ded16b2010-05-10 14:33:55 +01007475 Register left;
7476 if (input_type_.IsNumber()) {
7477 left = dst_;
Steve Blocka7e24c12009-10-30 11:49:00 +00007478 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01007479 __ push(dst_);
7480 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION);
7481 left = eax;
Steve Blocka7e24c12009-10-30 11:49:00 +00007482 }
Steve Block6ded16b2010-05-10 14:33:55 +01007483
7484 GenericBinaryOpStub stub(is_increment_ ? Token::ADD : Token::SUB,
7485 NO_OVERWRITE,
7486 NO_GENERIC_BINARY_FLAGS,
7487 TypeInfo::Number());
7488 stub.GenerateCall(masm_, left, Smi::FromInt(1));
7489
Steve Blocka7e24c12009-10-30 11:49:00 +00007490 if (!dst_.is(eax)) __ mov(dst_, eax);
7491}
7492
7493
7494// The value in dst was optimistically incremented or decremented. The
7495// result overflowed or was not smi tagged. Undo the operation and call
7496// into the runtime to convert the argument to a number. Update the
7497// original value in old. Call the specialized add or subtract stub.
7498// The result is left in dst.
7499class DeferredPostfixCountOperation: public DeferredCode {
7500 public:
Steve Block6ded16b2010-05-10 14:33:55 +01007501 DeferredPostfixCountOperation(Register dst,
7502 Register old,
7503 bool is_increment,
7504 TypeInfo input_type)
7505 : dst_(dst),
7506 old_(old),
7507 is_increment_(is_increment),
7508 input_type_(input_type) {
Steve Blocka7e24c12009-10-30 11:49:00 +00007509 set_comment("[ DeferredCountOperation");
7510 }
7511
7512 virtual void Generate();
7513
7514 private:
7515 Register dst_;
7516 Register old_;
7517 bool is_increment_;
Steve Block6ded16b2010-05-10 14:33:55 +01007518 TypeInfo input_type_;
Steve Blocka7e24c12009-10-30 11:49:00 +00007519};
7520
7521
7522void DeferredPostfixCountOperation::Generate() {
7523 // Undo the optimistic smi operation.
7524 if (is_increment_) {
7525 __ sub(Operand(dst_), Immediate(Smi::FromInt(1)));
7526 } else {
7527 __ add(Operand(dst_), Immediate(Smi::FromInt(1)));
7528 }
Steve Block6ded16b2010-05-10 14:33:55 +01007529 Register left;
7530 if (input_type_.IsNumber()) {
7531 __ push(dst_); // Save the input to use as the old value.
7532 left = dst_;
Steve Blocka7e24c12009-10-30 11:49:00 +00007533 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01007534 __ push(dst_);
7535 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION);
7536 __ push(eax); // Save the result of ToNumber to use as the old value.
7537 left = eax;
Steve Blocka7e24c12009-10-30 11:49:00 +00007538 }
Steve Block6ded16b2010-05-10 14:33:55 +01007539
7540 GenericBinaryOpStub stub(is_increment_ ? Token::ADD : Token::SUB,
7541 NO_OVERWRITE,
7542 NO_GENERIC_BINARY_FLAGS,
7543 TypeInfo::Number());
7544 stub.GenerateCall(masm_, left, Smi::FromInt(1));
7545
Steve Blocka7e24c12009-10-30 11:49:00 +00007546 if (!dst_.is(eax)) __ mov(dst_, eax);
7547 __ pop(old_);
7548}
7549
7550
7551void CodeGenerator::VisitCountOperation(CountOperation* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01007552 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00007553 Comment cmnt(masm_, "[ CountOperation");
7554
7555 bool is_postfix = node->is_postfix();
7556 bool is_increment = node->op() == Token::INC;
7557
7558 Variable* var = node->expression()->AsVariableProxy()->AsVariable();
7559 bool is_const = (var != NULL && var->mode() == Variable::CONST);
7560
7561 // Postfix operations need a stack slot under the reference to hold
7562 // the old value while the new value is being stored. This is so that
7563 // in the case that storing the new value requires a call, the old
7564 // value will be in the frame to be spilled.
7565 if (is_postfix) frame_->Push(Smi::FromInt(0));
7566
Leon Clarked91b9f72010-01-27 17:25:45 +00007567 // A constant reference is not saved to, so a constant reference is not a
7568 // compound assignment reference.
7569 { Reference target(this, node->expression(), !is_const);
Steve Blocka7e24c12009-10-30 11:49:00 +00007570 if (target.is_illegal()) {
7571 // Spoof the virtual frame to have the expected height (one higher
7572 // than on entry).
7573 if (!is_postfix) frame_->Push(Smi::FromInt(0));
7574 return;
7575 }
Steve Blockd0582a62009-12-15 09:54:21 +00007576 target.TakeValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00007577
7578 Result new_value = frame_->Pop();
7579 new_value.ToRegister();
7580
7581 Result old_value; // Only allocated in the postfix case.
7582 if (is_postfix) {
7583 // Allocate a temporary to preserve the old value.
7584 old_value = allocator_->Allocate();
7585 ASSERT(old_value.is_valid());
7586 __ mov(old_value.reg(), new_value.reg());
Steve Block6ded16b2010-05-10 14:33:55 +01007587
7588 // The return value for postfix operations is ToNumber(input).
7589 // Keep more precise type info if the input is some kind of
7590 // number already. If the input is not a number we have to wait
7591 // for the deferred code to convert it.
7592 if (new_value.type_info().IsNumber()) {
7593 old_value.set_type_info(new_value.type_info());
7594 }
Steve Blocka7e24c12009-10-30 11:49:00 +00007595 }
Steve Block6ded16b2010-05-10 14:33:55 +01007596
Steve Blocka7e24c12009-10-30 11:49:00 +00007597 // Ensure the new value is writable.
7598 frame_->Spill(new_value.reg());
7599
Steve Block6ded16b2010-05-10 14:33:55 +01007600 Result tmp;
7601 if (new_value.is_smi()) {
7602 if (FLAG_debug_code) __ AbortIfNotSmi(new_value.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00007603 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01007604 // We don't know statically if the input is a smi.
7605 // In order to combine the overflow and the smi tag check, we need
7606 // to be able to allocate a byte register. We attempt to do so
7607 // without spilling. If we fail, we will generate separate overflow
7608 // and smi tag checks.
7609 // We allocate and clear a temporary byte register before performing
7610 // the count operation since clearing the register using xor will clear
7611 // the overflow flag.
7612 tmp = allocator_->AllocateByteRegisterWithoutSpilling();
7613 if (tmp.is_valid()) {
7614 __ Set(tmp.reg(), Immediate(0));
7615 }
Steve Blocka7e24c12009-10-30 11:49:00 +00007616 }
7617
7618 if (is_increment) {
7619 __ add(Operand(new_value.reg()), Immediate(Smi::FromInt(1)));
7620 } else {
7621 __ sub(Operand(new_value.reg()), Immediate(Smi::FromInt(1)));
7622 }
7623
Steve Block6ded16b2010-05-10 14:33:55 +01007624 DeferredCode* deferred = NULL;
7625 if (is_postfix) {
7626 deferred = new DeferredPostfixCountOperation(new_value.reg(),
7627 old_value.reg(),
7628 is_increment,
7629 new_value.type_info());
7630 } else {
7631 deferred = new DeferredPrefixCountOperation(new_value.reg(),
7632 is_increment,
7633 new_value.type_info());
7634 }
7635
7636 if (new_value.is_smi()) {
7637 // In case we have a smi as input just check for overflow.
7638 deferred->Branch(overflow);
7639 } else {
7640 // If the count operation didn't overflow and the result is a valid
7641 // smi, we're done. Otherwise, we jump to the deferred slow-case
7642 // code.
Steve Blocka7e24c12009-10-30 11:49:00 +00007643 // We combine the overflow and the smi tag check if we could
7644 // successfully allocate a temporary byte register.
Steve Block6ded16b2010-05-10 14:33:55 +01007645 if (tmp.is_valid()) {
7646 __ setcc(overflow, tmp.reg());
7647 __ or_(Operand(tmp.reg()), new_value.reg());
7648 __ test(tmp.reg(), Immediate(kSmiTagMask));
7649 tmp.Unuse();
7650 deferred->Branch(not_zero);
7651 } else {
7652 // Otherwise we test separately for overflow and smi tag.
7653 deferred->Branch(overflow);
7654 __ test(new_value.reg(), Immediate(kSmiTagMask));
7655 deferred->Branch(not_zero);
7656 }
Steve Blocka7e24c12009-10-30 11:49:00 +00007657 }
7658 deferred->BindExit();
7659
Steve Block6ded16b2010-05-10 14:33:55 +01007660 // Postfix count operations return their input converted to
7661 // number. The case when the input is already a number is covered
7662 // above in the allocation code for old_value.
7663 if (is_postfix && !new_value.type_info().IsNumber()) {
7664 old_value.set_type_info(TypeInfo::Number());
7665 }
7666
7667 // The result of ++ or -- is an Integer32 if the
7668 // input is a smi. Otherwise it is a number.
7669 if (new_value.is_smi()) {
7670 new_value.set_type_info(TypeInfo::Integer32());
7671 } else {
7672 new_value.set_type_info(TypeInfo::Number());
7673 }
7674
Steve Blocka7e24c12009-10-30 11:49:00 +00007675 // Postfix: store the old value in the allocated slot under the
7676 // reference.
7677 if (is_postfix) frame_->SetElementAt(target.size(), &old_value);
7678
7679 frame_->Push(&new_value);
7680 // Non-constant: update the reference.
7681 if (!is_const) target.SetValue(NOT_CONST_INIT);
7682 }
7683
7684 // Postfix: drop the new value and use the old.
7685 if (is_postfix) frame_->Drop();
7686}
7687
7688
Steve Block6ded16b2010-05-10 14:33:55 +01007689void CodeGenerator::Int32BinaryOperation(BinaryOperation* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00007690 Token::Value op = node->op();
Steve Block6ded16b2010-05-10 14:33:55 +01007691 Comment cmnt(masm_, "[ Int32BinaryOperation");
7692 ASSERT(in_safe_int32_mode());
7693 ASSERT(safe_int32_mode_enabled());
7694 ASSERT(FLAG_safe_int32_compiler);
Steve Blocka7e24c12009-10-30 11:49:00 +00007695
Steve Block6ded16b2010-05-10 14:33:55 +01007696 if (op == Token::COMMA) {
7697 // Discard left value.
7698 frame_->Nip(1);
7699 return;
7700 }
7701
7702 Result right = frame_->Pop();
7703 Result left = frame_->Pop();
7704
7705 ASSERT(right.is_untagged_int32());
7706 ASSERT(left.is_untagged_int32());
7707 // Registers containing an int32 value are not multiply used.
7708 ASSERT(!left.is_register() || !frame_->is_used(left.reg()));
7709 ASSERT(!right.is_register() || !frame_->is_used(right.reg()));
7710
7711 switch (op) {
7712 case Token::COMMA:
7713 case Token::OR:
7714 case Token::AND:
7715 UNREACHABLE();
7716 break;
7717 case Token::BIT_OR:
7718 case Token::BIT_XOR:
7719 case Token::BIT_AND:
7720 if (left.is_constant() || right.is_constant()) {
7721 int32_t value; // Put constant in value, non-constant in left.
7722 // Constants are known to be int32 values, from static analysis,
7723 // or else will be converted to int32 by implicit ECMA [[ToInt32]].
7724 if (left.is_constant()) {
7725 ASSERT(left.handle()->IsSmi() || left.handle()->IsHeapNumber());
7726 value = NumberToInt32(*left.handle());
7727 left = right;
7728 } else {
7729 ASSERT(right.handle()->IsSmi() || right.handle()->IsHeapNumber());
7730 value = NumberToInt32(*right.handle());
7731 }
7732
7733 left.ToRegister();
7734 if (op == Token::BIT_OR) {
7735 __ or_(Operand(left.reg()), Immediate(value));
7736 } else if (op == Token::BIT_XOR) {
7737 __ xor_(Operand(left.reg()), Immediate(value));
7738 } else {
7739 ASSERT(op == Token::BIT_AND);
7740 __ and_(Operand(left.reg()), Immediate(value));
7741 }
7742 } else {
7743 ASSERT(left.is_register());
7744 ASSERT(right.is_register());
7745 if (op == Token::BIT_OR) {
7746 __ or_(left.reg(), Operand(right.reg()));
7747 } else if (op == Token::BIT_XOR) {
7748 __ xor_(left.reg(), Operand(right.reg()));
7749 } else {
7750 ASSERT(op == Token::BIT_AND);
7751 __ and_(left.reg(), Operand(right.reg()));
7752 }
7753 }
7754 frame_->Push(&left);
7755 right.Unuse();
7756 break;
7757 case Token::SAR:
7758 case Token::SHL:
7759 case Token::SHR: {
7760 bool test_shr_overflow = false;
7761 left.ToRegister();
7762 if (right.is_constant()) {
7763 ASSERT(right.handle()->IsSmi() || right.handle()->IsHeapNumber());
7764 int shift_amount = NumberToInt32(*right.handle()) & 0x1F;
7765 if (op == Token::SAR) {
7766 __ sar(left.reg(), shift_amount);
7767 } else if (op == Token::SHL) {
7768 __ shl(left.reg(), shift_amount);
7769 } else {
7770 ASSERT(op == Token::SHR);
7771 __ shr(left.reg(), shift_amount);
7772 if (shift_amount == 0) test_shr_overflow = true;
7773 }
7774 } else {
7775 // Move right to ecx
7776 if (left.is_register() && left.reg().is(ecx)) {
7777 right.ToRegister();
7778 __ xchg(left.reg(), right.reg());
7779 left = right; // Left is unused here, copy of right unused by Push.
7780 } else {
7781 right.ToRegister(ecx);
7782 left.ToRegister();
7783 }
7784 if (op == Token::SAR) {
7785 __ sar_cl(left.reg());
7786 } else if (op == Token::SHL) {
7787 __ shl_cl(left.reg());
7788 } else {
7789 ASSERT(op == Token::SHR);
7790 __ shr_cl(left.reg());
7791 test_shr_overflow = true;
7792 }
7793 }
7794 {
7795 Register left_reg = left.reg();
7796 frame_->Push(&left);
7797 right.Unuse();
7798 if (test_shr_overflow && !node->to_int32()) {
7799 // Uint32 results with top bit set are not Int32 values.
7800 // If they will be forced to Int32, skip the test.
7801 // Test is needed because shr with shift amount 0 does not set flags.
7802 __ test(left_reg, Operand(left_reg));
7803 unsafe_bailout_->Branch(sign);
7804 }
7805 }
7806 break;
7807 }
7808 case Token::ADD:
7809 case Token::SUB:
7810 case Token::MUL:
7811 if ((left.is_constant() && op != Token::SUB) || right.is_constant()) {
7812 int32_t value; // Put constant in value, non-constant in left.
7813 if (right.is_constant()) {
7814 ASSERT(right.handle()->IsSmi() || right.handle()->IsHeapNumber());
7815 value = NumberToInt32(*right.handle());
7816 } else {
7817 ASSERT(left.handle()->IsSmi() || left.handle()->IsHeapNumber());
7818 value = NumberToInt32(*left.handle());
7819 left = right;
7820 }
7821
7822 left.ToRegister();
7823 if (op == Token::ADD) {
7824 __ add(Operand(left.reg()), Immediate(value));
7825 } else if (op == Token::SUB) {
7826 __ sub(Operand(left.reg()), Immediate(value));
7827 } else {
7828 ASSERT(op == Token::MUL);
7829 __ imul(left.reg(), left.reg(), value);
7830 }
7831 } else {
7832 left.ToRegister();
7833 ASSERT(left.is_register());
7834 ASSERT(right.is_register());
7835 if (op == Token::ADD) {
7836 __ add(left.reg(), Operand(right.reg()));
7837 } else if (op == Token::SUB) {
7838 __ sub(left.reg(), Operand(right.reg()));
7839 } else {
7840 ASSERT(op == Token::MUL);
7841 // We have statically verified that a negative zero can be ignored.
7842 __ imul(left.reg(), Operand(right.reg()));
7843 }
7844 }
7845 right.Unuse();
7846 frame_->Push(&left);
7847 if (!node->to_int32()) {
7848 // If ToInt32 is called on the result of ADD, SUB, or MUL, we don't
7849 // care about overflows.
7850 unsafe_bailout_->Branch(overflow);
7851 }
7852 break;
7853 case Token::DIV:
7854 case Token::MOD: {
7855 if (right.is_register() && (right.reg().is(eax) || right.reg().is(edx))) {
7856 if (left.is_register() && left.reg().is(edi)) {
7857 right.ToRegister(ebx);
7858 } else {
7859 right.ToRegister(edi);
7860 }
7861 }
7862 left.ToRegister(eax);
7863 Result edx_reg = allocator_->Allocate(edx);
7864 right.ToRegister();
7865 // The results are unused here because BreakTarget::Branch cannot handle
7866 // live results.
7867 Register right_reg = right.reg();
7868 left.Unuse();
7869 right.Unuse();
7870 edx_reg.Unuse();
7871 __ cmp(right_reg, 0);
7872 // Ensure divisor is positive: no chance of non-int32 or -0 result.
7873 unsafe_bailout_->Branch(less_equal);
7874 __ cdq(); // Sign-extend eax into edx:eax
7875 __ idiv(right_reg);
7876 if (op == Token::MOD) {
7877 // Negative zero can arise as a negative divident with a zero result.
7878 if (!node->no_negative_zero()) {
7879 Label not_negative_zero;
7880 __ test(edx, Operand(edx));
7881 __ j(not_zero, &not_negative_zero);
7882 __ test(eax, Operand(eax));
7883 unsafe_bailout_->Branch(negative);
7884 __ bind(&not_negative_zero);
7885 }
7886 Result edx_result(edx, TypeInfo::Integer32());
7887 edx_result.set_untagged_int32(true);
7888 frame_->Push(&edx_result);
7889 } else {
7890 ASSERT(op == Token::DIV);
7891 __ test(edx, Operand(edx));
7892 unsafe_bailout_->Branch(not_equal);
7893 Result eax_result(eax, TypeInfo::Integer32());
7894 eax_result.set_untagged_int32(true);
7895 frame_->Push(&eax_result);
7896 }
7897 break;
7898 }
7899 default:
7900 UNREACHABLE();
7901 break;
7902 }
7903}
7904
7905
7906void CodeGenerator::GenerateLogicalBooleanOperation(BinaryOperation* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00007907 // According to ECMA-262 section 11.11, page 58, the binary logical
7908 // operators must yield the result of one of the two expressions
7909 // before any ToBoolean() conversions. This means that the value
7910 // produced by a && or || operator is not necessarily a boolean.
7911
7912 // NOTE: If the left hand side produces a materialized value (not
7913 // control flow), we force the right hand side to do the same. This
7914 // is necessary because we assume that if we get control flow on the
7915 // last path out of an expression we got it on all paths.
Steve Block6ded16b2010-05-10 14:33:55 +01007916 if (node->op() == Token::AND) {
7917 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00007918 JumpTarget is_true;
7919 ControlDestination dest(&is_true, destination()->false_target(), true);
Steve Blockd0582a62009-12-15 09:54:21 +00007920 LoadCondition(node->left(), &dest, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00007921
7922 if (dest.false_was_fall_through()) {
7923 // The current false target was used as the fall-through. If
7924 // there are no dangling jumps to is_true then the left
7925 // subexpression was unconditionally false. Otherwise we have
7926 // paths where we do have to evaluate the right subexpression.
7927 if (is_true.is_linked()) {
7928 // We need to compile the right subexpression. If the jump to
7929 // the current false target was a forward jump then we have a
7930 // valid frame, we have just bound the false target, and we
7931 // have to jump around the code for the right subexpression.
7932 if (has_valid_frame()) {
7933 destination()->false_target()->Unuse();
7934 destination()->false_target()->Jump();
7935 }
7936 is_true.Bind();
7937 // The left subexpression compiled to control flow, so the
7938 // right one is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00007939 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00007940 } else {
7941 // We have actually just jumped to or bound the current false
7942 // target but the current control destination is not marked as
7943 // used.
7944 destination()->Use(false);
7945 }
7946
7947 } else if (dest.is_used()) {
7948 // The left subexpression compiled to control flow (and is_true
7949 // was just bound), so the right is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00007950 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00007951
7952 } else {
7953 // We have a materialized value on the frame, so we exit with
7954 // one on all paths. There are possibly also jumps to is_true
7955 // from nested subexpressions.
7956 JumpTarget pop_and_continue;
7957 JumpTarget exit;
7958
7959 // Avoid popping the result if it converts to 'false' using the
7960 // standard ToBoolean() conversion as described in ECMA-262,
7961 // section 9.2, page 30.
7962 //
7963 // Duplicate the TOS value. The duplicate will be popped by
7964 // ToBoolean.
7965 frame_->Dup();
7966 ControlDestination dest(&pop_and_continue, &exit, true);
7967 ToBoolean(&dest);
7968
7969 // Pop the result of evaluating the first part.
7970 frame_->Drop();
7971
7972 // Compile right side expression.
7973 is_true.Bind();
7974 Load(node->right());
7975
7976 // Exit (always with a materialized value).
7977 exit.Bind();
7978 }
7979
Steve Block6ded16b2010-05-10 14:33:55 +01007980 } else {
7981 ASSERT(node->op() == Token::OR);
7982 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00007983 JumpTarget is_false;
7984 ControlDestination dest(destination()->true_target(), &is_false, false);
Steve Blockd0582a62009-12-15 09:54:21 +00007985 LoadCondition(node->left(), &dest, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00007986
7987 if (dest.true_was_fall_through()) {
7988 // The current true target was used as the fall-through. If
7989 // there are no dangling jumps to is_false then the left
7990 // subexpression was unconditionally true. Otherwise we have
7991 // paths where we do have to evaluate the right subexpression.
7992 if (is_false.is_linked()) {
7993 // We need to compile the right subexpression. If the jump to
7994 // the current true target was a forward jump then we have a
7995 // valid frame, we have just bound the true target, and we
7996 // have to jump around the code for the right subexpression.
7997 if (has_valid_frame()) {
7998 destination()->true_target()->Unuse();
7999 destination()->true_target()->Jump();
8000 }
8001 is_false.Bind();
8002 // The left subexpression compiled to control flow, so the
8003 // right one is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00008004 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00008005 } else {
8006 // We have just jumped to or bound the current true target but
8007 // the current control destination is not marked as used.
8008 destination()->Use(true);
8009 }
8010
8011 } else if (dest.is_used()) {
8012 // The left subexpression compiled to control flow (and is_false
8013 // was just bound), so the right is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00008014 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00008015
8016 } else {
8017 // We have a materialized value on the frame, so we exit with
8018 // one on all paths. There are possibly also jumps to is_false
8019 // from nested subexpressions.
8020 JumpTarget pop_and_continue;
8021 JumpTarget exit;
8022
8023 // Avoid popping the result if it converts to 'true' using the
8024 // standard ToBoolean() conversion as described in ECMA-262,
8025 // section 9.2, page 30.
8026 //
8027 // Duplicate the TOS value. The duplicate will be popped by
8028 // ToBoolean.
8029 frame_->Dup();
8030 ControlDestination dest(&exit, &pop_and_continue, false);
8031 ToBoolean(&dest);
8032
8033 // Pop the result of evaluating the first part.
8034 frame_->Drop();
8035
8036 // Compile right side expression.
8037 is_false.Bind();
8038 Load(node->right());
8039
8040 // Exit (always with a materialized value).
8041 exit.Bind();
8042 }
Steve Block6ded16b2010-05-10 14:33:55 +01008043 }
8044}
Steve Blocka7e24c12009-10-30 11:49:00 +00008045
Steve Block6ded16b2010-05-10 14:33:55 +01008046
8047void CodeGenerator::VisitBinaryOperation(BinaryOperation* node) {
8048 Comment cmnt(masm_, "[ BinaryOperation");
8049
8050 if (node->op() == Token::AND || node->op() == Token::OR) {
8051 GenerateLogicalBooleanOperation(node);
8052 } else if (in_safe_int32_mode()) {
8053 Visit(node->left());
8054 Visit(node->right());
8055 Int32BinaryOperation(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00008056 } else {
8057 // NOTE: The code below assumes that the slow cases (calls to runtime)
8058 // never return a constant/immutable object.
8059 OverwriteMode overwrite_mode = NO_OVERWRITE;
8060 if (node->left()->AsBinaryOperation() != NULL &&
8061 node->left()->AsBinaryOperation()->ResultOverwriteAllowed()) {
8062 overwrite_mode = OVERWRITE_LEFT;
8063 } else if (node->right()->AsBinaryOperation() != NULL &&
8064 node->right()->AsBinaryOperation()->ResultOverwriteAllowed()) {
8065 overwrite_mode = OVERWRITE_RIGHT;
8066 }
8067
Steve Block6ded16b2010-05-10 14:33:55 +01008068 if (node->left()->IsTrivial()) {
8069 Load(node->right());
8070 Result right = frame_->Pop();
8071 frame_->Push(node->left());
8072 frame_->Push(&right);
8073 } else {
8074 Load(node->left());
8075 Load(node->right());
8076 }
8077 GenericBinaryOperation(node, overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00008078 }
8079}
8080
8081
8082void CodeGenerator::VisitThisFunction(ThisFunction* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01008083 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00008084 frame_->PushFunction();
8085}
8086
8087
8088void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01008089 ASSERT(!in_safe_int32_mode());
Steve Blocka7e24c12009-10-30 11:49:00 +00008090 Comment cmnt(masm_, "[ CompareOperation");
8091
Leon Clarkee46be812010-01-19 14:06:41 +00008092 bool left_already_loaded = false;
8093
Steve Blocka7e24c12009-10-30 11:49:00 +00008094 // Get the expressions from the node.
8095 Expression* left = node->left();
8096 Expression* right = node->right();
8097 Token::Value op = node->op();
8098 // To make typeof testing for natives implemented in JavaScript really
8099 // efficient, we generate special code for expressions of the form:
8100 // 'typeof <expression> == <string>'.
8101 UnaryOperation* operation = left->AsUnaryOperation();
8102 if ((op == Token::EQ || op == Token::EQ_STRICT) &&
8103 (operation != NULL && operation->op() == Token::TYPEOF) &&
8104 (right->AsLiteral() != NULL &&
8105 right->AsLiteral()->handle()->IsString())) {
8106 Handle<String> check(String::cast(*right->AsLiteral()->handle()));
8107
8108 // Load the operand and move it to a register.
8109 LoadTypeofExpression(operation->expression());
8110 Result answer = frame_->Pop();
8111 answer.ToRegister();
8112
8113 if (check->Equals(Heap::number_symbol())) {
8114 __ test(answer.reg(), Immediate(kSmiTagMask));
8115 destination()->true_target()->Branch(zero);
8116 frame_->Spill(answer.reg());
8117 __ mov(answer.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset));
8118 __ cmp(answer.reg(), Factory::heap_number_map());
8119 answer.Unuse();
8120 destination()->Split(equal);
8121
8122 } else if (check->Equals(Heap::string_symbol())) {
8123 __ test(answer.reg(), Immediate(kSmiTagMask));
8124 destination()->false_target()->Branch(zero);
8125
8126 // It can be an undetectable string object.
8127 Result temp = allocator()->Allocate();
8128 ASSERT(temp.is_valid());
8129 __ mov(temp.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset));
8130 __ movzx_b(temp.reg(), FieldOperand(temp.reg(), Map::kBitFieldOffset));
8131 __ test(temp.reg(), Immediate(1 << Map::kIsUndetectable));
8132 destination()->false_target()->Branch(not_zero);
Andrei Popescu402d9372010-02-26 13:31:12 +00008133 __ CmpObjectType(answer.reg(), FIRST_NONSTRING_TYPE, temp.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00008134 temp.Unuse();
8135 answer.Unuse();
Andrei Popescu402d9372010-02-26 13:31:12 +00008136 destination()->Split(below);
Steve Blocka7e24c12009-10-30 11:49:00 +00008137
8138 } else if (check->Equals(Heap::boolean_symbol())) {
8139 __ cmp(answer.reg(), Factory::true_value());
8140 destination()->true_target()->Branch(equal);
8141 __ cmp(answer.reg(), Factory::false_value());
8142 answer.Unuse();
8143 destination()->Split(equal);
8144
8145 } else if (check->Equals(Heap::undefined_symbol())) {
8146 __ cmp(answer.reg(), Factory::undefined_value());
8147 destination()->true_target()->Branch(equal);
8148
8149 __ test(answer.reg(), Immediate(kSmiTagMask));
8150 destination()->false_target()->Branch(zero);
8151
8152 // It can be an undetectable object.
8153 frame_->Spill(answer.reg());
8154 __ mov(answer.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset));
8155 __ movzx_b(answer.reg(),
8156 FieldOperand(answer.reg(), Map::kBitFieldOffset));
8157 __ test(answer.reg(), Immediate(1 << Map::kIsUndetectable));
8158 answer.Unuse();
8159 destination()->Split(not_zero);
8160
8161 } else if (check->Equals(Heap::function_symbol())) {
8162 __ test(answer.reg(), Immediate(kSmiTagMask));
8163 destination()->false_target()->Branch(zero);
8164 frame_->Spill(answer.reg());
8165 __ CmpObjectType(answer.reg(), JS_FUNCTION_TYPE, answer.reg());
Steve Blockd0582a62009-12-15 09:54:21 +00008166 destination()->true_target()->Branch(equal);
8167 // Regular expressions are callable so typeof == 'function'.
8168 __ CmpInstanceType(answer.reg(), JS_REGEXP_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00008169 answer.Unuse();
8170 destination()->Split(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +00008171 } else if (check->Equals(Heap::object_symbol())) {
8172 __ test(answer.reg(), Immediate(kSmiTagMask));
8173 destination()->false_target()->Branch(zero);
8174 __ cmp(answer.reg(), Factory::null_value());
8175 destination()->true_target()->Branch(equal);
8176
Steve Blocka7e24c12009-10-30 11:49:00 +00008177 Result map = allocator()->Allocate();
8178 ASSERT(map.is_valid());
Steve Blockd0582a62009-12-15 09:54:21 +00008179 // Regular expressions are typeof == 'function', not 'object'.
8180 __ CmpObjectType(answer.reg(), JS_REGEXP_TYPE, map.reg());
8181 destination()->false_target()->Branch(equal);
8182
8183 // It can be an undetectable object.
Steve Blocka7e24c12009-10-30 11:49:00 +00008184 __ movzx_b(map.reg(), FieldOperand(map.reg(), Map::kBitFieldOffset));
8185 __ test(map.reg(), Immediate(1 << Map::kIsUndetectable));
8186 destination()->false_target()->Branch(not_zero);
8187 __ mov(map.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset));
8188 __ movzx_b(map.reg(), FieldOperand(map.reg(), Map::kInstanceTypeOffset));
8189 __ cmp(map.reg(), FIRST_JS_OBJECT_TYPE);
Leon Clarkef7060e22010-06-03 12:02:55 +01008190 destination()->false_target()->Branch(below);
Steve Blocka7e24c12009-10-30 11:49:00 +00008191 __ cmp(map.reg(), LAST_JS_OBJECT_TYPE);
8192 answer.Unuse();
8193 map.Unuse();
Leon Clarkef7060e22010-06-03 12:02:55 +01008194 destination()->Split(below_equal);
Steve Blocka7e24c12009-10-30 11:49:00 +00008195 } else {
8196 // Uncommon case: typeof testing against a string literal that is
8197 // never returned from the typeof operator.
8198 answer.Unuse();
8199 destination()->Goto(false);
8200 }
8201 return;
Leon Clarkee46be812010-01-19 14:06:41 +00008202 } else if (op == Token::LT &&
8203 right->AsLiteral() != NULL &&
8204 right->AsLiteral()->handle()->IsHeapNumber()) {
8205 Handle<HeapNumber> check(HeapNumber::cast(*right->AsLiteral()->handle()));
8206 if (check->value() == 2147483648.0) { // 0x80000000.
8207 Load(left);
8208 left_already_loaded = true;
8209 Result lhs = frame_->Pop();
8210 lhs.ToRegister();
8211 __ test(lhs.reg(), Immediate(kSmiTagMask));
8212 destination()->true_target()->Branch(zero); // All Smis are less.
8213 Result scratch = allocator()->Allocate();
8214 ASSERT(scratch.is_valid());
8215 __ mov(scratch.reg(), FieldOperand(lhs.reg(), HeapObject::kMapOffset));
8216 __ cmp(scratch.reg(), Factory::heap_number_map());
8217 JumpTarget not_a_number;
8218 not_a_number.Branch(not_equal, &lhs);
8219 __ mov(scratch.reg(),
8220 FieldOperand(lhs.reg(), HeapNumber::kExponentOffset));
8221 __ cmp(Operand(scratch.reg()), Immediate(0xfff00000));
8222 not_a_number.Branch(above_equal, &lhs); // It's a negative NaN or -Inf.
8223 const uint32_t borderline_exponent =
8224 (HeapNumber::kExponentBias + 31) << HeapNumber::kExponentShift;
8225 __ cmp(Operand(scratch.reg()), Immediate(borderline_exponent));
8226 scratch.Unuse();
8227 lhs.Unuse();
8228 destination()->true_target()->Branch(less);
8229 destination()->false_target()->Jump();
8230
8231 not_a_number.Bind(&lhs);
8232 frame_->Push(&lhs);
8233 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008234 }
8235
8236 Condition cc = no_condition;
8237 bool strict = false;
8238 switch (op) {
8239 case Token::EQ_STRICT:
8240 strict = true;
8241 // Fall through
8242 case Token::EQ:
8243 cc = equal;
8244 break;
8245 case Token::LT:
8246 cc = less;
8247 break;
8248 case Token::GT:
8249 cc = greater;
8250 break;
8251 case Token::LTE:
8252 cc = less_equal;
8253 break;
8254 case Token::GTE:
8255 cc = greater_equal;
8256 break;
8257 case Token::IN: {
Leon Clarkee46be812010-01-19 14:06:41 +00008258 if (!left_already_loaded) Load(left);
Steve Blocka7e24c12009-10-30 11:49:00 +00008259 Load(right);
8260 Result answer = frame_->InvokeBuiltin(Builtins::IN, CALL_FUNCTION, 2);
8261 frame_->Push(&answer); // push the result
8262 return;
8263 }
8264 case Token::INSTANCEOF: {
Leon Clarkee46be812010-01-19 14:06:41 +00008265 if (!left_already_loaded) Load(left);
Steve Blocka7e24c12009-10-30 11:49:00 +00008266 Load(right);
8267 InstanceofStub stub;
8268 Result answer = frame_->CallStub(&stub, 2);
8269 answer.ToRegister();
8270 __ test(answer.reg(), Operand(answer.reg()));
8271 answer.Unuse();
8272 destination()->Split(zero);
8273 return;
8274 }
8275 default:
8276 UNREACHABLE();
8277 }
Steve Block6ded16b2010-05-10 14:33:55 +01008278
8279 if (left->IsTrivial()) {
8280 if (!left_already_loaded) {
8281 Load(right);
8282 Result right_result = frame_->Pop();
8283 frame_->Push(left);
8284 frame_->Push(&right_result);
8285 } else {
8286 Load(right);
8287 }
8288 } else {
8289 if (!left_already_loaded) Load(left);
8290 Load(right);
8291 }
Leon Clarkee46be812010-01-19 14:06:41 +00008292 Comparison(node, cc, strict, destination());
Steve Blocka7e24c12009-10-30 11:49:00 +00008293}
8294
8295
8296#ifdef DEBUG
8297bool CodeGenerator::HasValidEntryRegisters() {
8298 return (allocator()->count(eax) == (frame()->is_used(eax) ? 1 : 0))
8299 && (allocator()->count(ebx) == (frame()->is_used(ebx) ? 1 : 0))
8300 && (allocator()->count(ecx) == (frame()->is_used(ecx) ? 1 : 0))
8301 && (allocator()->count(edx) == (frame()->is_used(edx) ? 1 : 0))
8302 && (allocator()->count(edi) == (frame()->is_used(edi) ? 1 : 0));
8303}
8304#endif
8305
8306
8307// Emit a LoadIC call to get the value from receiver and leave it in
Andrei Popescu402d9372010-02-26 13:31:12 +00008308// dst.
Steve Blocka7e24c12009-10-30 11:49:00 +00008309class DeferredReferenceGetNamedValue: public DeferredCode {
8310 public:
8311 DeferredReferenceGetNamedValue(Register dst,
8312 Register receiver,
8313 Handle<String> name)
8314 : dst_(dst), receiver_(receiver), name_(name) {
8315 set_comment("[ DeferredReferenceGetNamedValue");
8316 }
8317
8318 virtual void Generate();
8319
8320 Label* patch_site() { return &patch_site_; }
8321
8322 private:
8323 Label patch_site_;
8324 Register dst_;
8325 Register receiver_;
8326 Handle<String> name_;
8327};
8328
8329
8330void DeferredReferenceGetNamedValue::Generate() {
Andrei Popescu402d9372010-02-26 13:31:12 +00008331 if (!receiver_.is(eax)) {
8332 __ mov(eax, receiver_);
8333 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008334 __ Set(ecx, Immediate(name_));
8335 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
8336 __ call(ic, RelocInfo::CODE_TARGET);
8337 // The call must be followed by a test eax instruction to indicate
8338 // that the inobject property case was inlined.
8339 //
8340 // Store the delta to the map check instruction here in the test
8341 // instruction. Use masm_-> instead of the __ macro since the
8342 // latter can't return a value.
8343 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
8344 // Here we use masm_-> instead of the __ macro because this is the
8345 // instruction that gets patched and coverage code gets in the way.
8346 masm_->test(eax, Immediate(-delta_to_patch_site));
8347 __ IncrementCounter(&Counters::named_load_inline_miss, 1);
8348
8349 if (!dst_.is(eax)) __ mov(dst_, eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00008350}
8351
8352
8353class DeferredReferenceGetKeyedValue: public DeferredCode {
8354 public:
8355 explicit DeferredReferenceGetKeyedValue(Register dst,
8356 Register receiver,
Andrei Popescu402d9372010-02-26 13:31:12 +00008357 Register key)
8358 : dst_(dst), receiver_(receiver), key_(key) {
Steve Blocka7e24c12009-10-30 11:49:00 +00008359 set_comment("[ DeferredReferenceGetKeyedValue");
8360 }
8361
8362 virtual void Generate();
8363
8364 Label* patch_site() { return &patch_site_; }
8365
8366 private:
8367 Label patch_site_;
8368 Register dst_;
8369 Register receiver_;
8370 Register key_;
Steve Blocka7e24c12009-10-30 11:49:00 +00008371};
8372
8373
8374void DeferredReferenceGetKeyedValue::Generate() {
Andrei Popescu402d9372010-02-26 13:31:12 +00008375 if (!receiver_.is(eax)) {
8376 // Register eax is available for key.
8377 if (!key_.is(eax)) {
8378 __ mov(eax, key_);
8379 }
8380 if (!receiver_.is(edx)) {
8381 __ mov(edx, receiver_);
8382 }
8383 } else if (!key_.is(edx)) {
8384 // Register edx is available for receiver.
8385 if (!receiver_.is(edx)) {
8386 __ mov(edx, receiver_);
8387 }
8388 if (!key_.is(eax)) {
8389 __ mov(eax, key_);
8390 }
8391 } else {
8392 __ xchg(edx, eax);
8393 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008394 // Calculate the delta from the IC call instruction to the map check
8395 // cmp instruction in the inlined version. This delta is stored in
8396 // a test(eax, delta) instruction after the call so that we can find
8397 // it in the IC initialization code and patch the cmp instruction.
8398 // This means that we cannot allow test instructions after calls to
8399 // KeyedLoadIC stubs in other places.
8400 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
Andrei Popescu402d9372010-02-26 13:31:12 +00008401 __ call(ic, RelocInfo::CODE_TARGET);
Steve Blocka7e24c12009-10-30 11:49:00 +00008402 // The delta from the start of the map-compare instruction to the
8403 // test instruction. We use masm_-> directly here instead of the __
8404 // macro because the macro sometimes uses macro expansion to turn
8405 // into something that can't return a value. This is encountered
8406 // when doing generated code coverage tests.
8407 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
8408 // Here we use masm_-> instead of the __ macro because this is the
8409 // instruction that gets patched and coverage code gets in the way.
8410 masm_->test(eax, Immediate(-delta_to_patch_site));
8411 __ IncrementCounter(&Counters::keyed_load_inline_miss, 1);
8412
8413 if (!dst_.is(eax)) __ mov(dst_, eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00008414}
8415
8416
8417class DeferredReferenceSetKeyedValue: public DeferredCode {
8418 public:
8419 DeferredReferenceSetKeyedValue(Register value,
8420 Register key,
Steve Block6ded16b2010-05-10 14:33:55 +01008421 Register receiver,
8422 Register scratch)
8423 : value_(value),
8424 key_(key),
8425 receiver_(receiver),
8426 scratch_(scratch) {
Steve Blocka7e24c12009-10-30 11:49:00 +00008427 set_comment("[ DeferredReferenceSetKeyedValue");
8428 }
8429
8430 virtual void Generate();
8431
8432 Label* patch_site() { return &patch_site_; }
8433
8434 private:
8435 Register value_;
8436 Register key_;
8437 Register receiver_;
Steve Block6ded16b2010-05-10 14:33:55 +01008438 Register scratch_;
Steve Blocka7e24c12009-10-30 11:49:00 +00008439 Label patch_site_;
8440};
8441
8442
8443void DeferredReferenceSetKeyedValue::Generate() {
8444 __ IncrementCounter(&Counters::keyed_store_inline_miss, 1);
Steve Block6ded16b2010-05-10 14:33:55 +01008445 // Move value_ to eax, key_ to ecx, and receiver_ to edx.
8446 Register old_value = value_;
8447
8448 // First, move value to eax.
8449 if (!value_.is(eax)) {
8450 if (key_.is(eax)) {
8451 // Move key_ out of eax, preferably to ecx.
8452 if (!value_.is(ecx) && !receiver_.is(ecx)) {
8453 __ mov(ecx, key_);
8454 key_ = ecx;
8455 } else {
8456 __ mov(scratch_, key_);
8457 key_ = scratch_;
8458 }
8459 }
8460 if (receiver_.is(eax)) {
8461 // Move receiver_ out of eax, preferably to edx.
8462 if (!value_.is(edx) && !key_.is(edx)) {
8463 __ mov(edx, receiver_);
8464 receiver_ = edx;
8465 } else {
8466 // Both moves to scratch are from eax, also, no valid path hits both.
8467 __ mov(scratch_, receiver_);
8468 receiver_ = scratch_;
8469 }
8470 }
8471 __ mov(eax, value_);
8472 value_ = eax;
8473 }
8474
8475 // Now value_ is in eax. Move the other two to the right positions.
8476 // We do not update the variables key_ and receiver_ to ecx and edx.
8477 if (key_.is(ecx)) {
8478 if (!receiver_.is(edx)) {
8479 __ mov(edx, receiver_);
8480 }
8481 } else if (key_.is(edx)) {
8482 if (receiver_.is(ecx)) {
8483 __ xchg(edx, ecx);
8484 } else {
8485 __ mov(ecx, key_);
8486 if (!receiver_.is(edx)) {
8487 __ mov(edx, receiver_);
8488 }
8489 }
8490 } else { // Key is not in edx or ecx.
8491 if (!receiver_.is(edx)) {
8492 __ mov(edx, receiver_);
8493 }
8494 __ mov(ecx, key_);
8495 }
8496
Steve Blocka7e24c12009-10-30 11:49:00 +00008497 // Call the IC stub.
8498 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
8499 __ call(ic, RelocInfo::CODE_TARGET);
8500 // The delta from the start of the map-compare instruction to the
8501 // test instruction. We use masm_-> directly here instead of the
8502 // __ macro because the macro sometimes uses macro expansion to turn
8503 // into something that can't return a value. This is encountered
8504 // when doing generated code coverage tests.
8505 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
8506 // Here we use masm_-> instead of the __ macro because this is the
8507 // instruction that gets patched and coverage code gets in the way.
8508 masm_->test(eax, Immediate(-delta_to_patch_site));
Steve Block6ded16b2010-05-10 14:33:55 +01008509 // Restore value (returned from store IC) register.
8510 if (!old_value.is(eax)) __ mov(old_value, eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00008511}
8512
8513
Andrei Popescu402d9372010-02-26 13:31:12 +00008514Result CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) {
8515#ifdef DEBUG
8516 int original_height = frame()->height();
8517#endif
8518 Result result;
8519 // Do not inline the inobject property case for loads from the global
8520 // object. Also do not inline for unoptimized code. This saves time in
8521 // the code generator. Unoptimized code is toplevel code or code that is
8522 // not in a loop.
8523 if (is_contextual || scope()->is_global_scope() || loop_nesting() == 0) {
8524 Comment cmnt(masm(), "[ Load from named Property");
8525 frame()->Push(name);
8526
8527 RelocInfo::Mode mode = is_contextual
8528 ? RelocInfo::CODE_TARGET_CONTEXT
8529 : RelocInfo::CODE_TARGET;
8530 result = frame()->CallLoadIC(mode);
8531 // A test eax instruction following the call signals that the inobject
8532 // property case was inlined. Ensure that there is not a test eax
8533 // instruction here.
8534 __ nop();
8535 } else {
8536 // Inline the inobject property case.
8537 Comment cmnt(masm(), "[ Inlined named property load");
8538 Result receiver = frame()->Pop();
8539 receiver.ToRegister();
8540
8541 result = allocator()->Allocate();
8542 ASSERT(result.is_valid());
8543 DeferredReferenceGetNamedValue* deferred =
8544 new DeferredReferenceGetNamedValue(result.reg(), receiver.reg(), name);
8545
8546 // Check that the receiver is a heap object.
8547 __ test(receiver.reg(), Immediate(kSmiTagMask));
8548 deferred->Branch(zero);
8549
8550 __ bind(deferred->patch_site());
8551 // This is the map check instruction that will be patched (so we can't
8552 // use the double underscore macro that may insert instructions).
8553 // Initially use an invalid map to force a failure.
8554 masm()->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
8555 Immediate(Factory::null_value()));
8556 // This branch is always a forwards branch so it's always a fixed size
8557 // which allows the assert below to succeed and patching to work.
8558 deferred->Branch(not_equal);
8559
8560 // The delta from the patch label to the load offset must be statically
8561 // known.
8562 ASSERT(masm()->SizeOfCodeGeneratedSince(deferred->patch_site()) ==
8563 LoadIC::kOffsetToLoadInstruction);
8564 // The initial (invalid) offset has to be large enough to force a 32-bit
8565 // instruction encoding to allow patching with an arbitrary offset. Use
8566 // kMaxInt (minus kHeapObjectTag).
8567 int offset = kMaxInt;
8568 masm()->mov(result.reg(), FieldOperand(receiver.reg(), offset));
8569
8570 __ IncrementCounter(&Counters::named_load_inline, 1);
8571 deferred->BindExit();
8572 }
8573 ASSERT(frame()->height() == original_height - 1);
8574 return result;
8575}
8576
8577
8578Result CodeGenerator::EmitNamedStore(Handle<String> name, bool is_contextual) {
8579#ifdef DEBUG
8580 int expected_height = frame()->height() - (is_contextual ? 1 : 2);
8581#endif
8582 Result result = frame()->CallStoreIC(name, is_contextual);
8583
8584 ASSERT_EQ(expected_height, frame()->height());
8585 return result;
8586}
8587
8588
8589Result CodeGenerator::EmitKeyedLoad() {
8590#ifdef DEBUG
8591 int original_height = frame()->height();
8592#endif
8593 Result result;
8594 // Inline array load code if inside of a loop. We do not know the
8595 // receiver map yet, so we initially generate the code with a check
8596 // against an invalid map. In the inline cache code, we patch the map
8597 // check if appropriate.
Leon Clarked91b9f72010-01-27 17:25:45 +00008598 if (loop_nesting() > 0) {
8599 Comment cmnt(masm_, "[ Inlined load from keyed Property");
8600
Leon Clarked91b9f72010-01-27 17:25:45 +00008601 // Use a fresh temporary to load the elements without destroying
8602 // the receiver which is needed for the deferred slow case.
8603 Result elements = allocator()->Allocate();
8604 ASSERT(elements.is_valid());
8605
Leon Clarkef7060e22010-06-03 12:02:55 +01008606 Result key = frame_->Pop();
8607 Result receiver = frame_->Pop();
8608 key.ToRegister();
8609 receiver.ToRegister();
8610
Leon Clarked91b9f72010-01-27 17:25:45 +00008611 // Use a fresh temporary for the index and later the loaded
8612 // value.
Andrei Popescu402d9372010-02-26 13:31:12 +00008613 result = allocator()->Allocate();
8614 ASSERT(result.is_valid());
Leon Clarked91b9f72010-01-27 17:25:45 +00008615
8616 DeferredReferenceGetKeyedValue* deferred =
Andrei Popescu402d9372010-02-26 13:31:12 +00008617 new DeferredReferenceGetKeyedValue(result.reg(),
Leon Clarked91b9f72010-01-27 17:25:45 +00008618 receiver.reg(),
Andrei Popescu402d9372010-02-26 13:31:12 +00008619 key.reg());
Leon Clarked91b9f72010-01-27 17:25:45 +00008620
Andrei Popescu402d9372010-02-26 13:31:12 +00008621 __ test(receiver.reg(), Immediate(kSmiTagMask));
8622 deferred->Branch(zero);
Leon Clarked91b9f72010-01-27 17:25:45 +00008623
Leon Clarkef7060e22010-06-03 12:02:55 +01008624 // Check that the receiver has the expected map.
Leon Clarked91b9f72010-01-27 17:25:45 +00008625 // Initially, use an invalid map. The map is patched in the IC
8626 // initialization code.
8627 __ bind(deferred->patch_site());
8628 // Use masm-> here instead of the double underscore macro since extra
8629 // coverage code can interfere with the patching.
8630 masm_->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
8631 Immediate(Factory::null_value()));
8632 deferred->Branch(not_equal);
8633
8634 // Check that the key is a smi.
Steve Block6ded16b2010-05-10 14:33:55 +01008635 if (!key.is_smi()) {
8636 __ test(key.reg(), Immediate(kSmiTagMask));
8637 deferred->Branch(not_zero);
8638 } else {
8639 if (FLAG_debug_code) __ AbortIfNotSmi(key.reg());
8640 }
Leon Clarked91b9f72010-01-27 17:25:45 +00008641
8642 // Get the elements array from the receiver and check that it
8643 // is not a dictionary.
8644 __ mov(elements.reg(),
8645 FieldOperand(receiver.reg(), JSObject::kElementsOffset));
8646 __ cmp(FieldOperand(elements.reg(), HeapObject::kMapOffset),
8647 Immediate(Factory::fixed_array_map()));
8648 deferred->Branch(not_equal);
8649
8650 // Shift the key to get the actual index value and check that
Steve Block6ded16b2010-05-10 14:33:55 +01008651 // it is within bounds. Use unsigned comparison to handle negative keys.
Andrei Popescu402d9372010-02-26 13:31:12 +00008652 __ mov(result.reg(), key.reg());
8653 __ SmiUntag(result.reg());
8654 __ cmp(result.reg(),
Leon Clarked91b9f72010-01-27 17:25:45 +00008655 FieldOperand(elements.reg(), FixedArray::kLengthOffset));
8656 deferred->Branch(above_equal);
8657
Andrei Popescu402d9372010-02-26 13:31:12 +00008658 __ mov(result.reg(), Operand(elements.reg(),
8659 result.reg(),
8660 times_4,
8661 FixedArray::kHeaderSize - kHeapObjectTag));
Leon Clarked91b9f72010-01-27 17:25:45 +00008662 elements.Unuse();
Andrei Popescu402d9372010-02-26 13:31:12 +00008663 __ cmp(Operand(result.reg()), Immediate(Factory::the_hole_value()));
Leon Clarked91b9f72010-01-27 17:25:45 +00008664 deferred->Branch(equal);
8665 __ IncrementCounter(&Counters::keyed_load_inline, 1);
8666
8667 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00008668 } else {
8669 Comment cmnt(masm_, "[ Load from keyed Property");
Andrei Popescu402d9372010-02-26 13:31:12 +00008670 result = frame_->CallKeyedLoadIC(RelocInfo::CODE_TARGET);
Leon Clarked91b9f72010-01-27 17:25:45 +00008671 // Make sure that we do not have a test instruction after the
8672 // call. A test instruction after the call is used to
8673 // indicate that we have generated an inline version of the
8674 // keyed load. The explicit nop instruction is here because
8675 // the push that follows might be peep-hole optimized away.
8676 __ nop();
Leon Clarked91b9f72010-01-27 17:25:45 +00008677 }
Andrei Popescu402d9372010-02-26 13:31:12 +00008678 ASSERT(frame()->height() == original_height - 2);
8679 return result;
8680}
8681
8682
8683Result CodeGenerator::EmitKeyedStore(StaticType* key_type) {
8684#ifdef DEBUG
8685 int original_height = frame()->height();
8686#endif
8687 Result result;
8688 // Generate inlined version of the keyed store if the code is in a loop
8689 // and the key is likely to be a smi.
8690 if (loop_nesting() > 0 && key_type->IsLikelySmi()) {
8691 Comment cmnt(masm(), "[ Inlined store to keyed Property");
8692
8693 // Get the receiver, key and value into registers.
8694 result = frame()->Pop();
8695 Result key = frame()->Pop();
8696 Result receiver = frame()->Pop();
8697
8698 Result tmp = allocator_->Allocate();
8699 ASSERT(tmp.is_valid());
Steve Block6ded16b2010-05-10 14:33:55 +01008700 Result tmp2 = allocator_->Allocate();
8701 ASSERT(tmp2.is_valid());
Andrei Popescu402d9372010-02-26 13:31:12 +00008702
8703 // Determine whether the value is a constant before putting it in a
8704 // register.
8705 bool value_is_constant = result.is_constant();
8706
8707 // Make sure that value, key and receiver are in registers.
8708 result.ToRegister();
8709 key.ToRegister();
8710 receiver.ToRegister();
8711
8712 DeferredReferenceSetKeyedValue* deferred =
8713 new DeferredReferenceSetKeyedValue(result.reg(),
8714 key.reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01008715 receiver.reg(),
8716 tmp.reg());
Andrei Popescu402d9372010-02-26 13:31:12 +00008717
8718 // Check that the receiver is not a smi.
8719 __ test(receiver.reg(), Immediate(kSmiTagMask));
8720 deferred->Branch(zero);
8721
Steve Block6ded16b2010-05-10 14:33:55 +01008722 // Check that the key is a smi.
8723 if (!key.is_smi()) {
8724 __ test(key.reg(), Immediate(kSmiTagMask));
8725 deferred->Branch(not_zero);
8726 } else {
8727 if (FLAG_debug_code) __ AbortIfNotSmi(key.reg());
8728 }
8729
Andrei Popescu402d9372010-02-26 13:31:12 +00008730 // Check that the receiver is a JSArray.
Steve Block6ded16b2010-05-10 14:33:55 +01008731 __ CmpObjectType(receiver.reg(), JS_ARRAY_TYPE, tmp.reg());
Andrei Popescu402d9372010-02-26 13:31:12 +00008732 deferred->Branch(not_equal);
8733
8734 // Check that the key is within bounds. Both the key and the length of
Steve Block6ded16b2010-05-10 14:33:55 +01008735 // the JSArray are smis. Use unsigned comparison to handle negative keys.
Andrei Popescu402d9372010-02-26 13:31:12 +00008736 __ cmp(key.reg(),
8737 FieldOperand(receiver.reg(), JSArray::kLengthOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01008738 deferred->Branch(above_equal);
Andrei Popescu402d9372010-02-26 13:31:12 +00008739
8740 // Get the elements array from the receiver and check that it is not a
8741 // dictionary.
8742 __ mov(tmp.reg(),
Steve Block6ded16b2010-05-10 14:33:55 +01008743 FieldOperand(receiver.reg(), JSArray::kElementsOffset));
8744
8745 // Check whether it is possible to omit the write barrier. If the elements
8746 // array is in new space or the value written is a smi we can safely update
8747 // the elements array without updating the remembered set.
8748 Label in_new_space;
8749 __ InNewSpace(tmp.reg(), tmp2.reg(), equal, &in_new_space);
8750 if (!value_is_constant) {
8751 __ test(result.reg(), Immediate(kSmiTagMask));
8752 deferred->Branch(not_zero);
8753 }
8754
8755 __ bind(&in_new_space);
Andrei Popescu402d9372010-02-26 13:31:12 +00008756 // Bind the deferred code patch site to be able to locate the fixed
8757 // array map comparison. When debugging, we patch this comparison to
8758 // always fail so that we will hit the IC call in the deferred code
8759 // which will allow the debugger to break for fast case stores.
8760 __ bind(deferred->patch_site());
8761 __ cmp(FieldOperand(tmp.reg(), HeapObject::kMapOffset),
8762 Immediate(Factory::fixed_array_map()));
8763 deferred->Branch(not_equal);
8764
8765 // Store the value.
Kristian Monsen25f61362010-05-21 11:50:48 +01008766 __ mov(FixedArrayElementOperand(tmp.reg(), key.reg()), result.reg());
Andrei Popescu402d9372010-02-26 13:31:12 +00008767 __ IncrementCounter(&Counters::keyed_store_inline, 1);
8768
8769 deferred->BindExit();
8770 } else {
8771 result = frame()->CallKeyedStoreIC();
8772 // Make sure that we do not have a test instruction after the
8773 // call. A test instruction after the call is used to
8774 // indicate that we have generated an inline version of the
8775 // keyed store.
8776 __ nop();
Andrei Popescu402d9372010-02-26 13:31:12 +00008777 }
8778 ASSERT(frame()->height() == original_height - 3);
8779 return result;
Leon Clarked91b9f72010-01-27 17:25:45 +00008780}
8781
8782
Steve Blocka7e24c12009-10-30 11:49:00 +00008783#undef __
8784#define __ ACCESS_MASM(masm)
8785
8786
Steve Block6ded16b2010-05-10 14:33:55 +01008787static void CheckTwoForSminess(MacroAssembler* masm,
8788 Register left, Register right, Register scratch,
8789 TypeInfo left_info, TypeInfo right_info,
8790 DeferredInlineBinaryOperation* deferred) {
8791 if (left.is(right)) {
8792 if (!left_info.IsSmi()) {
8793 __ test(left, Immediate(kSmiTagMask));
8794 deferred->Branch(not_zero);
8795 } else {
8796 if (FLAG_debug_code) __ AbortIfNotSmi(left);
8797 }
8798 } else if (!left_info.IsSmi()) {
8799 if (!right_info.IsSmi()) {
8800 __ mov(scratch, left);
8801 __ or_(scratch, Operand(right));
8802 __ test(scratch, Immediate(kSmiTagMask));
8803 deferred->Branch(not_zero);
8804 } else {
8805 __ test(left, Immediate(kSmiTagMask));
8806 deferred->Branch(not_zero);
8807 if (FLAG_debug_code) __ AbortIfNotSmi(right);
8808 }
8809 } else {
8810 if (FLAG_debug_code) __ AbortIfNotSmi(left);
8811 if (!right_info.IsSmi()) {
8812 __ test(right, Immediate(kSmiTagMask));
8813 deferred->Branch(not_zero);
8814 } else {
8815 if (FLAG_debug_code) __ AbortIfNotSmi(right);
8816 }
8817 }
8818}
8819
8820
Steve Blocka7e24c12009-10-30 11:49:00 +00008821Handle<String> Reference::GetName() {
8822 ASSERT(type_ == NAMED);
8823 Property* property = expression_->AsProperty();
8824 if (property == NULL) {
8825 // Global variable reference treated as a named property reference.
8826 VariableProxy* proxy = expression_->AsVariableProxy();
8827 ASSERT(proxy->AsVariable() != NULL);
8828 ASSERT(proxy->AsVariable()->is_global());
8829 return proxy->name();
8830 } else {
8831 Literal* raw_name = property->key()->AsLiteral();
8832 ASSERT(raw_name != NULL);
Andrei Popescu402d9372010-02-26 13:31:12 +00008833 return Handle<String>::cast(raw_name->handle());
Steve Blocka7e24c12009-10-30 11:49:00 +00008834 }
8835}
8836
8837
Steve Blockd0582a62009-12-15 09:54:21 +00008838void Reference::GetValue() {
Steve Blocka7e24c12009-10-30 11:49:00 +00008839 ASSERT(!cgen_->in_spilled_code());
8840 ASSERT(cgen_->HasValidEntryRegisters());
8841 ASSERT(!is_illegal());
8842 MacroAssembler* masm = cgen_->masm();
8843
8844 // Record the source position for the property load.
8845 Property* property = expression_->AsProperty();
8846 if (property != NULL) {
8847 cgen_->CodeForSourcePosition(property->position());
8848 }
8849
8850 switch (type_) {
8851 case SLOT: {
8852 Comment cmnt(masm, "[ Load from Slot");
8853 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
8854 ASSERT(slot != NULL);
Leon Clarkef7060e22010-06-03 12:02:55 +01008855 cgen_->LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF);
Andrei Popescu402d9372010-02-26 13:31:12 +00008856 if (!persist_after_get_) set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00008857 break;
8858 }
8859
8860 case NAMED: {
Steve Blocka7e24c12009-10-30 11:49:00 +00008861 Variable* var = expression_->AsVariableProxy()->AsVariable();
8862 bool is_global = var != NULL;
8863 ASSERT(!is_global || var->is_global());
Andrei Popescu402d9372010-02-26 13:31:12 +00008864 if (persist_after_get_) cgen_->frame()->Dup();
8865 Result result = cgen_->EmitNamedLoad(GetName(), is_global);
8866 if (!persist_after_get_) set_unloaded();
8867 cgen_->frame()->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00008868 break;
8869 }
8870
8871 case KEYED: {
Andrei Popescu402d9372010-02-26 13:31:12 +00008872 if (persist_after_get_) {
8873 cgen_->frame()->PushElementAt(1);
8874 cgen_->frame()->PushElementAt(1);
8875 }
8876 Result value = cgen_->EmitKeyedLoad();
Leon Clarked91b9f72010-01-27 17:25:45 +00008877 cgen_->frame()->Push(&value);
Andrei Popescu402d9372010-02-26 13:31:12 +00008878 if (!persist_after_get_) set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00008879 break;
8880 }
8881
8882 default:
8883 UNREACHABLE();
8884 }
8885}
8886
8887
Steve Blockd0582a62009-12-15 09:54:21 +00008888void Reference::TakeValue() {
Steve Blocka7e24c12009-10-30 11:49:00 +00008889 // For non-constant frame-allocated slots, we invalidate the value in the
8890 // slot. For all others, we fall back on GetValue.
8891 ASSERT(!cgen_->in_spilled_code());
8892 ASSERT(!is_illegal());
8893 if (type_ != SLOT) {
Steve Blockd0582a62009-12-15 09:54:21 +00008894 GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00008895 return;
8896 }
8897
8898 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
8899 ASSERT(slot != NULL);
8900 if (slot->type() == Slot::LOOKUP ||
8901 slot->type() == Slot::CONTEXT ||
8902 slot->var()->mode() == Variable::CONST ||
8903 slot->is_arguments()) {
Steve Blockd0582a62009-12-15 09:54:21 +00008904 GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00008905 return;
8906 }
8907
8908 // Only non-constant, frame-allocated parameters and locals can
8909 // reach here. Be careful not to use the optimizations for arguments
8910 // object access since it may not have been initialized yet.
8911 ASSERT(!slot->is_arguments());
8912 if (slot->type() == Slot::PARAMETER) {
8913 cgen_->frame()->TakeParameterAt(slot->index());
8914 } else {
8915 ASSERT(slot->type() == Slot::LOCAL);
8916 cgen_->frame()->TakeLocalAt(slot->index());
8917 }
Leon Clarked91b9f72010-01-27 17:25:45 +00008918
8919 ASSERT(persist_after_get_);
8920 // Do not unload the reference, because it is used in SetValue.
Steve Blocka7e24c12009-10-30 11:49:00 +00008921}
8922
8923
8924void Reference::SetValue(InitState init_state) {
8925 ASSERT(cgen_->HasValidEntryRegisters());
8926 ASSERT(!is_illegal());
8927 MacroAssembler* masm = cgen_->masm();
8928 switch (type_) {
8929 case SLOT: {
8930 Comment cmnt(masm, "[ Store to Slot");
8931 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
8932 ASSERT(slot != NULL);
8933 cgen_->StoreToSlot(slot, init_state);
Andrei Popescu402d9372010-02-26 13:31:12 +00008934 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00008935 break;
8936 }
8937
8938 case NAMED: {
8939 Comment cmnt(masm, "[ Store to named Property");
Andrei Popescu402d9372010-02-26 13:31:12 +00008940 Result answer = cgen_->EmitNamedStore(GetName(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00008941 cgen_->frame()->Push(&answer);
Leon Clarke4515c472010-02-03 11:58:03 +00008942 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00008943 break;
8944 }
8945
8946 case KEYED: {
8947 Comment cmnt(masm, "[ Store to keyed Property");
Steve Blocka7e24c12009-10-30 11:49:00 +00008948 Property* property = expression()->AsProperty();
8949 ASSERT(property != NULL);
Steve Block6ded16b2010-05-10 14:33:55 +01008950
Andrei Popescu402d9372010-02-26 13:31:12 +00008951 Result answer = cgen_->EmitKeyedStore(property->key()->type());
8952 cgen_->frame()->Push(&answer);
8953 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00008954 break;
8955 }
8956
Andrei Popescu402d9372010-02-26 13:31:12 +00008957 case UNLOADED:
8958 case ILLEGAL:
Steve Blocka7e24c12009-10-30 11:49:00 +00008959 UNREACHABLE();
8960 }
8961}
8962
8963
Leon Clarkee46be812010-01-19 14:06:41 +00008964void FastNewClosureStub::Generate(MacroAssembler* masm) {
Steve Block6ded16b2010-05-10 14:33:55 +01008965 // Create a new closure from the given function info in new
8966 // space. Set the context to the current context in esi.
Leon Clarkee46be812010-01-19 14:06:41 +00008967 Label gc;
8968 __ AllocateInNewSpace(JSFunction::kSize, eax, ebx, ecx, &gc, TAG_OBJECT);
8969
Steve Block6ded16b2010-05-10 14:33:55 +01008970 // Get the function info from the stack.
Leon Clarkee46be812010-01-19 14:06:41 +00008971 __ mov(edx, Operand(esp, 1 * kPointerSize));
8972
8973 // Compute the function map in the current global context and set that
8974 // as the map of the allocated object.
8975 __ mov(ecx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
8976 __ mov(ecx, FieldOperand(ecx, GlobalObject::kGlobalContextOffset));
8977 __ mov(ecx, Operand(ecx, Context::SlotOffset(Context::FUNCTION_MAP_INDEX)));
8978 __ mov(FieldOperand(eax, JSObject::kMapOffset), ecx);
8979
Steve Block6ded16b2010-05-10 14:33:55 +01008980 // Initialize the rest of the function. We don't have to update the
8981 // write barrier because the allocated object is in new space.
8982 __ mov(ebx, Immediate(Factory::empty_fixed_array()));
8983 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset), ebx);
8984 __ mov(FieldOperand(eax, JSObject::kElementsOffset), ebx);
8985 __ mov(FieldOperand(eax, JSFunction::kPrototypeOrInitialMapOffset),
8986 Immediate(Factory::the_hole_value()));
8987 __ mov(FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset), edx);
8988 __ mov(FieldOperand(eax, JSFunction::kContextOffset), esi);
8989 __ mov(FieldOperand(eax, JSFunction::kLiteralsOffset), ebx);
Leon Clarkee46be812010-01-19 14:06:41 +00008990
8991 // Return and remove the on-stack parameter.
8992 __ ret(1 * kPointerSize);
8993
8994 // Create a new closure through the slower runtime call.
8995 __ bind(&gc);
8996 __ pop(ecx); // Temporarily remove return address.
8997 __ pop(edx);
8998 __ push(esi);
8999 __ push(edx);
9000 __ push(ecx); // Restore return address.
Steve Block6ded16b2010-05-10 14:33:55 +01009001 __ TailCallRuntime(Runtime::kNewClosure, 2, 1);
Leon Clarkee46be812010-01-19 14:06:41 +00009002}
9003
9004
9005void FastNewContextStub::Generate(MacroAssembler* masm) {
9006 // Try to allocate the context in new space.
9007 Label gc;
9008 int length = slots_ + Context::MIN_CONTEXT_SLOTS;
9009 __ AllocateInNewSpace((length * kPointerSize) + FixedArray::kHeaderSize,
9010 eax, ebx, ecx, &gc, TAG_OBJECT);
9011
9012 // Get the function from the stack.
9013 __ mov(ecx, Operand(esp, 1 * kPointerSize));
9014
9015 // Setup the object header.
9016 __ mov(FieldOperand(eax, HeapObject::kMapOffset), Factory::context_map());
9017 __ mov(FieldOperand(eax, Array::kLengthOffset), Immediate(length));
9018
9019 // Setup the fixed slots.
9020 __ xor_(ebx, Operand(ebx)); // Set to NULL.
9021 __ mov(Operand(eax, Context::SlotOffset(Context::CLOSURE_INDEX)), ecx);
9022 __ mov(Operand(eax, Context::SlotOffset(Context::FCONTEXT_INDEX)), eax);
9023 __ mov(Operand(eax, Context::SlotOffset(Context::PREVIOUS_INDEX)), ebx);
9024 __ mov(Operand(eax, Context::SlotOffset(Context::EXTENSION_INDEX)), ebx);
9025
9026 // Copy the global object from the surrounding context. We go through the
9027 // context in the function (ecx) to match the allocation behavior we have
9028 // in the runtime system (see Heap::AllocateFunctionContext).
9029 __ mov(ebx, FieldOperand(ecx, JSFunction::kContextOffset));
9030 __ mov(ebx, Operand(ebx, Context::SlotOffset(Context::GLOBAL_INDEX)));
9031 __ mov(Operand(eax, Context::SlotOffset(Context::GLOBAL_INDEX)), ebx);
9032
9033 // Initialize the rest of the slots to undefined.
9034 __ mov(ebx, Factory::undefined_value());
9035 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) {
9036 __ mov(Operand(eax, Context::SlotOffset(i)), ebx);
9037 }
9038
9039 // Return and remove the on-stack parameter.
9040 __ mov(esi, Operand(eax));
9041 __ ret(1 * kPointerSize);
9042
9043 // Need to collect. Call into runtime system.
9044 __ bind(&gc);
Steve Block6ded16b2010-05-10 14:33:55 +01009045 __ TailCallRuntime(Runtime::kNewContext, 1, 1);
Leon Clarkee46be812010-01-19 14:06:41 +00009046}
9047
9048
9049void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
Andrei Popescu402d9372010-02-26 13:31:12 +00009050 // Stack layout on entry:
9051 //
9052 // [esp + kPointerSize]: constant elements.
9053 // [esp + (2 * kPointerSize)]: literal index.
9054 // [esp + (3 * kPointerSize)]: literals array.
9055
9056 // All sizes here are multiples of kPointerSize.
Leon Clarkee46be812010-01-19 14:06:41 +00009057 int elements_size = (length_ > 0) ? FixedArray::SizeFor(length_) : 0;
9058 int size = JSArray::kSize + elements_size;
9059
9060 // Load boilerplate object into ecx and check if we need to create a
9061 // boilerplate.
9062 Label slow_case;
9063 __ mov(ecx, Operand(esp, 3 * kPointerSize));
9064 __ mov(eax, Operand(esp, 2 * kPointerSize));
9065 ASSERT((kPointerSize == 4) && (kSmiTagSize == 1) && (kSmiTag == 0));
Kristian Monsen25f61362010-05-21 11:50:48 +01009066 __ mov(ecx, CodeGenerator::FixedArrayElementOperand(ecx, eax));
Leon Clarkee46be812010-01-19 14:06:41 +00009067 __ cmp(ecx, Factory::undefined_value());
9068 __ j(equal, &slow_case);
9069
9070 // Allocate both the JS array and the elements array in one big
9071 // allocation. This avoids multiple limit checks.
9072 __ AllocateInNewSpace(size, eax, ebx, edx, &slow_case, TAG_OBJECT);
9073
9074 // Copy the JS array part.
9075 for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
9076 if ((i != JSArray::kElementsOffset) || (length_ == 0)) {
9077 __ mov(ebx, FieldOperand(ecx, i));
9078 __ mov(FieldOperand(eax, i), ebx);
9079 }
9080 }
9081
9082 if (length_ > 0) {
9083 // Get hold of the elements array of the boilerplate and setup the
9084 // elements pointer in the resulting object.
9085 __ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset));
9086 __ lea(edx, Operand(eax, JSArray::kSize));
9087 __ mov(FieldOperand(eax, JSArray::kElementsOffset), edx);
9088
9089 // Copy the elements array.
9090 for (int i = 0; i < elements_size; i += kPointerSize) {
9091 __ mov(ebx, FieldOperand(ecx, i));
9092 __ mov(FieldOperand(edx, i), ebx);
9093 }
9094 }
9095
9096 // Return and remove the on-stack parameters.
9097 __ ret(3 * kPointerSize);
9098
9099 __ bind(&slow_case);
Steve Block6ded16b2010-05-10 14:33:55 +01009100 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1);
Leon Clarkee46be812010-01-19 14:06:41 +00009101}
9102
9103
Steve Blocka7e24c12009-10-30 11:49:00 +00009104// NOTE: The stub does not handle the inlined cases (Smis, Booleans, undefined).
9105void ToBooleanStub::Generate(MacroAssembler* masm) {
9106 Label false_result, true_result, not_string;
9107 __ mov(eax, Operand(esp, 1 * kPointerSize));
9108
9109 // 'null' => false.
9110 __ cmp(eax, Factory::null_value());
9111 __ j(equal, &false_result);
9112
9113 // Get the map and type of the heap object.
9114 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
9115 __ movzx_b(ecx, FieldOperand(edx, Map::kInstanceTypeOffset));
9116
9117 // Undetectable => false.
9118 __ movzx_b(ebx, FieldOperand(edx, Map::kBitFieldOffset));
9119 __ and_(ebx, 1 << Map::kIsUndetectable);
9120 __ j(not_zero, &false_result);
9121
9122 // JavaScript object => true.
9123 __ cmp(ecx, FIRST_JS_OBJECT_TYPE);
9124 __ j(above_equal, &true_result);
9125
9126 // String value => false iff empty.
9127 __ cmp(ecx, FIRST_NONSTRING_TYPE);
9128 __ j(above_equal, &not_string);
Steve Blocka7e24c12009-10-30 11:49:00 +00009129 __ mov(edx, FieldOperand(eax, String::kLengthOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01009130 ASSERT(kSmiTag == 0);
Steve Blockd0582a62009-12-15 09:54:21 +00009131 __ test(edx, Operand(edx));
Steve Blocka7e24c12009-10-30 11:49:00 +00009132 __ j(zero, &false_result);
9133 __ jmp(&true_result);
9134
9135 __ bind(&not_string);
9136 // HeapNumber => false iff +0, -0, or NaN.
9137 __ cmp(edx, Factory::heap_number_map());
9138 __ j(not_equal, &true_result);
9139 __ fldz();
9140 __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset));
Steve Block3ce2e202009-11-05 08:53:23 +00009141 __ FCmp();
Steve Blocka7e24c12009-10-30 11:49:00 +00009142 __ j(zero, &false_result);
9143 // Fall through to |true_result|.
9144
9145 // Return 1/0 for true/false in eax.
9146 __ bind(&true_result);
9147 __ mov(eax, 1);
9148 __ ret(1 * kPointerSize);
9149 __ bind(&false_result);
9150 __ mov(eax, 0);
9151 __ ret(1 * kPointerSize);
9152}
9153
9154
Steve Block3ce2e202009-11-05 08:53:23 +00009155void GenericBinaryOpStub::GenerateCall(
9156 MacroAssembler* masm,
9157 Register left,
9158 Register right) {
9159 if (!ArgsInRegistersSupported()) {
9160 // Pass arguments on the stack.
9161 __ push(left);
9162 __ push(right);
9163 } else {
9164 // The calling convention with registers is left in edx and right in eax.
Steve Blockd0582a62009-12-15 09:54:21 +00009165 Register left_arg = edx;
9166 Register right_arg = eax;
9167 if (!(left.is(left_arg) && right.is(right_arg))) {
9168 if (left.is(right_arg) && right.is(left_arg)) {
Steve Block3ce2e202009-11-05 08:53:23 +00009169 if (IsOperationCommutative()) {
9170 SetArgsReversed();
9171 } else {
9172 __ xchg(left, right);
9173 }
Steve Blockd0582a62009-12-15 09:54:21 +00009174 } else if (left.is(left_arg)) {
9175 __ mov(right_arg, right);
Andrei Popescu402d9372010-02-26 13:31:12 +00009176 } else if (right.is(right_arg)) {
9177 __ mov(left_arg, left);
Steve Blockd0582a62009-12-15 09:54:21 +00009178 } else if (left.is(right_arg)) {
Steve Block3ce2e202009-11-05 08:53:23 +00009179 if (IsOperationCommutative()) {
Steve Blockd0582a62009-12-15 09:54:21 +00009180 __ mov(left_arg, right);
Steve Block3ce2e202009-11-05 08:53:23 +00009181 SetArgsReversed();
9182 } else {
Steve Blockd0582a62009-12-15 09:54:21 +00009183 // Order of moves important to avoid destroying left argument.
9184 __ mov(left_arg, left);
9185 __ mov(right_arg, right);
Steve Block3ce2e202009-11-05 08:53:23 +00009186 }
Steve Blockd0582a62009-12-15 09:54:21 +00009187 } else if (right.is(left_arg)) {
Steve Block3ce2e202009-11-05 08:53:23 +00009188 if (IsOperationCommutative()) {
Steve Blockd0582a62009-12-15 09:54:21 +00009189 __ mov(right_arg, left);
Steve Block3ce2e202009-11-05 08:53:23 +00009190 SetArgsReversed();
9191 } else {
Steve Blockd0582a62009-12-15 09:54:21 +00009192 // Order of moves important to avoid destroying right argument.
9193 __ mov(right_arg, right);
9194 __ mov(left_arg, left);
Steve Block3ce2e202009-11-05 08:53:23 +00009195 }
Steve Block3ce2e202009-11-05 08:53:23 +00009196 } else {
Steve Blockd0582a62009-12-15 09:54:21 +00009197 // Order of moves is not important.
9198 __ mov(left_arg, left);
9199 __ mov(right_arg, right);
Steve Block3ce2e202009-11-05 08:53:23 +00009200 }
9201 }
9202
9203 // Update flags to indicate that arguments are in registers.
9204 SetArgsInRegisters();
Steve Blockd0582a62009-12-15 09:54:21 +00009205 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
Steve Block3ce2e202009-11-05 08:53:23 +00009206 }
9207
9208 // Call the stub.
9209 __ CallStub(this);
9210}
9211
9212
9213void GenericBinaryOpStub::GenerateCall(
9214 MacroAssembler* masm,
9215 Register left,
9216 Smi* right) {
9217 if (!ArgsInRegistersSupported()) {
9218 // Pass arguments on the stack.
9219 __ push(left);
9220 __ push(Immediate(right));
9221 } else {
Steve Blockd0582a62009-12-15 09:54:21 +00009222 // The calling convention with registers is left in edx and right in eax.
9223 Register left_arg = edx;
9224 Register right_arg = eax;
9225 if (left.is(left_arg)) {
9226 __ mov(right_arg, Immediate(right));
9227 } else if (left.is(right_arg) && IsOperationCommutative()) {
9228 __ mov(left_arg, Immediate(right));
Steve Block3ce2e202009-11-05 08:53:23 +00009229 SetArgsReversed();
9230 } else {
Andrei Popescu402d9372010-02-26 13:31:12 +00009231 // For non-commutative operations, left and right_arg might be
9232 // the same register. Therefore, the order of the moves is
9233 // important here in order to not overwrite left before moving
9234 // it to left_arg.
Steve Blockd0582a62009-12-15 09:54:21 +00009235 __ mov(left_arg, left);
9236 __ mov(right_arg, Immediate(right));
Steve Block3ce2e202009-11-05 08:53:23 +00009237 }
9238
9239 // Update flags to indicate that arguments are in registers.
9240 SetArgsInRegisters();
Steve Blockd0582a62009-12-15 09:54:21 +00009241 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
Steve Block3ce2e202009-11-05 08:53:23 +00009242 }
9243
9244 // Call the stub.
9245 __ CallStub(this);
9246}
9247
9248
9249void GenericBinaryOpStub::GenerateCall(
9250 MacroAssembler* masm,
9251 Smi* left,
9252 Register right) {
9253 if (!ArgsInRegistersSupported()) {
9254 // Pass arguments on the stack.
9255 __ push(Immediate(left));
9256 __ push(right);
9257 } else {
Steve Blockd0582a62009-12-15 09:54:21 +00009258 // The calling convention with registers is left in edx and right in eax.
9259 Register left_arg = edx;
9260 Register right_arg = eax;
9261 if (right.is(right_arg)) {
9262 __ mov(left_arg, Immediate(left));
9263 } else if (right.is(left_arg) && IsOperationCommutative()) {
9264 __ mov(right_arg, Immediate(left));
9265 SetArgsReversed();
Steve Block3ce2e202009-11-05 08:53:23 +00009266 } else {
Andrei Popescu402d9372010-02-26 13:31:12 +00009267 // For non-commutative operations, right and left_arg might be
9268 // the same register. Therefore, the order of the moves is
9269 // important here in order to not overwrite right before moving
9270 // it to right_arg.
Steve Blockd0582a62009-12-15 09:54:21 +00009271 __ mov(right_arg, right);
Andrei Popescu402d9372010-02-26 13:31:12 +00009272 __ mov(left_arg, Immediate(left));
Steve Block3ce2e202009-11-05 08:53:23 +00009273 }
9274 // Update flags to indicate that arguments are in registers.
9275 SetArgsInRegisters();
Steve Blockd0582a62009-12-15 09:54:21 +00009276 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
Steve Block3ce2e202009-11-05 08:53:23 +00009277 }
9278
9279 // Call the stub.
9280 __ CallStub(this);
9281}
9282
9283
Leon Clarked91b9f72010-01-27 17:25:45 +00009284Result GenericBinaryOpStub::GenerateCall(MacroAssembler* masm,
9285 VirtualFrame* frame,
9286 Result* left,
9287 Result* right) {
9288 if (ArgsInRegistersSupported()) {
9289 SetArgsInRegisters();
9290 return frame->CallStub(this, left, right);
9291 } else {
9292 frame->Push(left);
9293 frame->Push(right);
9294 return frame->CallStub(this, 2);
9295 }
9296}
9297
9298
Steve Blocka7e24c12009-10-30 11:49:00 +00009299void GenericBinaryOpStub::GenerateSmiCode(MacroAssembler* masm, Label* slow) {
Leon Clarked91b9f72010-01-27 17:25:45 +00009300 // 1. Move arguments into edx, eax except for DIV and MOD, which need the
9301 // dividend in eax and edx free for the division. Use eax, ebx for those.
9302 Comment load_comment(masm, "-- Load arguments");
9303 Register left = edx;
9304 Register right = eax;
9305 if (op_ == Token::DIV || op_ == Token::MOD) {
9306 left = eax;
9307 right = ebx;
9308 if (HasArgsInRegisters()) {
9309 __ mov(ebx, eax);
9310 __ mov(eax, edx);
9311 }
9312 }
9313 if (!HasArgsInRegisters()) {
9314 __ mov(right, Operand(esp, 1 * kPointerSize));
9315 __ mov(left, Operand(esp, 2 * kPointerSize));
9316 }
Steve Blocka7e24c12009-10-30 11:49:00 +00009317
Steve Block6ded16b2010-05-10 14:33:55 +01009318 if (static_operands_type_.IsSmi()) {
9319 if (FLAG_debug_code) {
9320 __ AbortIfNotSmi(left);
9321 __ AbortIfNotSmi(right);
9322 }
9323 if (op_ == Token::BIT_OR) {
9324 __ or_(right, Operand(left));
9325 GenerateReturn(masm);
9326 return;
9327 } else if (op_ == Token::BIT_AND) {
9328 __ and_(right, Operand(left));
9329 GenerateReturn(masm);
9330 return;
9331 } else if (op_ == Token::BIT_XOR) {
9332 __ xor_(right, Operand(left));
9333 GenerateReturn(masm);
9334 return;
9335 }
9336 }
9337
Leon Clarked91b9f72010-01-27 17:25:45 +00009338 // 2. Prepare the smi check of both operands by oring them together.
9339 Comment smi_check_comment(masm, "-- Smi check arguments");
9340 Label not_smis;
9341 Register combined = ecx;
9342 ASSERT(!left.is(combined) && !right.is(combined));
Steve Blocka7e24c12009-10-30 11:49:00 +00009343 switch (op_) {
Leon Clarked91b9f72010-01-27 17:25:45 +00009344 case Token::BIT_OR:
9345 // Perform the operation into eax and smi check the result. Preserve
9346 // eax in case the result is not a smi.
9347 ASSERT(!left.is(ecx) && !right.is(ecx));
9348 __ mov(ecx, right);
9349 __ or_(right, Operand(left)); // Bitwise or is commutative.
9350 combined = right;
9351 break;
9352
9353 case Token::BIT_XOR:
9354 case Token::BIT_AND:
Leon Clarkeeab96aa2010-01-27 16:31:12 +00009355 case Token::ADD:
Steve Blocka7e24c12009-10-30 11:49:00 +00009356 case Token::SUB:
Leon Clarked91b9f72010-01-27 17:25:45 +00009357 case Token::MUL:
Steve Blocka7e24c12009-10-30 11:49:00 +00009358 case Token::DIV:
9359 case Token::MOD:
Leon Clarked91b9f72010-01-27 17:25:45 +00009360 __ mov(combined, right);
9361 __ or_(combined, Operand(left));
9362 break;
9363
9364 case Token::SHL:
9365 case Token::SAR:
9366 case Token::SHR:
9367 // Move the right operand into ecx for the shift operation, use eax
9368 // for the smi check register.
9369 ASSERT(!left.is(ecx) && !right.is(ecx));
9370 __ mov(ecx, right);
9371 __ or_(right, Operand(left));
9372 combined = right;
Steve Blocka7e24c12009-10-30 11:49:00 +00009373 break;
9374
9375 default:
Steve Blocka7e24c12009-10-30 11:49:00 +00009376 break;
9377 }
9378
Leon Clarked91b9f72010-01-27 17:25:45 +00009379 // 3. Perform the smi check of the operands.
9380 ASSERT(kSmiTag == 0); // Adjust zero check if not the case.
9381 __ test(combined, Immediate(kSmiTagMask));
9382 __ j(not_zero, &not_smis, not_taken);
Steve Blocka7e24c12009-10-30 11:49:00 +00009383
Leon Clarked91b9f72010-01-27 17:25:45 +00009384 // 4. Operands are both smis, perform the operation leaving the result in
9385 // eax and check the result if necessary.
9386 Comment perform_smi(masm, "-- Perform smi operation");
9387 Label use_fp_on_smis;
Steve Blocka7e24c12009-10-30 11:49:00 +00009388 switch (op_) {
Leon Clarked91b9f72010-01-27 17:25:45 +00009389 case Token::BIT_OR:
9390 // Nothing to do.
9391 break;
9392
9393 case Token::BIT_XOR:
9394 ASSERT(right.is(eax));
9395 __ xor_(right, Operand(left)); // Bitwise xor is commutative.
9396 break;
9397
9398 case Token::BIT_AND:
9399 ASSERT(right.is(eax));
9400 __ and_(right, Operand(left)); // Bitwise and is commutative.
9401 break;
9402
9403 case Token::SHL:
9404 // Remove tags from operands (but keep sign).
9405 __ SmiUntag(left);
9406 __ SmiUntag(ecx);
9407 // Perform the operation.
9408 __ shl_cl(left);
9409 // Check that the *signed* result fits in a smi.
9410 __ cmp(left, 0xc0000000);
9411 __ j(sign, &use_fp_on_smis, not_taken);
9412 // Tag the result and store it in register eax.
9413 __ SmiTag(left);
9414 __ mov(eax, left);
9415 break;
9416
9417 case Token::SAR:
9418 // Remove tags from operands (but keep sign).
9419 __ SmiUntag(left);
9420 __ SmiUntag(ecx);
9421 // Perform the operation.
9422 __ sar_cl(left);
9423 // Tag the result and store it in register eax.
9424 __ SmiTag(left);
9425 __ mov(eax, left);
9426 break;
9427
9428 case Token::SHR:
9429 // Remove tags from operands (but keep sign).
9430 __ SmiUntag(left);
9431 __ SmiUntag(ecx);
9432 // Perform the operation.
9433 __ shr_cl(left);
9434 // Check that the *unsigned* result fits in a smi.
9435 // Neither of the two high-order bits can be set:
9436 // - 0x80000000: high bit would be lost when smi tagging.
9437 // - 0x40000000: this number would convert to negative when
9438 // Smi tagging these two cases can only happen with shifts
9439 // by 0 or 1 when handed a valid smi.
9440 __ test(left, Immediate(0xc0000000));
9441 __ j(not_zero, slow, not_taken);
9442 // Tag the result and store it in register eax.
9443 __ SmiTag(left);
9444 __ mov(eax, left);
9445 break;
9446
Steve Blocka7e24c12009-10-30 11:49:00 +00009447 case Token::ADD:
Leon Clarked91b9f72010-01-27 17:25:45 +00009448 ASSERT(right.is(eax));
9449 __ add(right, Operand(left)); // Addition is commutative.
9450 __ j(overflow, &use_fp_on_smis, not_taken);
9451 break;
9452
Steve Blocka7e24c12009-10-30 11:49:00 +00009453 case Token::SUB:
Leon Clarked91b9f72010-01-27 17:25:45 +00009454 __ sub(left, Operand(right));
9455 __ j(overflow, &use_fp_on_smis, not_taken);
9456 __ mov(eax, left);
Steve Blocka7e24c12009-10-30 11:49:00 +00009457 break;
9458
9459 case Token::MUL:
9460 // If the smi tag is 0 we can just leave the tag on one operand.
Leon Clarked91b9f72010-01-27 17:25:45 +00009461 ASSERT(kSmiTag == 0); // Adjust code below if not the case.
9462 // We can't revert the multiplication if the result is not a smi
9463 // so save the right operand.
9464 __ mov(ebx, right);
Steve Blocka7e24c12009-10-30 11:49:00 +00009465 // Remove tag from one of the operands (but keep sign).
Leon Clarked91b9f72010-01-27 17:25:45 +00009466 __ SmiUntag(right);
Steve Blocka7e24c12009-10-30 11:49:00 +00009467 // Do multiplication.
Leon Clarked91b9f72010-01-27 17:25:45 +00009468 __ imul(right, Operand(left)); // Multiplication is commutative.
9469 __ j(overflow, &use_fp_on_smis, not_taken);
9470 // Check for negative zero result. Use combined = left | right.
9471 __ NegativeZeroTest(right, combined, &use_fp_on_smis);
Steve Blocka7e24c12009-10-30 11:49:00 +00009472 break;
9473
9474 case Token::DIV:
Leon Clarked91b9f72010-01-27 17:25:45 +00009475 // We can't revert the division if the result is not a smi so
9476 // save the left operand.
9477 __ mov(edi, left);
9478 // Check for 0 divisor.
9479 __ test(right, Operand(right));
9480 __ j(zero, &use_fp_on_smis, not_taken);
9481 // Sign extend left into edx:eax.
9482 ASSERT(left.is(eax));
9483 __ cdq();
9484 // Divide edx:eax by right.
9485 __ idiv(right);
9486 // Check for the corner case of dividing the most negative smi by
9487 // -1. We cannot use the overflow flag, since it is not set by idiv
9488 // instruction.
Steve Blocka7e24c12009-10-30 11:49:00 +00009489 ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
9490 __ cmp(eax, 0x40000000);
Leon Clarked91b9f72010-01-27 17:25:45 +00009491 __ j(equal, &use_fp_on_smis);
9492 // Check for negative zero result. Use combined = left | right.
9493 __ NegativeZeroTest(eax, combined, &use_fp_on_smis);
Steve Blocka7e24c12009-10-30 11:49:00 +00009494 // Check that the remainder is zero.
9495 __ test(edx, Operand(edx));
Leon Clarked91b9f72010-01-27 17:25:45 +00009496 __ j(not_zero, &use_fp_on_smis);
Steve Blocka7e24c12009-10-30 11:49:00 +00009497 // Tag the result and store it in register eax.
Leon Clarkee46be812010-01-19 14:06:41 +00009498 __ SmiTag(eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00009499 break;
9500
9501 case Token::MOD:
Leon Clarked91b9f72010-01-27 17:25:45 +00009502 // Check for 0 divisor.
9503 __ test(right, Operand(right));
9504 __ j(zero, &not_smis, not_taken);
9505
9506 // Sign extend left into edx:eax.
9507 ASSERT(left.is(eax));
9508 __ cdq();
9509 // Divide edx:eax by right.
9510 __ idiv(right);
9511 // Check for negative zero result. Use combined = left | right.
9512 __ NegativeZeroTest(edx, combined, slow);
Steve Blocka7e24c12009-10-30 11:49:00 +00009513 // Move remainder to register eax.
Leon Clarked91b9f72010-01-27 17:25:45 +00009514 __ mov(eax, edx);
Steve Blocka7e24c12009-10-30 11:49:00 +00009515 break;
9516
9517 default:
9518 UNREACHABLE();
Leon Clarked91b9f72010-01-27 17:25:45 +00009519 }
9520
9521 // 5. Emit return of result in eax.
9522 GenerateReturn(masm);
9523
9524 // 6. For some operations emit inline code to perform floating point
9525 // operations on known smis (e.g., if the result of the operation
9526 // overflowed the smi range).
9527 switch (op_) {
9528 case Token::SHL: {
9529 Comment perform_float(masm, "-- Perform float operation on smis");
9530 __ bind(&use_fp_on_smis);
9531 // Result we want is in left == edx, so we can put the allocated heap
9532 // number in eax.
9533 __ AllocateHeapNumber(eax, ecx, ebx, slow);
9534 // Store the result in the HeapNumber and return.
9535 if (CpuFeatures::IsSupported(SSE2)) {
9536 CpuFeatures::Scope use_sse2(SSE2);
9537 __ cvtsi2sd(xmm0, Operand(left));
9538 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
9539 } else {
9540 // It's OK to overwrite the right argument on the stack because we
9541 // are about to return.
9542 __ mov(Operand(esp, 1 * kPointerSize), left);
9543 __ fild_s(Operand(esp, 1 * kPointerSize));
9544 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
9545 }
9546 GenerateReturn(masm);
9547 break;
9548 }
9549
9550 case Token::ADD:
9551 case Token::SUB:
9552 case Token::MUL:
9553 case Token::DIV: {
9554 Comment perform_float(masm, "-- Perform float operation on smis");
9555 __ bind(&use_fp_on_smis);
9556 // Restore arguments to edx, eax.
9557 switch (op_) {
9558 case Token::ADD:
9559 // Revert right = right + left.
9560 __ sub(right, Operand(left));
9561 break;
9562 case Token::SUB:
9563 // Revert left = left - right.
9564 __ add(left, Operand(right));
9565 break;
9566 case Token::MUL:
9567 // Right was clobbered but a copy is in ebx.
9568 __ mov(right, ebx);
9569 break;
9570 case Token::DIV:
9571 // Left was clobbered but a copy is in edi. Right is in ebx for
9572 // division.
9573 __ mov(edx, edi);
9574 __ mov(eax, right);
9575 break;
9576 default: UNREACHABLE();
9577 break;
9578 }
9579 __ AllocateHeapNumber(ecx, ebx, no_reg, slow);
9580 if (CpuFeatures::IsSupported(SSE2)) {
9581 CpuFeatures::Scope use_sse2(SSE2);
9582 FloatingPointHelper::LoadSSE2Smis(masm, ebx);
9583 switch (op_) {
9584 case Token::ADD: __ addsd(xmm0, xmm1); break;
9585 case Token::SUB: __ subsd(xmm0, xmm1); break;
9586 case Token::MUL: __ mulsd(xmm0, xmm1); break;
9587 case Token::DIV: __ divsd(xmm0, xmm1); break;
9588 default: UNREACHABLE();
9589 }
9590 __ movdbl(FieldOperand(ecx, HeapNumber::kValueOffset), xmm0);
9591 } else { // SSE2 not available, use FPU.
9592 FloatingPointHelper::LoadFloatSmis(masm, ebx);
9593 switch (op_) {
9594 case Token::ADD: __ faddp(1); break;
9595 case Token::SUB: __ fsubp(1); break;
9596 case Token::MUL: __ fmulp(1); break;
9597 case Token::DIV: __ fdivp(1); break;
9598 default: UNREACHABLE();
9599 }
9600 __ fstp_d(FieldOperand(ecx, HeapNumber::kValueOffset));
9601 }
9602 __ mov(eax, ecx);
9603 GenerateReturn(masm);
9604 break;
9605 }
9606
9607 default:
9608 break;
9609 }
9610
9611 // 7. Non-smi operands, fall out to the non-smi code with the operands in
9612 // edx and eax.
9613 Comment done_comment(masm, "-- Enter non-smi code");
9614 __ bind(&not_smis);
9615 switch (op_) {
9616 case Token::BIT_OR:
9617 case Token::SHL:
9618 case Token::SAR:
9619 case Token::SHR:
9620 // Right operand is saved in ecx and eax was destroyed by the smi
9621 // check.
9622 __ mov(eax, ecx);
9623 break;
9624
9625 case Token::DIV:
9626 case Token::MOD:
9627 // Operands are in eax, ebx at this point.
9628 __ mov(edx, eax);
9629 __ mov(eax, ebx);
9630 break;
9631
9632 default:
Steve Blocka7e24c12009-10-30 11:49:00 +00009633 break;
9634 }
9635}
9636
9637
9638void GenericBinaryOpStub::Generate(MacroAssembler* masm) {
9639 Label call_runtime;
9640
Steve Block3ce2e202009-11-05 08:53:23 +00009641 __ IncrementCounter(&Counters::generic_binary_stub_calls, 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00009642
Steve Block3ce2e202009-11-05 08:53:23 +00009643 // Generate fast case smi code if requested. This flag is set when the fast
9644 // case smi code is not generated by the caller. Generating it here will speed
9645 // up common operations.
Steve Block6ded16b2010-05-10 14:33:55 +01009646 if (ShouldGenerateSmiCode()) {
Leon Clarked91b9f72010-01-27 17:25:45 +00009647 GenerateSmiCode(masm, &call_runtime);
9648 } else if (op_ != Token::MOD) { // MOD goes straight to runtime.
Steve Block6ded16b2010-05-10 14:33:55 +01009649 if (!HasArgsInRegisters()) {
9650 GenerateLoadArguments(masm);
9651 }
Steve Blocka7e24c12009-10-30 11:49:00 +00009652 }
9653
Steve Blocka7e24c12009-10-30 11:49:00 +00009654 // Floating point case.
Steve Block6ded16b2010-05-10 14:33:55 +01009655 if (ShouldGenerateFPCode()) {
9656 switch (op_) {
9657 case Token::ADD:
9658 case Token::SUB:
9659 case Token::MUL:
9660 case Token::DIV: {
9661 if (runtime_operands_type_ == BinaryOpIC::DEFAULT &&
9662 HasSmiCodeInStub()) {
9663 // Execution reaches this point when the first non-smi argument occurs
9664 // (and only if smi code is generated). This is the right moment to
9665 // patch to HEAP_NUMBERS state. The transition is attempted only for
9666 // the four basic operations. The stub stays in the DEFAULT state
9667 // forever for all other operations (also if smi code is skipped).
9668 GenerateTypeTransition(masm);
Andrei Popescu402d9372010-02-26 13:31:12 +00009669 }
Steve Blocka7e24c12009-10-30 11:49:00 +00009670
Steve Block6ded16b2010-05-10 14:33:55 +01009671 Label not_floats;
Leon Clarkee46be812010-01-19 14:06:41 +00009672 if (CpuFeatures::IsSupported(SSE2)) {
9673 CpuFeatures::Scope use_sse2(SSE2);
Steve Block6ded16b2010-05-10 14:33:55 +01009674 if (static_operands_type_.IsNumber()) {
9675 if (FLAG_debug_code) {
9676 // Assert at runtime that inputs are only numbers.
9677 __ AbortIfNotNumber(edx);
9678 __ AbortIfNotNumber(eax);
9679 }
9680 if (static_operands_type_.IsSmi()) {
9681 if (FLAG_debug_code) {
9682 __ AbortIfNotSmi(edx);
9683 __ AbortIfNotSmi(eax);
9684 }
9685 FloatingPointHelper::LoadSSE2Smis(masm, ecx);
9686 } else {
9687 FloatingPointHelper::LoadSSE2Operands(masm);
9688 }
9689 } else {
9690 FloatingPointHelper::LoadSSE2Operands(masm, &call_runtime);
9691 }
9692
9693 switch (op_) {
9694 case Token::ADD: __ addsd(xmm0, xmm1); break;
9695 case Token::SUB: __ subsd(xmm0, xmm1); break;
9696 case Token::MUL: __ mulsd(xmm0, xmm1); break;
9697 case Token::DIV: __ divsd(xmm0, xmm1); break;
9698 default: UNREACHABLE();
9699 }
9700 GenerateHeapResultAllocation(masm, &call_runtime);
Leon Clarkee46be812010-01-19 14:06:41 +00009701 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
Steve Block6ded16b2010-05-10 14:33:55 +01009702 GenerateReturn(masm);
9703 } else { // SSE2 not available, use FPU.
9704 if (static_operands_type_.IsNumber()) {
9705 if (FLAG_debug_code) {
9706 // Assert at runtime that inputs are only numbers.
9707 __ AbortIfNotNumber(edx);
9708 __ AbortIfNotNumber(eax);
9709 }
9710 } else {
9711 FloatingPointHelper::CheckFloatOperands(masm, &call_runtime, ebx);
9712 }
9713 FloatingPointHelper::LoadFloatOperands(
9714 masm,
9715 ecx,
9716 FloatingPointHelper::ARGS_IN_REGISTERS);
9717 switch (op_) {
9718 case Token::ADD: __ faddp(1); break;
9719 case Token::SUB: __ fsubp(1); break;
9720 case Token::MUL: __ fmulp(1); break;
9721 case Token::DIV: __ fdivp(1); break;
9722 default: UNREACHABLE();
9723 }
9724 Label after_alloc_failure;
9725 GenerateHeapResultAllocation(masm, &after_alloc_failure);
Leon Clarkee46be812010-01-19 14:06:41 +00009726 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01009727 GenerateReturn(masm);
9728 __ bind(&after_alloc_failure);
9729 __ ffree();
9730 __ jmp(&call_runtime);
Leon Clarkee46be812010-01-19 14:06:41 +00009731 }
Steve Block6ded16b2010-05-10 14:33:55 +01009732 __ bind(&not_floats);
9733 if (runtime_operands_type_ == BinaryOpIC::DEFAULT &&
9734 !HasSmiCodeInStub()) {
9735 // Execution reaches this point when the first non-number argument
9736 // occurs (and only if smi code is skipped from the stub, otherwise
9737 // the patching has already been done earlier in this case branch).
9738 // Try patching to STRINGS for ADD operation.
9739 if (op_ == Token::ADD) {
9740 GenerateTypeTransition(masm);
9741 }
9742 }
9743 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00009744 }
Steve Block6ded16b2010-05-10 14:33:55 +01009745 case Token::MOD: {
9746 // For MOD we go directly to runtime in the non-smi case.
9747 break;
9748 }
9749 case Token::BIT_OR:
9750 case Token::BIT_AND:
9751 case Token::BIT_XOR:
9752 case Token::SAR:
9753 case Token::SHL:
9754 case Token::SHR: {
9755 Label non_smi_result;
9756 FloatingPointHelper::LoadAsIntegers(masm,
9757 static_operands_type_,
9758 use_sse3_,
9759 &call_runtime);
9760 switch (op_) {
9761 case Token::BIT_OR: __ or_(eax, Operand(ecx)); break;
9762 case Token::BIT_AND: __ and_(eax, Operand(ecx)); break;
9763 case Token::BIT_XOR: __ xor_(eax, Operand(ecx)); break;
9764 case Token::SAR: __ sar_cl(eax); break;
9765 case Token::SHL: __ shl_cl(eax); break;
9766 case Token::SHR: __ shr_cl(eax); break;
9767 default: UNREACHABLE();
9768 }
9769 if (op_ == Token::SHR) {
9770 // Check if result is non-negative and fits in a smi.
9771 __ test(eax, Immediate(0xc0000000));
9772 __ j(not_zero, &call_runtime);
9773 } else {
9774 // Check if result fits in a smi.
9775 __ cmp(eax, 0xc0000000);
9776 __ j(negative, &non_smi_result);
9777 }
9778 // Tag smi result and return.
9779 __ SmiTag(eax);
9780 GenerateReturn(masm);
9781
9782 // All ops except SHR return a signed int32 that we load in
9783 // a HeapNumber.
9784 if (op_ != Token::SHR) {
9785 __ bind(&non_smi_result);
9786 // Allocate a heap number if needed.
9787 __ mov(ebx, Operand(eax)); // ebx: result
9788 Label skip_allocation;
9789 switch (mode_) {
9790 case OVERWRITE_LEFT:
9791 case OVERWRITE_RIGHT:
9792 // If the operand was an object, we skip the
9793 // allocation of a heap number.
9794 __ mov(eax, Operand(esp, mode_ == OVERWRITE_RIGHT ?
9795 1 * kPointerSize : 2 * kPointerSize));
9796 __ test(eax, Immediate(kSmiTagMask));
9797 __ j(not_zero, &skip_allocation, not_taken);
9798 // Fall through!
9799 case NO_OVERWRITE:
9800 __ AllocateHeapNumber(eax, ecx, edx, &call_runtime);
9801 __ bind(&skip_allocation);
9802 break;
9803 default: UNREACHABLE();
9804 }
9805 // Store the result in the HeapNumber and return.
9806 if (CpuFeatures::IsSupported(SSE2)) {
9807 CpuFeatures::Scope use_sse2(SSE2);
9808 __ cvtsi2sd(xmm0, Operand(ebx));
9809 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
9810 } else {
9811 __ mov(Operand(esp, 1 * kPointerSize), ebx);
9812 __ fild_s(Operand(esp, 1 * kPointerSize));
9813 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
9814 }
9815 GenerateReturn(masm);
9816 }
9817 break;
9818 }
9819 default: UNREACHABLE(); break;
Steve Blocka7e24c12009-10-30 11:49:00 +00009820 }
Steve Blocka7e24c12009-10-30 11:49:00 +00009821 }
9822
9823 // If all else fails, use the runtime system to get the correct
Steve Block3ce2e202009-11-05 08:53:23 +00009824 // result. If arguments was passed in registers now place them on the
Steve Blockd0582a62009-12-15 09:54:21 +00009825 // stack in the correct order below the return address.
Steve Blocka7e24c12009-10-30 11:49:00 +00009826 __ bind(&call_runtime);
Leon Clarked91b9f72010-01-27 17:25:45 +00009827 if (HasArgsInRegisters()) {
Steve Block6ded16b2010-05-10 14:33:55 +01009828 GenerateRegisterArgsPush(masm);
Steve Block3ce2e202009-11-05 08:53:23 +00009829 }
Steve Block6ded16b2010-05-10 14:33:55 +01009830
Steve Blocka7e24c12009-10-30 11:49:00 +00009831 switch (op_) {
9832 case Token::ADD: {
9833 // Test for string arguments before calling runtime.
Andrei Popescu402d9372010-02-26 13:31:12 +00009834 Label not_strings, not_string1, string1, string1_smi2;
Steve Block6ded16b2010-05-10 14:33:55 +01009835
9836 // If this stub has already generated FP-specific code then the arguments
9837 // are already in edx, eax
9838 if (!ShouldGenerateFPCode() && !HasArgsInRegisters()) {
9839 GenerateLoadArguments(masm);
9840 }
9841
9842 // Registers containing left and right operands respectively.
9843 Register lhs, rhs;
9844 if (HasArgsReversed()) {
9845 lhs = eax;
9846 rhs = edx;
9847 } else {
9848 lhs = edx;
9849 rhs = eax;
9850 }
9851
9852 // Test if first argument is a string.
9853 __ test(lhs, Immediate(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00009854 __ j(zero, &not_string1);
Steve Block6ded16b2010-05-10 14:33:55 +01009855 __ CmpObjectType(lhs, FIRST_NONSTRING_TYPE, ecx);
Steve Blocka7e24c12009-10-30 11:49:00 +00009856 __ j(above_equal, &not_string1);
9857
Leon Clarked91b9f72010-01-27 17:25:45 +00009858 // First argument is a string, test second.
Steve Block6ded16b2010-05-10 14:33:55 +01009859 __ test(rhs, Immediate(kSmiTagMask));
Andrei Popescu402d9372010-02-26 13:31:12 +00009860 __ j(zero, &string1_smi2);
Steve Block6ded16b2010-05-10 14:33:55 +01009861 __ CmpObjectType(rhs, FIRST_NONSTRING_TYPE, ecx);
Steve Blocka7e24c12009-10-30 11:49:00 +00009862 __ j(above_equal, &string1);
9863
Steve Blockd0582a62009-12-15 09:54:21 +00009864 // First and second argument are strings. Jump to the string add stub.
Andrei Popescu402d9372010-02-26 13:31:12 +00009865 StringAddStub string_add_stub(NO_STRING_CHECK_IN_STUB);
9866 __ TailCallStub(&string_add_stub);
Steve Blocka7e24c12009-10-30 11:49:00 +00009867
Andrei Popescu402d9372010-02-26 13:31:12 +00009868 __ bind(&string1_smi2);
9869 // First argument is a string, second is a smi. Try to lookup the number
9870 // string for the smi in the number string cache.
9871 NumberToStringStub::GenerateLookupNumberStringCache(
Steve Block6ded16b2010-05-10 14:33:55 +01009872 masm, rhs, edi, ebx, ecx, true, &string1);
Andrei Popescu402d9372010-02-26 13:31:12 +00009873
Steve Block6ded16b2010-05-10 14:33:55 +01009874 // Replace second argument on stack and tailcall string add stub to make
9875 // the result.
9876 __ mov(Operand(esp, 1 * kPointerSize), edi);
9877 __ TailCallStub(&string_add_stub);
Andrei Popescu402d9372010-02-26 13:31:12 +00009878
Steve Block6ded16b2010-05-10 14:33:55 +01009879 // Only first argument is a string.
Steve Blocka7e24c12009-10-30 11:49:00 +00009880 __ bind(&string1);
Steve Block6ded16b2010-05-10 14:33:55 +01009881 __ InvokeBuiltin(Builtins::STRING_ADD_LEFT, JUMP_FUNCTION);
Steve Blocka7e24c12009-10-30 11:49:00 +00009882
9883 // First argument was not a string, test second.
9884 __ bind(&not_string1);
Steve Block6ded16b2010-05-10 14:33:55 +01009885 __ test(rhs, Immediate(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00009886 __ j(zero, &not_strings);
Steve Block6ded16b2010-05-10 14:33:55 +01009887 __ CmpObjectType(rhs, FIRST_NONSTRING_TYPE, ecx);
Steve Blocka7e24c12009-10-30 11:49:00 +00009888 __ j(above_equal, &not_strings);
9889
9890 // Only second argument is a string.
Steve Block6ded16b2010-05-10 14:33:55 +01009891 __ InvokeBuiltin(Builtins::STRING_ADD_RIGHT, JUMP_FUNCTION);
Steve Blocka7e24c12009-10-30 11:49:00 +00009892
9893 __ bind(&not_strings);
9894 // Neither argument is a string.
9895 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION);
9896 break;
9897 }
9898 case Token::SUB:
9899 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION);
9900 break;
9901 case Token::MUL:
9902 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION);
Leon Clarked91b9f72010-01-27 17:25:45 +00009903 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00009904 case Token::DIV:
9905 __ InvokeBuiltin(Builtins::DIV, JUMP_FUNCTION);
9906 break;
9907 case Token::MOD:
9908 __ InvokeBuiltin(Builtins::MOD, JUMP_FUNCTION);
9909 break;
9910 case Token::BIT_OR:
9911 __ InvokeBuiltin(Builtins::BIT_OR, JUMP_FUNCTION);
9912 break;
9913 case Token::BIT_AND:
9914 __ InvokeBuiltin(Builtins::BIT_AND, JUMP_FUNCTION);
9915 break;
9916 case Token::BIT_XOR:
9917 __ InvokeBuiltin(Builtins::BIT_XOR, JUMP_FUNCTION);
9918 break;
9919 case Token::SAR:
9920 __ InvokeBuiltin(Builtins::SAR, JUMP_FUNCTION);
9921 break;
9922 case Token::SHL:
9923 __ InvokeBuiltin(Builtins::SHL, JUMP_FUNCTION);
9924 break;
9925 case Token::SHR:
9926 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION);
9927 break;
9928 default:
9929 UNREACHABLE();
9930 }
9931}
9932
9933
Leon Clarked91b9f72010-01-27 17:25:45 +00009934void GenericBinaryOpStub::GenerateHeapResultAllocation(MacroAssembler* masm,
9935 Label* alloc_failure) {
9936 Label skip_allocation;
9937 OverwriteMode mode = mode_;
9938 if (HasArgsReversed()) {
9939 if (mode == OVERWRITE_RIGHT) {
9940 mode = OVERWRITE_LEFT;
9941 } else if (mode == OVERWRITE_LEFT) {
9942 mode = OVERWRITE_RIGHT;
9943 }
9944 }
9945 switch (mode) {
9946 case OVERWRITE_LEFT: {
9947 // If the argument in edx is already an object, we skip the
9948 // allocation of a heap number.
9949 __ test(edx, Immediate(kSmiTagMask));
9950 __ j(not_zero, &skip_allocation, not_taken);
9951 // Allocate a heap number for the result. Keep eax and edx intact
9952 // for the possible runtime call.
9953 __ AllocateHeapNumber(ebx, ecx, no_reg, alloc_failure);
9954 // Now edx can be overwritten losing one of the arguments as we are
9955 // now done and will not need it any more.
9956 __ mov(edx, Operand(ebx));
9957 __ bind(&skip_allocation);
9958 // Use object in edx as a result holder
9959 __ mov(eax, Operand(edx));
9960 break;
9961 }
9962 case OVERWRITE_RIGHT:
9963 // If the argument in eax is already an object, we skip the
9964 // allocation of a heap number.
9965 __ test(eax, Immediate(kSmiTagMask));
9966 __ j(not_zero, &skip_allocation, not_taken);
9967 // Fall through!
9968 case NO_OVERWRITE:
9969 // Allocate a heap number for the result. Keep eax and edx intact
9970 // for the possible runtime call.
9971 __ AllocateHeapNumber(ebx, ecx, no_reg, alloc_failure);
9972 // Now eax can be overwritten losing one of the arguments as we are
9973 // now done and will not need it any more.
9974 __ mov(eax, ebx);
9975 __ bind(&skip_allocation);
9976 break;
9977 default: UNREACHABLE();
9978 }
9979}
9980
9981
Steve Block3ce2e202009-11-05 08:53:23 +00009982void GenericBinaryOpStub::GenerateLoadArguments(MacroAssembler* masm) {
9983 // If arguments are not passed in registers read them from the stack.
Steve Block6ded16b2010-05-10 14:33:55 +01009984 ASSERT(!HasArgsInRegisters());
9985 __ mov(eax, Operand(esp, 1 * kPointerSize));
9986 __ mov(edx, Operand(esp, 2 * kPointerSize));
Steve Block3ce2e202009-11-05 08:53:23 +00009987}
Steve Blocka7e24c12009-10-30 11:49:00 +00009988
Steve Block3ce2e202009-11-05 08:53:23 +00009989
9990void GenericBinaryOpStub::GenerateReturn(MacroAssembler* masm) {
9991 // If arguments are not passed in registers remove them from the stack before
9992 // returning.
Leon Clarked91b9f72010-01-27 17:25:45 +00009993 if (!HasArgsInRegisters()) {
Steve Block3ce2e202009-11-05 08:53:23 +00009994 __ ret(2 * kPointerSize); // Remove both operands
9995 } else {
9996 __ ret(0);
9997 }
Steve Blocka7e24c12009-10-30 11:49:00 +00009998}
9999
10000
Steve Block6ded16b2010-05-10 14:33:55 +010010001void GenericBinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) {
10002 ASSERT(HasArgsInRegisters());
10003 __ pop(ecx);
10004 if (HasArgsReversed()) {
10005 __ push(eax);
10006 __ push(edx);
10007 } else {
10008 __ push(edx);
10009 __ push(eax);
10010 }
10011 __ push(ecx);
10012}
10013
10014
10015void GenericBinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
10016 Label get_result;
10017
10018 // Keep a copy of operands on the stack and make sure they are also in
10019 // edx, eax.
10020 if (HasArgsInRegisters()) {
10021 GenerateRegisterArgsPush(masm);
10022 } else {
10023 GenerateLoadArguments(masm);
10024 }
10025
10026 // Internal frame is necessary to handle exceptions properly.
10027 __ EnterInternalFrame();
10028
10029 // Push arguments on stack if the stub expects them there.
10030 if (!HasArgsInRegisters()) {
10031 __ push(edx);
10032 __ push(eax);
10033 }
10034 // Call the stub proper to get the result in eax.
10035 __ call(&get_result);
10036 __ LeaveInternalFrame();
10037
10038 __ pop(ecx); // Return address.
10039 // Left and right arguments are now on top.
10040 // Push the operation result. The tail call to BinaryOp_Patch will
10041 // return it to the original caller.
10042 __ push(eax);
10043 // Push this stub's key. Although the operation and the type info are
10044 // encoded into the key, the encoding is opaque, so push them too.
10045 __ push(Immediate(Smi::FromInt(MinorKey())));
10046 __ push(Immediate(Smi::FromInt(op_)));
10047 __ push(Immediate(Smi::FromInt(runtime_operands_type_)));
10048
10049 __ push(ecx); // Return address.
10050
10051 // Patch the caller to an appropriate specialized stub
10052 // and return the operation result.
10053 __ TailCallExternalReference(
10054 ExternalReference(IC_Utility(IC::kBinaryOp_Patch)),
10055 6,
10056 1);
10057
10058 // The entry point for the result calculation is assumed to be immediately
10059 // after this sequence.
10060 __ bind(&get_result);
10061}
10062
10063
10064Handle<Code> GetBinaryOpStub(int key, BinaryOpIC::TypeInfo type_info) {
10065 GenericBinaryOpStub stub(key, type_info);
10066 return stub.GetCode();
10067}
10068
10069
Andrei Popescu402d9372010-02-26 13:31:12 +000010070void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
10071 // Input on stack:
10072 // esp[4]: argument (should be number).
10073 // esp[0]: return address.
10074 // Test that eax is a number.
10075 Label runtime_call;
10076 Label runtime_call_clear_stack;
10077 Label input_not_smi;
10078 Label loaded;
10079 __ mov(eax, Operand(esp, kPointerSize));
10080 __ test(eax, Immediate(kSmiTagMask));
10081 __ j(not_zero, &input_not_smi);
10082 // Input is a smi. Untag and load it onto the FPU stack.
10083 // Then load the low and high words of the double into ebx, edx.
10084 ASSERT_EQ(1, kSmiTagSize);
10085 __ sar(eax, 1);
10086 __ sub(Operand(esp), Immediate(2 * kPointerSize));
10087 __ mov(Operand(esp, 0), eax);
10088 __ fild_s(Operand(esp, 0));
10089 __ fst_d(Operand(esp, 0));
10090 __ pop(edx);
10091 __ pop(ebx);
10092 __ jmp(&loaded);
10093 __ bind(&input_not_smi);
10094 // Check if input is a HeapNumber.
10095 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
10096 __ cmp(Operand(ebx), Immediate(Factory::heap_number_map()));
10097 __ j(not_equal, &runtime_call);
10098 // Input is a HeapNumber. Push it on the FPU stack and load its
10099 // low and high words into ebx, edx.
10100 __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset));
10101 __ mov(edx, FieldOperand(eax, HeapNumber::kExponentOffset));
10102 __ mov(ebx, FieldOperand(eax, HeapNumber::kMantissaOffset));
10103
10104 __ bind(&loaded);
10105 // ST[0] == double value
10106 // ebx = low 32 bits of double value
10107 // edx = high 32 bits of double value
10108 // Compute hash:
10109 // h = (low ^ high); h ^= h >> 16; h ^= h >> 8; h = h & (cacheSize - 1);
10110 __ mov(ecx, ebx);
10111 __ xor_(ecx, Operand(edx));
10112 __ mov(eax, ecx);
10113 __ sar(eax, 16);
10114 __ xor_(ecx, Operand(eax));
10115 __ mov(eax, ecx);
10116 __ sar(eax, 8);
10117 __ xor_(ecx, Operand(eax));
10118 ASSERT(IsPowerOf2(TranscendentalCache::kCacheSize));
10119 __ and_(Operand(ecx), Immediate(TranscendentalCache::kCacheSize - 1));
10120 // ST[0] == double value.
10121 // ebx = low 32 bits of double value.
10122 // edx = high 32 bits of double value.
10123 // ecx = TranscendentalCache::hash(double value).
10124 __ mov(eax,
10125 Immediate(ExternalReference::transcendental_cache_array_address()));
10126 // Eax points to cache array.
10127 __ mov(eax, Operand(eax, type_ * sizeof(TranscendentalCache::caches_[0])));
10128 // Eax points to the cache for the type type_.
10129 // If NULL, the cache hasn't been initialized yet, so go through runtime.
10130 __ test(eax, Operand(eax));
10131 __ j(zero, &runtime_call_clear_stack);
10132#ifdef DEBUG
10133 // Check that the layout of cache elements match expectations.
Steve Block6ded16b2010-05-10 14:33:55 +010010134 { TranscendentalCache::Element test_elem[2];
Andrei Popescu402d9372010-02-26 13:31:12 +000010135 char* elem_start = reinterpret_cast<char*>(&test_elem[0]);
10136 char* elem2_start = reinterpret_cast<char*>(&test_elem[1]);
10137 char* elem_in0 = reinterpret_cast<char*>(&(test_elem[0].in[0]));
10138 char* elem_in1 = reinterpret_cast<char*>(&(test_elem[0].in[1]));
10139 char* elem_out = reinterpret_cast<char*>(&(test_elem[0].output));
10140 CHECK_EQ(12, elem2_start - elem_start); // Two uint_32's and a pointer.
10141 CHECK_EQ(0, elem_in0 - elem_start);
10142 CHECK_EQ(kIntSize, elem_in1 - elem_start);
10143 CHECK_EQ(2 * kIntSize, elem_out - elem_start);
10144 }
10145#endif
10146 // Find the address of the ecx'th entry in the cache, i.e., &eax[ecx*12].
10147 __ lea(ecx, Operand(ecx, ecx, times_2, 0));
10148 __ lea(ecx, Operand(eax, ecx, times_4, 0));
10149 // Check if cache matches: Double value is stored in uint32_t[2] array.
10150 Label cache_miss;
10151 __ cmp(ebx, Operand(ecx, 0));
10152 __ j(not_equal, &cache_miss);
10153 __ cmp(edx, Operand(ecx, kIntSize));
10154 __ j(not_equal, &cache_miss);
10155 // Cache hit!
10156 __ mov(eax, Operand(ecx, 2 * kIntSize));
10157 __ fstp(0);
10158 __ ret(kPointerSize);
10159
10160 __ bind(&cache_miss);
10161 // Update cache with new value.
10162 // We are short on registers, so use no_reg as scratch.
10163 // This gives slightly larger code.
10164 __ AllocateHeapNumber(eax, edi, no_reg, &runtime_call_clear_stack);
10165 GenerateOperation(masm);
10166 __ mov(Operand(ecx, 0), ebx);
10167 __ mov(Operand(ecx, kIntSize), edx);
10168 __ mov(Operand(ecx, 2 * kIntSize), eax);
10169 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
10170 __ ret(kPointerSize);
10171
10172 __ bind(&runtime_call_clear_stack);
10173 __ fstp(0);
10174 __ bind(&runtime_call);
Steve Block6ded16b2010-05-10 14:33:55 +010010175 __ TailCallExternalReference(ExternalReference(RuntimeFunction()), 1, 1);
Andrei Popescu402d9372010-02-26 13:31:12 +000010176}
10177
10178
10179Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() {
10180 switch (type_) {
10181 // Add more cases when necessary.
10182 case TranscendentalCache::SIN: return Runtime::kMath_sin;
10183 case TranscendentalCache::COS: return Runtime::kMath_cos;
10184 default:
10185 UNIMPLEMENTED();
10186 return Runtime::kAbort;
10187 }
10188}
10189
10190
10191void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm) {
10192 // Only free register is edi.
10193 Label done;
10194 ASSERT(type_ == TranscendentalCache::SIN ||
10195 type_ == TranscendentalCache::COS);
10196 // More transcendental types can be added later.
10197
10198 // Both fsin and fcos require arguments in the range +/-2^63 and
10199 // return NaN for infinities and NaN. They can share all code except
10200 // the actual fsin/fcos operation.
10201 Label in_range;
10202 // If argument is outside the range -2^63..2^63, fsin/cos doesn't
10203 // work. We must reduce it to the appropriate range.
10204 __ mov(edi, edx);
10205 __ and_(Operand(edi), Immediate(0x7ff00000)); // Exponent only.
10206 int supported_exponent_limit =
10207 (63 + HeapNumber::kExponentBias) << HeapNumber::kExponentShift;
10208 __ cmp(Operand(edi), Immediate(supported_exponent_limit));
10209 __ j(below, &in_range, taken);
10210 // Check for infinity and NaN. Both return NaN for sin.
10211 __ cmp(Operand(edi), Immediate(0x7ff00000));
10212 Label non_nan_result;
10213 __ j(not_equal, &non_nan_result, taken);
10214 // Input is +/-Infinity or NaN. Result is NaN.
10215 __ fstp(0);
10216 // NaN is represented by 0x7ff8000000000000.
10217 __ push(Immediate(0x7ff80000));
10218 __ push(Immediate(0));
10219 __ fld_d(Operand(esp, 0));
10220 __ add(Operand(esp), Immediate(2 * kPointerSize));
10221 __ jmp(&done);
10222
10223 __ bind(&non_nan_result);
10224
10225 // Use fpmod to restrict argument to the range +/-2*PI.
10226 __ mov(edi, eax); // Save eax before using fnstsw_ax.
10227 __ fldpi();
10228 __ fadd(0);
10229 __ fld(1);
10230 // FPU Stack: input, 2*pi, input.
10231 {
10232 Label no_exceptions;
10233 __ fwait();
10234 __ fnstsw_ax();
10235 // Clear if Illegal Operand or Zero Division exceptions are set.
10236 __ test(Operand(eax), Immediate(5));
10237 __ j(zero, &no_exceptions);
10238 __ fnclex();
10239 __ bind(&no_exceptions);
10240 }
10241
10242 // Compute st(0) % st(1)
10243 {
10244 Label partial_remainder_loop;
10245 __ bind(&partial_remainder_loop);
10246 __ fprem1();
10247 __ fwait();
10248 __ fnstsw_ax();
10249 __ test(Operand(eax), Immediate(0x400 /* C2 */));
10250 // If C2 is set, computation only has partial result. Loop to
10251 // continue computation.
10252 __ j(not_zero, &partial_remainder_loop);
10253 }
10254 // FPU Stack: input, 2*pi, input % 2*pi
10255 __ fstp(2);
10256 __ fstp(0);
10257 __ mov(eax, edi); // Restore eax (allocated HeapNumber pointer).
10258
10259 // FPU Stack: input % 2*pi
10260 __ bind(&in_range);
10261 switch (type_) {
10262 case TranscendentalCache::SIN:
10263 __ fsin();
10264 break;
10265 case TranscendentalCache::COS:
10266 __ fcos();
10267 break;
10268 default:
10269 UNREACHABLE();
10270 }
10271 __ bind(&done);
10272}
10273
10274
Leon Clarkee46be812010-01-19 14:06:41 +000010275// Get the integer part of a heap number. Surprisingly, all this bit twiddling
10276// is faster than using the built-in instructions on floating point registers.
10277// Trashes edi and ebx. Dest is ecx. Source cannot be ecx or one of the
10278// trashed registers.
10279void IntegerConvert(MacroAssembler* masm,
10280 Register source,
Steve Block6ded16b2010-05-10 14:33:55 +010010281 TypeInfo type_info,
Leon Clarkee46be812010-01-19 14:06:41 +000010282 bool use_sse3,
10283 Label* conversion_failure) {
Leon Clarked91b9f72010-01-27 17:25:45 +000010284 ASSERT(!source.is(ecx) && !source.is(edi) && !source.is(ebx));
Leon Clarkee46be812010-01-19 14:06:41 +000010285 Label done, right_exponent, normal_exponent;
10286 Register scratch = ebx;
10287 Register scratch2 = edi;
Kristian Monsen25f61362010-05-21 11:50:48 +010010288 if (type_info.IsInteger32() && CpuFeatures::IsEnabled(SSE2)) {
10289 CpuFeatures::Scope scope(SSE2);
10290 __ cvttsd2si(ecx, FieldOperand(source, HeapNumber::kValueOffset));
10291 return;
10292 }
Steve Block6ded16b2010-05-10 14:33:55 +010010293 if (!type_info.IsInteger32() || !use_sse3) {
10294 // Get exponent word.
10295 __ mov(scratch, FieldOperand(source, HeapNumber::kExponentOffset));
10296 // Get exponent alone in scratch2.
10297 __ mov(scratch2, scratch);
10298 __ and_(scratch2, HeapNumber::kExponentMask);
10299 }
Leon Clarkee46be812010-01-19 14:06:41 +000010300 if (use_sse3) {
10301 CpuFeatures::Scope scope(SSE3);
Steve Block6ded16b2010-05-10 14:33:55 +010010302 if (!type_info.IsInteger32()) {
10303 // Check whether the exponent is too big for a 64 bit signed integer.
10304 static const uint32_t kTooBigExponent =
10305 (HeapNumber::kExponentBias + 63) << HeapNumber::kExponentShift;
10306 __ cmp(Operand(scratch2), Immediate(kTooBigExponent));
10307 __ j(greater_equal, conversion_failure);
10308 }
Leon Clarkee46be812010-01-19 14:06:41 +000010309 // Load x87 register with heap number.
10310 __ fld_d(FieldOperand(source, HeapNumber::kValueOffset));
10311 // Reserve space for 64 bit answer.
10312 __ sub(Operand(esp), Immediate(sizeof(uint64_t))); // Nolint.
10313 // Do conversion, which cannot fail because we checked the exponent.
10314 __ fisttp_d(Operand(esp, 0));
10315 __ mov(ecx, Operand(esp, 0)); // Load low word of answer into ecx.
10316 __ add(Operand(esp), Immediate(sizeof(uint64_t))); // Nolint.
10317 } else {
10318 // Load ecx with zero. We use this either for the final shift or
10319 // for the answer.
10320 __ xor_(ecx, Operand(ecx));
10321 // Check whether the exponent matches a 32 bit signed int that cannot be
10322 // represented by a Smi. A non-smi 32 bit integer is 1.xxx * 2^30 so the
10323 // exponent is 30 (biased). This is the exponent that we are fastest at and
10324 // also the highest exponent we can handle here.
10325 const uint32_t non_smi_exponent =
10326 (HeapNumber::kExponentBias + 30) << HeapNumber::kExponentShift;
10327 __ cmp(Operand(scratch2), Immediate(non_smi_exponent));
10328 // If we have a match of the int32-but-not-Smi exponent then skip some
10329 // logic.
10330 __ j(equal, &right_exponent);
10331 // If the exponent is higher than that then go to slow case. This catches
10332 // numbers that don't fit in a signed int32, infinities and NaNs.
10333 __ j(less, &normal_exponent);
10334
10335 {
10336 // Handle a big exponent. The only reason we have this code is that the
10337 // >>> operator has a tendency to generate numbers with an exponent of 31.
10338 const uint32_t big_non_smi_exponent =
10339 (HeapNumber::kExponentBias + 31) << HeapNumber::kExponentShift;
10340 __ cmp(Operand(scratch2), Immediate(big_non_smi_exponent));
10341 __ j(not_equal, conversion_failure);
10342 // We have the big exponent, typically from >>>. This means the number is
10343 // in the range 2^31 to 2^32 - 1. Get the top bits of the mantissa.
10344 __ mov(scratch2, scratch);
10345 __ and_(scratch2, HeapNumber::kMantissaMask);
10346 // Put back the implicit 1.
10347 __ or_(scratch2, 1 << HeapNumber::kExponentShift);
10348 // Shift up the mantissa bits to take up the space the exponent used to
10349 // take. We just orred in the implicit bit so that took care of one and
10350 // we want to use the full unsigned range so we subtract 1 bit from the
10351 // shift distance.
10352 const int big_shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 1;
10353 __ shl(scratch2, big_shift_distance);
10354 // Get the second half of the double.
10355 __ mov(ecx, FieldOperand(source, HeapNumber::kMantissaOffset));
10356 // Shift down 21 bits to get the most significant 11 bits or the low
10357 // mantissa word.
10358 __ shr(ecx, 32 - big_shift_distance);
10359 __ or_(ecx, Operand(scratch2));
10360 // We have the answer in ecx, but we may need to negate it.
10361 __ test(scratch, Operand(scratch));
10362 __ j(positive, &done);
10363 __ neg(ecx);
10364 __ jmp(&done);
10365 }
10366
10367 __ bind(&normal_exponent);
10368 // Exponent word in scratch, exponent part of exponent word in scratch2.
10369 // Zero in ecx.
10370 // We know the exponent is smaller than 30 (biased). If it is less than
10371 // 0 (biased) then the number is smaller in magnitude than 1.0 * 2^0, ie
10372 // it rounds to zero.
10373 const uint32_t zero_exponent =
10374 (HeapNumber::kExponentBias + 0) << HeapNumber::kExponentShift;
10375 __ sub(Operand(scratch2), Immediate(zero_exponent));
10376 // ecx already has a Smi zero.
10377 __ j(less, &done);
10378
10379 // We have a shifted exponent between 0 and 30 in scratch2.
10380 __ shr(scratch2, HeapNumber::kExponentShift);
10381 __ mov(ecx, Immediate(30));
10382 __ sub(ecx, Operand(scratch2));
10383
10384 __ bind(&right_exponent);
10385 // Here ecx is the shift, scratch is the exponent word.
10386 // Get the top bits of the mantissa.
10387 __ and_(scratch, HeapNumber::kMantissaMask);
10388 // Put back the implicit 1.
10389 __ or_(scratch, 1 << HeapNumber::kExponentShift);
10390 // Shift up the mantissa bits to take up the space the exponent used to
10391 // take. We have kExponentShift + 1 significant bits int he low end of the
10392 // word. Shift them to the top bits.
10393 const int shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 2;
10394 __ shl(scratch, shift_distance);
10395 // Get the second half of the double. For some exponents we don't
10396 // actually need this because the bits get shifted out again, but
10397 // it's probably slower to test than just to do it.
10398 __ mov(scratch2, FieldOperand(source, HeapNumber::kMantissaOffset));
10399 // Shift down 22 bits to get the most significant 10 bits or the low
10400 // mantissa word.
10401 __ shr(scratch2, 32 - shift_distance);
10402 __ or_(scratch2, Operand(scratch));
10403 // Move down according to the exponent.
10404 __ shr_cl(scratch2);
10405 // Now the unsigned answer is in scratch2. We need to move it to ecx and
10406 // we may need to fix the sign.
10407 Label negative;
10408 __ xor_(ecx, Operand(ecx));
10409 __ cmp(ecx, FieldOperand(source, HeapNumber::kExponentOffset));
10410 __ j(greater, &negative);
10411 __ mov(ecx, scratch2);
10412 __ jmp(&done);
10413 __ bind(&negative);
10414 __ sub(ecx, Operand(scratch2));
10415 __ bind(&done);
10416 }
10417}
10418
10419
10420// Input: edx, eax are the left and right objects of a bit op.
10421// Output: eax, ecx are left and right integers for a bit op.
Steve Block6ded16b2010-05-10 14:33:55 +010010422void FloatingPointHelper::LoadNumbersAsIntegers(MacroAssembler* masm,
10423 TypeInfo type_info,
10424 bool use_sse3,
10425 Label* conversion_failure) {
Leon Clarkee46be812010-01-19 14:06:41 +000010426 // Check float operands.
10427 Label arg1_is_object, check_undefined_arg1;
10428 Label arg2_is_object, check_undefined_arg2;
10429 Label load_arg2, done;
10430
Steve Block6ded16b2010-05-10 14:33:55 +010010431 if (!type_info.IsDouble()) {
10432 if (!type_info.IsSmi()) {
10433 __ test(edx, Immediate(kSmiTagMask));
10434 __ j(not_zero, &arg1_is_object);
10435 } else {
10436 if (FLAG_debug_code) __ AbortIfNotSmi(edx);
10437 }
10438 __ SmiUntag(edx);
10439 __ jmp(&load_arg2);
10440 }
10441
10442 __ bind(&arg1_is_object);
10443
10444 // Get the untagged integer version of the edx heap number in ecx.
10445 IntegerConvert(masm, edx, type_info, use_sse3, conversion_failure);
10446 __ mov(edx, ecx);
10447
10448 // Here edx has the untagged integer, eax has a Smi or a heap number.
10449 __ bind(&load_arg2);
10450 if (!type_info.IsDouble()) {
10451 // Test if arg2 is a Smi.
10452 if (!type_info.IsSmi()) {
10453 __ test(eax, Immediate(kSmiTagMask));
10454 __ j(not_zero, &arg2_is_object);
10455 } else {
10456 if (FLAG_debug_code) __ AbortIfNotSmi(eax);
10457 }
10458 __ SmiUntag(eax);
10459 __ mov(ecx, eax);
10460 __ jmp(&done);
10461 }
10462
10463 __ bind(&arg2_is_object);
10464
10465 // Get the untagged integer version of the eax heap number in ecx.
10466 IntegerConvert(masm, eax, type_info, use_sse3, conversion_failure);
10467 __ bind(&done);
10468 __ mov(eax, edx);
10469}
10470
10471
10472// Input: edx, eax are the left and right objects of a bit op.
10473// Output: eax, ecx are left and right integers for a bit op.
10474void FloatingPointHelper::LoadUnknownsAsIntegers(MacroAssembler* masm,
10475 bool use_sse3,
10476 Label* conversion_failure) {
10477 // Check float operands.
10478 Label arg1_is_object, check_undefined_arg1;
10479 Label arg2_is_object, check_undefined_arg2;
10480 Label load_arg2, done;
10481
10482 // Test if arg1 is a Smi.
Leon Clarkee46be812010-01-19 14:06:41 +000010483 __ test(edx, Immediate(kSmiTagMask));
10484 __ j(not_zero, &arg1_is_object);
Steve Block6ded16b2010-05-10 14:33:55 +010010485
Leon Clarkee46be812010-01-19 14:06:41 +000010486 __ SmiUntag(edx);
10487 __ jmp(&load_arg2);
10488
10489 // If the argument is undefined it converts to zero (ECMA-262, section 9.5).
10490 __ bind(&check_undefined_arg1);
10491 __ cmp(edx, Factory::undefined_value());
10492 __ j(not_equal, conversion_failure);
10493 __ mov(edx, Immediate(0));
10494 __ jmp(&load_arg2);
10495
10496 __ bind(&arg1_is_object);
10497 __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset));
10498 __ cmp(ebx, Factory::heap_number_map());
10499 __ j(not_equal, &check_undefined_arg1);
Steve Block6ded16b2010-05-10 14:33:55 +010010500
Leon Clarkee46be812010-01-19 14:06:41 +000010501 // Get the untagged integer version of the edx heap number in ecx.
Steve Block6ded16b2010-05-10 14:33:55 +010010502 IntegerConvert(masm,
10503 edx,
10504 TypeInfo::Unknown(),
10505 use_sse3,
10506 conversion_failure);
Leon Clarkee46be812010-01-19 14:06:41 +000010507 __ mov(edx, ecx);
10508
10509 // Here edx has the untagged integer, eax has a Smi or a heap number.
10510 __ bind(&load_arg2);
Steve Block6ded16b2010-05-10 14:33:55 +010010511
Leon Clarkee46be812010-01-19 14:06:41 +000010512 // Test if arg2 is a Smi.
10513 __ test(eax, Immediate(kSmiTagMask));
10514 __ j(not_zero, &arg2_is_object);
Steve Block6ded16b2010-05-10 14:33:55 +010010515
Leon Clarkee46be812010-01-19 14:06:41 +000010516 __ SmiUntag(eax);
10517 __ mov(ecx, eax);
10518 __ jmp(&done);
10519
10520 // If the argument is undefined it converts to zero (ECMA-262, section 9.5).
10521 __ bind(&check_undefined_arg2);
10522 __ cmp(eax, Factory::undefined_value());
10523 __ j(not_equal, conversion_failure);
10524 __ mov(ecx, Immediate(0));
10525 __ jmp(&done);
10526
10527 __ bind(&arg2_is_object);
10528 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
10529 __ cmp(ebx, Factory::heap_number_map());
10530 __ j(not_equal, &check_undefined_arg2);
Steve Block6ded16b2010-05-10 14:33:55 +010010531
Leon Clarkee46be812010-01-19 14:06:41 +000010532 // Get the untagged integer version of the eax heap number in ecx.
Steve Block6ded16b2010-05-10 14:33:55 +010010533 IntegerConvert(masm,
10534 eax,
10535 TypeInfo::Unknown(),
10536 use_sse3,
10537 conversion_failure);
Leon Clarkee46be812010-01-19 14:06:41 +000010538 __ bind(&done);
10539 __ mov(eax, edx);
10540}
10541
10542
Steve Block6ded16b2010-05-10 14:33:55 +010010543void FloatingPointHelper::LoadAsIntegers(MacroAssembler* masm,
10544 TypeInfo type_info,
10545 bool use_sse3,
10546 Label* conversion_failure) {
10547 if (type_info.IsNumber()) {
10548 LoadNumbersAsIntegers(masm, type_info, use_sse3, conversion_failure);
10549 } else {
10550 LoadUnknownsAsIntegers(masm, use_sse3, conversion_failure);
10551 }
10552}
10553
10554
Steve Blocka7e24c12009-10-30 11:49:00 +000010555void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm,
10556 Register number) {
10557 Label load_smi, done;
10558
10559 __ test(number, Immediate(kSmiTagMask));
10560 __ j(zero, &load_smi, not_taken);
10561 __ fld_d(FieldOperand(number, HeapNumber::kValueOffset));
10562 __ jmp(&done);
10563
10564 __ bind(&load_smi);
Leon Clarkee46be812010-01-19 14:06:41 +000010565 __ SmiUntag(number);
Steve Blocka7e24c12009-10-30 11:49:00 +000010566 __ push(number);
10567 __ fild_s(Operand(esp, 0));
10568 __ pop(number);
10569
10570 __ bind(&done);
10571}
10572
10573
Andrei Popescu402d9372010-02-26 13:31:12 +000010574void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm) {
10575 Label load_smi_edx, load_eax, load_smi_eax, done;
10576 // Load operand in edx into xmm0.
10577 __ test(edx, Immediate(kSmiTagMask));
10578 __ j(zero, &load_smi_edx, not_taken); // Argument in edx is a smi.
10579 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
10580
10581 __ bind(&load_eax);
10582 // Load operand in eax into xmm1.
10583 __ test(eax, Immediate(kSmiTagMask));
10584 __ j(zero, &load_smi_eax, not_taken); // Argument in eax is a smi.
10585 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
10586 __ jmp(&done);
10587
10588 __ bind(&load_smi_edx);
10589 __ SmiUntag(edx); // Untag smi before converting to float.
10590 __ cvtsi2sd(xmm0, Operand(edx));
10591 __ SmiTag(edx); // Retag smi for heap number overwriting test.
10592 __ jmp(&load_eax);
10593
10594 __ bind(&load_smi_eax);
10595 __ SmiUntag(eax); // Untag smi before converting to float.
10596 __ cvtsi2sd(xmm1, Operand(eax));
10597 __ SmiTag(eax); // Retag smi for heap number overwriting test.
10598
10599 __ bind(&done);
10600}
10601
10602
Leon Clarked91b9f72010-01-27 17:25:45 +000010603void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm,
Steve Blocka7e24c12009-10-30 11:49:00 +000010604 Label* not_numbers) {
10605 Label load_smi_edx, load_eax, load_smi_eax, load_float_eax, done;
10606 // Load operand in edx into xmm0, or branch to not_numbers.
10607 __ test(edx, Immediate(kSmiTagMask));
10608 __ j(zero, &load_smi_edx, not_taken); // Argument in edx is a smi.
10609 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), Factory::heap_number_map());
10610 __ j(not_equal, not_numbers); // Argument in edx is not a number.
10611 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
10612 __ bind(&load_eax);
10613 // Load operand in eax into xmm1, or branch to not_numbers.
10614 __ test(eax, Immediate(kSmiTagMask));
10615 __ j(zero, &load_smi_eax, not_taken); // Argument in eax is a smi.
10616 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), Factory::heap_number_map());
10617 __ j(equal, &load_float_eax);
10618 __ jmp(not_numbers); // Argument in eax is not a number.
10619 __ bind(&load_smi_edx);
Leon Clarkee46be812010-01-19 14:06:41 +000010620 __ SmiUntag(edx); // Untag smi before converting to float.
Steve Blocka7e24c12009-10-30 11:49:00 +000010621 __ cvtsi2sd(xmm0, Operand(edx));
Leon Clarkee46be812010-01-19 14:06:41 +000010622 __ SmiTag(edx); // Retag smi for heap number overwriting test.
Steve Blocka7e24c12009-10-30 11:49:00 +000010623 __ jmp(&load_eax);
10624 __ bind(&load_smi_eax);
Leon Clarkee46be812010-01-19 14:06:41 +000010625 __ SmiUntag(eax); // Untag smi before converting to float.
Steve Blocka7e24c12009-10-30 11:49:00 +000010626 __ cvtsi2sd(xmm1, Operand(eax));
Leon Clarkee46be812010-01-19 14:06:41 +000010627 __ SmiTag(eax); // Retag smi for heap number overwriting test.
Steve Blocka7e24c12009-10-30 11:49:00 +000010628 __ jmp(&done);
10629 __ bind(&load_float_eax);
10630 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
10631 __ bind(&done);
10632}
10633
10634
Leon Clarked91b9f72010-01-27 17:25:45 +000010635void FloatingPointHelper::LoadSSE2Smis(MacroAssembler* masm,
10636 Register scratch) {
10637 const Register left = edx;
10638 const Register right = eax;
10639 __ mov(scratch, left);
10640 ASSERT(!scratch.is(right)); // We're about to clobber scratch.
10641 __ SmiUntag(scratch);
10642 __ cvtsi2sd(xmm0, Operand(scratch));
10643
10644 __ mov(scratch, right);
10645 __ SmiUntag(scratch);
10646 __ cvtsi2sd(xmm1, Operand(scratch));
10647}
10648
10649
Steve Blocka7e24c12009-10-30 11:49:00 +000010650void FloatingPointHelper::LoadFloatOperands(MacroAssembler* masm,
Leon Clarked91b9f72010-01-27 17:25:45 +000010651 Register scratch,
10652 ArgLocation arg_location) {
Steve Blocka7e24c12009-10-30 11:49:00 +000010653 Label load_smi_1, load_smi_2, done_load_1, done;
Leon Clarked91b9f72010-01-27 17:25:45 +000010654 if (arg_location == ARGS_IN_REGISTERS) {
10655 __ mov(scratch, edx);
10656 } else {
10657 __ mov(scratch, Operand(esp, 2 * kPointerSize));
10658 }
Steve Blocka7e24c12009-10-30 11:49:00 +000010659 __ test(scratch, Immediate(kSmiTagMask));
10660 __ j(zero, &load_smi_1, not_taken);
10661 __ fld_d(FieldOperand(scratch, HeapNumber::kValueOffset));
10662 __ bind(&done_load_1);
10663
Leon Clarked91b9f72010-01-27 17:25:45 +000010664 if (arg_location == ARGS_IN_REGISTERS) {
10665 __ mov(scratch, eax);
10666 } else {
10667 __ mov(scratch, Operand(esp, 1 * kPointerSize));
10668 }
Steve Blocka7e24c12009-10-30 11:49:00 +000010669 __ test(scratch, Immediate(kSmiTagMask));
10670 __ j(zero, &load_smi_2, not_taken);
10671 __ fld_d(FieldOperand(scratch, HeapNumber::kValueOffset));
10672 __ jmp(&done);
10673
10674 __ bind(&load_smi_1);
Leon Clarkee46be812010-01-19 14:06:41 +000010675 __ SmiUntag(scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +000010676 __ push(scratch);
10677 __ fild_s(Operand(esp, 0));
10678 __ pop(scratch);
10679 __ jmp(&done_load_1);
10680
10681 __ bind(&load_smi_2);
Leon Clarkee46be812010-01-19 14:06:41 +000010682 __ SmiUntag(scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +000010683 __ push(scratch);
10684 __ fild_s(Operand(esp, 0));
10685 __ pop(scratch);
10686
10687 __ bind(&done);
10688}
10689
10690
Leon Clarked91b9f72010-01-27 17:25:45 +000010691void FloatingPointHelper::LoadFloatSmis(MacroAssembler* masm,
10692 Register scratch) {
10693 const Register left = edx;
10694 const Register right = eax;
10695 __ mov(scratch, left);
10696 ASSERT(!scratch.is(right)); // We're about to clobber scratch.
10697 __ SmiUntag(scratch);
10698 __ push(scratch);
10699 __ fild_s(Operand(esp, 0));
10700
10701 __ mov(scratch, right);
10702 __ SmiUntag(scratch);
10703 __ mov(Operand(esp, 0), scratch);
10704 __ fild_s(Operand(esp, 0));
10705 __ pop(scratch);
10706}
10707
10708
Steve Blocka7e24c12009-10-30 11:49:00 +000010709void FloatingPointHelper::CheckFloatOperands(MacroAssembler* masm,
10710 Label* non_float,
10711 Register scratch) {
10712 Label test_other, done;
10713 // Test if both operands are floats or smi -> scratch=k_is_float;
10714 // Otherwise scratch = k_not_float.
10715 __ test(edx, Immediate(kSmiTagMask));
10716 __ j(zero, &test_other, not_taken); // argument in edx is OK
10717 __ mov(scratch, FieldOperand(edx, HeapObject::kMapOffset));
10718 __ cmp(scratch, Factory::heap_number_map());
10719 __ j(not_equal, non_float); // argument in edx is not a number -> NaN
10720
10721 __ bind(&test_other);
10722 __ test(eax, Immediate(kSmiTagMask));
10723 __ j(zero, &done); // argument in eax is OK
10724 __ mov(scratch, FieldOperand(eax, HeapObject::kMapOffset));
10725 __ cmp(scratch, Factory::heap_number_map());
10726 __ j(not_equal, non_float); // argument in eax is not a number -> NaN
10727
10728 // Fall-through: Both operands are numbers.
10729 __ bind(&done);
10730}
10731
10732
Leon Clarkee46be812010-01-19 14:06:41 +000010733void GenericUnaryOpStub::Generate(MacroAssembler* masm) {
10734 Label slow, done;
Steve Blocka7e24c12009-10-30 11:49:00 +000010735
Leon Clarkee46be812010-01-19 14:06:41 +000010736 if (op_ == Token::SUB) {
10737 // Check whether the value is a smi.
10738 Label try_float;
10739 __ test(eax, Immediate(kSmiTagMask));
10740 __ j(not_zero, &try_float, not_taken);
Steve Blocka7e24c12009-10-30 11:49:00 +000010741
Leon Clarkee46be812010-01-19 14:06:41 +000010742 // Go slow case if the value of the expression is zero
10743 // to make sure that we switch between 0 and -0.
10744 __ test(eax, Operand(eax));
10745 __ j(zero, &slow, not_taken);
Steve Blocka7e24c12009-10-30 11:49:00 +000010746
Leon Clarkee46be812010-01-19 14:06:41 +000010747 // The value of the expression is a smi that is not zero. Try
10748 // optimistic subtraction '0 - value'.
10749 Label undo;
Steve Blocka7e24c12009-10-30 11:49:00 +000010750 __ mov(edx, Operand(eax));
Leon Clarkee46be812010-01-19 14:06:41 +000010751 __ Set(eax, Immediate(0));
10752 __ sub(eax, Operand(edx));
10753 __ j(overflow, &undo, not_taken);
10754
10755 // If result is a smi we are done.
10756 __ test(eax, Immediate(kSmiTagMask));
10757 __ j(zero, &done, taken);
10758
10759 // Restore eax and go slow case.
10760 __ bind(&undo);
10761 __ mov(eax, Operand(edx));
10762 __ jmp(&slow);
10763
10764 // Try floating point case.
10765 __ bind(&try_float);
10766 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
10767 __ cmp(edx, Factory::heap_number_map());
10768 __ j(not_equal, &slow);
10769 if (overwrite_) {
10770 __ mov(edx, FieldOperand(eax, HeapNumber::kExponentOffset));
10771 __ xor_(edx, HeapNumber::kSignMask); // Flip sign.
10772 __ mov(FieldOperand(eax, HeapNumber::kExponentOffset), edx);
10773 } else {
10774 __ mov(edx, Operand(eax));
10775 // edx: operand
10776 __ AllocateHeapNumber(eax, ebx, ecx, &undo);
10777 // eax: allocated 'empty' number
10778 __ mov(ecx, FieldOperand(edx, HeapNumber::kExponentOffset));
10779 __ xor_(ecx, HeapNumber::kSignMask); // Flip sign.
10780 __ mov(FieldOperand(eax, HeapNumber::kExponentOffset), ecx);
10781 __ mov(ecx, FieldOperand(edx, HeapNumber::kMantissaOffset));
10782 __ mov(FieldOperand(eax, HeapNumber::kMantissaOffset), ecx);
10783 }
10784 } else if (op_ == Token::BIT_NOT) {
10785 // Check if the operand is a heap number.
10786 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
10787 __ cmp(edx, Factory::heap_number_map());
10788 __ j(not_equal, &slow, not_taken);
10789
10790 // Convert the heap number in eax to an untagged integer in ecx.
Steve Block6ded16b2010-05-10 14:33:55 +010010791 IntegerConvert(masm,
10792 eax,
10793 TypeInfo::Unknown(),
10794 CpuFeatures::IsSupported(SSE3),
10795 &slow);
Leon Clarkee46be812010-01-19 14:06:41 +000010796
10797 // Do the bitwise operation and check if the result fits in a smi.
10798 Label try_float;
10799 __ not_(ecx);
10800 __ cmp(ecx, 0xc0000000);
10801 __ j(sign, &try_float, not_taken);
10802
10803 // Tag the result as a smi and we're done.
10804 ASSERT(kSmiTagSize == 1);
10805 __ lea(eax, Operand(ecx, times_2, kSmiTag));
10806 __ jmp(&done);
10807
10808 // Try to store the result in a heap number.
10809 __ bind(&try_float);
10810 if (!overwrite_) {
10811 // Allocate a fresh heap number, but don't overwrite eax until
10812 // we're sure we can do it without going through the slow case
10813 // that needs the value in eax.
10814 __ AllocateHeapNumber(ebx, edx, edi, &slow);
10815 __ mov(eax, Operand(ebx));
10816 }
10817 if (CpuFeatures::IsSupported(SSE2)) {
10818 CpuFeatures::Scope use_sse2(SSE2);
10819 __ cvtsi2sd(xmm0, Operand(ecx));
10820 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
10821 } else {
10822 __ push(ecx);
10823 __ fild_s(Operand(esp, 0));
10824 __ pop(ecx);
10825 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
10826 }
10827 } else {
10828 UNIMPLEMENTED();
Steve Blocka7e24c12009-10-30 11:49:00 +000010829 }
10830
Leon Clarkee46be812010-01-19 14:06:41 +000010831 // Return from the stub.
Steve Blocka7e24c12009-10-30 11:49:00 +000010832 __ bind(&done);
Steve Blocka7e24c12009-10-30 11:49:00 +000010833 __ StubReturn(1);
Leon Clarkee46be812010-01-19 14:06:41 +000010834
10835 // Handle the slow case by jumping to the JavaScript builtin.
10836 __ bind(&slow);
10837 __ pop(ecx); // pop return address.
10838 __ push(eax);
10839 __ push(ecx); // push return address
10840 switch (op_) {
10841 case Token::SUB:
10842 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION);
10843 break;
10844 case Token::BIT_NOT:
10845 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION);
10846 break;
10847 default:
10848 UNREACHABLE();
10849 }
Steve Blocka7e24c12009-10-30 11:49:00 +000010850}
10851
10852
Steve Blocka7e24c12009-10-30 11:49:00 +000010853void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
10854 // The key is in edx and the parameter count is in eax.
10855
10856 // The displacement is used for skipping the frame pointer on the
10857 // stack. It is the offset of the last parameter (if any) relative
10858 // to the frame pointer.
10859 static const int kDisplacement = 1 * kPointerSize;
10860
10861 // Check that the key is a smi.
10862 Label slow;
10863 __ test(edx, Immediate(kSmiTagMask));
10864 __ j(not_zero, &slow, not_taken);
10865
10866 // Check if the calling frame is an arguments adaptor frame.
10867 Label adaptor;
10868 __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
10869 __ mov(ecx, Operand(ebx, StandardFrameConstants::kContextOffset));
10870 __ cmp(Operand(ecx), Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
10871 __ j(equal, &adaptor);
10872
10873 // Check index against formal parameters count limit passed in
10874 // through register eax. Use unsigned comparison to get negative
10875 // check for free.
10876 __ cmp(edx, Operand(eax));
10877 __ j(above_equal, &slow, not_taken);
10878
10879 // Read the argument from the stack and return it.
10880 ASSERT(kSmiTagSize == 1 && kSmiTag == 0); // shifting code depends on this
10881 __ lea(ebx, Operand(ebp, eax, times_2, 0));
10882 __ neg(edx);
10883 __ mov(eax, Operand(ebx, edx, times_2, kDisplacement));
10884 __ ret(0);
10885
10886 // Arguments adaptor case: Check index against actual arguments
10887 // limit found in the arguments adaptor frame. Use unsigned
10888 // comparison to get negative check for free.
10889 __ bind(&adaptor);
10890 __ mov(ecx, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
10891 __ cmp(edx, Operand(ecx));
10892 __ j(above_equal, &slow, not_taken);
10893
10894 // Read the argument from the stack and return it.
10895 ASSERT(kSmiTagSize == 1 && kSmiTag == 0); // shifting code depends on this
10896 __ lea(ebx, Operand(ebx, ecx, times_2, 0));
10897 __ neg(edx);
10898 __ mov(eax, Operand(ebx, edx, times_2, kDisplacement));
10899 __ ret(0);
10900
10901 // Slow-case: Handle non-smi or out-of-bounds access to arguments
10902 // by calling the runtime system.
10903 __ bind(&slow);
10904 __ pop(ebx); // Return address.
10905 __ push(edx);
10906 __ push(ebx);
Steve Block6ded16b2010-05-10 14:33:55 +010010907 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
Steve Blocka7e24c12009-10-30 11:49:00 +000010908}
10909
10910
10911void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
Andrei Popescu402d9372010-02-26 13:31:12 +000010912 // esp[0] : return address
10913 // esp[4] : number of parameters
10914 // esp[8] : receiver displacement
10915 // esp[16] : function
10916
Steve Blocka7e24c12009-10-30 11:49:00 +000010917 // The displacement is used for skipping the return address and the
10918 // frame pointer on the stack. It is the offset of the last
10919 // parameter (if any) relative to the frame pointer.
10920 static const int kDisplacement = 2 * kPointerSize;
10921
10922 // Check if the calling frame is an arguments adaptor frame.
Leon Clarkee46be812010-01-19 14:06:41 +000010923 Label adaptor_frame, try_allocate, runtime;
Steve Blocka7e24c12009-10-30 11:49:00 +000010924 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
10925 __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
10926 __ cmp(Operand(ecx), Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
Leon Clarkee46be812010-01-19 14:06:41 +000010927 __ j(equal, &adaptor_frame);
10928
10929 // Get the length from the frame.
10930 __ mov(ecx, Operand(esp, 1 * kPointerSize));
10931 __ jmp(&try_allocate);
Steve Blocka7e24c12009-10-30 11:49:00 +000010932
10933 // Patch the arguments.length and the parameters pointer.
Leon Clarkee46be812010-01-19 14:06:41 +000010934 __ bind(&adaptor_frame);
Steve Blocka7e24c12009-10-30 11:49:00 +000010935 __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
10936 __ mov(Operand(esp, 1 * kPointerSize), ecx);
10937 __ lea(edx, Operand(edx, ecx, times_2, kDisplacement));
10938 __ mov(Operand(esp, 2 * kPointerSize), edx);
10939
Leon Clarkee46be812010-01-19 14:06:41 +000010940 // Try the new space allocation. Start out with computing the size of
10941 // the arguments object and the elements array.
10942 Label add_arguments_object;
10943 __ bind(&try_allocate);
10944 __ test(ecx, Operand(ecx));
10945 __ j(zero, &add_arguments_object);
10946 __ lea(ecx, Operand(ecx, times_2, FixedArray::kHeaderSize));
10947 __ bind(&add_arguments_object);
10948 __ add(Operand(ecx), Immediate(Heap::kArgumentsObjectSize));
10949
10950 // Do the allocation of both objects in one go.
10951 __ AllocateInNewSpace(ecx, eax, edx, ebx, &runtime, TAG_OBJECT);
10952
10953 // Get the arguments boilerplate from the current (global) context.
10954 int offset = Context::SlotOffset(Context::ARGUMENTS_BOILERPLATE_INDEX);
10955 __ mov(edi, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
10956 __ mov(edi, FieldOperand(edi, GlobalObject::kGlobalContextOffset));
10957 __ mov(edi, Operand(edi, offset));
10958
10959 // Copy the JS object part.
10960 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
10961 __ mov(ebx, FieldOperand(edi, i));
10962 __ mov(FieldOperand(eax, i), ebx);
10963 }
10964
10965 // Setup the callee in-object property.
10966 ASSERT(Heap::arguments_callee_index == 0);
10967 __ mov(ebx, Operand(esp, 3 * kPointerSize));
10968 __ mov(FieldOperand(eax, JSObject::kHeaderSize), ebx);
10969
10970 // Get the length (smi tagged) and set that as an in-object property too.
10971 ASSERT(Heap::arguments_length_index == 1);
10972 __ mov(ecx, Operand(esp, 1 * kPointerSize));
10973 __ mov(FieldOperand(eax, JSObject::kHeaderSize + kPointerSize), ecx);
10974
10975 // If there are no actual arguments, we're done.
10976 Label done;
10977 __ test(ecx, Operand(ecx));
10978 __ j(zero, &done);
10979
10980 // Get the parameters pointer from the stack and untag the length.
10981 __ mov(edx, Operand(esp, 2 * kPointerSize));
10982 __ SmiUntag(ecx);
10983
10984 // Setup the elements pointer in the allocated arguments object and
10985 // initialize the header in the elements fixed array.
10986 __ lea(edi, Operand(eax, Heap::kArgumentsObjectSize));
10987 __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi);
10988 __ mov(FieldOperand(edi, FixedArray::kMapOffset),
10989 Immediate(Factory::fixed_array_map()));
10990 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
10991
10992 // Copy the fixed array slots.
10993 Label loop;
10994 __ bind(&loop);
10995 __ mov(ebx, Operand(edx, -1 * kPointerSize)); // Skip receiver.
10996 __ mov(FieldOperand(edi, FixedArray::kHeaderSize), ebx);
10997 __ add(Operand(edi), Immediate(kPointerSize));
10998 __ sub(Operand(edx), Immediate(kPointerSize));
10999 __ dec(ecx);
Leon Clarkee46be812010-01-19 14:06:41 +000011000 __ j(not_zero, &loop);
11001
11002 // Return and remove the on-stack parameters.
11003 __ bind(&done);
11004 __ ret(3 * kPointerSize);
11005
Steve Blocka7e24c12009-10-30 11:49:00 +000011006 // Do the runtime call to allocate the arguments object.
11007 __ bind(&runtime);
Steve Block6ded16b2010-05-10 14:33:55 +010011008 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
Steve Blocka7e24c12009-10-30 11:49:00 +000011009}
11010
11011
Leon Clarkee46be812010-01-19 14:06:41 +000011012void RegExpExecStub::Generate(MacroAssembler* masm) {
Leon Clarke4515c472010-02-03 11:58:03 +000011013 // Just jump directly to runtime if native RegExp is not selected at compile
11014 // time or if regexp entry in generated code is turned off runtime switch or
11015 // at compilation.
Steve Block6ded16b2010-05-10 14:33:55 +010011016#ifdef V8_INTERPRETED_REGEXP
11017 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
11018#else // V8_INTERPRETED_REGEXP
Leon Clarkee46be812010-01-19 14:06:41 +000011019 if (!FLAG_regexp_entry_native) {
Steve Block6ded16b2010-05-10 14:33:55 +010011020 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
Leon Clarkee46be812010-01-19 14:06:41 +000011021 return;
11022 }
11023
11024 // Stack frame on entry.
11025 // esp[0]: return address
11026 // esp[4]: last_match_info (expected JSArray)
11027 // esp[8]: previous index
11028 // esp[12]: subject string
11029 // esp[16]: JSRegExp object
11030
Leon Clarked91b9f72010-01-27 17:25:45 +000011031 static const int kLastMatchInfoOffset = 1 * kPointerSize;
11032 static const int kPreviousIndexOffset = 2 * kPointerSize;
11033 static const int kSubjectOffset = 3 * kPointerSize;
11034 static const int kJSRegExpOffset = 4 * kPointerSize;
11035
11036 Label runtime, invoke_regexp;
11037
11038 // Ensure that a RegExp stack is allocated.
11039 ExternalReference address_of_regexp_stack_memory_address =
11040 ExternalReference::address_of_regexp_stack_memory_address();
11041 ExternalReference address_of_regexp_stack_memory_size =
11042 ExternalReference::address_of_regexp_stack_memory_size();
11043 __ mov(ebx, Operand::StaticVariable(address_of_regexp_stack_memory_size));
11044 __ test(ebx, Operand(ebx));
11045 __ j(zero, &runtime, not_taken);
Leon Clarkee46be812010-01-19 14:06:41 +000011046
11047 // Check that the first argument is a JSRegExp object.
Leon Clarked91b9f72010-01-27 17:25:45 +000011048 __ mov(eax, Operand(esp, kJSRegExpOffset));
Leon Clarkee46be812010-01-19 14:06:41 +000011049 ASSERT_EQ(0, kSmiTag);
11050 __ test(eax, Immediate(kSmiTagMask));
11051 __ j(zero, &runtime);
11052 __ CmpObjectType(eax, JS_REGEXP_TYPE, ecx);
11053 __ j(not_equal, &runtime);
11054 // Check that the RegExp has been compiled (data contains a fixed array).
11055 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
Leon Clarke4515c472010-02-03 11:58:03 +000011056 if (FLAG_debug_code) {
11057 __ test(ecx, Immediate(kSmiTagMask));
11058 __ Check(not_zero, "Unexpected type for RegExp data, FixedArray expected");
11059 __ CmpObjectType(ecx, FIXED_ARRAY_TYPE, ebx);
11060 __ Check(equal, "Unexpected type for RegExp data, FixedArray expected");
11061 }
Leon Clarkee46be812010-01-19 14:06:41 +000011062
11063 // ecx: RegExp data (FixedArray)
11064 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
11065 __ mov(ebx, FieldOperand(ecx, JSRegExp::kDataTagOffset));
11066 __ cmp(Operand(ebx), Immediate(Smi::FromInt(JSRegExp::IRREGEXP)));
11067 __ j(not_equal, &runtime);
11068
11069 // ecx: RegExp data (FixedArray)
11070 // Check that the number of captures fit in the static offsets vector buffer.
11071 __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset));
11072 // Calculate number of capture registers (number_of_captures + 1) * 2. This
11073 // uses the asumption that smis are 2 * their untagged value.
11074 ASSERT_EQ(0, kSmiTag);
11075 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
11076 __ add(Operand(edx), Immediate(2)); // edx was a smi.
11077 // Check that the static offsets vector buffer is large enough.
11078 __ cmp(edx, OffsetsVector::kStaticOffsetsVectorSize);
11079 __ j(above, &runtime);
11080
11081 // ecx: RegExp data (FixedArray)
11082 // edx: Number of capture registers
11083 // Check that the second argument is a string.
Leon Clarked91b9f72010-01-27 17:25:45 +000011084 __ mov(eax, Operand(esp, kSubjectOffset));
Leon Clarkee46be812010-01-19 14:06:41 +000011085 __ test(eax, Immediate(kSmiTagMask));
11086 __ j(zero, &runtime);
11087 Condition is_string = masm->IsObjectStringType(eax, ebx, ebx);
11088 __ j(NegateCondition(is_string), &runtime);
11089 // Get the length of the string to ebx.
11090 __ mov(ebx, FieldOperand(eax, String::kLengthOffset));
11091
Steve Block6ded16b2010-05-10 14:33:55 +010011092 // ebx: Length of subject string as a smi
Leon Clarkee46be812010-01-19 14:06:41 +000011093 // ecx: RegExp data (FixedArray)
11094 // edx: Number of capture registers
Leon Clarke4515c472010-02-03 11:58:03 +000011095 // Check that the third argument is a positive smi less than the subject
Steve Block6ded16b2010-05-10 14:33:55 +010011096 // string length. A negative value will be greater (unsigned comparison).
Leon Clarked91b9f72010-01-27 17:25:45 +000011097 __ mov(eax, Operand(esp, kPreviousIndexOffset));
Steve Block6ded16b2010-05-10 14:33:55 +010011098 __ test(eax, Immediate(kSmiTagMask));
Kristian Monsen25f61362010-05-21 11:50:48 +010011099 __ j(not_zero, &runtime);
Leon Clarkee46be812010-01-19 14:06:41 +000011100 __ cmp(eax, Operand(ebx));
Steve Block6ded16b2010-05-10 14:33:55 +010011101 __ j(above_equal, &runtime);
Leon Clarkee46be812010-01-19 14:06:41 +000011102
11103 // ecx: RegExp data (FixedArray)
11104 // edx: Number of capture registers
11105 // Check that the fourth object is a JSArray object.
Leon Clarked91b9f72010-01-27 17:25:45 +000011106 __ mov(eax, Operand(esp, kLastMatchInfoOffset));
Leon Clarkee46be812010-01-19 14:06:41 +000011107 __ test(eax, Immediate(kSmiTagMask));
11108 __ j(zero, &runtime);
11109 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
11110 __ j(not_equal, &runtime);
11111 // Check that the JSArray is in fast case.
11112 __ mov(ebx, FieldOperand(eax, JSArray::kElementsOffset));
11113 __ mov(eax, FieldOperand(ebx, HeapObject::kMapOffset));
11114 __ cmp(eax, Factory::fixed_array_map());
11115 __ j(not_equal, &runtime);
11116 // Check that the last match info has space for the capture registers and the
11117 // additional information.
11118 __ mov(eax, FieldOperand(ebx, FixedArray::kLengthOffset));
11119 __ add(Operand(edx), Immediate(RegExpImpl::kLastMatchOverhead));
11120 __ cmp(edx, Operand(eax));
11121 __ j(greater, &runtime);
11122
11123 // ecx: RegExp data (FixedArray)
Leon Clarked91b9f72010-01-27 17:25:45 +000011124 // Check the representation and encoding of the subject string.
11125 Label seq_string, seq_two_byte_string, check_code;
11126 const int kStringRepresentationEncodingMask =
11127 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
11128 __ mov(eax, Operand(esp, kSubjectOffset));
Leon Clarkee46be812010-01-19 14:06:41 +000011129 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
11130 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
Leon Clarked91b9f72010-01-27 17:25:45 +000011131 __ and_(ebx, kStringRepresentationEncodingMask);
11132 // First check for sequential string.
11133 ASSERT_EQ(0, kStringTag);
11134 ASSERT_EQ(0, kSeqStringTag);
11135 __ test(Operand(ebx),
11136 Immediate(kIsNotStringMask | kStringRepresentationMask));
11137 __ j(zero, &seq_string);
11138
11139 // Check for flat cons string.
11140 // A flat cons string is a cons string where the second part is the empty
11141 // string. In that case the subject string is just the first part of the cons
11142 // string. Also in this case the first part of the cons string is known to be
Leon Clarke4515c472010-02-03 11:58:03 +000011143 // a sequential string or an external string.
Steve Block6ded16b2010-05-10 14:33:55 +010011144 __ and_(ebx, kStringRepresentationMask);
11145 __ cmp(ebx, kConsStringTag);
Leon Clarkee46be812010-01-19 14:06:41 +000011146 __ j(not_equal, &runtime);
Leon Clarked91b9f72010-01-27 17:25:45 +000011147 __ mov(edx, FieldOperand(eax, ConsString::kSecondOffset));
Leon Clarke4515c472010-02-03 11:58:03 +000011148 __ cmp(Operand(edx), Factory::empty_string());
Leon Clarked91b9f72010-01-27 17:25:45 +000011149 __ j(not_equal, &runtime);
11150 __ mov(eax, FieldOperand(eax, ConsString::kFirstOffset));
11151 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
11152 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
Leon Clarke4515c472010-02-03 11:58:03 +000011153 ASSERT_EQ(0, kSeqStringTag);
11154 __ test(ebx, Immediate(kStringRepresentationMask));
11155 __ j(not_zero, &runtime);
Leon Clarked91b9f72010-01-27 17:25:45 +000011156 __ and_(ebx, kStringRepresentationEncodingMask);
Leon Clarkee46be812010-01-19 14:06:41 +000011157
Leon Clarked91b9f72010-01-27 17:25:45 +000011158 __ bind(&seq_string);
11159 // eax: subject string (sequential either ascii to two byte)
11160 // ebx: suject string type & kStringRepresentationEncodingMask
Leon Clarkee46be812010-01-19 14:06:41 +000011161 // ecx: RegExp data (FixedArray)
11162 // Check that the irregexp code has been generated for an ascii string. If
Leon Clarked91b9f72010-01-27 17:25:45 +000011163 // it has, the field contains a code object otherwise it contains the hole.
Steve Block6ded16b2010-05-10 14:33:55 +010011164 const int kSeqTwoByteString = kStringTag | kSeqStringTag | kTwoByteStringTag;
11165 __ cmp(ebx, kSeqTwoByteString);
Leon Clarked91b9f72010-01-27 17:25:45 +000011166 __ j(equal, &seq_two_byte_string);
Leon Clarke4515c472010-02-03 11:58:03 +000011167 if (FLAG_debug_code) {
11168 __ cmp(ebx, kStringTag | kSeqStringTag | kAsciiStringTag);
11169 __ Check(equal, "Expected sequential ascii string");
11170 }
Leon Clarkee46be812010-01-19 14:06:41 +000011171 __ mov(edx, FieldOperand(ecx, JSRegExp::kDataAsciiCodeOffset));
Leon Clarked91b9f72010-01-27 17:25:45 +000011172 __ Set(edi, Immediate(1)); // Type is ascii.
11173 __ jmp(&check_code);
11174
11175 __ bind(&seq_two_byte_string);
11176 // eax: subject string
11177 // ecx: RegExp data (FixedArray)
11178 __ mov(edx, FieldOperand(ecx, JSRegExp::kDataUC16CodeOffset));
11179 __ Set(edi, Immediate(0)); // Type is two byte.
11180
11181 __ bind(&check_code);
Leon Clarke4515c472010-02-03 11:58:03 +000011182 // Check that the irregexp code has been generated for the actual string
11183 // encoding. If it has, the field contains a code object otherwise it contains
11184 // the hole.
Leon Clarkee46be812010-01-19 14:06:41 +000011185 __ CmpObjectType(edx, CODE_TYPE, ebx);
11186 __ j(not_equal, &runtime);
11187
Leon Clarked91b9f72010-01-27 17:25:45 +000011188 // eax: subject string
11189 // edx: code
Leon Clarke4515c472010-02-03 11:58:03 +000011190 // edi: encoding of subject string (1 if ascii, 0 if two_byte);
Leon Clarkee46be812010-01-19 14:06:41 +000011191 // Load used arguments before starting to push arguments for call to native
11192 // RegExp code to avoid handling changing stack height.
Leon Clarked91b9f72010-01-27 17:25:45 +000011193 __ mov(ebx, Operand(esp, kPreviousIndexOffset));
Leon Clarked91b9f72010-01-27 17:25:45 +000011194 __ SmiUntag(ebx); // Previous index from smi.
Leon Clarkee46be812010-01-19 14:06:41 +000011195
11196 // eax: subject string
11197 // ebx: previous index
11198 // edx: code
Leon Clarke4515c472010-02-03 11:58:03 +000011199 // edi: encoding of subject string (1 if ascii 0 if two_byte);
Leon Clarkee46be812010-01-19 14:06:41 +000011200 // All checks done. Now push arguments for native regexp code.
11201 __ IncrementCounter(&Counters::regexp_entry_native, 1);
11202
Steve Block6ded16b2010-05-10 14:33:55 +010011203 static const int kRegExpExecuteArguments = 7;
11204 __ PrepareCallCFunction(kRegExpExecuteArguments, ecx);
11205
Leon Clarked91b9f72010-01-27 17:25:45 +000011206 // Argument 7: Indicate that this is a direct call from JavaScript.
Steve Block6ded16b2010-05-10 14:33:55 +010011207 __ mov(Operand(esp, 6 * kPointerSize), Immediate(1));
Leon Clarkee46be812010-01-19 14:06:41 +000011208
Leon Clarked91b9f72010-01-27 17:25:45 +000011209 // Argument 6: Start (high end) of backtracking stack memory area.
Leon Clarkee46be812010-01-19 14:06:41 +000011210 __ mov(ecx, Operand::StaticVariable(address_of_regexp_stack_memory_address));
11211 __ add(ecx, Operand::StaticVariable(address_of_regexp_stack_memory_size));
Steve Block6ded16b2010-05-10 14:33:55 +010011212 __ mov(Operand(esp, 5 * kPointerSize), ecx);
Leon Clarkee46be812010-01-19 14:06:41 +000011213
Leon Clarkee46be812010-01-19 14:06:41 +000011214 // Argument 5: static offsets vector buffer.
Steve Block6ded16b2010-05-10 14:33:55 +010011215 __ mov(Operand(esp, 4 * kPointerSize),
11216 Immediate(ExternalReference::address_of_static_offsets_vector()));
Leon Clarkee46be812010-01-19 14:06:41 +000011217
Leon Clarked91b9f72010-01-27 17:25:45 +000011218 // Argument 4: End of string data
11219 // Argument 3: Start of string data
Steve Block6ded16b2010-05-10 14:33:55 +010011220 Label setup_two_byte, setup_rest;
Leon Clarked91b9f72010-01-27 17:25:45 +000011221 __ test(edi, Operand(edi));
11222 __ mov(edi, FieldOperand(eax, String::kLengthOffset));
Steve Block6ded16b2010-05-10 14:33:55 +010011223 __ j(zero, &setup_two_byte);
11224 __ SmiUntag(edi);
Leon Clarked91b9f72010-01-27 17:25:45 +000011225 __ lea(ecx, FieldOperand(eax, edi, times_1, SeqAsciiString::kHeaderSize));
Steve Block6ded16b2010-05-10 14:33:55 +010011226 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4.
Leon Clarked91b9f72010-01-27 17:25:45 +000011227 __ lea(ecx, FieldOperand(eax, ebx, times_1, SeqAsciiString::kHeaderSize));
Steve Block6ded16b2010-05-10 14:33:55 +010011228 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3.
11229 __ jmp(&setup_rest);
Leon Clarkee46be812010-01-19 14:06:41 +000011230
Steve Block6ded16b2010-05-10 14:33:55 +010011231 __ bind(&setup_two_byte);
11232 ASSERT(kSmiTag == 0 && kSmiTagSize == 1); // edi is smi (powered by 2).
11233 __ lea(ecx, FieldOperand(eax, edi, times_1, SeqTwoByteString::kHeaderSize));
11234 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4.
Leon Clarked91b9f72010-01-27 17:25:45 +000011235 __ lea(ecx, FieldOperand(eax, ebx, times_2, SeqTwoByteString::kHeaderSize));
Steve Block6ded16b2010-05-10 14:33:55 +010011236 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3.
Leon Clarked91b9f72010-01-27 17:25:45 +000011237
Steve Block6ded16b2010-05-10 14:33:55 +010011238 __ bind(&setup_rest);
Leon Clarkee46be812010-01-19 14:06:41 +000011239
11240 // Argument 2: Previous index.
Steve Block6ded16b2010-05-10 14:33:55 +010011241 __ mov(Operand(esp, 1 * kPointerSize), ebx);
Leon Clarkee46be812010-01-19 14:06:41 +000011242
11243 // Argument 1: Subject string.
Steve Block6ded16b2010-05-10 14:33:55 +010011244 __ mov(Operand(esp, 0 * kPointerSize), eax);
Leon Clarkee46be812010-01-19 14:06:41 +000011245
11246 // Locate the code entry and call it.
11247 __ add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
Steve Block6ded16b2010-05-10 14:33:55 +010011248 __ CallCFunction(edx, kRegExpExecuteArguments);
Leon Clarkee46be812010-01-19 14:06:41 +000011249
11250 // Check the result.
11251 Label success;
11252 __ cmp(eax, NativeRegExpMacroAssembler::SUCCESS);
11253 __ j(equal, &success, taken);
11254 Label failure;
11255 __ cmp(eax, NativeRegExpMacroAssembler::FAILURE);
11256 __ j(equal, &failure, taken);
11257 __ cmp(eax, NativeRegExpMacroAssembler::EXCEPTION);
11258 // If not exception it can only be retry. Handle that in the runtime system.
11259 __ j(not_equal, &runtime);
11260 // Result must now be exception. If there is no pending exception already a
11261 // stack overflow (on the backtrack stack) was detected in RegExp code but
11262 // haven't created the exception yet. Handle that in the runtime system.
Steve Block6ded16b2010-05-10 14:33:55 +010011263 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
Leon Clarkee46be812010-01-19 14:06:41 +000011264 ExternalReference pending_exception(Top::k_pending_exception_address);
11265 __ mov(eax,
11266 Operand::StaticVariable(ExternalReference::the_hole_value_location()));
11267 __ cmp(eax, Operand::StaticVariable(pending_exception));
11268 __ j(equal, &runtime);
11269 __ bind(&failure);
11270 // For failure and exception return null.
11271 __ mov(Operand(eax), Factory::null_value());
11272 __ ret(4 * kPointerSize);
11273
11274 // Load RegExp data.
11275 __ bind(&success);
Leon Clarked91b9f72010-01-27 17:25:45 +000011276 __ mov(eax, Operand(esp, kJSRegExpOffset));
Leon Clarkee46be812010-01-19 14:06:41 +000011277 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
11278 __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset));
11279 // Calculate number of capture registers (number_of_captures + 1) * 2.
Leon Clarke4515c472010-02-03 11:58:03 +000011280 ASSERT_EQ(0, kSmiTag);
11281 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
Leon Clarkee46be812010-01-19 14:06:41 +000011282 __ add(Operand(edx), Immediate(2)); // edx was a smi.
11283
11284 // edx: Number of capture registers
11285 // Load last_match_info which is still known to be a fast case JSArray.
Leon Clarked91b9f72010-01-27 17:25:45 +000011286 __ mov(eax, Operand(esp, kLastMatchInfoOffset));
Leon Clarkee46be812010-01-19 14:06:41 +000011287 __ mov(ebx, FieldOperand(eax, JSArray::kElementsOffset));
11288
11289 // ebx: last_match_info backing store (FixedArray)
11290 // edx: number of capture registers
11291 // Store the capture count.
11292 __ SmiTag(edx); // Number of capture registers to smi.
11293 __ mov(FieldOperand(ebx, RegExpImpl::kLastCaptureCountOffset), edx);
11294 __ SmiUntag(edx); // Number of capture registers back from smi.
11295 // Store last subject and last input.
Leon Clarked91b9f72010-01-27 17:25:45 +000011296 __ mov(eax, Operand(esp, kSubjectOffset));
Leon Clarkee46be812010-01-19 14:06:41 +000011297 __ mov(FieldOperand(ebx, RegExpImpl::kLastSubjectOffset), eax);
11298 __ mov(ecx, ebx);
11299 __ RecordWrite(ecx, RegExpImpl::kLastSubjectOffset, eax, edi);
Leon Clarked91b9f72010-01-27 17:25:45 +000011300 __ mov(eax, Operand(esp, kSubjectOffset));
Leon Clarkee46be812010-01-19 14:06:41 +000011301 __ mov(FieldOperand(ebx, RegExpImpl::kLastInputOffset), eax);
11302 __ mov(ecx, ebx);
11303 __ RecordWrite(ecx, RegExpImpl::kLastInputOffset, eax, edi);
11304
11305 // Get the static offsets vector filled by the native regexp code.
11306 ExternalReference address_of_static_offsets_vector =
11307 ExternalReference::address_of_static_offsets_vector();
11308 __ mov(ecx, Immediate(address_of_static_offsets_vector));
11309
11310 // ebx: last_match_info backing store (FixedArray)
11311 // ecx: offsets vector
11312 // edx: number of capture registers
11313 Label next_capture, done;
Leon Clarkee46be812010-01-19 14:06:41 +000011314 // Capture register counter starts from number of capture registers and
11315 // counts down until wraping after zero.
11316 __ bind(&next_capture);
11317 __ sub(Operand(edx), Immediate(1));
11318 __ j(negative, &done);
11319 // Read the value from the static offsets vector buffer.
Leon Clarke4515c472010-02-03 11:58:03 +000011320 __ mov(edi, Operand(ecx, edx, times_int_size, 0));
Steve Block6ded16b2010-05-10 14:33:55 +010011321 __ SmiTag(edi);
Leon Clarkee46be812010-01-19 14:06:41 +000011322 // Store the smi value in the last match info.
11323 __ mov(FieldOperand(ebx,
11324 edx,
11325 times_pointer_size,
11326 RegExpImpl::kFirstCaptureOffset),
11327 edi);
11328 __ jmp(&next_capture);
11329 __ bind(&done);
11330
11331 // Return last match info.
Leon Clarked91b9f72010-01-27 17:25:45 +000011332 __ mov(eax, Operand(esp, kLastMatchInfoOffset));
Leon Clarkee46be812010-01-19 14:06:41 +000011333 __ ret(4 * kPointerSize);
11334
11335 // Do the runtime call to execute the regexp.
11336 __ bind(&runtime);
Steve Block6ded16b2010-05-10 14:33:55 +010011337 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
11338#endif // V8_INTERPRETED_REGEXP
Leon Clarkee46be812010-01-19 14:06:41 +000011339}
11340
11341
Andrei Popescu402d9372010-02-26 13:31:12 +000011342void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm,
11343 Register object,
11344 Register result,
11345 Register scratch1,
11346 Register scratch2,
11347 bool object_is_smi,
11348 Label* not_found) {
Andrei Popescu402d9372010-02-26 13:31:12 +000011349 // Use of registers. Register result is used as a temporary.
11350 Register number_string_cache = result;
11351 Register mask = scratch1;
11352 Register scratch = scratch2;
11353
11354 // Load the number string cache.
11355 ExternalReference roots_address = ExternalReference::roots_address();
11356 __ mov(scratch, Immediate(Heap::kNumberStringCacheRootIndex));
11357 __ mov(number_string_cache,
11358 Operand::StaticArray(scratch, times_pointer_size, roots_address));
11359 // Make the hash mask from the length of the number string cache. It
11360 // contains two elements (number and string) for each cache entry.
11361 __ mov(mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset));
11362 __ shr(mask, 1); // Divide length by two (length is not a smi).
11363 __ sub(Operand(mask), Immediate(1)); // Make mask.
Steve Block6ded16b2010-05-10 14:33:55 +010011364
Andrei Popescu402d9372010-02-26 13:31:12 +000011365 // Calculate the entry in the number string cache. The hash value in the
Steve Block6ded16b2010-05-10 14:33:55 +010011366 // number string cache for smis is just the smi value, and the hash for
11367 // doubles is the xor of the upper and lower words. See
11368 // Heap::GetNumberStringCache.
11369 Label smi_hash_calculated;
11370 Label load_result_from_cache;
11371 if (object_is_smi) {
11372 __ mov(scratch, object);
11373 __ SmiUntag(scratch);
11374 } else {
11375 Label not_smi, hash_calculated;
11376 ASSERT(kSmiTag == 0);
11377 __ test(object, Immediate(kSmiTagMask));
11378 __ j(not_zero, &not_smi);
11379 __ mov(scratch, object);
11380 __ SmiUntag(scratch);
11381 __ jmp(&smi_hash_calculated);
11382 __ bind(&not_smi);
11383 __ cmp(FieldOperand(object, HeapObject::kMapOffset),
11384 Factory::heap_number_map());
11385 __ j(not_equal, not_found);
11386 ASSERT_EQ(8, kDoubleSize);
11387 __ mov(scratch, FieldOperand(object, HeapNumber::kValueOffset));
11388 __ xor_(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4));
11389 // Object is heap number and hash is now in scratch. Calculate cache index.
11390 __ and_(scratch, Operand(mask));
11391 Register index = scratch;
11392 Register probe = mask;
11393 __ mov(probe,
11394 FieldOperand(number_string_cache,
11395 index,
11396 times_twice_pointer_size,
11397 FixedArray::kHeaderSize));
11398 __ test(probe, Immediate(kSmiTagMask));
11399 __ j(zero, not_found);
11400 if (CpuFeatures::IsSupported(SSE2)) {
11401 CpuFeatures::Scope fscope(SSE2);
11402 __ movdbl(xmm0, FieldOperand(object, HeapNumber::kValueOffset));
11403 __ movdbl(xmm1, FieldOperand(probe, HeapNumber::kValueOffset));
11404 __ comisd(xmm0, xmm1);
11405 } else {
11406 __ fld_d(FieldOperand(object, HeapNumber::kValueOffset));
11407 __ fld_d(FieldOperand(probe, HeapNumber::kValueOffset));
11408 __ FCmp();
11409 }
11410 __ j(parity_even, not_found); // Bail out if NaN is involved.
11411 __ j(not_equal, not_found); // The cache did not contain this value.
11412 __ jmp(&load_result_from_cache);
11413 }
11414
11415 __ bind(&smi_hash_calculated);
11416 // Object is smi and hash is now in scratch. Calculate cache index.
Andrei Popescu402d9372010-02-26 13:31:12 +000011417 __ and_(scratch, Operand(mask));
Steve Block6ded16b2010-05-10 14:33:55 +010011418 Register index = scratch;
Andrei Popescu402d9372010-02-26 13:31:12 +000011419 // Check if the entry is the smi we are looking for.
11420 __ cmp(object,
11421 FieldOperand(number_string_cache,
Steve Block6ded16b2010-05-10 14:33:55 +010011422 index,
Andrei Popescu402d9372010-02-26 13:31:12 +000011423 times_twice_pointer_size,
11424 FixedArray::kHeaderSize));
11425 __ j(not_equal, not_found);
11426
11427 // Get the result from the cache.
Steve Block6ded16b2010-05-10 14:33:55 +010011428 __ bind(&load_result_from_cache);
Andrei Popescu402d9372010-02-26 13:31:12 +000011429 __ mov(result,
11430 FieldOperand(number_string_cache,
Steve Block6ded16b2010-05-10 14:33:55 +010011431 index,
Andrei Popescu402d9372010-02-26 13:31:12 +000011432 times_twice_pointer_size,
11433 FixedArray::kHeaderSize + kPointerSize));
11434 __ IncrementCounter(&Counters::number_to_string_native, 1);
11435}
11436
11437
11438void NumberToStringStub::Generate(MacroAssembler* masm) {
11439 Label runtime;
11440
11441 __ mov(ebx, Operand(esp, kPointerSize));
11442
11443 // Generate code to lookup number in the number string cache.
11444 GenerateLookupNumberStringCache(masm, ebx, eax, ecx, edx, false, &runtime);
11445 __ ret(1 * kPointerSize);
11446
11447 __ bind(&runtime);
11448 // Handle number to string in the runtime system if not found in the cache.
Steve Block6ded16b2010-05-10 14:33:55 +010011449 __ TailCallRuntime(Runtime::kNumberToStringSkipCache, 1, 1);
11450}
11451
11452
11453void RecordWriteStub::Generate(MacroAssembler* masm) {
11454 masm->RecordWriteHelper(object_, addr_, scratch_);
11455 masm->ret(0);
11456}
11457
11458
11459static int NegativeComparisonResult(Condition cc) {
11460 ASSERT(cc != equal);
11461 ASSERT((cc == less) || (cc == less_equal)
11462 || (cc == greater) || (cc == greater_equal));
11463 return (cc == greater || cc == greater_equal) ? LESS : GREATER;
Andrei Popescu402d9372010-02-26 13:31:12 +000011464}
11465
11466
Steve Blocka7e24c12009-10-30 11:49:00 +000011467void CompareStub::Generate(MacroAssembler* masm) {
11468 Label call_builtin, done;
11469
11470 // NOTICE! This code is only reached after a smi-fast-case check, so
11471 // it is certain that at least one operand isn't a smi.
11472
Steve Block6ded16b2010-05-10 14:33:55 +010011473 // Identical objects can be compared fast, but there are some tricky cases
11474 // for NaN and undefined.
11475 {
11476 Label not_identical;
11477 __ cmp(eax, Operand(edx));
11478 __ j(not_equal, &not_identical);
Steve Blocka7e24c12009-10-30 11:49:00 +000011479
Steve Block6ded16b2010-05-10 14:33:55 +010011480 if (cc_ != equal) {
11481 // Check for undefined. undefined OP undefined is false even though
11482 // undefined == undefined.
11483 Label check_for_nan;
11484 __ cmp(edx, Factory::undefined_value());
11485 __ j(not_equal, &check_for_nan);
11486 __ Set(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc_))));
11487 __ ret(0);
11488 __ bind(&check_for_nan);
11489 }
Steve Blocka7e24c12009-10-30 11:49:00 +000011490
Steve Block6ded16b2010-05-10 14:33:55 +010011491 // Test for NaN. Sadly, we can't just compare to Factory::nan_value(),
11492 // so we do the second best thing - test it ourselves.
11493 // Note: if cc_ != equal, never_nan_nan_ is not used.
11494 if (never_nan_nan_ && (cc_ == equal)) {
11495 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
11496 __ ret(0);
11497 } else {
11498 Label return_equal;
11499 Label heap_number;
11500 // If it's not a heap number, then return equal.
11501 __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
11502 Immediate(Factory::heap_number_map()));
11503 __ j(equal, &heap_number);
11504 __ bind(&return_equal);
11505 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
11506 __ ret(0);
Steve Blocka7e24c12009-10-30 11:49:00 +000011507
Steve Block6ded16b2010-05-10 14:33:55 +010011508 __ bind(&heap_number);
11509 // It is a heap number, so return non-equal if it's NaN and equal if
11510 // it's not NaN.
11511 // The representation of NaN values has all exponent bits (52..62) set,
11512 // and not all mantissa bits (0..51) clear.
11513 // We only accept QNaNs, which have bit 51 set.
11514 // Read top bits of double representation (second word of value).
11515
11516 // Value is a QNaN if value & kQuietNaNMask == kQuietNaNMask, i.e.,
11517 // all bits in the mask are set. We only need to check the word
11518 // that contains the exponent and high bit of the mantissa.
11519 ASSERT_NE(0, (kQuietNaNHighBitsMask << 1) & 0x80000000u);
11520 __ mov(edx, FieldOperand(edx, HeapNumber::kExponentOffset));
11521 __ xor_(eax, Operand(eax));
11522 // Shift value and mask so kQuietNaNHighBitsMask applies to topmost
11523 // bits.
11524 __ add(edx, Operand(edx));
11525 __ cmp(edx, kQuietNaNHighBitsMask << 1);
11526 if (cc_ == equal) {
11527 ASSERT_NE(1, EQUAL);
Leon Clarkee46be812010-01-19 14:06:41 +000011528 __ setcc(above_equal, eax);
11529 __ ret(0);
Steve Block6ded16b2010-05-10 14:33:55 +010011530 } else {
11531 Label nan;
11532 __ j(above_equal, &nan);
11533 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
11534 __ ret(0);
11535 __ bind(&nan);
11536 __ Set(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc_))));
11537 __ ret(0);
Leon Clarkee46be812010-01-19 14:06:41 +000011538 }
Steve Blocka7e24c12009-10-30 11:49:00 +000011539 }
11540
Steve Block6ded16b2010-05-10 14:33:55 +010011541 __ bind(&not_identical);
11542 }
11543
11544 if (cc_ == equal) { // Both strict and non-strict.
11545 Label slow; // Fallthrough label.
11546
Steve Blocka7e24c12009-10-30 11:49:00 +000011547 // If we're doing a strict equality comparison, we don't have to do
11548 // type conversion, so we generate code to do fast comparison for objects
11549 // and oddballs. Non-smi numbers and strings still go through the usual
11550 // slow-case code.
11551 if (strict_) {
11552 // If either is a Smi (we know that not both are), then they can only
11553 // be equal if the other is a HeapNumber. If so, use the slow case.
11554 {
11555 Label not_smis;
11556 ASSERT_EQ(0, kSmiTag);
11557 ASSERT_EQ(0, Smi::FromInt(0));
11558 __ mov(ecx, Immediate(kSmiTagMask));
11559 __ and_(ecx, Operand(eax));
11560 __ test(ecx, Operand(edx));
11561 __ j(not_zero, &not_smis);
11562 // One operand is a smi.
11563
11564 // Check whether the non-smi is a heap number.
11565 ASSERT_EQ(1, kSmiTagMask);
11566 // ecx still holds eax & kSmiTag, which is either zero or one.
11567 __ sub(Operand(ecx), Immediate(0x01));
11568 __ mov(ebx, edx);
11569 __ xor_(ebx, Operand(eax));
11570 __ and_(ebx, Operand(ecx)); // ebx holds either 0 or eax ^ edx.
11571 __ xor_(ebx, Operand(eax));
11572 // if eax was smi, ebx is now edx, else eax.
11573
11574 // Check if the non-smi operand is a heap number.
11575 __ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
11576 Immediate(Factory::heap_number_map()));
11577 // If heap number, handle it in the slow case.
11578 __ j(equal, &slow);
11579 // Return non-equal (ebx is not zero)
11580 __ mov(eax, ebx);
11581 __ ret(0);
11582
11583 __ bind(&not_smis);
11584 }
11585
11586 // If either operand is a JSObject or an oddball value, then they are not
11587 // equal since their pointers are different
11588 // There is no test for undetectability in strict equality.
11589
11590 // Get the type of the first operand.
11591 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
11592 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
11593
11594 // If the first object is a JS object, we have done pointer comparison.
11595 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
11596 Label first_non_object;
11597 __ cmp(ecx, FIRST_JS_OBJECT_TYPE);
Leon Clarkef7060e22010-06-03 12:02:55 +010011598 __ j(below, &first_non_object);
Steve Blocka7e24c12009-10-30 11:49:00 +000011599
11600 // Return non-zero (eax is not zero)
11601 Label return_not_equal;
11602 ASSERT(kHeapObjectTag != 0);
11603 __ bind(&return_not_equal);
11604 __ ret(0);
11605
11606 __ bind(&first_non_object);
11607 // Check for oddballs: true, false, null, undefined.
11608 __ cmp(ecx, ODDBALL_TYPE);
11609 __ j(equal, &return_not_equal);
11610
11611 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
11612 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
11613
11614 __ cmp(ecx, FIRST_JS_OBJECT_TYPE);
Leon Clarkef7060e22010-06-03 12:02:55 +010011615 __ j(above_equal, &return_not_equal);
Steve Blocka7e24c12009-10-30 11:49:00 +000011616
11617 // Check for oddballs: true, false, null, undefined.
11618 __ cmp(ecx, ODDBALL_TYPE);
11619 __ j(equal, &return_not_equal);
11620
11621 // Fall through to the general case.
11622 }
11623 __ bind(&slow);
11624 }
11625
11626 // Push arguments below the return address.
11627 __ pop(ecx);
11628 __ push(eax);
11629 __ push(edx);
11630 __ push(ecx);
11631
Steve Block6ded16b2010-05-10 14:33:55 +010011632 // Generate the number comparison code.
11633 if (include_number_compare_) {
11634 Label non_number_comparison;
11635 Label unordered;
11636 if (CpuFeatures::IsSupported(SSE2)) {
11637 CpuFeatures::Scope use_sse2(SSE2);
11638 CpuFeatures::Scope use_cmov(CMOV);
Steve Blocka7e24c12009-10-30 11:49:00 +000011639
Steve Block6ded16b2010-05-10 14:33:55 +010011640 FloatingPointHelper::LoadSSE2Operands(masm, &non_number_comparison);
11641 __ comisd(xmm0, xmm1);
Steve Blocka7e24c12009-10-30 11:49:00 +000011642
Steve Block6ded16b2010-05-10 14:33:55 +010011643 // Don't base result on EFLAGS when a NaN is involved.
11644 __ j(parity_even, &unordered, not_taken);
11645 // Return a result of -1, 0, or 1, based on EFLAGS.
11646 __ mov(eax, 0); // equal
11647 __ mov(ecx, Immediate(Smi::FromInt(1)));
11648 __ cmov(above, eax, Operand(ecx));
11649 __ mov(ecx, Immediate(Smi::FromInt(-1)));
11650 __ cmov(below, eax, Operand(ecx));
11651 __ ret(2 * kPointerSize);
11652 } else {
11653 FloatingPointHelper::CheckFloatOperands(
11654 masm, &non_number_comparison, ebx);
11655 FloatingPointHelper::LoadFloatOperands(masm, ecx);
11656 __ FCmp();
Steve Blocka7e24c12009-10-30 11:49:00 +000011657
Steve Block6ded16b2010-05-10 14:33:55 +010011658 // Don't base result on EFLAGS when a NaN is involved.
11659 __ j(parity_even, &unordered, not_taken);
Steve Blocka7e24c12009-10-30 11:49:00 +000011660
Steve Block6ded16b2010-05-10 14:33:55 +010011661 Label below_label, above_label;
11662 // Return a result of -1, 0, or 1, based on EFLAGS. In all cases remove
11663 // two arguments from the stack as they have been pushed in preparation
11664 // of a possible runtime call.
11665 __ j(below, &below_label, not_taken);
11666 __ j(above, &above_label, not_taken);
Steve Blocka7e24c12009-10-30 11:49:00 +000011667
Steve Block6ded16b2010-05-10 14:33:55 +010011668 __ xor_(eax, Operand(eax));
11669 __ ret(2 * kPointerSize);
Steve Blocka7e24c12009-10-30 11:49:00 +000011670
Steve Block6ded16b2010-05-10 14:33:55 +010011671 __ bind(&below_label);
11672 __ mov(eax, Immediate(Smi::FromInt(-1)));
11673 __ ret(2 * kPointerSize);
Steve Blocka7e24c12009-10-30 11:49:00 +000011674
Steve Block6ded16b2010-05-10 14:33:55 +010011675 __ bind(&above_label);
11676 __ mov(eax, Immediate(Smi::FromInt(1)));
11677 __ ret(2 * kPointerSize);
11678 }
11679
11680 // If one of the numbers was NaN, then the result is always false.
11681 // The cc is never not-equal.
11682 __ bind(&unordered);
11683 ASSERT(cc_ != not_equal);
11684 if (cc_ == less || cc_ == less_equal) {
11685 __ mov(eax, Immediate(Smi::FromInt(1)));
11686 } else {
11687 __ mov(eax, Immediate(Smi::FromInt(-1)));
11688 }
Steve Blocka7e24c12009-10-30 11:49:00 +000011689 __ ret(2 * kPointerSize); // eax, edx were pushed
Steve Block6ded16b2010-05-10 14:33:55 +010011690
11691 // The number comparison code did not provide a valid result.
11692 __ bind(&non_number_comparison);
Steve Blocka7e24c12009-10-30 11:49:00 +000011693 }
Steve Blocka7e24c12009-10-30 11:49:00 +000011694
11695 // Fast negative check for symbol-to-symbol equality.
Leon Clarkee46be812010-01-19 14:06:41 +000011696 Label check_for_strings;
Steve Blocka7e24c12009-10-30 11:49:00 +000011697 if (cc_ == equal) {
Leon Clarkee46be812010-01-19 14:06:41 +000011698 BranchIfNonSymbol(masm, &check_for_strings, eax, ecx);
11699 BranchIfNonSymbol(masm, &check_for_strings, edx, ecx);
Steve Blocka7e24c12009-10-30 11:49:00 +000011700
11701 // We've already checked for object identity, so if both operands
11702 // are symbols they aren't equal. Register eax already holds a
11703 // non-zero value, which indicates not equal, so just return.
11704 __ ret(2 * kPointerSize);
11705 }
11706
Leon Clarkee46be812010-01-19 14:06:41 +000011707 __ bind(&check_for_strings);
11708
Leon Clarked91b9f72010-01-27 17:25:45 +000011709 __ JumpIfNotBothSequentialAsciiStrings(edx, eax, ecx, ebx, &call_builtin);
Leon Clarkee46be812010-01-19 14:06:41 +000011710
11711 // Inline comparison of ascii strings.
11712 StringCompareStub::GenerateCompareFlatAsciiStrings(masm,
11713 edx,
11714 eax,
11715 ecx,
11716 ebx,
11717 edi);
11718#ifdef DEBUG
11719 __ Abort("Unexpected fall-through from string comparison");
11720#endif
11721
Steve Blocka7e24c12009-10-30 11:49:00 +000011722 __ bind(&call_builtin);
11723 // must swap argument order
11724 __ pop(ecx);
11725 __ pop(edx);
11726 __ pop(eax);
11727 __ push(edx);
11728 __ push(eax);
11729
11730 // Figure out which native to call and setup the arguments.
11731 Builtins::JavaScript builtin;
11732 if (cc_ == equal) {
11733 builtin = strict_ ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
11734 } else {
11735 builtin = Builtins::COMPARE;
Steve Block6ded16b2010-05-10 14:33:55 +010011736 __ push(Immediate(Smi::FromInt(NegativeComparisonResult(cc_))));
Steve Blocka7e24c12009-10-30 11:49:00 +000011737 }
11738
11739 // Restore return address on the stack.
11740 __ push(ecx);
11741
11742 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
11743 // tagged as a small integer.
11744 __ InvokeBuiltin(builtin, JUMP_FUNCTION);
11745}
11746
11747
11748void CompareStub::BranchIfNonSymbol(MacroAssembler* masm,
11749 Label* label,
11750 Register object,
11751 Register scratch) {
11752 __ test(object, Immediate(kSmiTagMask));
11753 __ j(zero, label);
11754 __ mov(scratch, FieldOperand(object, HeapObject::kMapOffset));
11755 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
11756 __ and_(scratch, kIsSymbolMask | kIsNotStringMask);
11757 __ cmp(scratch, kSymbolTag | kStringTag);
11758 __ j(not_equal, label);
11759}
11760
11761
11762void StackCheckStub::Generate(MacroAssembler* masm) {
11763 // Because builtins always remove the receiver from the stack, we
11764 // have to fake one to avoid underflowing the stack. The receiver
11765 // must be inserted below the return address on the stack so we
11766 // temporarily store that in a register.
11767 __ pop(eax);
11768 __ push(Immediate(Smi::FromInt(0)));
11769 __ push(eax);
11770
11771 // Do tail-call to runtime routine.
Steve Block6ded16b2010-05-10 14:33:55 +010011772 __ TailCallRuntime(Runtime::kStackGuard, 1, 1);
Steve Blocka7e24c12009-10-30 11:49:00 +000011773}
11774
11775
11776void CallFunctionStub::Generate(MacroAssembler* masm) {
11777 Label slow;
11778
Leon Clarkee46be812010-01-19 14:06:41 +000011779 // If the receiver might be a value (string, number or boolean) check for this
11780 // and box it if it is.
11781 if (ReceiverMightBeValue()) {
11782 // Get the receiver from the stack.
11783 // +1 ~ return address
11784 Label receiver_is_value, receiver_is_js_object;
11785 __ mov(eax, Operand(esp, (argc_ + 1) * kPointerSize));
11786
11787 // Check if receiver is a smi (which is a number value).
11788 __ test(eax, Immediate(kSmiTagMask));
11789 __ j(zero, &receiver_is_value, not_taken);
11790
11791 // Check if the receiver is a valid JS object.
11792 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, edi);
11793 __ j(above_equal, &receiver_is_js_object);
11794
11795 // Call the runtime to box the value.
11796 __ bind(&receiver_is_value);
11797 __ EnterInternalFrame();
11798 __ push(eax);
11799 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
11800 __ LeaveInternalFrame();
11801 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), eax);
11802
11803 __ bind(&receiver_is_js_object);
11804 }
11805
Steve Blocka7e24c12009-10-30 11:49:00 +000011806 // Get the function to call from the stack.
11807 // +2 ~ receiver, return address
11808 __ mov(edi, Operand(esp, (argc_ + 2) * kPointerSize));
11809
11810 // Check that the function really is a JavaScript function.
11811 __ test(edi, Immediate(kSmiTagMask));
11812 __ j(zero, &slow, not_taken);
11813 // Goto slow case if we do not have a function.
11814 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
11815 __ j(not_equal, &slow, not_taken);
11816
11817 // Fast-case: Just invoke the function.
11818 ParameterCount actual(argc_);
11819 __ InvokeFunction(edi, actual, JUMP_FUNCTION);
11820
11821 // Slow-case: Non-function called.
11822 __ bind(&slow);
Andrei Popescu402d9372010-02-26 13:31:12 +000011823 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
11824 // of the original receiver from the call site).
11825 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), edi);
Steve Blocka7e24c12009-10-30 11:49:00 +000011826 __ Set(eax, Immediate(argc_));
11827 __ Set(ebx, Immediate(0));
11828 __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
11829 Handle<Code> adaptor(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
11830 __ jmp(adaptor, RelocInfo::CODE_TARGET);
11831}
11832
11833
Steve Blocka7e24c12009-10-30 11:49:00 +000011834void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
11835 // eax holds the exception.
11836
11837 // Adjust this code if not the case.
11838 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
11839
11840 // Drop the sp to the top of the handler.
11841 ExternalReference handler_address(Top::k_handler_address);
11842 __ mov(esp, Operand::StaticVariable(handler_address));
11843
11844 // Restore next handler and frame pointer, discard handler state.
11845 ASSERT(StackHandlerConstants::kNextOffset == 0);
11846 __ pop(Operand::StaticVariable(handler_address));
11847 ASSERT(StackHandlerConstants::kFPOffset == 1 * kPointerSize);
11848 __ pop(ebp);
11849 __ pop(edx); // Remove state.
11850
11851 // Before returning we restore the context from the frame pointer if
11852 // not NULL. The frame pointer is NULL in the exception handler of
11853 // a JS entry frame.
11854 __ xor_(esi, Operand(esi)); // Tentatively set context pointer to NULL.
11855 Label skip;
11856 __ cmp(ebp, 0);
11857 __ j(equal, &skip, not_taken);
11858 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
11859 __ bind(&skip);
11860
11861 ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
11862 __ ret(0);
11863}
11864
11865
Steve Blockd0582a62009-12-15 09:54:21 +000011866// If true, a Handle<T> passed by value is passed and returned by
11867// using the location_ field directly. If false, it is passed and
11868// returned as a pointer to a handle.
Steve Block6ded16b2010-05-10 14:33:55 +010011869#ifdef USING_BSD_ABI
Steve Blockd0582a62009-12-15 09:54:21 +000011870static const bool kPassHandlesDirectly = true;
11871#else
11872static const bool kPassHandlesDirectly = false;
11873#endif
11874
11875
11876void ApiGetterEntryStub::Generate(MacroAssembler* masm) {
11877 Label get_result;
11878 Label prologue;
11879 Label promote_scheduled_exception;
11880 __ EnterApiExitFrame(ExitFrame::MODE_NORMAL, kStackSpace, kArgc);
11881 ASSERT_EQ(kArgc, 4);
11882 if (kPassHandlesDirectly) {
11883 // When handles as passed directly we don't have to allocate extra
11884 // space for and pass an out parameter.
11885 __ mov(Operand(esp, 0 * kPointerSize), ebx); // name.
11886 __ mov(Operand(esp, 1 * kPointerSize), eax); // arguments pointer.
11887 } else {
11888 // The function expects three arguments to be passed but we allocate
11889 // four to get space for the output cell. The argument slots are filled
11890 // as follows:
11891 //
11892 // 3: output cell
11893 // 2: arguments pointer
11894 // 1: name
11895 // 0: pointer to the output cell
11896 //
11897 // Note that this is one more "argument" than the function expects
11898 // so the out cell will have to be popped explicitly after returning
11899 // from the function.
11900 __ mov(Operand(esp, 1 * kPointerSize), ebx); // name.
11901 __ mov(Operand(esp, 2 * kPointerSize), eax); // arguments pointer.
11902 __ mov(ebx, esp);
11903 __ add(Operand(ebx), Immediate(3 * kPointerSize));
11904 __ mov(Operand(esp, 0 * kPointerSize), ebx); // output
11905 __ mov(Operand(esp, 3 * kPointerSize), Immediate(0)); // out cell.
11906 }
11907 // Call the api function!
11908 __ call(fun()->address(), RelocInfo::RUNTIME_ENTRY);
11909 // Check if the function scheduled an exception.
11910 ExternalReference scheduled_exception_address =
11911 ExternalReference::scheduled_exception_address();
11912 __ cmp(Operand::StaticVariable(scheduled_exception_address),
11913 Immediate(Factory::the_hole_value()));
11914 __ j(not_equal, &promote_scheduled_exception, not_taken);
11915 if (!kPassHandlesDirectly) {
11916 // The returned value is a pointer to the handle holding the result.
11917 // Dereference this to get to the location.
11918 __ mov(eax, Operand(eax, 0));
11919 }
11920 // Check if the result handle holds 0
11921 __ test(eax, Operand(eax));
11922 __ j(not_zero, &get_result, taken);
11923 // It was zero; the result is undefined.
11924 __ mov(eax, Factory::undefined_value());
11925 __ jmp(&prologue);
11926 // It was non-zero. Dereference to get the result value.
11927 __ bind(&get_result);
11928 __ mov(eax, Operand(eax, 0));
11929 __ bind(&prologue);
11930 __ LeaveExitFrame(ExitFrame::MODE_NORMAL);
11931 __ ret(0);
11932 __ bind(&promote_scheduled_exception);
Steve Block6ded16b2010-05-10 14:33:55 +010011933 __ TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
Steve Blockd0582a62009-12-15 09:54:21 +000011934}
11935
11936
Steve Blocka7e24c12009-10-30 11:49:00 +000011937void CEntryStub::GenerateCore(MacroAssembler* masm,
11938 Label* throw_normal_exception,
11939 Label* throw_termination_exception,
11940 Label* throw_out_of_memory_exception,
Steve Blocka7e24c12009-10-30 11:49:00 +000011941 bool do_gc,
Steve Block6ded16b2010-05-10 14:33:55 +010011942 bool always_allocate_scope,
11943 int /* alignment_skew */) {
Steve Blocka7e24c12009-10-30 11:49:00 +000011944 // eax: result parameter for PerformGC, if any
11945 // ebx: pointer to C function (C callee-saved)
11946 // ebp: frame pointer (restored after C call)
11947 // esp: stack pointer (restored after C call)
11948 // edi: number of arguments including receiver (C callee-saved)
11949 // esi: pointer to the first argument (C callee-saved)
11950
Leon Clarke4515c472010-02-03 11:58:03 +000011951 // Result returned in eax, or eax+edx if result_size_ is 2.
11952
Steve Block6ded16b2010-05-10 14:33:55 +010011953 // Check stack alignment.
11954 if (FLAG_debug_code) {
11955 __ CheckStackAlignment();
11956 }
11957
Steve Blocka7e24c12009-10-30 11:49:00 +000011958 if (do_gc) {
Steve Block6ded16b2010-05-10 14:33:55 +010011959 // Pass failure code returned from last attempt as first argument to
11960 // PerformGC. No need to use PrepareCallCFunction/CallCFunction here as the
11961 // stack alignment is known to be correct. This function takes one argument
11962 // which is passed on the stack, and we know that the stack has been
11963 // prepared to pass at least one argument.
Steve Blocka7e24c12009-10-30 11:49:00 +000011964 __ mov(Operand(esp, 0 * kPointerSize), eax); // Result.
11965 __ call(FUNCTION_ADDR(Runtime::PerformGC), RelocInfo::RUNTIME_ENTRY);
11966 }
11967
11968 ExternalReference scope_depth =
11969 ExternalReference::heap_always_allocate_scope_depth();
11970 if (always_allocate_scope) {
11971 __ inc(Operand::StaticVariable(scope_depth));
11972 }
11973
11974 // Call C function.
11975 __ mov(Operand(esp, 0 * kPointerSize), edi); // argc.
11976 __ mov(Operand(esp, 1 * kPointerSize), esi); // argv.
11977 __ call(Operand(ebx));
11978 // Result is in eax or edx:eax - do not destroy these registers!
11979
11980 if (always_allocate_scope) {
11981 __ dec(Operand::StaticVariable(scope_depth));
11982 }
11983
11984 // Make sure we're not trying to return 'the hole' from the runtime
11985 // call as this may lead to crashes in the IC code later.
11986 if (FLAG_debug_code) {
11987 Label okay;
11988 __ cmp(eax, Factory::the_hole_value());
11989 __ j(not_equal, &okay);
11990 __ int3();
11991 __ bind(&okay);
11992 }
11993
11994 // Check for failure result.
11995 Label failure_returned;
11996 ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0);
11997 __ lea(ecx, Operand(eax, 1));
11998 // Lower 2 bits of ecx are 0 iff eax has failure tag.
11999 __ test(ecx, Immediate(kFailureTagMask));
12000 __ j(zero, &failure_returned, not_taken);
12001
12002 // Exit the JavaScript to C++ exit frame.
Leon Clarke4515c472010-02-03 11:58:03 +000012003 __ LeaveExitFrame(mode_);
Steve Blocka7e24c12009-10-30 11:49:00 +000012004 __ ret(0);
12005
12006 // Handling of failure.
12007 __ bind(&failure_returned);
12008
12009 Label retry;
12010 // If the returned exception is RETRY_AFTER_GC continue at retry label
12011 ASSERT(Failure::RETRY_AFTER_GC == 0);
12012 __ test(eax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
12013 __ j(zero, &retry, taken);
12014
12015 // Special handling of out of memory exceptions.
12016 __ cmp(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException()));
12017 __ j(equal, throw_out_of_memory_exception);
12018
12019 // Retrieve the pending exception and clear the variable.
12020 ExternalReference pending_exception_address(Top::k_pending_exception_address);
12021 __ mov(eax, Operand::StaticVariable(pending_exception_address));
12022 __ mov(edx,
12023 Operand::StaticVariable(ExternalReference::the_hole_value_location()));
12024 __ mov(Operand::StaticVariable(pending_exception_address), edx);
12025
12026 // Special handling of termination exceptions which are uncatchable
12027 // by javascript code.
12028 __ cmp(eax, Factory::termination_exception());
12029 __ j(equal, throw_termination_exception);
12030
12031 // Handle normal exception.
12032 __ jmp(throw_normal_exception);
12033
12034 // Retry.
12035 __ bind(&retry);
12036}
12037
12038
12039void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm,
12040 UncatchableExceptionType type) {
12041 // Adjust this code if not the case.
12042 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
12043
12044 // Drop sp to the top stack handler.
12045 ExternalReference handler_address(Top::k_handler_address);
12046 __ mov(esp, Operand::StaticVariable(handler_address));
12047
12048 // Unwind the handlers until the ENTRY handler is found.
12049 Label loop, done;
12050 __ bind(&loop);
12051 // Load the type of the current stack handler.
12052 const int kStateOffset = StackHandlerConstants::kStateOffset;
12053 __ cmp(Operand(esp, kStateOffset), Immediate(StackHandler::ENTRY));
12054 __ j(equal, &done);
12055 // Fetch the next handler in the list.
12056 const int kNextOffset = StackHandlerConstants::kNextOffset;
12057 __ mov(esp, Operand(esp, kNextOffset));
12058 __ jmp(&loop);
12059 __ bind(&done);
12060
12061 // Set the top handler address to next handler past the current ENTRY handler.
12062 ASSERT(StackHandlerConstants::kNextOffset == 0);
12063 __ pop(Operand::StaticVariable(handler_address));
12064
12065 if (type == OUT_OF_MEMORY) {
12066 // Set external caught exception to false.
12067 ExternalReference external_caught(Top::k_external_caught_exception_address);
12068 __ mov(eax, false);
12069 __ mov(Operand::StaticVariable(external_caught), eax);
12070
12071 // Set pending exception and eax to out of memory exception.
12072 ExternalReference pending_exception(Top::k_pending_exception_address);
12073 __ mov(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException()));
12074 __ mov(Operand::StaticVariable(pending_exception), eax);
12075 }
12076
12077 // Clear the context pointer.
12078 __ xor_(esi, Operand(esi));
12079
12080 // Restore fp from handler and discard handler state.
12081 ASSERT(StackHandlerConstants::kFPOffset == 1 * kPointerSize);
12082 __ pop(ebp);
12083 __ pop(edx); // State.
12084
12085 ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
12086 __ ret(0);
12087}
12088
12089
Leon Clarke4515c472010-02-03 11:58:03 +000012090void CEntryStub::Generate(MacroAssembler* masm) {
Steve Blocka7e24c12009-10-30 11:49:00 +000012091 // eax: number of arguments including receiver
12092 // ebx: pointer to C function (C callee-saved)
12093 // ebp: frame pointer (restored after C call)
12094 // esp: stack pointer (restored after C call)
12095 // esi: current context (C callee-saved)
12096 // edi: JS function of the caller (C callee-saved)
12097
12098 // NOTE: Invocations of builtins may return failure objects instead
12099 // of a proper result. The builtin entry handles this by performing
12100 // a garbage collection and retrying the builtin (twice).
12101
Steve Blocka7e24c12009-10-30 11:49:00 +000012102 // Enter the exit frame that transitions from JavaScript to C++.
Leon Clarke4515c472010-02-03 11:58:03 +000012103 __ EnterExitFrame(mode_);
Steve Blocka7e24c12009-10-30 11:49:00 +000012104
12105 // eax: result parameter for PerformGC, if any (setup below)
12106 // ebx: pointer to builtin function (C callee-saved)
12107 // ebp: frame pointer (restored after C call)
12108 // esp: stack pointer (restored after C call)
12109 // edi: number of arguments including receiver (C callee-saved)
12110 // esi: argv pointer (C callee-saved)
12111
12112 Label throw_normal_exception;
12113 Label throw_termination_exception;
12114 Label throw_out_of_memory_exception;
12115
12116 // Call into the runtime system.
12117 GenerateCore(masm,
12118 &throw_normal_exception,
12119 &throw_termination_exception,
12120 &throw_out_of_memory_exception,
Steve Blocka7e24c12009-10-30 11:49:00 +000012121 false,
12122 false);
12123
12124 // Do space-specific GC and retry runtime call.
12125 GenerateCore(masm,
12126 &throw_normal_exception,
12127 &throw_termination_exception,
12128 &throw_out_of_memory_exception,
Steve Blocka7e24c12009-10-30 11:49:00 +000012129 true,
12130 false);
12131
12132 // Do full GC and retry runtime call one final time.
12133 Failure* failure = Failure::InternalError();
12134 __ mov(eax, Immediate(reinterpret_cast<int32_t>(failure)));
12135 GenerateCore(masm,
12136 &throw_normal_exception,
12137 &throw_termination_exception,
12138 &throw_out_of_memory_exception,
Steve Blocka7e24c12009-10-30 11:49:00 +000012139 true,
12140 true);
12141
12142 __ bind(&throw_out_of_memory_exception);
12143 GenerateThrowUncatchable(masm, OUT_OF_MEMORY);
12144
12145 __ bind(&throw_termination_exception);
12146 GenerateThrowUncatchable(masm, TERMINATION);
12147
12148 __ bind(&throw_normal_exception);
12149 GenerateThrowTOS(masm);
12150}
12151
12152
12153void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
12154 Label invoke, exit;
12155#ifdef ENABLE_LOGGING_AND_PROFILING
12156 Label not_outermost_js, not_outermost_js_2;
12157#endif
12158
12159 // Setup frame.
12160 __ push(ebp);
12161 __ mov(ebp, Operand(esp));
12162
12163 // Push marker in two places.
12164 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
12165 __ push(Immediate(Smi::FromInt(marker))); // context slot
12166 __ push(Immediate(Smi::FromInt(marker))); // function slot
12167 // Save callee-saved registers (C calling conventions).
12168 __ push(edi);
12169 __ push(esi);
12170 __ push(ebx);
12171
12172 // Save copies of the top frame descriptor on the stack.
12173 ExternalReference c_entry_fp(Top::k_c_entry_fp_address);
12174 __ push(Operand::StaticVariable(c_entry_fp));
12175
12176#ifdef ENABLE_LOGGING_AND_PROFILING
12177 // If this is the outermost JS call, set js_entry_sp value.
12178 ExternalReference js_entry_sp(Top::k_js_entry_sp_address);
12179 __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0));
12180 __ j(not_equal, &not_outermost_js);
12181 __ mov(Operand::StaticVariable(js_entry_sp), ebp);
12182 __ bind(&not_outermost_js);
12183#endif
12184
12185 // Call a faked try-block that does the invoke.
12186 __ call(&invoke);
12187
12188 // Caught exception: Store result (exception) in the pending
12189 // exception field in the JSEnv and return a failure sentinel.
12190 ExternalReference pending_exception(Top::k_pending_exception_address);
12191 __ mov(Operand::StaticVariable(pending_exception), eax);
12192 __ mov(eax, reinterpret_cast<int32_t>(Failure::Exception()));
12193 __ jmp(&exit);
12194
12195 // Invoke: Link this frame into the handler chain.
12196 __ bind(&invoke);
12197 __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER);
12198
12199 // Clear any pending exceptions.
12200 __ mov(edx,
12201 Operand::StaticVariable(ExternalReference::the_hole_value_location()));
12202 __ mov(Operand::StaticVariable(pending_exception), edx);
12203
12204 // Fake a receiver (NULL).
12205 __ push(Immediate(0)); // receiver
12206
12207 // Invoke the function by calling through JS entry trampoline
12208 // builtin and pop the faked function when we return. Notice that we
12209 // cannot store a reference to the trampoline code directly in this
12210 // stub, because the builtin stubs may not have been generated yet.
12211 if (is_construct) {
12212 ExternalReference construct_entry(Builtins::JSConstructEntryTrampoline);
12213 __ mov(edx, Immediate(construct_entry));
12214 } else {
12215 ExternalReference entry(Builtins::JSEntryTrampoline);
12216 __ mov(edx, Immediate(entry));
12217 }
12218 __ mov(edx, Operand(edx, 0)); // deref address
12219 __ lea(edx, FieldOperand(edx, Code::kHeaderSize));
12220 __ call(Operand(edx));
12221
12222 // Unlink this frame from the handler chain.
12223 __ pop(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
12224 // Pop next_sp.
12225 __ add(Operand(esp), Immediate(StackHandlerConstants::kSize - kPointerSize));
12226
12227#ifdef ENABLE_LOGGING_AND_PROFILING
12228 // If current EBP value is the same as js_entry_sp value, it means that
12229 // the current function is the outermost.
12230 __ cmp(ebp, Operand::StaticVariable(js_entry_sp));
12231 __ j(not_equal, &not_outermost_js_2);
12232 __ mov(Operand::StaticVariable(js_entry_sp), Immediate(0));
12233 __ bind(&not_outermost_js_2);
12234#endif
12235
12236 // Restore the top frame descriptor from the stack.
12237 __ bind(&exit);
12238 __ pop(Operand::StaticVariable(ExternalReference(Top::k_c_entry_fp_address)));
12239
12240 // Restore callee-saved registers (C calling conventions).
12241 __ pop(ebx);
12242 __ pop(esi);
12243 __ pop(edi);
12244 __ add(Operand(esp), Immediate(2 * kPointerSize)); // remove markers
12245
12246 // Restore frame pointer and return.
12247 __ pop(ebp);
12248 __ ret(0);
12249}
12250
12251
12252void InstanceofStub::Generate(MacroAssembler* masm) {
12253 // Get the object - go slow case if it's a smi.
12254 Label slow;
12255 __ mov(eax, Operand(esp, 2 * kPointerSize)); // 2 ~ return address, function
12256 __ test(eax, Immediate(kSmiTagMask));
12257 __ j(zero, &slow, not_taken);
12258
12259 // Check that the left hand is a JS object.
12260 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset)); // eax - object map
12261 __ movzx_b(ecx, FieldOperand(eax, Map::kInstanceTypeOffset)); // ecx - type
12262 __ cmp(ecx, FIRST_JS_OBJECT_TYPE);
Leon Clarkef7060e22010-06-03 12:02:55 +010012263 __ j(below, &slow, not_taken);
Steve Blocka7e24c12009-10-30 11:49:00 +000012264 __ cmp(ecx, LAST_JS_OBJECT_TYPE);
Leon Clarkef7060e22010-06-03 12:02:55 +010012265 __ j(above, &slow, not_taken);
Steve Blocka7e24c12009-10-30 11:49:00 +000012266
12267 // Get the prototype of the function.
12268 __ mov(edx, Operand(esp, 1 * kPointerSize)); // 1 ~ return address
Kristian Monsen25f61362010-05-21 11:50:48 +010012269 // edx is function, eax is map.
12270
12271 // Look up the function and the map in the instanceof cache.
12272 Label miss;
12273 ExternalReference roots_address = ExternalReference::roots_address();
12274 __ mov(ecx, Immediate(Heap::kInstanceofCacheFunctionRootIndex));
12275 __ cmp(edx, Operand::StaticArray(ecx, times_pointer_size, roots_address));
12276 __ j(not_equal, &miss);
12277 __ mov(ecx, Immediate(Heap::kInstanceofCacheMapRootIndex));
12278 __ cmp(eax, Operand::StaticArray(ecx, times_pointer_size, roots_address));
12279 __ j(not_equal, &miss);
12280 __ mov(ecx, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
12281 __ mov(eax, Operand::StaticArray(ecx, times_pointer_size, roots_address));
12282 __ ret(2 * kPointerSize);
12283
12284 __ bind(&miss);
Steve Blocka7e24c12009-10-30 11:49:00 +000012285 __ TryGetFunctionPrototype(edx, ebx, ecx, &slow);
12286
12287 // Check that the function prototype is a JS object.
12288 __ test(ebx, Immediate(kSmiTagMask));
12289 __ j(zero, &slow, not_taken);
12290 __ mov(ecx, FieldOperand(ebx, HeapObject::kMapOffset));
12291 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
12292 __ cmp(ecx, FIRST_JS_OBJECT_TYPE);
Leon Clarkef7060e22010-06-03 12:02:55 +010012293 __ j(below, &slow, not_taken);
Steve Blocka7e24c12009-10-30 11:49:00 +000012294 __ cmp(ecx, LAST_JS_OBJECT_TYPE);
Leon Clarkef7060e22010-06-03 12:02:55 +010012295 __ j(above, &slow, not_taken);
Steve Blocka7e24c12009-10-30 11:49:00 +000012296
Kristian Monsen25f61362010-05-21 11:50:48 +010012297 // Register mapping:
12298 // eax is object map.
12299 // edx is function.
12300 // ebx is function prototype.
12301 __ mov(ecx, Immediate(Heap::kInstanceofCacheMapRootIndex));
12302 __ mov(Operand::StaticArray(ecx, times_pointer_size, roots_address), eax);
12303 __ mov(ecx, Immediate(Heap::kInstanceofCacheFunctionRootIndex));
12304 __ mov(Operand::StaticArray(ecx, times_pointer_size, roots_address), edx);
12305
Steve Blocka7e24c12009-10-30 11:49:00 +000012306 __ mov(ecx, FieldOperand(eax, Map::kPrototypeOffset));
12307
12308 // Loop through the prototype chain looking for the function prototype.
12309 Label loop, is_instance, is_not_instance;
12310 __ bind(&loop);
12311 __ cmp(ecx, Operand(ebx));
12312 __ j(equal, &is_instance);
12313 __ cmp(Operand(ecx), Immediate(Factory::null_value()));
12314 __ j(equal, &is_not_instance);
12315 __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
12316 __ mov(ecx, FieldOperand(ecx, Map::kPrototypeOffset));
12317 __ jmp(&loop);
12318
12319 __ bind(&is_instance);
12320 __ Set(eax, Immediate(0));
Kristian Monsen25f61362010-05-21 11:50:48 +010012321 __ mov(ecx, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
12322 __ mov(Operand::StaticArray(ecx, times_pointer_size, roots_address), eax);
Steve Blocka7e24c12009-10-30 11:49:00 +000012323 __ ret(2 * kPointerSize);
12324
12325 __ bind(&is_not_instance);
12326 __ Set(eax, Immediate(Smi::FromInt(1)));
Kristian Monsen25f61362010-05-21 11:50:48 +010012327 __ mov(ecx, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
12328 __ mov(Operand::StaticArray(ecx, times_pointer_size, roots_address), eax);
Steve Blocka7e24c12009-10-30 11:49:00 +000012329 __ ret(2 * kPointerSize);
12330
12331 // Slow-case: Go through the JavaScript implementation.
12332 __ bind(&slow);
12333 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
12334}
12335
12336
Steve Block6ded16b2010-05-10 14:33:55 +010012337int CompareStub::MinorKey() {
12338 // Encode the three parameters in a unique 16 bit value. To avoid duplicate
12339 // stubs the never NaN NaN condition is only taken into account if the
12340 // condition is equals.
12341 ASSERT(static_cast<unsigned>(cc_) < (1 << 13));
12342 return ConditionField::encode(static_cast<unsigned>(cc_))
12343 | StrictField::encode(strict_)
12344 | NeverNanNanField::encode(cc_ == equal ? never_nan_nan_ : false)
12345 | IncludeNumberCompareField::encode(include_number_compare_);
Leon Clarkee46be812010-01-19 14:06:41 +000012346}
12347
12348
Steve Block6ded16b2010-05-10 14:33:55 +010012349// Unfortunately you have to run without snapshots to see most of these
12350// names in the profile since most compare stubs end up in the snapshot.
12351const char* CompareStub::GetName() {
12352 if (name_ != NULL) return name_;
12353 const int kMaxNameLength = 100;
12354 name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);
12355 if (name_ == NULL) return "OOM";
12356
12357 const char* cc_name;
12358 switch (cc_) {
12359 case less: cc_name = "LT"; break;
12360 case greater: cc_name = "GT"; break;
12361 case less_equal: cc_name = "LE"; break;
12362 case greater_equal: cc_name = "GE"; break;
12363 case equal: cc_name = "EQ"; break;
12364 case not_equal: cc_name = "NE"; break;
12365 default: cc_name = "UnknownCondition"; break;
12366 }
12367
12368 const char* strict_name = "";
12369 if (strict_ && (cc_ == equal || cc_ == not_equal)) {
12370 strict_name = "_STRICT";
12371 }
12372
12373 const char* never_nan_nan_name = "";
12374 if (never_nan_nan_ && (cc_ == equal || cc_ == not_equal)) {
12375 never_nan_nan_name = "_NO_NAN";
12376 }
12377
12378 const char* include_number_compare_name = "";
12379 if (!include_number_compare_) {
12380 include_number_compare_name = "_NO_NUMBER";
12381 }
12382
12383 OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
12384 "CompareStub_%s%s%s%s",
12385 cc_name,
12386 strict_name,
12387 never_nan_nan_name,
12388 include_number_compare_name);
12389 return name_;
12390}
12391
12392
12393void StringHelper::GenerateFastCharCodeAt(MacroAssembler* masm,
12394 Register object,
12395 Register index,
12396 Register scratch,
12397 Register result,
12398 Label* receiver_not_string,
12399 Label* index_not_smi,
12400 Label* index_out_of_range,
12401 Label* slow_case) {
12402 Label not_a_flat_string;
12403 Label try_again_with_new_string;
12404 Label ascii_string;
12405 Label got_char_code;
12406
12407 // If the receiver is a smi trigger the non-string case.
12408 ASSERT(kSmiTag == 0);
12409 __ test(object, Immediate(kSmiTagMask));
12410 __ j(zero, receiver_not_string);
12411
12412 // Fetch the instance type of the receiver into result register.
12413 __ mov(result, FieldOperand(object, HeapObject::kMapOffset));
12414 __ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset));
12415 // If the receiver is not a string trigger the non-string case.
12416 __ test(result, Immediate(kIsNotStringMask));
12417 __ j(not_zero, receiver_not_string);
12418
12419 // If the index is non-smi trigger the non-smi case.
12420 ASSERT(kSmiTag == 0);
12421 __ test(index, Immediate(kSmiTagMask));
12422 __ j(not_zero, index_not_smi);
12423
12424 // Check for index out of range.
12425 __ cmp(index, FieldOperand(object, String::kLengthOffset));
12426 __ j(above_equal, index_out_of_range);
12427
12428 __ bind(&try_again_with_new_string);
12429 // ----------- S t a t e -------------
12430 // -- object : string to access
12431 // -- result : instance type of the string
12432 // -- scratch : non-negative index < length
12433 // -----------------------------------
12434
12435 // We need special handling for non-flat strings.
12436 ASSERT(kSeqStringTag == 0);
12437 __ test(result, Immediate(kStringRepresentationMask));
12438 __ j(not_zero, &not_a_flat_string);
12439
12440 // Check for 1-byte or 2-byte string.
12441 ASSERT(kAsciiStringTag != 0);
12442 __ test(result, Immediate(kStringEncodingMask));
12443 __ j(not_zero, &ascii_string);
12444
12445 // 2-byte string.
12446 // Load the 2-byte character code into the result register.
12447 ASSERT(kSmiTag == 0 && kSmiTagSize == 1); // index is smi (powered by 2).
12448 __ movzx_w(result, FieldOperand(object,
12449 index, times_1,
12450 SeqTwoByteString::kHeaderSize));
12451 __ jmp(&got_char_code);
12452
12453 // Handle non-flat strings.
12454 __ bind(&not_a_flat_string);
12455 __ and_(result, kStringRepresentationMask);
12456 __ cmp(result, kConsStringTag);
12457 __ j(not_equal, slow_case);
12458
12459 // ConsString.
12460 // Check whether the right hand side is the empty string (i.e. if
12461 // this is really a flat string in a cons string). If that is not
12462 // the case we would rather go to the runtime system now to flatten
12463 // the string.
12464 __ mov(result, FieldOperand(object, ConsString::kSecondOffset));
12465 __ cmp(Operand(result), Factory::empty_string());
12466 __ j(not_equal, slow_case);
12467 // Get the first of the two strings and load its instance type.
12468 __ mov(object, FieldOperand(object, ConsString::kFirstOffset));
12469 __ mov(result, FieldOperand(object, HeapObject::kMapOffset));
12470 __ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset));
12471 __ jmp(&try_again_with_new_string);
12472
12473 // ASCII string.
12474 __ bind(&ascii_string);
12475 // Put untagged index into scratch register.
12476 __ mov(scratch, index);
12477 __ SmiUntag(scratch);
12478
12479 // Load the byte into the result register.
12480 __ movzx_b(result, FieldOperand(object,
12481 scratch, times_1,
12482 SeqAsciiString::kHeaderSize));
12483 __ bind(&got_char_code);
12484 __ SmiTag(result);
12485}
12486
12487
12488void StringHelper::GenerateCharFromCode(MacroAssembler* masm,
12489 Register code,
12490 Register result,
12491 InvokeFlag flag) {
12492 ASSERT(!code.is(result));
12493
12494 Label slow_case;
12495 Label exit;
12496
12497 // Fast case of Heap::LookupSingleCharacterStringFromCode.
12498 ASSERT(kSmiTag == 0);
12499 ASSERT(kSmiShiftSize == 0);
12500 ASSERT(IsPowerOf2(String::kMaxAsciiCharCode + 1));
12501 __ test(code,
12502 Immediate(kSmiTagMask |
12503 ((~String::kMaxAsciiCharCode) << kSmiTagSize)));
12504 __ j(not_zero, &slow_case, not_taken);
12505
12506 __ Set(result, Immediate(Factory::single_character_string_cache()));
12507 ASSERT(kSmiTag == 0);
12508 ASSERT(kSmiTagSize == 1);
12509 ASSERT(kSmiShiftSize == 0);
12510 // At this point code register contains smi tagged ascii char code.
12511 __ mov(result, FieldOperand(result,
12512 code, times_half_pointer_size,
12513 FixedArray::kHeaderSize));
12514 __ cmp(result, Factory::undefined_value());
12515 __ j(equal, &slow_case, not_taken);
12516 __ jmp(&exit);
12517
12518 __ bind(&slow_case);
12519 if (flag == CALL_FUNCTION) {
12520 __ push(code);
12521 __ CallRuntime(Runtime::kCharFromCode, 1);
12522 if (!result.is(eax)) {
12523 __ mov(result, eax);
12524 }
12525 } else {
12526 ASSERT(flag == JUMP_FUNCTION);
12527 ASSERT(result.is(eax));
12528 __ pop(eax); // Save return address.
12529 __ push(code);
12530 __ push(eax); // Restore return address.
12531 __ TailCallRuntime(Runtime::kCharFromCode, 1, 1);
12532 }
12533
12534 __ bind(&exit);
12535 if (flag == JUMP_FUNCTION) {
12536 ASSERT(result.is(eax));
12537 __ ret(0);
12538 }
Steve Blocka7e24c12009-10-30 11:49:00 +000012539}
12540
Steve Blockd0582a62009-12-15 09:54:21 +000012541
12542void StringAddStub::Generate(MacroAssembler* masm) {
12543 Label string_add_runtime;
12544
12545 // Load the two arguments.
12546 __ mov(eax, Operand(esp, 2 * kPointerSize)); // First argument.
12547 __ mov(edx, Operand(esp, 1 * kPointerSize)); // Second argument.
12548
12549 // Make sure that both arguments are strings if not known in advance.
12550 if (string_check_) {
12551 __ test(eax, Immediate(kSmiTagMask));
12552 __ j(zero, &string_add_runtime);
12553 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, ebx);
12554 __ j(above_equal, &string_add_runtime);
12555
12556 // First argument is a a string, test second.
12557 __ test(edx, Immediate(kSmiTagMask));
12558 __ j(zero, &string_add_runtime);
12559 __ CmpObjectType(edx, FIRST_NONSTRING_TYPE, ebx);
12560 __ j(above_equal, &string_add_runtime);
12561 }
12562
12563 // Both arguments are strings.
12564 // eax: first string
12565 // edx: second string
12566 // Check if either of the strings are empty. In that case return the other.
12567 Label second_not_zero_length, both_not_zero_length;
12568 __ mov(ecx, FieldOperand(edx, String::kLengthOffset));
Steve Block6ded16b2010-05-10 14:33:55 +010012569 ASSERT(kSmiTag == 0);
Steve Blockd0582a62009-12-15 09:54:21 +000012570 __ test(ecx, Operand(ecx));
12571 __ j(not_zero, &second_not_zero_length);
12572 // Second string is empty, result is first string which is already in eax.
12573 __ IncrementCounter(&Counters::string_add_native, 1);
12574 __ ret(2 * kPointerSize);
12575 __ bind(&second_not_zero_length);
12576 __ mov(ebx, FieldOperand(eax, String::kLengthOffset));
Steve Block6ded16b2010-05-10 14:33:55 +010012577 ASSERT(kSmiTag == 0);
Steve Blockd0582a62009-12-15 09:54:21 +000012578 __ test(ebx, Operand(ebx));
12579 __ j(not_zero, &both_not_zero_length);
12580 // First string is empty, result is second string which is in edx.
12581 __ mov(eax, edx);
12582 __ IncrementCounter(&Counters::string_add_native, 1);
12583 __ ret(2 * kPointerSize);
12584
12585 // Both strings are non-empty.
12586 // eax: first string
Steve Block6ded16b2010-05-10 14:33:55 +010012587 // ebx: length of first string as a smi
12588 // ecx: length of second string as a smi
Steve Blockd0582a62009-12-15 09:54:21 +000012589 // edx: second string
12590 // Look at the length of the result of adding the two strings.
Andrei Popescu402d9372010-02-26 13:31:12 +000012591 Label string_add_flat_result, longer_than_two;
Steve Blockd0582a62009-12-15 09:54:21 +000012592 __ bind(&both_not_zero_length);
12593 __ add(ebx, Operand(ecx));
Steve Block6ded16b2010-05-10 14:33:55 +010012594 ASSERT(Smi::kMaxValue == String::kMaxLength);
12595 // Handle exceptionally long strings in the runtime system.
12596 __ j(overflow, &string_add_runtime);
Steve Blockd0582a62009-12-15 09:54:21 +000012597 // Use the runtime system when adding two one character strings, as it
12598 // contains optimizations for this specific case using the symbol table.
Steve Block6ded16b2010-05-10 14:33:55 +010012599 __ cmp(Operand(ebx), Immediate(Smi::FromInt(2)));
Andrei Popescu402d9372010-02-26 13:31:12 +000012600 __ j(not_equal, &longer_than_two);
12601
12602 // Check that both strings are non-external ascii strings.
12603 __ JumpIfNotBothSequentialAsciiStrings(eax, edx, ebx, ecx,
12604 &string_add_runtime);
12605
12606 // Get the two characters forming the sub string.
12607 __ movzx_b(ebx, FieldOperand(eax, SeqAsciiString::kHeaderSize));
12608 __ movzx_b(ecx, FieldOperand(edx, SeqAsciiString::kHeaderSize));
12609
12610 // Try to lookup two character string in symbol table. If it is not found
12611 // just allocate a new one.
12612 Label make_two_character_string, make_flat_ascii_string;
Steve Block6ded16b2010-05-10 14:33:55 +010012613 StringHelper::GenerateTwoCharacterSymbolTableProbe(
12614 masm, ebx, ecx, eax, edx, edi, &make_two_character_string);
12615 __ IncrementCounter(&Counters::string_add_native, 1);
Andrei Popescu402d9372010-02-26 13:31:12 +000012616 __ ret(2 * kPointerSize);
12617
12618 __ bind(&make_two_character_string);
Steve Block6ded16b2010-05-10 14:33:55 +010012619 __ Set(ebx, Immediate(Smi::FromInt(2)));
Andrei Popescu402d9372010-02-26 13:31:12 +000012620 __ jmp(&make_flat_ascii_string);
12621
12622 __ bind(&longer_than_two);
Steve Blockd0582a62009-12-15 09:54:21 +000012623 // Check if resulting string will be flat.
Steve Block6ded16b2010-05-10 14:33:55 +010012624 __ cmp(Operand(ebx), Immediate(Smi::FromInt(String::kMinNonFlatLength)));
Steve Blockd0582a62009-12-15 09:54:21 +000012625 __ j(below, &string_add_flat_result);
Steve Blockd0582a62009-12-15 09:54:21 +000012626
12627 // If result is not supposed to be flat allocate a cons string object. If both
12628 // strings are ascii the result is an ascii cons string.
12629 Label non_ascii, allocated;
12630 __ mov(edi, FieldOperand(eax, HeapObject::kMapOffset));
12631 __ movzx_b(ecx, FieldOperand(edi, Map::kInstanceTypeOffset));
12632 __ mov(edi, FieldOperand(edx, HeapObject::kMapOffset));
12633 __ movzx_b(edi, FieldOperand(edi, Map::kInstanceTypeOffset));
12634 __ and_(ecx, Operand(edi));
Leon Clarkee46be812010-01-19 14:06:41 +000012635 ASSERT(kStringEncodingMask == kAsciiStringTag);
Steve Blockd0582a62009-12-15 09:54:21 +000012636 __ test(ecx, Immediate(kAsciiStringTag));
12637 __ j(zero, &non_ascii);
12638 // Allocate an acsii cons string.
12639 __ AllocateAsciiConsString(ecx, edi, no_reg, &string_add_runtime);
12640 __ bind(&allocated);
12641 // Fill the fields of the cons string.
Steve Block6ded16b2010-05-10 14:33:55 +010012642 if (FLAG_debug_code) __ AbortIfNotSmi(ebx);
Steve Blockd0582a62009-12-15 09:54:21 +000012643 __ mov(FieldOperand(ecx, ConsString::kLengthOffset), ebx);
12644 __ mov(FieldOperand(ecx, ConsString::kHashFieldOffset),
12645 Immediate(String::kEmptyHashField));
12646 __ mov(FieldOperand(ecx, ConsString::kFirstOffset), eax);
12647 __ mov(FieldOperand(ecx, ConsString::kSecondOffset), edx);
12648 __ mov(eax, ecx);
12649 __ IncrementCounter(&Counters::string_add_native, 1);
12650 __ ret(2 * kPointerSize);
12651 __ bind(&non_ascii);
12652 // Allocate a two byte cons string.
12653 __ AllocateConsString(ecx, edi, no_reg, &string_add_runtime);
12654 __ jmp(&allocated);
12655
12656 // Handle creating a flat result. First check that both strings are not
12657 // external strings.
12658 // eax: first string
Steve Block6ded16b2010-05-10 14:33:55 +010012659 // ebx: length of resulting flat string as a smi
Steve Blockd0582a62009-12-15 09:54:21 +000012660 // edx: second string
12661 __ bind(&string_add_flat_result);
12662 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
12663 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
12664 __ and_(ecx, kStringRepresentationMask);
12665 __ cmp(ecx, kExternalStringTag);
12666 __ j(equal, &string_add_runtime);
12667 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
12668 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
12669 __ and_(ecx, kStringRepresentationMask);
12670 __ cmp(ecx, kExternalStringTag);
12671 __ j(equal, &string_add_runtime);
12672 // Now check if both strings are ascii strings.
12673 // eax: first string
Steve Block6ded16b2010-05-10 14:33:55 +010012674 // ebx: length of resulting flat string as a smi
Steve Blockd0582a62009-12-15 09:54:21 +000012675 // edx: second string
12676 Label non_ascii_string_add_flat_result;
12677 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
12678 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
Leon Clarkee46be812010-01-19 14:06:41 +000012679 ASSERT(kStringEncodingMask == kAsciiStringTag);
Steve Blockd0582a62009-12-15 09:54:21 +000012680 __ test(ecx, Immediate(kAsciiStringTag));
12681 __ j(zero, &non_ascii_string_add_flat_result);
12682 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
12683 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
12684 __ test(ecx, Immediate(kAsciiStringTag));
12685 __ j(zero, &string_add_runtime);
Andrei Popescu402d9372010-02-26 13:31:12 +000012686
12687 __ bind(&make_flat_ascii_string);
Steve Blockd0582a62009-12-15 09:54:21 +000012688 // Both strings are ascii strings. As they are short they are both flat.
Steve Block6ded16b2010-05-10 14:33:55 +010012689 // ebx: length of resulting flat string as a smi
12690 __ SmiUntag(ebx);
Steve Blockd0582a62009-12-15 09:54:21 +000012691 __ AllocateAsciiString(eax, ebx, ecx, edx, edi, &string_add_runtime);
12692 // eax: result string
12693 __ mov(ecx, eax);
12694 // Locate first character of result.
12695 __ add(Operand(ecx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
12696 // Load first argument and locate first character.
12697 __ mov(edx, Operand(esp, 2 * kPointerSize));
12698 __ mov(edi, FieldOperand(edx, String::kLengthOffset));
Steve Block6ded16b2010-05-10 14:33:55 +010012699 __ SmiUntag(edi);
Steve Blockd0582a62009-12-15 09:54:21 +000012700 __ add(Operand(edx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
12701 // eax: result string
12702 // ecx: first character of result
12703 // edx: first char of first argument
12704 // edi: length of first argument
Steve Block6ded16b2010-05-10 14:33:55 +010012705 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, true);
Steve Blockd0582a62009-12-15 09:54:21 +000012706 // Load second argument and locate first character.
12707 __ mov(edx, Operand(esp, 1 * kPointerSize));
12708 __ mov(edi, FieldOperand(edx, String::kLengthOffset));
Steve Block6ded16b2010-05-10 14:33:55 +010012709 __ SmiUntag(edi);
Steve Blockd0582a62009-12-15 09:54:21 +000012710 __ add(Operand(edx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
12711 // eax: result string
12712 // ecx: next character of result
12713 // edx: first char of second argument
12714 // edi: length of second argument
Steve Block6ded16b2010-05-10 14:33:55 +010012715 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, true);
Steve Blockd0582a62009-12-15 09:54:21 +000012716 __ IncrementCounter(&Counters::string_add_native, 1);
12717 __ ret(2 * kPointerSize);
12718
12719 // Handle creating a flat two byte result.
12720 // eax: first string - known to be two byte
Steve Block6ded16b2010-05-10 14:33:55 +010012721 // ebx: length of resulting flat string as a smi
Steve Blockd0582a62009-12-15 09:54:21 +000012722 // edx: second string
12723 __ bind(&non_ascii_string_add_flat_result);
12724 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
12725 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
12726 __ and_(ecx, kAsciiStringTag);
12727 __ j(not_zero, &string_add_runtime);
12728 // Both strings are two byte strings. As they are short they are both
12729 // flat.
Steve Block6ded16b2010-05-10 14:33:55 +010012730 __ SmiUntag(ebx);
Steve Blockd0582a62009-12-15 09:54:21 +000012731 __ AllocateTwoByteString(eax, ebx, ecx, edx, edi, &string_add_runtime);
12732 // eax: result string
12733 __ mov(ecx, eax);
12734 // Locate first character of result.
12735 __ add(Operand(ecx),
12736 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
12737 // Load first argument and locate first character.
12738 __ mov(edx, Operand(esp, 2 * kPointerSize));
12739 __ mov(edi, FieldOperand(edx, String::kLengthOffset));
Steve Block6ded16b2010-05-10 14:33:55 +010012740 __ SmiUntag(edi);
Steve Blockd0582a62009-12-15 09:54:21 +000012741 __ add(Operand(edx),
12742 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
12743 // eax: result string
12744 // ecx: first character of result
12745 // edx: first char of first argument
12746 // edi: length of first argument
Steve Block6ded16b2010-05-10 14:33:55 +010012747 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, false);
Steve Blockd0582a62009-12-15 09:54:21 +000012748 // Load second argument and locate first character.
12749 __ mov(edx, Operand(esp, 1 * kPointerSize));
12750 __ mov(edi, FieldOperand(edx, String::kLengthOffset));
Steve Block6ded16b2010-05-10 14:33:55 +010012751 __ SmiUntag(edi);
Steve Blockd0582a62009-12-15 09:54:21 +000012752 __ add(Operand(edx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
12753 // eax: result string
12754 // ecx: next character of result
12755 // edx: first char of second argument
12756 // edi: length of second argument
Steve Block6ded16b2010-05-10 14:33:55 +010012757 StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, false);
Steve Blockd0582a62009-12-15 09:54:21 +000012758 __ IncrementCounter(&Counters::string_add_native, 1);
12759 __ ret(2 * kPointerSize);
12760
12761 // Just jump to runtime to add the two strings.
12762 __ bind(&string_add_runtime);
Steve Block6ded16b2010-05-10 14:33:55 +010012763 __ TailCallRuntime(Runtime::kStringAdd, 2, 1);
Steve Blockd0582a62009-12-15 09:54:21 +000012764}
12765
12766
Steve Block6ded16b2010-05-10 14:33:55 +010012767void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
12768 Register dest,
12769 Register src,
12770 Register count,
12771 Register scratch,
12772 bool ascii) {
Steve Blockd0582a62009-12-15 09:54:21 +000012773 Label loop;
12774 __ bind(&loop);
12775 // This loop just copies one character at a time, as it is only used for very
12776 // short strings.
12777 if (ascii) {
12778 __ mov_b(scratch, Operand(src, 0));
12779 __ mov_b(Operand(dest, 0), scratch);
12780 __ add(Operand(src), Immediate(1));
12781 __ add(Operand(dest), Immediate(1));
12782 } else {
12783 __ mov_w(scratch, Operand(src, 0));
12784 __ mov_w(Operand(dest, 0), scratch);
12785 __ add(Operand(src), Immediate(2));
12786 __ add(Operand(dest), Immediate(2));
12787 }
12788 __ sub(Operand(count), Immediate(1));
12789 __ j(not_zero, &loop);
12790}
12791
12792
Steve Block6ded16b2010-05-10 14:33:55 +010012793void StringHelper::GenerateCopyCharactersREP(MacroAssembler* masm,
12794 Register dest,
12795 Register src,
12796 Register count,
12797 Register scratch,
12798 bool ascii) {
Leon Clarkee46be812010-01-19 14:06:41 +000012799 // Copy characters using rep movs of doublewords. Align destination on 4 byte
12800 // boundary before starting rep movs. Copy remaining characters after running
12801 // rep movs.
12802 ASSERT(dest.is(edi)); // rep movs destination
12803 ASSERT(src.is(esi)); // rep movs source
12804 ASSERT(count.is(ecx)); // rep movs count
12805 ASSERT(!scratch.is(dest));
12806 ASSERT(!scratch.is(src));
12807 ASSERT(!scratch.is(count));
12808
12809 // Nothing to do for zero characters.
12810 Label done;
12811 __ test(count, Operand(count));
12812 __ j(zero, &done);
12813
12814 // Make count the number of bytes to copy.
12815 if (!ascii) {
12816 __ shl(count, 1);
12817 }
12818
12819 // Don't enter the rep movs if there are less than 4 bytes to copy.
12820 Label last_bytes;
12821 __ test(count, Immediate(~3));
12822 __ j(zero, &last_bytes);
12823
12824 // Copy from edi to esi using rep movs instruction.
12825 __ mov(scratch, count);
12826 __ sar(count, 2); // Number of doublewords to copy.
Steve Block6ded16b2010-05-10 14:33:55 +010012827 __ cld();
Leon Clarkee46be812010-01-19 14:06:41 +000012828 __ rep_movs();
12829
12830 // Find number of bytes left.
12831 __ mov(count, scratch);
12832 __ and_(count, 3);
12833
12834 // Check if there are more bytes to copy.
12835 __ bind(&last_bytes);
12836 __ test(count, Operand(count));
12837 __ j(zero, &done);
12838
12839 // Copy remaining characters.
12840 Label loop;
12841 __ bind(&loop);
12842 __ mov_b(scratch, Operand(src, 0));
12843 __ mov_b(Operand(dest, 0), scratch);
12844 __ add(Operand(src), Immediate(1));
12845 __ add(Operand(dest), Immediate(1));
12846 __ sub(Operand(count), Immediate(1));
12847 __ j(not_zero, &loop);
12848
12849 __ bind(&done);
12850}
12851
12852
Steve Block6ded16b2010-05-10 14:33:55 +010012853void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
12854 Register c1,
12855 Register c2,
12856 Register scratch1,
12857 Register scratch2,
12858 Register scratch3,
12859 Label* not_found) {
Andrei Popescu402d9372010-02-26 13:31:12 +000012860 // Register scratch3 is the general scratch register in this function.
12861 Register scratch = scratch3;
12862
12863 // Make sure that both characters are not digits as such strings has a
12864 // different hash algorithm. Don't try to look for these in the symbol table.
12865 Label not_array_index;
12866 __ mov(scratch, c1);
12867 __ sub(Operand(scratch), Immediate(static_cast<int>('0')));
12868 __ cmp(Operand(scratch), Immediate(static_cast<int>('9' - '0')));
12869 __ j(above, &not_array_index);
12870 __ mov(scratch, c2);
12871 __ sub(Operand(scratch), Immediate(static_cast<int>('0')));
12872 __ cmp(Operand(scratch), Immediate(static_cast<int>('9' - '0')));
12873 __ j(below_equal, not_found);
12874
12875 __ bind(&not_array_index);
12876 // Calculate the two character string hash.
12877 Register hash = scratch1;
12878 GenerateHashInit(masm, hash, c1, scratch);
12879 GenerateHashAddCharacter(masm, hash, c2, scratch);
12880 GenerateHashGetHash(masm, hash, scratch);
12881
12882 // Collect the two characters in a register.
12883 Register chars = c1;
12884 __ shl(c2, kBitsPerByte);
12885 __ or_(chars, Operand(c2));
12886
12887 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
12888 // hash: hash of two character string.
12889
12890 // Load the symbol table.
12891 Register symbol_table = c2;
12892 ExternalReference roots_address = ExternalReference::roots_address();
12893 __ mov(scratch, Immediate(Heap::kSymbolTableRootIndex));
12894 __ mov(symbol_table,
12895 Operand::StaticArray(scratch, times_pointer_size, roots_address));
12896
12897 // Calculate capacity mask from the symbol table capacity.
12898 Register mask = scratch2;
Steve Block6ded16b2010-05-10 14:33:55 +010012899 __ mov(mask, FieldOperand(symbol_table, SymbolTable::kCapacityOffset));
Andrei Popescu402d9372010-02-26 13:31:12 +000012900 __ SmiUntag(mask);
12901 __ sub(Operand(mask), Immediate(1));
12902
12903 // Registers
12904 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
12905 // hash: hash of two character string
12906 // symbol_table: symbol table
12907 // mask: capacity mask
12908 // scratch: -
12909
12910 // Perform a number of probes in the symbol table.
12911 static const int kProbes = 4;
12912 Label found_in_symbol_table;
12913 Label next_probe[kProbes], next_probe_pop_mask[kProbes];
12914 for (int i = 0; i < kProbes; i++) {
12915 // Calculate entry in symbol table.
12916 __ mov(scratch, hash);
12917 if (i > 0) {
12918 __ add(Operand(scratch), Immediate(SymbolTable::GetProbeOffset(i)));
12919 }
12920 __ and_(scratch, Operand(mask));
12921
12922 // Load the entry from the symble table.
12923 Register candidate = scratch; // Scratch register contains candidate.
Steve Block6ded16b2010-05-10 14:33:55 +010012924 ASSERT_EQ(1, SymbolTable::kEntrySize);
Andrei Popescu402d9372010-02-26 13:31:12 +000012925 __ mov(candidate,
12926 FieldOperand(symbol_table,
12927 scratch,
12928 times_pointer_size,
Steve Block6ded16b2010-05-10 14:33:55 +010012929 SymbolTable::kElementsStartOffset));
Andrei Popescu402d9372010-02-26 13:31:12 +000012930
12931 // If entry is undefined no string with this hash can be found.
12932 __ cmp(candidate, Factory::undefined_value());
12933 __ j(equal, not_found);
12934
12935 // If length is not 2 the string is not a candidate.
Steve Block6ded16b2010-05-10 14:33:55 +010012936 __ cmp(FieldOperand(candidate, String::kLengthOffset),
12937 Immediate(Smi::FromInt(2)));
Andrei Popescu402d9372010-02-26 13:31:12 +000012938 __ j(not_equal, &next_probe[i]);
12939
12940 // As we are out of registers save the mask on the stack and use that
12941 // register as a temporary.
12942 __ push(mask);
12943 Register temp = mask;
12944
12945 // Check that the candidate is a non-external ascii string.
12946 __ mov(temp, FieldOperand(candidate, HeapObject::kMapOffset));
12947 __ movzx_b(temp, FieldOperand(temp, Map::kInstanceTypeOffset));
12948 __ JumpIfInstanceTypeIsNotSequentialAscii(
12949 temp, temp, &next_probe_pop_mask[i]);
12950
12951 // Check if the two characters match.
12952 __ mov(temp, FieldOperand(candidate, SeqAsciiString::kHeaderSize));
12953 __ and_(temp, 0x0000ffff);
12954 __ cmp(chars, Operand(temp));
12955 __ j(equal, &found_in_symbol_table);
12956 __ bind(&next_probe_pop_mask[i]);
12957 __ pop(mask);
12958 __ bind(&next_probe[i]);
12959 }
12960
12961 // No matching 2 character string found by probing.
12962 __ jmp(not_found);
12963
12964 // Scratch register contains result when we fall through to here.
12965 Register result = scratch;
12966 __ bind(&found_in_symbol_table);
12967 __ pop(mask); // Pop temporally saved mask from the stack.
12968 if (!result.is(eax)) {
12969 __ mov(eax, result);
12970 }
12971}
12972
12973
Steve Block6ded16b2010-05-10 14:33:55 +010012974void StringHelper::GenerateHashInit(MacroAssembler* masm,
12975 Register hash,
12976 Register character,
12977 Register scratch) {
Andrei Popescu402d9372010-02-26 13:31:12 +000012978 // hash = character + (character << 10);
12979 __ mov(hash, character);
12980 __ shl(hash, 10);
12981 __ add(hash, Operand(character));
12982 // hash ^= hash >> 6;
12983 __ mov(scratch, hash);
12984 __ sar(scratch, 6);
12985 __ xor_(hash, Operand(scratch));
12986}
12987
12988
Steve Block6ded16b2010-05-10 14:33:55 +010012989void StringHelper::GenerateHashAddCharacter(MacroAssembler* masm,
12990 Register hash,
12991 Register character,
12992 Register scratch) {
Andrei Popescu402d9372010-02-26 13:31:12 +000012993 // hash += character;
12994 __ add(hash, Operand(character));
12995 // hash += hash << 10;
12996 __ mov(scratch, hash);
12997 __ shl(scratch, 10);
12998 __ add(hash, Operand(scratch));
12999 // hash ^= hash >> 6;
13000 __ mov(scratch, hash);
13001 __ sar(scratch, 6);
13002 __ xor_(hash, Operand(scratch));
13003}
13004
13005
Steve Block6ded16b2010-05-10 14:33:55 +010013006void StringHelper::GenerateHashGetHash(MacroAssembler* masm,
13007 Register hash,
13008 Register scratch) {
Andrei Popescu402d9372010-02-26 13:31:12 +000013009 // hash += hash << 3;
13010 __ mov(scratch, hash);
13011 __ shl(scratch, 3);
13012 __ add(hash, Operand(scratch));
13013 // hash ^= hash >> 11;
13014 __ mov(scratch, hash);
13015 __ sar(scratch, 11);
13016 __ xor_(hash, Operand(scratch));
13017 // hash += hash << 15;
13018 __ mov(scratch, hash);
13019 __ shl(scratch, 15);
13020 __ add(hash, Operand(scratch));
13021
13022 // if (hash == 0) hash = 27;
13023 Label hash_not_zero;
13024 __ test(hash, Operand(hash));
13025 __ j(not_zero, &hash_not_zero);
13026 __ mov(hash, Immediate(27));
13027 __ bind(&hash_not_zero);
13028}
13029
13030
Leon Clarkee46be812010-01-19 14:06:41 +000013031void SubStringStub::Generate(MacroAssembler* masm) {
13032 Label runtime;
13033
13034 // Stack frame on entry.
13035 // esp[0]: return address
13036 // esp[4]: to
13037 // esp[8]: from
13038 // esp[12]: string
13039
13040 // Make sure first argument is a string.
13041 __ mov(eax, Operand(esp, 3 * kPointerSize));
13042 ASSERT_EQ(0, kSmiTag);
13043 __ test(eax, Immediate(kSmiTagMask));
13044 __ j(zero, &runtime);
13045 Condition is_string = masm->IsObjectStringType(eax, ebx, ebx);
13046 __ j(NegateCondition(is_string), &runtime);
13047
13048 // eax: string
13049 // ebx: instance type
13050 // Calculate length of sub string using the smi values.
Andrei Popescu402d9372010-02-26 13:31:12 +000013051 Label result_longer_than_two;
13052 __ mov(ecx, Operand(esp, 1 * kPointerSize)); // To index.
Leon Clarkee46be812010-01-19 14:06:41 +000013053 __ test(ecx, Immediate(kSmiTagMask));
13054 __ j(not_zero, &runtime);
Andrei Popescu402d9372010-02-26 13:31:12 +000013055 __ mov(edx, Operand(esp, 2 * kPointerSize)); // From index.
Leon Clarkee46be812010-01-19 14:06:41 +000013056 __ test(edx, Immediate(kSmiTagMask));
13057 __ j(not_zero, &runtime);
13058 __ sub(ecx, Operand(edx));
Andrei Popescu402d9372010-02-26 13:31:12 +000013059 // Special handling of sub-strings of length 1 and 2. One character strings
13060 // are handled in the runtime system (looked up in the single character
13061 // cache). Two character strings are looked for in the symbol cache.
Leon Clarkee46be812010-01-19 14:06:41 +000013062 __ SmiUntag(ecx); // Result length is no longer smi.
13063 __ cmp(ecx, 2);
Andrei Popescu402d9372010-02-26 13:31:12 +000013064 __ j(greater, &result_longer_than_two);
13065 __ j(less, &runtime);
Leon Clarkee46be812010-01-19 14:06:41 +000013066
Andrei Popescu402d9372010-02-26 13:31:12 +000013067 // Sub string of length 2 requested.
13068 // eax: string
13069 // ebx: instance type
13070 // ecx: sub string length (value is 2)
13071 // edx: from index (smi)
13072 __ JumpIfInstanceTypeIsNotSequentialAscii(ebx, ebx, &runtime);
13073
13074 // Get the two characters forming the sub string.
13075 __ SmiUntag(edx); // From index is no longer smi.
13076 __ movzx_b(ebx, FieldOperand(eax, edx, times_1, SeqAsciiString::kHeaderSize));
13077 __ movzx_b(ecx,
13078 FieldOperand(eax, edx, times_1, SeqAsciiString::kHeaderSize + 1));
13079
13080 // Try to lookup two character string in symbol table.
13081 Label make_two_character_string;
Steve Block6ded16b2010-05-10 14:33:55 +010013082 StringHelper::GenerateTwoCharacterSymbolTableProbe(
13083 masm, ebx, ecx, eax, edx, edi, &make_two_character_string);
13084 __ ret(3 * kPointerSize);
Andrei Popescu402d9372010-02-26 13:31:12 +000013085
13086 __ bind(&make_two_character_string);
13087 // Setup registers for allocating the two character string.
13088 __ mov(eax, Operand(esp, 3 * kPointerSize));
13089 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
13090 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
13091 __ Set(ecx, Immediate(2));
13092
13093 __ bind(&result_longer_than_two);
Leon Clarkee46be812010-01-19 14:06:41 +000013094 // eax: string
13095 // ebx: instance type
13096 // ecx: result string length
13097 // Check for flat ascii string
13098 Label non_ascii_flat;
Andrei Popescu402d9372010-02-26 13:31:12 +000013099 __ JumpIfInstanceTypeIsNotSequentialAscii(ebx, ebx, &non_ascii_flat);
Leon Clarkee46be812010-01-19 14:06:41 +000013100
13101 // Allocate the result.
13102 __ AllocateAsciiString(eax, ecx, ebx, edx, edi, &runtime);
13103
13104 // eax: result string
13105 // ecx: result string length
13106 __ mov(edx, esi); // esi used by following code.
13107 // Locate first character of result.
13108 __ mov(edi, eax);
13109 __ add(Operand(edi), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
13110 // Load string argument and locate character of sub string start.
13111 __ mov(esi, Operand(esp, 3 * kPointerSize));
13112 __ add(Operand(esi), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
13113 __ mov(ebx, Operand(esp, 2 * kPointerSize)); // from
13114 __ SmiUntag(ebx);
13115 __ add(esi, Operand(ebx));
13116
13117 // eax: result string
13118 // ecx: result length
13119 // edx: original value of esi
13120 // edi: first character of result
13121 // esi: character of sub string start
Steve Block6ded16b2010-05-10 14:33:55 +010013122 StringHelper::GenerateCopyCharactersREP(masm, edi, esi, ecx, ebx, true);
Leon Clarkee46be812010-01-19 14:06:41 +000013123 __ mov(esi, edx); // Restore esi.
13124 __ IncrementCounter(&Counters::sub_string_native, 1);
13125 __ ret(3 * kPointerSize);
13126
13127 __ bind(&non_ascii_flat);
13128 // eax: string
13129 // ebx: instance type & kStringRepresentationMask | kStringEncodingMask
13130 // ecx: result string length
13131 // Check for flat two byte string
13132 __ cmp(ebx, kSeqStringTag | kTwoByteStringTag);
13133 __ j(not_equal, &runtime);
13134
13135 // Allocate the result.
13136 __ AllocateTwoByteString(eax, ecx, ebx, edx, edi, &runtime);
13137
13138 // eax: result string
13139 // ecx: result string length
13140 __ mov(edx, esi); // esi used by following code.
13141 // Locate first character of result.
13142 __ mov(edi, eax);
13143 __ add(Operand(edi),
13144 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
13145 // Load string argument and locate character of sub string start.
13146 __ mov(esi, Operand(esp, 3 * kPointerSize));
Andrei Popescu31002712010-02-23 13:46:05 +000013147 __ add(Operand(esi),
13148 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
Leon Clarkee46be812010-01-19 14:06:41 +000013149 __ mov(ebx, Operand(esp, 2 * kPointerSize)); // from
13150 // As from is a smi it is 2 times the value which matches the size of a two
13151 // byte character.
13152 ASSERT_EQ(0, kSmiTag);
13153 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
13154 __ add(esi, Operand(ebx));
13155
13156 // eax: result string
13157 // ecx: result length
13158 // edx: original value of esi
13159 // edi: first character of result
13160 // esi: character of sub string start
Steve Block6ded16b2010-05-10 14:33:55 +010013161 StringHelper::GenerateCopyCharactersREP(masm, edi, esi, ecx, ebx, false);
Leon Clarkee46be812010-01-19 14:06:41 +000013162 __ mov(esi, edx); // Restore esi.
13163 __ IncrementCounter(&Counters::sub_string_native, 1);
13164 __ ret(3 * kPointerSize);
13165
13166 // Just jump to runtime to create the sub string.
13167 __ bind(&runtime);
Steve Block6ded16b2010-05-10 14:33:55 +010013168 __ TailCallRuntime(Runtime::kSubString, 3, 1);
Leon Clarkee46be812010-01-19 14:06:41 +000013169}
13170
13171
13172void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
13173 Register left,
13174 Register right,
13175 Register scratch1,
13176 Register scratch2,
13177 Register scratch3) {
Leon Clarked91b9f72010-01-27 17:25:45 +000013178 Label result_not_equal;
13179 Label result_greater;
13180 Label compare_lengths;
Steve Block6ded16b2010-05-10 14:33:55 +010013181
13182 __ IncrementCounter(&Counters::string_compare_native, 1);
13183
Leon Clarked91b9f72010-01-27 17:25:45 +000013184 // Find minimum length.
13185 Label left_shorter;
Leon Clarkee46be812010-01-19 14:06:41 +000013186 __ mov(scratch1, FieldOperand(left, String::kLengthOffset));
Leon Clarked91b9f72010-01-27 17:25:45 +000013187 __ mov(scratch3, scratch1);
13188 __ sub(scratch3, FieldOperand(right, String::kLengthOffset));
13189
13190 Register length_delta = scratch3;
13191
13192 __ j(less_equal, &left_shorter);
13193 // Right string is shorter. Change scratch1 to be length of right string.
13194 __ sub(scratch1, Operand(length_delta));
13195 __ bind(&left_shorter);
13196
13197 Register min_length = scratch1;
13198
13199 // If either length is zero, just compare lengths.
13200 __ test(min_length, Operand(min_length));
13201 __ j(zero, &compare_lengths);
13202
13203 // Change index to run from -min_length to -1 by adding min_length
13204 // to string start. This means that loop ends when index reaches zero,
13205 // which doesn't need an additional compare.
Steve Block6ded16b2010-05-10 14:33:55 +010013206 __ SmiUntag(min_length);
Leon Clarked91b9f72010-01-27 17:25:45 +000013207 __ lea(left,
13208 FieldOperand(left,
13209 min_length, times_1,
13210 SeqAsciiString::kHeaderSize));
13211 __ lea(right,
13212 FieldOperand(right,
13213 min_length, times_1,
13214 SeqAsciiString::kHeaderSize));
13215 __ neg(min_length);
13216
13217 Register index = min_length; // index = -min_length;
13218
13219 {
13220 // Compare loop.
13221 Label loop;
13222 __ bind(&loop);
13223 // Compare characters.
13224 __ mov_b(scratch2, Operand(left, index, times_1, 0));
13225 __ cmpb(scratch2, Operand(right, index, times_1, 0));
13226 __ j(not_equal, &result_not_equal);
13227 __ add(Operand(index), Immediate(1));
13228 __ j(not_zero, &loop);
Leon Clarkee46be812010-01-19 14:06:41 +000013229 }
13230
Leon Clarked91b9f72010-01-27 17:25:45 +000013231 // Compare lengths - strings up to min-length are equal.
Leon Clarkee46be812010-01-19 14:06:41 +000013232 __ bind(&compare_lengths);
Leon Clarked91b9f72010-01-27 17:25:45 +000013233 __ test(length_delta, Operand(length_delta));
Leon Clarkee46be812010-01-19 14:06:41 +000013234 __ j(not_zero, &result_not_equal);
13235
13236 // Result is EQUAL.
13237 ASSERT_EQ(0, EQUAL);
13238 ASSERT_EQ(0, kSmiTag);
Leon Clarked91b9f72010-01-27 17:25:45 +000013239 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
Leon Clarkee46be812010-01-19 14:06:41 +000013240 __ ret(2 * kPointerSize);
Leon Clarked91b9f72010-01-27 17:25:45 +000013241
Leon Clarkee46be812010-01-19 14:06:41 +000013242 __ bind(&result_not_equal);
13243 __ j(greater, &result_greater);
13244
13245 // Result is LESS.
Leon Clarked91b9f72010-01-27 17:25:45 +000013246 __ Set(eax, Immediate(Smi::FromInt(LESS)));
Leon Clarkee46be812010-01-19 14:06:41 +000013247 __ ret(2 * kPointerSize);
13248
13249 // Result is GREATER.
13250 __ bind(&result_greater);
Leon Clarked91b9f72010-01-27 17:25:45 +000013251 __ Set(eax, Immediate(Smi::FromInt(GREATER)));
Leon Clarkee46be812010-01-19 14:06:41 +000013252 __ ret(2 * kPointerSize);
13253}
13254
13255
13256void StringCompareStub::Generate(MacroAssembler* masm) {
13257 Label runtime;
13258
13259 // Stack frame on entry.
13260 // esp[0]: return address
13261 // esp[4]: right string
13262 // esp[8]: left string
13263
13264 __ mov(edx, Operand(esp, 2 * kPointerSize)); // left
13265 __ mov(eax, Operand(esp, 1 * kPointerSize)); // right
13266
13267 Label not_same;
13268 __ cmp(edx, Operand(eax));
13269 __ j(not_equal, &not_same);
13270 ASSERT_EQ(0, EQUAL);
13271 ASSERT_EQ(0, kSmiTag);
Leon Clarked91b9f72010-01-27 17:25:45 +000013272 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
Leon Clarkee46be812010-01-19 14:06:41 +000013273 __ IncrementCounter(&Counters::string_compare_native, 1);
13274 __ ret(2 * kPointerSize);
13275
13276 __ bind(&not_same);
13277
Leon Clarked91b9f72010-01-27 17:25:45 +000013278 // Check that both objects are sequential ascii strings.
13279 __ JumpIfNotBothSequentialAsciiStrings(edx, eax, ecx, ebx, &runtime);
Leon Clarkee46be812010-01-19 14:06:41 +000013280
13281 // Compare flat ascii strings.
13282 GenerateCompareFlatAsciiStrings(masm, edx, eax, ecx, ebx, edi);
13283
Leon Clarkee46be812010-01-19 14:06:41 +000013284 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
13285 // tagged as a small integer.
13286 __ bind(&runtime);
Steve Block6ded16b2010-05-10 14:33:55 +010013287 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
Leon Clarkee46be812010-01-19 14:06:41 +000013288}
13289
Steve Blocka7e24c12009-10-30 11:49:00 +000013290#undef __
13291
13292} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +010013293
13294#endif // V8_TARGET_ARCH_IA32