blob: d0fbabbace891ef0b37247ba6b485af0262d7867 [file] [log] [blame]
Leon Clarked91b9f72010-01-27 17:25:45 +00001// Copyright 2010 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "bootstrapper.h"
31#include "codegen-inl.h"
Steve Blockd0582a62009-12-15 09:54:21 +000032#include "compiler.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000033#include "debug.h"
34#include "ic-inl.h"
Leon Clarkee46be812010-01-19 14:06:41 +000035#include "jsregexp.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000036#include "parser.h"
Leon Clarkee46be812010-01-19 14:06:41 +000037#include "regexp-macro-assembler.h"
38#include "regexp-stack.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000039#include "register-allocator-inl.h"
40#include "runtime.h"
41#include "scopes.h"
42
43namespace v8 {
44namespace internal {
45
46#define __ ACCESS_MASM(masm_)
47
48// -------------------------------------------------------------------------
49// Platform-specific DeferredCode functions.
50
51void DeferredCode::SaveRegisters() {
52 for (int i = 0; i < RegisterAllocator::kNumRegisters; i++) {
53 int action = registers_[i];
54 if (action == kPush) {
55 __ push(RegisterAllocator::ToRegister(i));
56 } else if (action != kIgnore && (action & kSyncedFlag) == 0) {
57 __ mov(Operand(ebp, action), RegisterAllocator::ToRegister(i));
58 }
59 }
60}
61
62
63void DeferredCode::RestoreRegisters() {
64 // Restore registers in reverse order due to the stack.
65 for (int i = RegisterAllocator::kNumRegisters - 1; i >= 0; i--) {
66 int action = registers_[i];
67 if (action == kPush) {
68 __ pop(RegisterAllocator::ToRegister(i));
69 } else if (action != kIgnore) {
70 action &= ~kSyncedFlag;
71 __ mov(RegisterAllocator::ToRegister(i), Operand(ebp, action));
72 }
73 }
74}
75
76
77// -------------------------------------------------------------------------
78// CodeGenState implementation.
79
80CodeGenState::CodeGenState(CodeGenerator* owner)
81 : owner_(owner),
Steve Blocka7e24c12009-10-30 11:49:00 +000082 destination_(NULL),
83 previous_(NULL) {
84 owner_->set_state(this);
85}
86
87
88CodeGenState::CodeGenState(CodeGenerator* owner,
Steve Blocka7e24c12009-10-30 11:49:00 +000089 ControlDestination* destination)
90 : owner_(owner),
Steve Blocka7e24c12009-10-30 11:49:00 +000091 destination_(destination),
92 previous_(owner->state()) {
93 owner_->set_state(this);
94}
95
96
97CodeGenState::~CodeGenState() {
98 ASSERT(owner_->state() == this);
99 owner_->set_state(previous_);
100}
101
102
103// -------------------------------------------------------------------------
104// CodeGenerator implementation
105
Andrei Popescu31002712010-02-23 13:46:05 +0000106CodeGenerator::CodeGenerator(MacroAssembler* masm)
107 : deferred_(8),
Leon Clarke4515c472010-02-03 11:58:03 +0000108 masm_(masm),
Andrei Popescu31002712010-02-23 13:46:05 +0000109 info_(NULL),
Steve Blocka7e24c12009-10-30 11:49:00 +0000110 frame_(NULL),
111 allocator_(NULL),
112 state_(NULL),
113 loop_nesting_(0),
114 function_return_is_shadowed_(false),
115 in_spilled_code_(false) {
116}
117
118
Andrei Popescu31002712010-02-23 13:46:05 +0000119Scope* CodeGenerator::scope() { return info_->function()->scope(); }
120
121
Steve Blocka7e24c12009-10-30 11:49:00 +0000122// Calling conventions:
123// ebp: caller's frame pointer
124// esp: stack pointer
125// edi: called JS function
126// esi: callee's context
127
Andrei Popescu31002712010-02-23 13:46:05 +0000128void CodeGenerator::Generate(CompilationInfo* info, Mode mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000129 // Record the position for debugging purposes.
Andrei Popescu31002712010-02-23 13:46:05 +0000130 CodeForFunctionPosition(info->function());
Steve Blocka7e24c12009-10-30 11:49:00 +0000131
132 // Initialize state.
Andrei Popescu31002712010-02-23 13:46:05 +0000133 info_ = info;
Steve Blocka7e24c12009-10-30 11:49:00 +0000134 ASSERT(allocator_ == NULL);
135 RegisterAllocator register_allocator(this);
136 allocator_ = &register_allocator;
137 ASSERT(frame_ == NULL);
138 frame_ = new VirtualFrame();
139 set_in_spilled_code(false);
140
141 // Adjust for function-level loop nesting.
Leon Clarke4515c472010-02-03 11:58:03 +0000142 loop_nesting_ += info->loop_nesting();
Steve Blocka7e24c12009-10-30 11:49:00 +0000143
144 JumpTarget::set_compiling_deferred_code(false);
145
146#ifdef DEBUG
147 if (strlen(FLAG_stop_at) > 0 &&
Andrei Popescu31002712010-02-23 13:46:05 +0000148 info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000149 frame_->SpillAll();
150 __ int3();
151 }
152#endif
153
154 // New scope to get automatic timing calculation.
155 { // NOLINT
156 HistogramTimerScope codegen_timer(&Counters::code_generation);
157 CodeGenState state(this);
158
159 // Entry:
160 // Stack: receiver, arguments, return address.
161 // ebp: caller's frame pointer
162 // esp: stack pointer
163 // edi: called JS function
164 // esi: callee's context
165 allocator_->Initialize();
Steve Blocka7e24c12009-10-30 11:49:00 +0000166
Leon Clarke4515c472010-02-03 11:58:03 +0000167 if (mode == PRIMARY) {
168 frame_->Enter();
169
170 // Allocate space for locals and initialize them.
171 frame_->AllocateStackSlots();
172
173 // Allocate the local context if needed.
Andrei Popescu31002712010-02-23 13:46:05 +0000174 int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
Leon Clarke4515c472010-02-03 11:58:03 +0000175 if (heap_slots > 0) {
176 Comment cmnt(masm_, "[ allocate local context");
177 // Allocate local context.
178 // Get outer context and create a new context based on it.
179 frame_->PushFunction();
180 Result context;
181 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
182 FastNewContextStub stub(heap_slots);
183 context = frame_->CallStub(&stub, 1);
184 } else {
185 context = frame_->CallRuntime(Runtime::kNewContext, 1);
186 }
187
188 // Update context local.
189 frame_->SaveContextRegister();
190
191 // Verify that the runtime call result and esi agree.
192 if (FLAG_debug_code) {
193 __ cmp(context.reg(), Operand(esi));
194 __ Assert(equal, "Runtime::NewContext should end up in esi");
195 }
196 }
197
198 // TODO(1241774): Improve this code:
199 // 1) only needed if we have a context
200 // 2) no need to recompute context ptr every single time
201 // 3) don't copy parameter operand code from SlotOperand!
202 {
203 Comment cmnt2(masm_, "[ copy context parameters into .context");
Leon Clarke4515c472010-02-03 11:58:03 +0000204 // Note that iteration order is relevant here! If we have the same
205 // parameter twice (e.g., function (x, y, x)), and that parameter
206 // needs to be copied into the context, it must be the last argument
207 // passed to the parameter that needs to be copied. This is a rare
208 // case so we don't check for it, instead we rely on the copying
209 // order: such a parameter is copied repeatedly into the same
210 // context location and thus the last value is what is seen inside
211 // the function.
Andrei Popescu31002712010-02-23 13:46:05 +0000212 for (int i = 0; i < scope()->num_parameters(); i++) {
213 Variable* par = scope()->parameter(i);
Leon Clarke4515c472010-02-03 11:58:03 +0000214 Slot* slot = par->slot();
215 if (slot != NULL && slot->type() == Slot::CONTEXT) {
216 // The use of SlotOperand below is safe in unspilled code
217 // because the slot is guaranteed to be a context slot.
218 //
219 // There are no parameters in the global scope.
Andrei Popescu31002712010-02-23 13:46:05 +0000220 ASSERT(!scope()->is_global_scope());
Leon Clarke4515c472010-02-03 11:58:03 +0000221 frame_->PushParameterAt(i);
222 Result value = frame_->Pop();
223 value.ToRegister();
224
225 // SlotOperand loads context.reg() with the context object
226 // stored to, used below in RecordWrite.
227 Result context = allocator_->Allocate();
228 ASSERT(context.is_valid());
229 __ mov(SlotOperand(slot, context.reg()), value.reg());
230 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
231 Result scratch = allocator_->Allocate();
232 ASSERT(scratch.is_valid());
233 frame_->Spill(context.reg());
234 frame_->Spill(value.reg());
235 __ RecordWrite(context.reg(), offset, value.reg(), scratch.reg());
236 }
237 }
238 }
239
240 // Store the arguments object. This must happen after context
241 // initialization because the arguments object may be stored in
242 // the context.
243 if (ArgumentsMode() != NO_ARGUMENTS_ALLOCATION) {
244 StoreArgumentsObject(true);
245 }
246
247 // Initialize ThisFunction reference if present.
Andrei Popescu31002712010-02-23 13:46:05 +0000248 if (scope()->is_function_scope() && scope()->function() != NULL) {
Leon Clarke4515c472010-02-03 11:58:03 +0000249 frame_->Push(Factory::the_hole_value());
Andrei Popescu31002712010-02-23 13:46:05 +0000250 StoreToSlot(scope()->function()->slot(), NOT_CONST_INIT);
Leon Clarke4515c472010-02-03 11:58:03 +0000251 }
252 } else {
253 // When used as the secondary compiler for splitting, ebp, esi,
254 // and edi have been pushed on the stack. Adjust the virtual
255 // frame to match this state.
256 frame_->Adjust(3);
257 allocator_->Unuse(edi);
258 }
259
Steve Blocka7e24c12009-10-30 11:49:00 +0000260 // Initialize the function return target after the locals are set
261 // up, because it needs the expected frame height from the frame.
262 function_return_.set_direction(JumpTarget::BIDIRECTIONAL);
263 function_return_is_shadowed_ = false;
264
Steve Blocka7e24c12009-10-30 11:49:00 +0000265 // Generate code to 'execute' declarations and initialize functions
266 // (source elements). In case of an illegal redeclaration we need to
267 // handle that instead of processing the declarations.
Andrei Popescu31002712010-02-23 13:46:05 +0000268 if (scope()->HasIllegalRedeclaration()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000269 Comment cmnt(masm_, "[ illegal redeclarations");
Andrei Popescu31002712010-02-23 13:46:05 +0000270 scope()->VisitIllegalRedeclaration(this);
Steve Blocka7e24c12009-10-30 11:49:00 +0000271 } else {
272 Comment cmnt(masm_, "[ declarations");
Andrei Popescu31002712010-02-23 13:46:05 +0000273 ProcessDeclarations(scope()->declarations());
Steve Blocka7e24c12009-10-30 11:49:00 +0000274 // Bail out if a stack-overflow exception occurred when processing
275 // declarations.
276 if (HasStackOverflow()) return;
277 }
278
279 if (FLAG_trace) {
280 frame_->CallRuntime(Runtime::kTraceEnter, 0);
281 // Ignore the return value.
282 }
283 CheckStack();
284
285 // Compile the body of the function in a vanilla state. Don't
286 // bother compiling all the code if the scope has an illegal
287 // redeclaration.
Andrei Popescu31002712010-02-23 13:46:05 +0000288 if (!scope()->HasIllegalRedeclaration()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000289 Comment cmnt(masm_, "[ function body");
290#ifdef DEBUG
291 bool is_builtin = Bootstrapper::IsActive();
292 bool should_trace =
293 is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls;
294 if (should_trace) {
295 frame_->CallRuntime(Runtime::kDebugTrace, 0);
296 // Ignore the return value.
297 }
298#endif
Andrei Popescu31002712010-02-23 13:46:05 +0000299 VisitStatements(info->function()->body());
Steve Blocka7e24c12009-10-30 11:49:00 +0000300
301 // Handle the return from the function.
302 if (has_valid_frame()) {
303 // If there is a valid frame, control flow can fall off the end of
304 // the body. In that case there is an implicit return statement.
305 ASSERT(!function_return_is_shadowed_);
Andrei Popescu31002712010-02-23 13:46:05 +0000306 CodeForReturnPosition(info->function());
Steve Blocka7e24c12009-10-30 11:49:00 +0000307 frame_->PrepareForReturn();
308 Result undefined(Factory::undefined_value());
309 if (function_return_.is_bound()) {
310 function_return_.Jump(&undefined);
311 } else {
312 function_return_.Bind(&undefined);
313 GenerateReturnSequence(&undefined);
314 }
315 } else if (function_return_.is_linked()) {
316 // If the return target has dangling jumps to it, then we have not
317 // yet generated the return sequence. This can happen when (a)
318 // control does not flow off the end of the body so we did not
319 // compile an artificial return statement just above, and (b) there
320 // are return statements in the body but (c) they are all shadowed.
321 Result return_value;
322 function_return_.Bind(&return_value);
323 GenerateReturnSequence(&return_value);
324 }
325 }
326 }
327
328 // Adjust for function-level loop nesting.
Leon Clarke4515c472010-02-03 11:58:03 +0000329 loop_nesting_ -= info->loop_nesting();
Steve Blocka7e24c12009-10-30 11:49:00 +0000330
331 // Code generation state must be reset.
332 ASSERT(state_ == NULL);
333 ASSERT(loop_nesting() == 0);
334 ASSERT(!function_return_is_shadowed_);
335 function_return_.Unuse();
336 DeleteFrame();
337
338 // Process any deferred code using the register allocator.
339 if (!HasStackOverflow()) {
340 HistogramTimerScope deferred_timer(&Counters::deferred_code_generation);
341 JumpTarget::set_compiling_deferred_code(true);
342 ProcessDeferred();
343 JumpTarget::set_compiling_deferred_code(false);
344 }
345
346 // There is no need to delete the register allocator, it is a
347 // stack-allocated local.
348 allocator_ = NULL;
Steve Blocka7e24c12009-10-30 11:49:00 +0000349}
350
351
352Operand CodeGenerator::SlotOperand(Slot* slot, Register tmp) {
353 // Currently, this assertion will fail if we try to assign to
354 // a constant variable that is constant because it is read-only
355 // (such as the variable referring to a named function expression).
356 // We need to implement assignments to read-only variables.
357 // Ideally, we should do this during AST generation (by converting
358 // such assignments into expression statements); however, in general
359 // we may not be able to make the decision until past AST generation,
360 // that is when the entire program is known.
361 ASSERT(slot != NULL);
362 int index = slot->index();
363 switch (slot->type()) {
364 case Slot::PARAMETER:
365 return frame_->ParameterAt(index);
366
367 case Slot::LOCAL:
368 return frame_->LocalAt(index);
369
370 case Slot::CONTEXT: {
371 // Follow the context chain if necessary.
372 ASSERT(!tmp.is(esi)); // do not overwrite context register
373 Register context = esi;
374 int chain_length = scope()->ContextChainLength(slot->var()->scope());
375 for (int i = 0; i < chain_length; i++) {
376 // Load the closure.
377 // (All contexts, even 'with' contexts, have a closure,
378 // and it is the same for all contexts inside a function.
379 // There is no need to go to the function context first.)
380 __ mov(tmp, ContextOperand(context, Context::CLOSURE_INDEX));
381 // Load the function context (which is the incoming, outer context).
382 __ mov(tmp, FieldOperand(tmp, JSFunction::kContextOffset));
383 context = tmp;
384 }
385 // We may have a 'with' context now. Get the function context.
386 // (In fact this mov may never be the needed, since the scope analysis
387 // may not permit a direct context access in this case and thus we are
388 // always at a function context. However it is safe to dereference be-
389 // cause the function context of a function context is itself. Before
390 // deleting this mov we should try to create a counter-example first,
391 // though...)
392 __ mov(tmp, ContextOperand(context, Context::FCONTEXT_INDEX));
393 return ContextOperand(tmp, index);
394 }
395
396 default:
397 UNREACHABLE();
398 return Operand(eax);
399 }
400}
401
402
403Operand CodeGenerator::ContextSlotOperandCheckExtensions(Slot* slot,
404 Result tmp,
405 JumpTarget* slow) {
406 ASSERT(slot->type() == Slot::CONTEXT);
407 ASSERT(tmp.is_register());
408 Register context = esi;
409
410 for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) {
411 if (s->num_heap_slots() > 0) {
412 if (s->calls_eval()) {
413 // Check that extension is NULL.
414 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
415 Immediate(0));
416 slow->Branch(not_equal, not_taken);
417 }
418 __ mov(tmp.reg(), ContextOperand(context, Context::CLOSURE_INDEX));
419 __ mov(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset));
420 context = tmp.reg();
421 }
422 }
423 // Check that last extension is NULL.
424 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
425 slow->Branch(not_equal, not_taken);
426 __ mov(tmp.reg(), ContextOperand(context, Context::FCONTEXT_INDEX));
427 return ContextOperand(tmp.reg(), slot->index());
428}
429
430
431// Emit code to load the value of an expression to the top of the
432// frame. If the expression is boolean-valued it may be compiled (or
433// partially compiled) into control flow to the control destination.
434// If force_control is true, control flow is forced.
435void CodeGenerator::LoadCondition(Expression* x,
Steve Blocka7e24c12009-10-30 11:49:00 +0000436 ControlDestination* dest,
437 bool force_control) {
438 ASSERT(!in_spilled_code());
439 int original_height = frame_->height();
440
Steve Blockd0582a62009-12-15 09:54:21 +0000441 { CodeGenState new_state(this, dest);
Steve Blocka7e24c12009-10-30 11:49:00 +0000442 Visit(x);
443
444 // If we hit a stack overflow, we may not have actually visited
445 // the expression. In that case, we ensure that we have a
446 // valid-looking frame state because we will continue to generate
447 // code as we unwind the C++ stack.
448 //
449 // It's possible to have both a stack overflow and a valid frame
450 // state (eg, a subexpression overflowed, visiting it returned
451 // with a dummied frame state, and visiting this expression
452 // returned with a normal-looking state).
453 if (HasStackOverflow() &&
454 !dest->is_used() &&
455 frame_->height() == original_height) {
456 dest->Goto(true);
457 }
458 }
459
460 if (force_control && !dest->is_used()) {
461 // Convert the TOS value into flow to the control destination.
462 ToBoolean(dest);
463 }
464
465 ASSERT(!(force_control && !dest->is_used()));
466 ASSERT(dest->is_used() || frame_->height() == original_height + 1);
467}
468
469
Steve Blockd0582a62009-12-15 09:54:21 +0000470void CodeGenerator::LoadAndSpill(Expression* expression) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000471 ASSERT(in_spilled_code());
472 set_in_spilled_code(false);
Steve Blockd0582a62009-12-15 09:54:21 +0000473 Load(expression);
Steve Blocka7e24c12009-10-30 11:49:00 +0000474 frame_->SpillAll();
475 set_in_spilled_code(true);
476}
477
478
Steve Blockd0582a62009-12-15 09:54:21 +0000479void CodeGenerator::Load(Expression* expr) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000480#ifdef DEBUG
481 int original_height = frame_->height();
482#endif
483 ASSERT(!in_spilled_code());
484 JumpTarget true_target;
485 JumpTarget false_target;
486 ControlDestination dest(&true_target, &false_target, true);
Steve Blockd0582a62009-12-15 09:54:21 +0000487 LoadCondition(expr, &dest, false);
Steve Blocka7e24c12009-10-30 11:49:00 +0000488
489 if (dest.false_was_fall_through()) {
490 // The false target was just bound.
491 JumpTarget loaded;
492 frame_->Push(Factory::false_value());
493 // There may be dangling jumps to the true target.
494 if (true_target.is_linked()) {
495 loaded.Jump();
496 true_target.Bind();
497 frame_->Push(Factory::true_value());
498 loaded.Bind();
499 }
500
501 } else if (dest.is_used()) {
502 // There is true, and possibly false, control flow (with true as
503 // the fall through).
504 JumpTarget loaded;
505 frame_->Push(Factory::true_value());
506 if (false_target.is_linked()) {
507 loaded.Jump();
508 false_target.Bind();
509 frame_->Push(Factory::false_value());
510 loaded.Bind();
511 }
512
513 } else {
514 // We have a valid value on top of the frame, but we still may
515 // have dangling jumps to the true and false targets from nested
516 // subexpressions (eg, the left subexpressions of the
517 // short-circuited boolean operators).
518 ASSERT(has_valid_frame());
519 if (true_target.is_linked() || false_target.is_linked()) {
520 JumpTarget loaded;
521 loaded.Jump(); // Don't lose the current TOS.
522 if (true_target.is_linked()) {
523 true_target.Bind();
524 frame_->Push(Factory::true_value());
525 if (false_target.is_linked()) {
526 loaded.Jump();
527 }
528 }
529 if (false_target.is_linked()) {
530 false_target.Bind();
531 frame_->Push(Factory::false_value());
532 }
533 loaded.Bind();
534 }
535 }
536
537 ASSERT(has_valid_frame());
538 ASSERT(frame_->height() == original_height + 1);
539}
540
541
542void CodeGenerator::LoadGlobal() {
543 if (in_spilled_code()) {
544 frame_->EmitPush(GlobalObject());
545 } else {
546 Result temp = allocator_->Allocate();
547 __ mov(temp.reg(), GlobalObject());
548 frame_->Push(&temp);
549 }
550}
551
552
553void CodeGenerator::LoadGlobalReceiver() {
554 Result temp = allocator_->Allocate();
555 Register reg = temp.reg();
556 __ mov(reg, GlobalObject());
557 __ mov(reg, FieldOperand(reg, GlobalObject::kGlobalReceiverOffset));
558 frame_->Push(&temp);
559}
560
561
Steve Blockd0582a62009-12-15 09:54:21 +0000562void CodeGenerator::LoadTypeofExpression(Expression* expr) {
563 // Special handling of identifiers as subexpressions of typeof.
564 Variable* variable = expr->AsVariableProxy()->AsVariable();
Steve Blocka7e24c12009-10-30 11:49:00 +0000565 if (variable != NULL && !variable->is_this() && variable->is_global()) {
Steve Blockd0582a62009-12-15 09:54:21 +0000566 // For a global variable we build the property reference
567 // <global>.<variable> and perform a (regular non-contextual) property
568 // load to make sure we do not get reference errors.
Steve Blocka7e24c12009-10-30 11:49:00 +0000569 Slot global(variable, Slot::CONTEXT, Context::GLOBAL_INDEX);
570 Literal key(variable->name());
Steve Blocka7e24c12009-10-30 11:49:00 +0000571 Property property(&global, &key, RelocInfo::kNoPosition);
Steve Blockd0582a62009-12-15 09:54:21 +0000572 Reference ref(this, &property);
573 ref.GetValue();
574 } else if (variable != NULL && variable->slot() != NULL) {
575 // For a variable that rewrites to a slot, we signal it is the immediate
576 // subexpression of a typeof.
577 LoadFromSlotCheckForArguments(variable->slot(), INSIDE_TYPEOF);
Steve Blocka7e24c12009-10-30 11:49:00 +0000578 } else {
Steve Blockd0582a62009-12-15 09:54:21 +0000579 // Anything else can be handled normally.
580 Load(expr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000581 }
582}
583
584
Andrei Popescu31002712010-02-23 13:46:05 +0000585ArgumentsAllocationMode CodeGenerator::ArgumentsMode() {
586 if (scope()->arguments() == NULL) return NO_ARGUMENTS_ALLOCATION;
587 ASSERT(scope()->arguments_shadow() != NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000588 // We don't want to do lazy arguments allocation for functions that
589 // have heap-allocated contexts, because it interfers with the
590 // uninitialized const tracking in the context objects.
Andrei Popescu31002712010-02-23 13:46:05 +0000591 return (scope()->num_heap_slots() > 0)
Steve Blocka7e24c12009-10-30 11:49:00 +0000592 ? EAGER_ARGUMENTS_ALLOCATION
593 : LAZY_ARGUMENTS_ALLOCATION;
594}
595
596
597Result CodeGenerator::StoreArgumentsObject(bool initial) {
598 ArgumentsAllocationMode mode = ArgumentsMode();
599 ASSERT(mode != NO_ARGUMENTS_ALLOCATION);
600
601 Comment cmnt(masm_, "[ store arguments object");
602 if (mode == LAZY_ARGUMENTS_ALLOCATION && initial) {
603 // When using lazy arguments allocation, we store the hole value
604 // as a sentinel indicating that the arguments object hasn't been
605 // allocated yet.
606 frame_->Push(Factory::the_hole_value());
607 } else {
608 ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT);
609 frame_->PushFunction();
610 frame_->PushReceiverSlotAddress();
Andrei Popescu31002712010-02-23 13:46:05 +0000611 frame_->Push(Smi::FromInt(scope()->num_parameters()));
Steve Blocka7e24c12009-10-30 11:49:00 +0000612 Result result = frame_->CallStub(&stub, 3);
613 frame_->Push(&result);
614 }
615
Andrei Popescu31002712010-02-23 13:46:05 +0000616 Variable* arguments = scope()->arguments()->var();
617 Variable* shadow = scope()->arguments_shadow()->var();
Leon Clarkee46be812010-01-19 14:06:41 +0000618 ASSERT(arguments != NULL && arguments->slot() != NULL);
619 ASSERT(shadow != NULL && shadow->slot() != NULL);
620 JumpTarget done;
621 bool skip_arguments = false;
622 if (mode == LAZY_ARGUMENTS_ALLOCATION && !initial) {
623 // We have to skip storing into the arguments slot if it has already
624 // been written to. This can happen if the a function has a local
625 // variable named 'arguments'.
626 LoadFromSlot(arguments->slot(), NOT_INSIDE_TYPEOF);
627 Result probe = frame_->Pop();
628 if (probe.is_constant()) {
629 // We have to skip updating the arguments object if it has
630 // been assigned a proper value.
631 skip_arguments = !probe.handle()->IsTheHole();
632 } else {
633 __ cmp(Operand(probe.reg()), Immediate(Factory::the_hole_value()));
634 probe.Unuse();
635 done.Branch(not_equal);
Steve Blocka7e24c12009-10-30 11:49:00 +0000636 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000637 }
Leon Clarkee46be812010-01-19 14:06:41 +0000638 if (!skip_arguments) {
639 StoreToSlot(arguments->slot(), NOT_CONST_INIT);
640 if (mode == LAZY_ARGUMENTS_ALLOCATION) done.Bind();
641 }
642 StoreToSlot(shadow->slot(), NOT_CONST_INIT);
Steve Blocka7e24c12009-10-30 11:49:00 +0000643 return frame_->Pop();
644}
645
Leon Clarked91b9f72010-01-27 17:25:45 +0000646//------------------------------------------------------------------------------
647// CodeGenerator implementation of variables, lookups, and stores.
Steve Blocka7e24c12009-10-30 11:49:00 +0000648
Leon Clarked91b9f72010-01-27 17:25:45 +0000649Reference::Reference(CodeGenerator* cgen,
650 Expression* expression,
651 bool persist_after_get)
652 : cgen_(cgen),
653 expression_(expression),
654 type_(ILLEGAL),
655 persist_after_get_(persist_after_get) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000656 cgen->LoadReference(this);
657}
658
659
660Reference::~Reference() {
Leon Clarked91b9f72010-01-27 17:25:45 +0000661 ASSERT(is_unloaded() || is_illegal());
Steve Blocka7e24c12009-10-30 11:49:00 +0000662}
663
664
665void CodeGenerator::LoadReference(Reference* ref) {
666 // References are loaded from both spilled and unspilled code. Set the
667 // state to unspilled to allow that (and explicitly spill after
668 // construction at the construction sites).
669 bool was_in_spilled_code = in_spilled_code_;
670 in_spilled_code_ = false;
671
672 Comment cmnt(masm_, "[ LoadReference");
673 Expression* e = ref->expression();
674 Property* property = e->AsProperty();
675 Variable* var = e->AsVariableProxy()->AsVariable();
676
677 if (property != NULL) {
678 // The expression is either a property or a variable proxy that rewrites
679 // to a property.
680 Load(property->obj());
Leon Clarkee46be812010-01-19 14:06:41 +0000681 if (property->key()->IsPropertyName()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000682 ref->set_type(Reference::NAMED);
683 } else {
684 Load(property->key());
685 ref->set_type(Reference::KEYED);
686 }
687 } else if (var != NULL) {
688 // The expression is a variable proxy that does not rewrite to a
689 // property. Global variables are treated as named property references.
690 if (var->is_global()) {
691 LoadGlobal();
692 ref->set_type(Reference::NAMED);
693 } else {
694 ASSERT(var->slot() != NULL);
695 ref->set_type(Reference::SLOT);
696 }
697 } else {
698 // Anything else is a runtime error.
699 Load(e);
700 frame_->CallRuntime(Runtime::kThrowReferenceError, 1);
701 }
702
703 in_spilled_code_ = was_in_spilled_code;
704}
705
706
707void CodeGenerator::UnloadReference(Reference* ref) {
708 // Pop a reference from the stack while preserving TOS.
709 Comment cmnt(masm_, "[ UnloadReference");
710 frame_->Nip(ref->size());
Leon Clarked91b9f72010-01-27 17:25:45 +0000711 ref->set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +0000712}
713
714
Steve Blocka7e24c12009-10-30 11:49:00 +0000715// ECMA-262, section 9.2, page 30: ToBoolean(). Pop the top of stack and
716// convert it to a boolean in the condition code register or jump to
717// 'false_target'/'true_target' as appropriate.
718void CodeGenerator::ToBoolean(ControlDestination* dest) {
719 Comment cmnt(masm_, "[ ToBoolean");
720
721 // The value to convert should be popped from the frame.
722 Result value = frame_->Pop();
723 value.ToRegister();
724 // Fast case checks.
725
726 // 'false' => false.
727 __ cmp(value.reg(), Factory::false_value());
728 dest->false_target()->Branch(equal);
729
730 // 'true' => true.
731 __ cmp(value.reg(), Factory::true_value());
732 dest->true_target()->Branch(equal);
733
734 // 'undefined' => false.
735 __ cmp(value.reg(), Factory::undefined_value());
736 dest->false_target()->Branch(equal);
737
738 // Smi => false iff zero.
739 ASSERT(kSmiTag == 0);
740 __ test(value.reg(), Operand(value.reg()));
741 dest->false_target()->Branch(zero);
742 __ test(value.reg(), Immediate(kSmiTagMask));
743 dest->true_target()->Branch(zero);
744
745 // Call the stub for all other cases.
746 frame_->Push(&value); // Undo the Pop() from above.
747 ToBooleanStub stub;
748 Result temp = frame_->CallStub(&stub, 1);
749 // Convert the result to a condition code.
750 __ test(temp.reg(), Operand(temp.reg()));
751 temp.Unuse();
752 dest->Split(not_equal);
753}
754
755
756class FloatingPointHelper : public AllStatic {
757 public:
Leon Clarked91b9f72010-01-27 17:25:45 +0000758
759 enum ArgLocation {
760 ARGS_ON_STACK,
761 ARGS_IN_REGISTERS
762 };
763
Steve Blocka7e24c12009-10-30 11:49:00 +0000764 // Code pattern for loading a floating point value. Input value must
765 // be either a smi or a heap number object (fp value). Requirements:
766 // operand in register number. Returns operand as floating point number
767 // on FPU stack.
768 static void LoadFloatOperand(MacroAssembler* masm, Register number);
769 // Code pattern for loading floating point values. Input values must
770 // be either smi or heap number objects (fp values). Requirements:
Leon Clarked91b9f72010-01-27 17:25:45 +0000771 // operand_1 on TOS+1 or in edx, operand_2 on TOS+2 or in eax.
772 // Returns operands as floating point numbers on FPU stack.
773 static void LoadFloatOperands(MacroAssembler* masm,
774 Register scratch,
775 ArgLocation arg_location = ARGS_ON_STACK);
776
777 // Similar to LoadFloatOperand but assumes that both operands are smis.
778 // Expects operands in edx, eax.
779 static void LoadFloatSmis(MacroAssembler* masm, Register scratch);
780
Steve Blocka7e24c12009-10-30 11:49:00 +0000781 // Test if operands are smi or number objects (fp). Requirements:
782 // operand_1 in eax, operand_2 in edx; falls through on float
783 // operands, jumps to the non_float label otherwise.
784 static void CheckFloatOperands(MacroAssembler* masm,
785 Label* non_float,
786 Register scratch);
Leon Clarkee46be812010-01-19 14:06:41 +0000787 // Takes the operands in edx and eax and loads them as integers in eax
788 // and ecx.
789 static void LoadAsIntegers(MacroAssembler* masm,
790 bool use_sse3,
791 Label* operand_conversion_failure);
Steve Blocka7e24c12009-10-30 11:49:00 +0000792 // Test if operands are numbers (smi or HeapNumber objects), and load
793 // them into xmm0 and xmm1 if they are. Jump to label not_numbers if
794 // either operand is not a number. Operands are in edx and eax.
795 // Leaves operands unchanged.
Leon Clarked91b9f72010-01-27 17:25:45 +0000796 static void LoadSSE2Operands(MacroAssembler* masm, Label* not_numbers);
797
798 // Similar to LoadSSE2Operands but assumes that both operands are smis.
799 // Expects operands in edx, eax.
800 static void LoadSSE2Smis(MacroAssembler* masm, Register scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000801};
802
803
804const char* GenericBinaryOpStub::GetName() {
Leon Clarkee46be812010-01-19 14:06:41 +0000805 if (name_ != NULL) return name_;
806 const int kMaxNameLength = 100;
807 name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);
808 if (name_ == NULL) return "OOM";
809 const char* op_name = Token::Name(op_);
810 const char* overwrite_name;
811 switch (mode_) {
812 case NO_OVERWRITE: overwrite_name = "Alloc"; break;
813 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break;
814 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
815 default: overwrite_name = "UnknownOverwrite"; break;
Steve Blocka7e24c12009-10-30 11:49:00 +0000816 }
Leon Clarkee46be812010-01-19 14:06:41 +0000817
818 OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
819 "GenericBinaryOpStub_%s_%s%s_%s%s",
820 op_name,
821 overwrite_name,
822 (flags_ & NO_SMI_CODE_IN_STUB) ? "_NoSmiInStub" : "",
823 args_in_registers_ ? "RegArgs" : "StackArgs",
824 args_reversed_ ? "_R" : "");
825 return name_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000826}
827
828
829// Call the specialized stub for a binary operation.
830class DeferredInlineBinaryOperation: public DeferredCode {
831 public:
832 DeferredInlineBinaryOperation(Token::Value op,
833 Register dst,
834 Register left,
835 Register right,
836 OverwriteMode mode)
837 : op_(op), dst_(dst), left_(left), right_(right), mode_(mode) {
838 set_comment("[ DeferredInlineBinaryOperation");
839 }
840
841 virtual void Generate();
842
843 private:
844 Token::Value op_;
845 Register dst_;
846 Register left_;
847 Register right_;
848 OverwriteMode mode_;
849};
850
851
852void DeferredInlineBinaryOperation::Generate() {
Leon Clarkee46be812010-01-19 14:06:41 +0000853 Label done;
854 if (CpuFeatures::IsSupported(SSE2) && ((op_ == Token::ADD) ||
855 (op_ ==Token::SUB) ||
856 (op_ == Token::MUL) ||
857 (op_ == Token::DIV))) {
858 CpuFeatures::Scope use_sse2(SSE2);
859 Label call_runtime, after_alloc_failure;
860 Label left_smi, right_smi, load_right, do_op;
861 __ test(left_, Immediate(kSmiTagMask));
862 __ j(zero, &left_smi);
863 __ cmp(FieldOperand(left_, HeapObject::kMapOffset),
864 Factory::heap_number_map());
865 __ j(not_equal, &call_runtime);
866 __ movdbl(xmm0, FieldOperand(left_, HeapNumber::kValueOffset));
867 if (mode_ == OVERWRITE_LEFT) {
868 __ mov(dst_, left_);
869 }
870 __ jmp(&load_right);
871
872 __ bind(&left_smi);
873 __ SmiUntag(left_);
874 __ cvtsi2sd(xmm0, Operand(left_));
875 __ SmiTag(left_);
876 if (mode_ == OVERWRITE_LEFT) {
877 Label alloc_failure;
878 __ push(left_);
879 __ AllocateHeapNumber(dst_, left_, no_reg, &after_alloc_failure);
880 __ pop(left_);
881 }
882
883 __ bind(&load_right);
884 __ test(right_, Immediate(kSmiTagMask));
885 __ j(zero, &right_smi);
886 __ cmp(FieldOperand(right_, HeapObject::kMapOffset),
887 Factory::heap_number_map());
888 __ j(not_equal, &call_runtime);
889 __ movdbl(xmm1, FieldOperand(right_, HeapNumber::kValueOffset));
890 if (mode_ == OVERWRITE_RIGHT) {
891 __ mov(dst_, right_);
892 } else if (mode_ == NO_OVERWRITE) {
893 Label alloc_failure;
894 __ push(left_);
895 __ AllocateHeapNumber(dst_, left_, no_reg, &after_alloc_failure);
896 __ pop(left_);
897 }
898 __ jmp(&do_op);
899
900 __ bind(&right_smi);
901 __ SmiUntag(right_);
902 __ cvtsi2sd(xmm1, Operand(right_));
903 __ SmiTag(right_);
904 if (mode_ == OVERWRITE_RIGHT || mode_ == NO_OVERWRITE) {
905 Label alloc_failure;
906 __ push(left_);
907 __ AllocateHeapNumber(dst_, left_, no_reg, &after_alloc_failure);
908 __ pop(left_);
909 }
910
911 __ bind(&do_op);
912 switch (op_) {
913 case Token::ADD: __ addsd(xmm0, xmm1); break;
914 case Token::SUB: __ subsd(xmm0, xmm1); break;
915 case Token::MUL: __ mulsd(xmm0, xmm1); break;
916 case Token::DIV: __ divsd(xmm0, xmm1); break;
917 default: UNREACHABLE();
918 }
919 __ movdbl(FieldOperand(dst_, HeapNumber::kValueOffset), xmm0);
920 __ jmp(&done);
921
922 __ bind(&after_alloc_failure);
923 __ pop(left_);
924 __ bind(&call_runtime);
925 }
Steve Block3ce2e202009-11-05 08:53:23 +0000926 GenericBinaryOpStub stub(op_, mode_, NO_SMI_CODE_IN_STUB);
927 stub.GenerateCall(masm_, left_, right_);
Steve Blocka7e24c12009-10-30 11:49:00 +0000928 if (!dst_.is(eax)) __ mov(dst_, eax);
Leon Clarkee46be812010-01-19 14:06:41 +0000929 __ bind(&done);
Steve Blocka7e24c12009-10-30 11:49:00 +0000930}
931
932
933void CodeGenerator::GenericBinaryOperation(Token::Value op,
Leon Clarkee46be812010-01-19 14:06:41 +0000934 StaticType* type,
Steve Blocka7e24c12009-10-30 11:49:00 +0000935 OverwriteMode overwrite_mode) {
936 Comment cmnt(masm_, "[ BinaryOperation");
937 Comment cmnt_token(masm_, Token::String(op));
938
939 if (op == Token::COMMA) {
940 // Simply discard left value.
941 frame_->Nip(1);
942 return;
943 }
944
Steve Blocka7e24c12009-10-30 11:49:00 +0000945 Result right = frame_->Pop();
946 Result left = frame_->Pop();
947
948 if (op == Token::ADD) {
949 bool left_is_string = left.is_constant() && left.handle()->IsString();
950 bool right_is_string = right.is_constant() && right.handle()->IsString();
951 if (left_is_string || right_is_string) {
952 frame_->Push(&left);
953 frame_->Push(&right);
954 Result answer;
955 if (left_is_string) {
956 if (right_is_string) {
957 // TODO(lrn): if both are constant strings
958 // -- do a compile time cons, if allocation during codegen is allowed.
959 answer = frame_->CallRuntime(Runtime::kStringAdd, 2);
960 } else {
961 answer =
962 frame_->InvokeBuiltin(Builtins::STRING_ADD_LEFT, CALL_FUNCTION, 2);
963 }
964 } else if (right_is_string) {
965 answer =
966 frame_->InvokeBuiltin(Builtins::STRING_ADD_RIGHT, CALL_FUNCTION, 2);
967 }
968 frame_->Push(&answer);
969 return;
970 }
971 // Neither operand is known to be a string.
972 }
973
974 bool left_is_smi = left.is_constant() && left.handle()->IsSmi();
975 bool left_is_non_smi = left.is_constant() && !left.handle()->IsSmi();
976 bool right_is_smi = right.is_constant() && right.handle()->IsSmi();
977 bool right_is_non_smi = right.is_constant() && !right.handle()->IsSmi();
Steve Blocka7e24c12009-10-30 11:49:00 +0000978
979 if (left_is_smi && right_is_smi) {
980 // Compute the constant result at compile time, and leave it on the frame.
981 int left_int = Smi::cast(*left.handle())->value();
982 int right_int = Smi::cast(*right.handle())->value();
983 if (FoldConstantSmis(op, left_int, right_int)) return;
984 }
985
Leon Clarked91b9f72010-01-27 17:25:45 +0000986 Result answer;
Steve Blocka7e24c12009-10-30 11:49:00 +0000987 if (left_is_non_smi || right_is_non_smi) {
Leon Clarked91b9f72010-01-27 17:25:45 +0000988 // Go straight to the slow case, with no smi code.
989 GenericBinaryOpStub stub(op, overwrite_mode, NO_SMI_CODE_IN_STUB);
990 answer = stub.GenerateCall(masm_, frame_, &left, &right);
Steve Blocka7e24c12009-10-30 11:49:00 +0000991 } else if (right_is_smi) {
Leon Clarked91b9f72010-01-27 17:25:45 +0000992 answer = ConstantSmiBinaryOperation(op, &left, right.handle(),
993 type, false, overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +0000994 } else if (left_is_smi) {
Leon Clarked91b9f72010-01-27 17:25:45 +0000995 answer = ConstantSmiBinaryOperation(op, &right, left.handle(),
996 type, true, overwrite_mode);
Leon Clarkeeab96aa2010-01-27 16:31:12 +0000997 } else {
Leon Clarked91b9f72010-01-27 17:25:45 +0000998 // Set the flags based on the operation, type and loop nesting level.
999 // Bit operations always assume they likely operate on Smis. Still only
1000 // generate the inline Smi check code if this operation is part of a loop.
1001 // For all other operations only inline the Smi check code for likely smis
1002 // if the operation is part of a loop.
1003 if (loop_nesting() > 0 && (Token::IsBitOp(op) || type->IsLikelySmi())) {
1004 answer = LikelySmiBinaryOperation(op, &left, &right, overwrite_mode);
1005 } else {
1006 GenericBinaryOpStub stub(op, overwrite_mode, NO_GENERIC_BINARY_FLAGS);
1007 answer = stub.GenerateCall(masm_, frame_, &left, &right);
Leon Clarkeeab96aa2010-01-27 16:31:12 +00001008 }
Leon Clarkeeab96aa2010-01-27 16:31:12 +00001009 }
Leon Clarked91b9f72010-01-27 17:25:45 +00001010 frame_->Push(&answer);
Steve Blocka7e24c12009-10-30 11:49:00 +00001011}
1012
1013
1014bool CodeGenerator::FoldConstantSmis(Token::Value op, int left, int right) {
1015 Object* answer_object = Heap::undefined_value();
1016 switch (op) {
1017 case Token::ADD:
1018 if (Smi::IsValid(left + right)) {
1019 answer_object = Smi::FromInt(left + right);
1020 }
1021 break;
1022 case Token::SUB:
1023 if (Smi::IsValid(left - right)) {
1024 answer_object = Smi::FromInt(left - right);
1025 }
1026 break;
1027 case Token::MUL: {
1028 double answer = static_cast<double>(left) * right;
1029 if (answer >= Smi::kMinValue && answer <= Smi::kMaxValue) {
1030 // If the product is zero and the non-zero factor is negative,
1031 // the spec requires us to return floating point negative zero.
1032 if (answer != 0 || (left >= 0 && right >= 0)) {
1033 answer_object = Smi::FromInt(static_cast<int>(answer));
1034 }
1035 }
1036 }
1037 break;
1038 case Token::DIV:
1039 case Token::MOD:
1040 break;
1041 case Token::BIT_OR:
1042 answer_object = Smi::FromInt(left | right);
1043 break;
1044 case Token::BIT_AND:
1045 answer_object = Smi::FromInt(left & right);
1046 break;
1047 case Token::BIT_XOR:
1048 answer_object = Smi::FromInt(left ^ right);
1049 break;
1050
1051 case Token::SHL: {
1052 int shift_amount = right & 0x1F;
1053 if (Smi::IsValid(left << shift_amount)) {
1054 answer_object = Smi::FromInt(left << shift_amount);
1055 }
1056 break;
1057 }
1058 case Token::SHR: {
1059 int shift_amount = right & 0x1F;
1060 unsigned int unsigned_left = left;
1061 unsigned_left >>= shift_amount;
1062 if (unsigned_left <= static_cast<unsigned int>(Smi::kMaxValue)) {
1063 answer_object = Smi::FromInt(unsigned_left);
1064 }
1065 break;
1066 }
1067 case Token::SAR: {
1068 int shift_amount = right & 0x1F;
1069 unsigned int unsigned_left = left;
1070 if (left < 0) {
1071 // Perform arithmetic shift of a negative number by
1072 // complementing number, logical shifting, complementing again.
1073 unsigned_left = ~unsigned_left;
1074 unsigned_left >>= shift_amount;
1075 unsigned_left = ~unsigned_left;
1076 } else {
1077 unsigned_left >>= shift_amount;
1078 }
1079 ASSERT(Smi::IsValid(unsigned_left)); // Converted to signed.
1080 answer_object = Smi::FromInt(unsigned_left); // Converted to signed.
1081 break;
1082 }
1083 default:
1084 UNREACHABLE();
1085 break;
1086 }
1087 if (answer_object == Heap::undefined_value()) {
1088 return false;
1089 }
1090 frame_->Push(Handle<Object>(answer_object));
1091 return true;
1092}
1093
1094
1095// Implements a binary operation using a deferred code object and some
1096// inline code to operate on smis quickly.
Leon Clarked91b9f72010-01-27 17:25:45 +00001097Result CodeGenerator::LikelySmiBinaryOperation(Token::Value op,
1098 Result* left,
1099 Result* right,
1100 OverwriteMode overwrite_mode) {
1101 Result answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00001102 // Special handling of div and mod because they use fixed registers.
1103 if (op == Token::DIV || op == Token::MOD) {
1104 // We need eax as the quotient register, edx as the remainder
1105 // register, neither left nor right in eax or edx, and left copied
1106 // to eax.
1107 Result quotient;
1108 Result remainder;
1109 bool left_is_in_eax = false;
1110 // Step 1: get eax for quotient.
1111 if ((left->is_register() && left->reg().is(eax)) ||
1112 (right->is_register() && right->reg().is(eax))) {
1113 // One or both is in eax. Use a fresh non-edx register for
1114 // them.
1115 Result fresh = allocator_->Allocate();
1116 ASSERT(fresh.is_valid());
1117 if (fresh.reg().is(edx)) {
1118 remainder = fresh;
1119 fresh = allocator_->Allocate();
1120 ASSERT(fresh.is_valid());
1121 }
1122 if (left->is_register() && left->reg().is(eax)) {
1123 quotient = *left;
1124 *left = fresh;
1125 left_is_in_eax = true;
1126 }
1127 if (right->is_register() && right->reg().is(eax)) {
1128 quotient = *right;
1129 *right = fresh;
1130 }
1131 __ mov(fresh.reg(), eax);
1132 } else {
1133 // Neither left nor right is in eax.
1134 quotient = allocator_->Allocate(eax);
1135 }
1136 ASSERT(quotient.is_register() && quotient.reg().is(eax));
1137 ASSERT(!(left->is_register() && left->reg().is(eax)));
1138 ASSERT(!(right->is_register() && right->reg().is(eax)));
1139
1140 // Step 2: get edx for remainder if necessary.
1141 if (!remainder.is_valid()) {
1142 if ((left->is_register() && left->reg().is(edx)) ||
1143 (right->is_register() && right->reg().is(edx))) {
1144 Result fresh = allocator_->Allocate();
1145 ASSERT(fresh.is_valid());
1146 if (left->is_register() && left->reg().is(edx)) {
1147 remainder = *left;
1148 *left = fresh;
1149 }
1150 if (right->is_register() && right->reg().is(edx)) {
1151 remainder = *right;
1152 *right = fresh;
1153 }
1154 __ mov(fresh.reg(), edx);
1155 } else {
1156 // Neither left nor right is in edx.
1157 remainder = allocator_->Allocate(edx);
1158 }
1159 }
1160 ASSERT(remainder.is_register() && remainder.reg().is(edx));
1161 ASSERT(!(left->is_register() && left->reg().is(edx)));
1162 ASSERT(!(right->is_register() && right->reg().is(edx)));
1163
1164 left->ToRegister();
1165 right->ToRegister();
1166 frame_->Spill(eax);
1167 frame_->Spill(edx);
1168
1169 // Check that left and right are smi tagged.
1170 DeferredInlineBinaryOperation* deferred =
1171 new DeferredInlineBinaryOperation(op,
1172 (op == Token::DIV) ? eax : edx,
1173 left->reg(),
1174 right->reg(),
1175 overwrite_mode);
1176 if (left->reg().is(right->reg())) {
1177 __ test(left->reg(), Immediate(kSmiTagMask));
1178 } else {
1179 // Use the quotient register as a scratch for the tag check.
1180 if (!left_is_in_eax) __ mov(eax, left->reg());
1181 left_is_in_eax = false; // About to destroy the value in eax.
1182 __ or_(eax, Operand(right->reg()));
1183 ASSERT(kSmiTag == 0); // Adjust test if not the case.
1184 __ test(eax, Immediate(kSmiTagMask));
1185 }
1186 deferred->Branch(not_zero);
1187
1188 if (!left_is_in_eax) __ mov(eax, left->reg());
1189 // Sign extend eax into edx:eax.
1190 __ cdq();
1191 // Check for 0 divisor.
1192 __ test(right->reg(), Operand(right->reg()));
1193 deferred->Branch(zero);
1194 // Divide edx:eax by the right operand.
1195 __ idiv(right->reg());
1196
1197 // Complete the operation.
1198 if (op == Token::DIV) {
1199 // Check for negative zero result. If result is zero, and divisor
1200 // is negative, return a floating point negative zero. The
1201 // virtual frame is unchanged in this block, so local control flow
1202 // can use a Label rather than a JumpTarget.
1203 Label non_zero_result;
1204 __ test(left->reg(), Operand(left->reg()));
1205 __ j(not_zero, &non_zero_result);
1206 __ test(right->reg(), Operand(right->reg()));
1207 deferred->Branch(negative);
1208 __ bind(&non_zero_result);
1209 // Check for the corner case of dividing the most negative smi by
1210 // -1. We cannot use the overflow flag, since it is not set by
1211 // idiv instruction.
1212 ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
1213 __ cmp(eax, 0x40000000);
1214 deferred->Branch(equal);
1215 // Check that the remainder is zero.
1216 __ test(edx, Operand(edx));
1217 deferred->Branch(not_zero);
1218 // Tag the result and store it in the quotient register.
Leon Clarkee46be812010-01-19 14:06:41 +00001219 __ SmiTag(eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00001220 deferred->BindExit();
1221 left->Unuse();
1222 right->Unuse();
Leon Clarked91b9f72010-01-27 17:25:45 +00001223 answer = quotient;
Steve Blocka7e24c12009-10-30 11:49:00 +00001224 } else {
1225 ASSERT(op == Token::MOD);
1226 // Check for a negative zero result. If the result is zero, and
1227 // the dividend is negative, return a floating point negative
1228 // zero. The frame is unchanged in this block, so local control
1229 // flow can use a Label rather than a JumpTarget.
1230 Label non_zero_result;
1231 __ test(edx, Operand(edx));
1232 __ j(not_zero, &non_zero_result, taken);
1233 __ test(left->reg(), Operand(left->reg()));
1234 deferred->Branch(negative);
1235 __ bind(&non_zero_result);
1236 deferred->BindExit();
1237 left->Unuse();
1238 right->Unuse();
Leon Clarked91b9f72010-01-27 17:25:45 +00001239 answer = remainder;
Steve Blocka7e24c12009-10-30 11:49:00 +00001240 }
Leon Clarked91b9f72010-01-27 17:25:45 +00001241 ASSERT(answer.is_valid());
1242 return answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00001243 }
1244
1245 // Special handling of shift operations because they use fixed
1246 // registers.
1247 if (op == Token::SHL || op == Token::SHR || op == Token::SAR) {
1248 // Move left out of ecx if necessary.
1249 if (left->is_register() && left->reg().is(ecx)) {
1250 *left = allocator_->Allocate();
1251 ASSERT(left->is_valid());
1252 __ mov(left->reg(), ecx);
1253 }
1254 right->ToRegister(ecx);
1255 left->ToRegister();
1256 ASSERT(left->is_register() && !left->reg().is(ecx));
1257 ASSERT(right->is_register() && right->reg().is(ecx));
1258
1259 // We will modify right, it must be spilled.
1260 frame_->Spill(ecx);
1261
1262 // Use a fresh answer register to avoid spilling the left operand.
Leon Clarked91b9f72010-01-27 17:25:45 +00001263 answer = allocator_->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00001264 ASSERT(answer.is_valid());
1265 // Check that both operands are smis using the answer register as a
1266 // temporary.
1267 DeferredInlineBinaryOperation* deferred =
1268 new DeferredInlineBinaryOperation(op,
1269 answer.reg(),
1270 left->reg(),
1271 ecx,
1272 overwrite_mode);
1273 __ mov(answer.reg(), left->reg());
1274 __ or_(answer.reg(), Operand(ecx));
1275 __ test(answer.reg(), Immediate(kSmiTagMask));
1276 deferred->Branch(not_zero);
1277
1278 // Untag both operands.
1279 __ mov(answer.reg(), left->reg());
Leon Clarkee46be812010-01-19 14:06:41 +00001280 __ SmiUntag(answer.reg());
1281 __ SmiUntag(ecx);
Steve Blocka7e24c12009-10-30 11:49:00 +00001282 // Perform the operation.
1283 switch (op) {
1284 case Token::SAR:
Steve Blockd0582a62009-12-15 09:54:21 +00001285 __ sar_cl(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001286 // No checks of result necessary
1287 break;
1288 case Token::SHR: {
1289 Label result_ok;
Steve Blockd0582a62009-12-15 09:54:21 +00001290 __ shr_cl(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001291 // Check that the *unsigned* result fits in a smi. Neither of
1292 // the two high-order bits can be set:
1293 // * 0x80000000: high bit would be lost when smi tagging.
1294 // * 0x40000000: this number would convert to negative when smi
1295 // tagging.
1296 // These two cases can only happen with shifts by 0 or 1 when
1297 // handed a valid smi. If the answer cannot be represented by a
1298 // smi, restore the left and right arguments, and jump to slow
1299 // case. The low bit of the left argument may be lost, but only
1300 // in a case where it is dropped anyway.
1301 __ test(answer.reg(), Immediate(0xc0000000));
1302 __ j(zero, &result_ok);
Leon Clarkee46be812010-01-19 14:06:41 +00001303 __ SmiTag(ecx);
Steve Blocka7e24c12009-10-30 11:49:00 +00001304 deferred->Jump();
1305 __ bind(&result_ok);
1306 break;
1307 }
1308 case Token::SHL: {
1309 Label result_ok;
Steve Blockd0582a62009-12-15 09:54:21 +00001310 __ shl_cl(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001311 // Check that the *signed* result fits in a smi.
1312 __ cmp(answer.reg(), 0xc0000000);
1313 __ j(positive, &result_ok);
Leon Clarkee46be812010-01-19 14:06:41 +00001314 __ SmiTag(ecx);
Steve Blocka7e24c12009-10-30 11:49:00 +00001315 deferred->Jump();
1316 __ bind(&result_ok);
1317 break;
1318 }
1319 default:
1320 UNREACHABLE();
1321 }
1322 // Smi-tag the result in answer.
Leon Clarkee46be812010-01-19 14:06:41 +00001323 __ SmiTag(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001324 deferred->BindExit();
1325 left->Unuse();
1326 right->Unuse();
Leon Clarked91b9f72010-01-27 17:25:45 +00001327 ASSERT(answer.is_valid());
1328 return answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00001329 }
1330
1331 // Handle the other binary operations.
1332 left->ToRegister();
1333 right->ToRegister();
1334 // A newly allocated register answer is used to hold the answer. The
1335 // registers containing left and right are not modified so they don't
1336 // need to be spilled in the fast case.
Leon Clarked91b9f72010-01-27 17:25:45 +00001337 answer = allocator_->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00001338 ASSERT(answer.is_valid());
1339
1340 // Perform the smi tag check.
1341 DeferredInlineBinaryOperation* deferred =
1342 new DeferredInlineBinaryOperation(op,
1343 answer.reg(),
1344 left->reg(),
1345 right->reg(),
1346 overwrite_mode);
1347 if (left->reg().is(right->reg())) {
1348 __ test(left->reg(), Immediate(kSmiTagMask));
1349 } else {
1350 __ mov(answer.reg(), left->reg());
1351 __ or_(answer.reg(), Operand(right->reg()));
1352 ASSERT(kSmiTag == 0); // Adjust test if not the case.
1353 __ test(answer.reg(), Immediate(kSmiTagMask));
1354 }
1355 deferred->Branch(not_zero);
1356 __ mov(answer.reg(), left->reg());
1357 switch (op) {
1358 case Token::ADD:
Leon Clarked91b9f72010-01-27 17:25:45 +00001359 __ add(answer.reg(), Operand(right->reg()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001360 deferred->Branch(overflow);
1361 break;
1362
1363 case Token::SUB:
Leon Clarked91b9f72010-01-27 17:25:45 +00001364 __ sub(answer.reg(), Operand(right->reg()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001365 deferred->Branch(overflow);
1366 break;
1367
1368 case Token::MUL: {
1369 // If the smi tag is 0 we can just leave the tag on one operand.
1370 ASSERT(kSmiTag == 0); // Adjust code below if not the case.
1371 // Remove smi tag from the left operand (but keep sign).
1372 // Left-hand operand has been copied into answer.
Leon Clarkee46be812010-01-19 14:06:41 +00001373 __ SmiUntag(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001374 // Do multiplication of smis, leaving result in answer.
1375 __ imul(answer.reg(), Operand(right->reg()));
1376 // Go slow on overflows.
1377 deferred->Branch(overflow);
1378 // Check for negative zero result. If product is zero, and one
1379 // argument is negative, go to slow case. The frame is unchanged
1380 // in this block, so local control flow can use a Label rather
1381 // than a JumpTarget.
1382 Label non_zero_result;
1383 __ test(answer.reg(), Operand(answer.reg()));
1384 __ j(not_zero, &non_zero_result, taken);
1385 __ mov(answer.reg(), left->reg());
1386 __ or_(answer.reg(), Operand(right->reg()));
1387 deferred->Branch(negative);
1388 __ xor_(answer.reg(), Operand(answer.reg())); // Positive 0 is correct.
1389 __ bind(&non_zero_result);
1390 break;
1391 }
1392
1393 case Token::BIT_OR:
1394 __ or_(answer.reg(), Operand(right->reg()));
1395 break;
1396
1397 case Token::BIT_AND:
1398 __ and_(answer.reg(), Operand(right->reg()));
1399 break;
1400
1401 case Token::BIT_XOR:
1402 __ xor_(answer.reg(), Operand(right->reg()));
1403 break;
1404
1405 default:
1406 UNREACHABLE();
1407 break;
1408 }
1409 deferred->BindExit();
1410 left->Unuse();
1411 right->Unuse();
Leon Clarked91b9f72010-01-27 17:25:45 +00001412 ASSERT(answer.is_valid());
1413 return answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00001414}
1415
1416
1417// Call the appropriate binary operation stub to compute src op value
1418// and leave the result in dst.
1419class DeferredInlineSmiOperation: public DeferredCode {
1420 public:
1421 DeferredInlineSmiOperation(Token::Value op,
1422 Register dst,
1423 Register src,
1424 Smi* value,
1425 OverwriteMode overwrite_mode)
1426 : op_(op),
1427 dst_(dst),
1428 src_(src),
1429 value_(value),
1430 overwrite_mode_(overwrite_mode) {
1431 set_comment("[ DeferredInlineSmiOperation");
1432 }
1433
1434 virtual void Generate();
1435
1436 private:
1437 Token::Value op_;
1438 Register dst_;
1439 Register src_;
1440 Smi* value_;
1441 OverwriteMode overwrite_mode_;
1442};
1443
1444
1445void DeferredInlineSmiOperation::Generate() {
Steve Blocka7e24c12009-10-30 11:49:00 +00001446 // For mod we don't generate all the Smi code inline.
1447 GenericBinaryOpStub stub(
1448 op_,
1449 overwrite_mode_,
Steve Block3ce2e202009-11-05 08:53:23 +00001450 (op_ == Token::MOD) ? NO_GENERIC_BINARY_FLAGS : NO_SMI_CODE_IN_STUB);
1451 stub.GenerateCall(masm_, src_, value_);
Steve Blocka7e24c12009-10-30 11:49:00 +00001452 if (!dst_.is(eax)) __ mov(dst_, eax);
1453}
1454
1455
1456// Call the appropriate binary operation stub to compute value op src
1457// and leave the result in dst.
1458class DeferredInlineSmiOperationReversed: public DeferredCode {
1459 public:
1460 DeferredInlineSmiOperationReversed(Token::Value op,
1461 Register dst,
1462 Smi* value,
1463 Register src,
1464 OverwriteMode overwrite_mode)
1465 : op_(op),
1466 dst_(dst),
1467 value_(value),
1468 src_(src),
1469 overwrite_mode_(overwrite_mode) {
1470 set_comment("[ DeferredInlineSmiOperationReversed");
1471 }
1472
1473 virtual void Generate();
1474
1475 private:
1476 Token::Value op_;
1477 Register dst_;
1478 Smi* value_;
1479 Register src_;
1480 OverwriteMode overwrite_mode_;
1481};
1482
1483
1484void DeferredInlineSmiOperationReversed::Generate() {
Steve Block3ce2e202009-11-05 08:53:23 +00001485 GenericBinaryOpStub igostub(op_, overwrite_mode_, NO_SMI_CODE_IN_STUB);
1486 igostub.GenerateCall(masm_, value_, src_);
Steve Blocka7e24c12009-10-30 11:49:00 +00001487 if (!dst_.is(eax)) __ mov(dst_, eax);
1488}
1489
1490
1491// The result of src + value is in dst. It either overflowed or was not
1492// smi tagged. Undo the speculative addition and call the appropriate
1493// specialized stub for add. The result is left in dst.
1494class DeferredInlineSmiAdd: public DeferredCode {
1495 public:
1496 DeferredInlineSmiAdd(Register dst,
1497 Smi* value,
1498 OverwriteMode overwrite_mode)
1499 : dst_(dst), value_(value), overwrite_mode_(overwrite_mode) {
1500 set_comment("[ DeferredInlineSmiAdd");
1501 }
1502
1503 virtual void Generate();
1504
1505 private:
1506 Register dst_;
1507 Smi* value_;
1508 OverwriteMode overwrite_mode_;
1509};
1510
1511
1512void DeferredInlineSmiAdd::Generate() {
1513 // Undo the optimistic add operation and call the shared stub.
1514 __ sub(Operand(dst_), Immediate(value_));
Steve Block3ce2e202009-11-05 08:53:23 +00001515 GenericBinaryOpStub igostub(Token::ADD, overwrite_mode_, NO_SMI_CODE_IN_STUB);
1516 igostub.GenerateCall(masm_, dst_, value_);
Steve Blocka7e24c12009-10-30 11:49:00 +00001517 if (!dst_.is(eax)) __ mov(dst_, eax);
1518}
1519
1520
1521// The result of value + src is in dst. It either overflowed or was not
1522// smi tagged. Undo the speculative addition and call the appropriate
1523// specialized stub for add. The result is left in dst.
1524class DeferredInlineSmiAddReversed: public DeferredCode {
1525 public:
1526 DeferredInlineSmiAddReversed(Register dst,
1527 Smi* value,
1528 OverwriteMode overwrite_mode)
1529 : dst_(dst), value_(value), overwrite_mode_(overwrite_mode) {
1530 set_comment("[ DeferredInlineSmiAddReversed");
1531 }
1532
1533 virtual void Generate();
1534
1535 private:
1536 Register dst_;
1537 Smi* value_;
1538 OverwriteMode overwrite_mode_;
1539};
1540
1541
1542void DeferredInlineSmiAddReversed::Generate() {
1543 // Undo the optimistic add operation and call the shared stub.
1544 __ sub(Operand(dst_), Immediate(value_));
Steve Block3ce2e202009-11-05 08:53:23 +00001545 GenericBinaryOpStub igostub(Token::ADD, overwrite_mode_, NO_SMI_CODE_IN_STUB);
1546 igostub.GenerateCall(masm_, value_, dst_);
Steve Blocka7e24c12009-10-30 11:49:00 +00001547 if (!dst_.is(eax)) __ mov(dst_, eax);
1548}
1549
1550
1551// The result of src - value is in dst. It either overflowed or was not
1552// smi tagged. Undo the speculative subtraction and call the
1553// appropriate specialized stub for subtract. The result is left in
1554// dst.
1555class DeferredInlineSmiSub: public DeferredCode {
1556 public:
1557 DeferredInlineSmiSub(Register dst,
1558 Smi* value,
1559 OverwriteMode overwrite_mode)
1560 : dst_(dst), value_(value), overwrite_mode_(overwrite_mode) {
1561 set_comment("[ DeferredInlineSmiSub");
1562 }
1563
1564 virtual void Generate();
1565
1566 private:
1567 Register dst_;
1568 Smi* value_;
1569 OverwriteMode overwrite_mode_;
1570};
1571
1572
1573void DeferredInlineSmiSub::Generate() {
1574 // Undo the optimistic sub operation and call the shared stub.
1575 __ add(Operand(dst_), Immediate(value_));
Steve Block3ce2e202009-11-05 08:53:23 +00001576 GenericBinaryOpStub igostub(Token::SUB, overwrite_mode_, NO_SMI_CODE_IN_STUB);
1577 igostub.GenerateCall(masm_, dst_, value_);
Steve Blocka7e24c12009-10-30 11:49:00 +00001578 if (!dst_.is(eax)) __ mov(dst_, eax);
1579}
1580
1581
Leon Clarked91b9f72010-01-27 17:25:45 +00001582Result CodeGenerator::ConstantSmiBinaryOperation(Token::Value op,
1583 Result* operand,
1584 Handle<Object> value,
1585 StaticType* type,
1586 bool reversed,
1587 OverwriteMode overwrite_mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001588 // NOTE: This is an attempt to inline (a bit) more of the code for
1589 // some possible smi operations (like + and -) when (at least) one
1590 // of the operands is a constant smi.
1591 // Consumes the argument "operand".
Steve Blocka7e24c12009-10-30 11:49:00 +00001592 // TODO(199): Optimize some special cases of operations involving a
1593 // smi literal (multiply by 2, shift by 0, etc.).
1594 if (IsUnsafeSmi(value)) {
1595 Result unsafe_operand(value);
1596 if (reversed) {
Leon Clarked91b9f72010-01-27 17:25:45 +00001597 return LikelySmiBinaryOperation(op, &unsafe_operand, operand,
1598 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00001599 } else {
Leon Clarked91b9f72010-01-27 17:25:45 +00001600 return LikelySmiBinaryOperation(op, operand, &unsafe_operand,
1601 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00001602 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001603 }
1604
1605 // Get the literal value.
1606 Smi* smi_value = Smi::cast(*value);
1607 int int_value = smi_value->value();
1608
Leon Clarked91b9f72010-01-27 17:25:45 +00001609 Result answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00001610 switch (op) {
1611 case Token::ADD: {
1612 operand->ToRegister();
1613 frame_->Spill(operand->reg());
1614
1615 // Optimistically add. Call the specialized add stub if the
1616 // result is not a smi or overflows.
1617 DeferredCode* deferred = NULL;
1618 if (reversed) {
1619 deferred = new DeferredInlineSmiAddReversed(operand->reg(),
1620 smi_value,
1621 overwrite_mode);
1622 } else {
1623 deferred = new DeferredInlineSmiAdd(operand->reg(),
1624 smi_value,
1625 overwrite_mode);
1626 }
1627 __ add(Operand(operand->reg()), Immediate(value));
1628 deferred->Branch(overflow);
1629 __ test(operand->reg(), Immediate(kSmiTagMask));
1630 deferred->Branch(not_zero);
1631 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00001632 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00001633 break;
1634 }
1635
1636 case Token::SUB: {
1637 DeferredCode* deferred = NULL;
Steve Blocka7e24c12009-10-30 11:49:00 +00001638 if (reversed) {
1639 // The reversed case is only hit when the right operand is not a
1640 // constant.
1641 ASSERT(operand->is_register());
1642 answer = allocator()->Allocate();
1643 ASSERT(answer.is_valid());
1644 __ Set(answer.reg(), Immediate(value));
1645 deferred = new DeferredInlineSmiOperationReversed(op,
1646 answer.reg(),
1647 smi_value,
1648 operand->reg(),
1649 overwrite_mode);
1650 __ sub(answer.reg(), Operand(operand->reg()));
1651 } else {
1652 operand->ToRegister();
1653 frame_->Spill(operand->reg());
1654 answer = *operand;
1655 deferred = new DeferredInlineSmiSub(operand->reg(),
1656 smi_value,
1657 overwrite_mode);
1658 __ sub(Operand(operand->reg()), Immediate(value));
1659 }
1660 deferred->Branch(overflow);
1661 __ test(answer.reg(), Immediate(kSmiTagMask));
1662 deferred->Branch(not_zero);
1663 deferred->BindExit();
1664 operand->Unuse();
Steve Blocka7e24c12009-10-30 11:49:00 +00001665 break;
1666 }
1667
1668 case Token::SAR:
1669 if (reversed) {
1670 Result constant_operand(value);
Leon Clarked91b9f72010-01-27 17:25:45 +00001671 answer = LikelySmiBinaryOperation(op, &constant_operand, operand,
1672 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00001673 } else {
1674 // Only the least significant 5 bits of the shift value are used.
1675 // In the slow case, this masking is done inside the runtime call.
1676 int shift_value = int_value & 0x1f;
1677 operand->ToRegister();
1678 frame_->Spill(operand->reg());
1679 DeferredInlineSmiOperation* deferred =
1680 new DeferredInlineSmiOperation(op,
1681 operand->reg(),
1682 operand->reg(),
1683 smi_value,
1684 overwrite_mode);
1685 __ test(operand->reg(), Immediate(kSmiTagMask));
1686 deferred->Branch(not_zero);
1687 if (shift_value > 0) {
1688 __ sar(operand->reg(), shift_value);
1689 __ and_(operand->reg(), ~kSmiTagMask);
1690 }
1691 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00001692 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00001693 }
1694 break;
1695
1696 case Token::SHR:
1697 if (reversed) {
1698 Result constant_operand(value);
Leon Clarked91b9f72010-01-27 17:25:45 +00001699 answer = LikelySmiBinaryOperation(op, &constant_operand, operand,
1700 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00001701 } else {
1702 // Only the least significant 5 bits of the shift value are used.
1703 // In the slow case, this masking is done inside the runtime call.
1704 int shift_value = int_value & 0x1f;
1705 operand->ToRegister();
Leon Clarked91b9f72010-01-27 17:25:45 +00001706 answer = allocator()->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00001707 ASSERT(answer.is_valid());
1708 DeferredInlineSmiOperation* deferred =
1709 new DeferredInlineSmiOperation(op,
1710 answer.reg(),
1711 operand->reg(),
1712 smi_value,
1713 overwrite_mode);
1714 __ test(operand->reg(), Immediate(kSmiTagMask));
1715 deferred->Branch(not_zero);
1716 __ mov(answer.reg(), operand->reg());
Leon Clarkee46be812010-01-19 14:06:41 +00001717 __ SmiUntag(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001718 __ shr(answer.reg(), shift_value);
1719 // A negative Smi shifted right two is in the positive Smi range.
1720 if (shift_value < 2) {
1721 __ test(answer.reg(), Immediate(0xc0000000));
1722 deferred->Branch(not_zero);
1723 }
1724 operand->Unuse();
Leon Clarkee46be812010-01-19 14:06:41 +00001725 __ SmiTag(answer.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00001726 deferred->BindExit();
Steve Blocka7e24c12009-10-30 11:49:00 +00001727 }
1728 break;
1729
1730 case Token::SHL:
1731 if (reversed) {
Leon Clarkee46be812010-01-19 14:06:41 +00001732 Result right;
1733 Result right_copy_in_ecx;
1734
1735 // Make sure to get a copy of the right operand into ecx. This
1736 // allows us to modify it without having to restore it in the
1737 // deferred code.
1738 operand->ToRegister();
1739 if (operand->reg().is(ecx)) {
1740 right = allocator()->Allocate();
1741 __ mov(right.reg(), ecx);
1742 frame_->Spill(ecx);
1743 right_copy_in_ecx = *operand;
1744 } else {
1745 right_copy_in_ecx = allocator()->Allocate(ecx);
1746 __ mov(ecx, operand->reg());
1747 right = *operand;
1748 }
1749 operand->Unuse();
1750
Leon Clarked91b9f72010-01-27 17:25:45 +00001751 answer = allocator()->Allocate();
Leon Clarkee46be812010-01-19 14:06:41 +00001752 DeferredInlineSmiOperationReversed* deferred =
1753 new DeferredInlineSmiOperationReversed(op,
1754 answer.reg(),
1755 smi_value,
1756 right.reg(),
1757 overwrite_mode);
1758 __ mov(answer.reg(), Immediate(int_value));
1759 __ sar(ecx, kSmiTagSize);
1760 deferred->Branch(carry);
1761 __ shl_cl(answer.reg());
1762 __ cmp(answer.reg(), 0xc0000000);
1763 deferred->Branch(sign);
1764 __ SmiTag(answer.reg());
1765
1766 deferred->BindExit();
Steve Blocka7e24c12009-10-30 11:49:00 +00001767 } else {
1768 // Only the least significant 5 bits of the shift value are used.
1769 // In the slow case, this masking is done inside the runtime call.
1770 int shift_value = int_value & 0x1f;
1771 operand->ToRegister();
1772 if (shift_value == 0) {
1773 // Spill operand so it can be overwritten in the slow case.
1774 frame_->Spill(operand->reg());
1775 DeferredInlineSmiOperation* deferred =
1776 new DeferredInlineSmiOperation(op,
1777 operand->reg(),
1778 operand->reg(),
1779 smi_value,
1780 overwrite_mode);
1781 __ test(operand->reg(), Immediate(kSmiTagMask));
1782 deferred->Branch(not_zero);
1783 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00001784 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00001785 } else {
1786 // Use a fresh temporary for nonzero shift values.
Leon Clarked91b9f72010-01-27 17:25:45 +00001787 answer = allocator()->Allocate();
Steve Blocka7e24c12009-10-30 11:49:00 +00001788 ASSERT(answer.is_valid());
1789 DeferredInlineSmiOperation* deferred =
1790 new DeferredInlineSmiOperation(op,
1791 answer.reg(),
1792 operand->reg(),
1793 smi_value,
1794 overwrite_mode);
1795 __ test(operand->reg(), Immediate(kSmiTagMask));
1796 deferred->Branch(not_zero);
1797 __ mov(answer.reg(), operand->reg());
1798 ASSERT(kSmiTag == 0); // adjust code if not the case
1799 // We do no shifts, only the Smi conversion, if shift_value is 1.
1800 if (shift_value > 1) {
1801 __ shl(answer.reg(), shift_value - 1);
1802 }
1803 // Convert int result to Smi, checking that it is in int range.
1804 ASSERT(kSmiTagSize == 1); // adjust code if not the case
1805 __ add(answer.reg(), Operand(answer.reg()));
1806 deferred->Branch(overflow);
1807 deferred->BindExit();
1808 operand->Unuse();
Steve Blocka7e24c12009-10-30 11:49:00 +00001809 }
1810 }
1811 break;
1812
1813 case Token::BIT_OR:
1814 case Token::BIT_XOR:
1815 case Token::BIT_AND: {
1816 operand->ToRegister();
1817 frame_->Spill(operand->reg());
1818 DeferredCode* deferred = NULL;
1819 if (reversed) {
1820 deferred = new DeferredInlineSmiOperationReversed(op,
1821 operand->reg(),
1822 smi_value,
1823 operand->reg(),
1824 overwrite_mode);
1825 } else {
1826 deferred = new DeferredInlineSmiOperation(op,
1827 operand->reg(),
1828 operand->reg(),
1829 smi_value,
1830 overwrite_mode);
1831 }
1832 __ test(operand->reg(), Immediate(kSmiTagMask));
1833 deferred->Branch(not_zero);
1834 if (op == Token::BIT_AND) {
1835 __ and_(Operand(operand->reg()), Immediate(value));
1836 } else if (op == Token::BIT_XOR) {
1837 if (int_value != 0) {
1838 __ xor_(Operand(operand->reg()), Immediate(value));
1839 }
1840 } else {
1841 ASSERT(op == Token::BIT_OR);
1842 if (int_value != 0) {
1843 __ or_(Operand(operand->reg()), Immediate(value));
1844 }
1845 }
1846 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00001847 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00001848 break;
1849 }
1850
1851 // Generate inline code for mod of powers of 2 and negative powers of 2.
1852 case Token::MOD:
1853 if (!reversed &&
1854 int_value != 0 &&
1855 (IsPowerOf2(int_value) || IsPowerOf2(-int_value))) {
1856 operand->ToRegister();
1857 frame_->Spill(operand->reg());
1858 DeferredCode* deferred = new DeferredInlineSmiOperation(op,
1859 operand->reg(),
1860 operand->reg(),
1861 smi_value,
1862 overwrite_mode);
1863 // Check for negative or non-Smi left hand side.
1864 __ test(operand->reg(), Immediate(kSmiTagMask | 0x80000000));
1865 deferred->Branch(not_zero);
1866 if (int_value < 0) int_value = -int_value;
1867 if (int_value == 1) {
1868 __ mov(operand->reg(), Immediate(Smi::FromInt(0)));
1869 } else {
1870 __ and_(operand->reg(), (int_value << kSmiTagSize) - 1);
1871 }
1872 deferred->BindExit();
Leon Clarked91b9f72010-01-27 17:25:45 +00001873 answer = *operand;
Steve Blocka7e24c12009-10-30 11:49:00 +00001874 break;
1875 }
1876 // Fall through if we did not find a power of 2 on the right hand side!
1877
1878 default: {
1879 Result constant_operand(value);
1880 if (reversed) {
Leon Clarked91b9f72010-01-27 17:25:45 +00001881 answer = LikelySmiBinaryOperation(op, &constant_operand, operand,
1882 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00001883 } else {
Leon Clarked91b9f72010-01-27 17:25:45 +00001884 answer = LikelySmiBinaryOperation(op, operand, &constant_operand,
1885 overwrite_mode);
Steve Blocka7e24c12009-10-30 11:49:00 +00001886 }
1887 break;
1888 }
1889 }
Leon Clarked91b9f72010-01-27 17:25:45 +00001890 ASSERT(answer.is_valid());
1891 return answer;
Steve Blocka7e24c12009-10-30 11:49:00 +00001892}
1893
1894
Leon Clarkee46be812010-01-19 14:06:41 +00001895static bool CouldBeNaN(const Result& result) {
1896 if (!result.is_constant()) return true;
1897 if (!result.handle()->IsHeapNumber()) return false;
1898 return isnan(HeapNumber::cast(*result.handle())->value());
1899}
1900
1901
1902void CodeGenerator::Comparison(AstNode* node,
1903 Condition cc,
Steve Blocka7e24c12009-10-30 11:49:00 +00001904 bool strict,
1905 ControlDestination* dest) {
1906 // Strict only makes sense for equality comparisons.
1907 ASSERT(!strict || cc == equal);
1908
1909 Result left_side;
1910 Result right_side;
1911 // Implement '>' and '<=' by reversal to obtain ECMA-262 conversion order.
1912 if (cc == greater || cc == less_equal) {
1913 cc = ReverseCondition(cc);
1914 left_side = frame_->Pop();
1915 right_side = frame_->Pop();
1916 } else {
1917 right_side = frame_->Pop();
1918 left_side = frame_->Pop();
1919 }
1920 ASSERT(cc == less || cc == equal || cc == greater_equal);
1921
Leon Clarkee46be812010-01-19 14:06:41 +00001922 // If either side is a constant of some sort, we can probably optimize the
1923 // comparison.
1924 bool left_side_constant_smi = false;
1925 bool left_side_constant_null = false;
1926 bool left_side_constant_1_char_string = false;
1927 if (left_side.is_constant()) {
1928 left_side_constant_smi = left_side.handle()->IsSmi();
1929 left_side_constant_null = left_side.handle()->IsNull();
1930 left_side_constant_1_char_string =
1931 (left_side.handle()->IsString() &&
1932 (String::cast(*left_side.handle())->length() == 1));
1933 }
1934 bool right_side_constant_smi = false;
1935 bool right_side_constant_null = false;
1936 bool right_side_constant_1_char_string = false;
1937 if (right_side.is_constant()) {
1938 right_side_constant_smi = right_side.handle()->IsSmi();
1939 right_side_constant_null = right_side.handle()->IsNull();
1940 right_side_constant_1_char_string =
1941 (right_side.handle()->IsString() &&
1942 (String::cast(*right_side.handle())->length() == 1));
1943 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001944
1945 if (left_side_constant_smi || right_side_constant_smi) {
1946 if (left_side_constant_smi && right_side_constant_smi) {
1947 // Trivial case, comparing two constants.
1948 int left_value = Smi::cast(*left_side.handle())->value();
1949 int right_value = Smi::cast(*right_side.handle())->value();
1950 switch (cc) {
1951 case less:
1952 dest->Goto(left_value < right_value);
1953 break;
1954 case equal:
1955 dest->Goto(left_value == right_value);
1956 break;
1957 case greater_equal:
1958 dest->Goto(left_value >= right_value);
1959 break;
1960 default:
1961 UNREACHABLE();
1962 }
Leon Clarkee46be812010-01-19 14:06:41 +00001963 } else {
1964 // Only one side is a constant Smi.
Steve Blocka7e24c12009-10-30 11:49:00 +00001965 // If left side is a constant Smi, reverse the operands.
1966 // Since one side is a constant Smi, conversion order does not matter.
1967 if (left_side_constant_smi) {
1968 Result temp = left_side;
1969 left_side = right_side;
1970 right_side = temp;
1971 cc = ReverseCondition(cc);
1972 // This may reintroduce greater or less_equal as the value of cc.
1973 // CompareStub and the inline code both support all values of cc.
1974 }
1975 // Implement comparison against a constant Smi, inlining the case
1976 // where both sides are Smis.
1977 left_side.ToRegister();
Leon Clarkee46be812010-01-19 14:06:41 +00001978 Register left_reg = left_side.reg();
1979 Handle<Object> right_val = right_side.handle();
Steve Blocka7e24c12009-10-30 11:49:00 +00001980
1981 // Here we split control flow to the stub call and inlined cases
1982 // before finally splitting it to the control destination. We use
1983 // a jump target and branching to duplicate the virtual frame at
1984 // the first split. We manually handle the off-frame references
1985 // by reconstituting them on the non-fall-through path.
1986 JumpTarget is_smi;
Steve Blocka7e24c12009-10-30 11:49:00 +00001987 __ test(left_side.reg(), Immediate(kSmiTagMask));
1988 is_smi.Branch(zero, taken);
1989
Leon Clarkee46be812010-01-19 14:06:41 +00001990 bool is_for_loop_compare = (node->AsCompareOperation() != NULL)
1991 && node->AsCompareOperation()->is_for_loop_condition();
1992 if (!is_for_loop_compare
1993 && CpuFeatures::IsSupported(SSE2)
1994 && right_val->IsSmi()) {
1995 // Right side is a constant smi and left side has been checked
1996 // not to be a smi.
1997 CpuFeatures::Scope use_sse2(SSE2);
1998 JumpTarget not_number;
1999 __ cmp(FieldOperand(left_reg, HeapObject::kMapOffset),
2000 Immediate(Factory::heap_number_map()));
2001 not_number.Branch(not_equal, &left_side);
2002 __ movdbl(xmm1,
2003 FieldOperand(left_reg, HeapNumber::kValueOffset));
2004 int value = Smi::cast(*right_val)->value();
2005 if (value == 0) {
2006 __ xorpd(xmm0, xmm0);
2007 } else {
2008 Result temp = allocator()->Allocate();
2009 __ mov(temp.reg(), Immediate(value));
2010 __ cvtsi2sd(xmm0, Operand(temp.reg()));
2011 temp.Unuse();
2012 }
2013 __ comisd(xmm1, xmm0);
2014 // Jump to builtin for NaN.
2015 not_number.Branch(parity_even, &left_side);
2016 left_side.Unuse();
2017 Condition double_cc = cc;
2018 switch (cc) {
2019 case less: double_cc = below; break;
2020 case equal: double_cc = equal; break;
2021 case less_equal: double_cc = below_equal; break;
2022 case greater: double_cc = above; break;
2023 case greater_equal: double_cc = above_equal; break;
2024 default: UNREACHABLE();
2025 }
2026 dest->true_target()->Branch(double_cc);
2027 dest->false_target()->Jump();
2028 not_number.Bind(&left_side);
2029 }
2030
Steve Blocka7e24c12009-10-30 11:49:00 +00002031 // Setup and call the compare stub.
Leon Clarkee46be812010-01-19 14:06:41 +00002032 CompareStub stub(cc, strict, kCantBothBeNaN);
Steve Blocka7e24c12009-10-30 11:49:00 +00002033 Result result = frame_->CallStub(&stub, &left_side, &right_side);
2034 result.ToRegister();
2035 __ cmp(result.reg(), 0);
2036 result.Unuse();
2037 dest->true_target()->Branch(cc);
2038 dest->false_target()->Jump();
2039
2040 is_smi.Bind();
2041 left_side = Result(left_reg);
2042 right_side = Result(right_val);
2043 // Test smi equality and comparison by signed int comparison.
2044 if (IsUnsafeSmi(right_side.handle())) {
2045 right_side.ToRegister();
2046 __ cmp(left_side.reg(), Operand(right_side.reg()));
2047 } else {
2048 __ cmp(Operand(left_side.reg()), Immediate(right_side.handle()));
2049 }
2050 left_side.Unuse();
2051 right_side.Unuse();
2052 dest->Split(cc);
2053 }
Leon Clarkee46be812010-01-19 14:06:41 +00002054
Steve Blocka7e24c12009-10-30 11:49:00 +00002055 } else if (cc == equal &&
2056 (left_side_constant_null || right_side_constant_null)) {
2057 // To make null checks efficient, we check if either the left side or
2058 // the right side is the constant 'null'.
2059 // If so, we optimize the code by inlining a null check instead of
2060 // calling the (very) general runtime routine for checking equality.
2061 Result operand = left_side_constant_null ? right_side : left_side;
2062 right_side.Unuse();
2063 left_side.Unuse();
2064 operand.ToRegister();
2065 __ cmp(operand.reg(), Factory::null_value());
2066 if (strict) {
2067 operand.Unuse();
2068 dest->Split(equal);
2069 } else {
2070 // The 'null' value is only equal to 'undefined' if using non-strict
2071 // comparisons.
2072 dest->true_target()->Branch(equal);
2073 __ cmp(operand.reg(), Factory::undefined_value());
2074 dest->true_target()->Branch(equal);
2075 __ test(operand.reg(), Immediate(kSmiTagMask));
2076 dest->false_target()->Branch(equal);
2077
2078 // It can be an undetectable object.
2079 // Use a scratch register in preference to spilling operand.reg().
2080 Result temp = allocator()->Allocate();
2081 ASSERT(temp.is_valid());
2082 __ mov(temp.reg(),
2083 FieldOperand(operand.reg(), HeapObject::kMapOffset));
2084 __ movzx_b(temp.reg(),
2085 FieldOperand(temp.reg(), Map::kBitFieldOffset));
2086 __ test(temp.reg(), Immediate(1 << Map::kIsUndetectable));
2087 temp.Unuse();
2088 operand.Unuse();
2089 dest->Split(not_zero);
2090 }
Leon Clarkee46be812010-01-19 14:06:41 +00002091 } else if (left_side_constant_1_char_string ||
2092 right_side_constant_1_char_string) {
2093 if (left_side_constant_1_char_string && right_side_constant_1_char_string) {
2094 // Trivial case, comparing two constants.
2095 int left_value = String::cast(*left_side.handle())->Get(0);
2096 int right_value = String::cast(*right_side.handle())->Get(0);
2097 switch (cc) {
2098 case less:
2099 dest->Goto(left_value < right_value);
2100 break;
2101 case equal:
2102 dest->Goto(left_value == right_value);
2103 break;
2104 case greater_equal:
2105 dest->Goto(left_value >= right_value);
2106 break;
2107 default:
2108 UNREACHABLE();
2109 }
2110 } else {
2111 // Only one side is a constant 1 character string.
2112 // If left side is a constant 1-character string, reverse the operands.
2113 // Since one side is a constant string, conversion order does not matter.
2114 if (left_side_constant_1_char_string) {
2115 Result temp = left_side;
2116 left_side = right_side;
2117 right_side = temp;
2118 cc = ReverseCondition(cc);
2119 // This may reintroduce greater or less_equal as the value of cc.
2120 // CompareStub and the inline code both support all values of cc.
2121 }
2122 // Implement comparison against a constant string, inlining the case
2123 // where both sides are strings.
2124 left_side.ToRegister();
2125
2126 // Here we split control flow to the stub call and inlined cases
2127 // before finally splitting it to the control destination. We use
2128 // a jump target and branching to duplicate the virtual frame at
2129 // the first split. We manually handle the off-frame references
2130 // by reconstituting them on the non-fall-through path.
2131 JumpTarget is_not_string, is_string;
2132 Register left_reg = left_side.reg();
2133 Handle<Object> right_val = right_side.handle();
2134 __ test(left_side.reg(), Immediate(kSmiTagMask));
2135 is_not_string.Branch(zero, &left_side);
2136 Result temp = allocator_->Allocate();
2137 ASSERT(temp.is_valid());
2138 __ mov(temp.reg(),
2139 FieldOperand(left_side.reg(), HeapObject::kMapOffset));
2140 __ movzx_b(temp.reg(),
2141 FieldOperand(temp.reg(), Map::kInstanceTypeOffset));
2142 // If we are testing for equality then make use of the symbol shortcut.
2143 // Check if the right left hand side has the same type as the left hand
2144 // side (which is always a symbol).
2145 if (cc == equal) {
2146 Label not_a_symbol;
2147 ASSERT(kSymbolTag != 0);
2148 // Ensure that no non-strings have the symbol bit set.
2149 ASSERT(kNotStringTag + kIsSymbolMask > LAST_TYPE);
2150 __ test(temp.reg(), Immediate(kIsSymbolMask)); // Test the symbol bit.
2151 __ j(zero, &not_a_symbol);
2152 // They are symbols, so do identity compare.
2153 __ cmp(left_side.reg(), right_side.handle());
2154 dest->true_target()->Branch(equal);
2155 dest->false_target()->Branch(not_equal);
2156 __ bind(&not_a_symbol);
2157 }
2158 // If the receiver is not a string of the type we handle call the stub.
2159 __ and_(temp.reg(),
2160 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
2161 __ cmp(temp.reg(), kStringTag | kSeqStringTag | kAsciiStringTag);
2162 temp.Unuse();
2163 is_string.Branch(equal, &left_side);
2164
2165 // Setup and call the compare stub.
2166 is_not_string.Bind(&left_side);
2167 CompareStub stub(cc, strict, kCantBothBeNaN);
2168 Result result = frame_->CallStub(&stub, &left_side, &right_side);
2169 result.ToRegister();
2170 __ cmp(result.reg(), 0);
2171 result.Unuse();
2172 dest->true_target()->Branch(cc);
2173 dest->false_target()->Jump();
2174
2175 is_string.Bind(&left_side);
2176 // Here we know we have a sequential ASCII string.
2177 left_side = Result(left_reg);
2178 right_side = Result(right_val);
2179 Result temp2 = allocator_->Allocate();
2180 ASSERT(temp2.is_valid());
2181 // Test string equality and comparison.
2182 if (cc == equal) {
2183 Label comparison_done;
2184 __ cmp(FieldOperand(left_side.reg(), String::kLengthOffset),
2185 Immediate(1));
2186 __ j(not_equal, &comparison_done);
2187 uint8_t char_value =
2188 static_cast<uint8_t>(String::cast(*right_side.handle())->Get(0));
2189 __ cmpb(FieldOperand(left_side.reg(), SeqAsciiString::kHeaderSize),
2190 char_value);
2191 __ bind(&comparison_done);
2192 } else {
2193 __ mov(temp2.reg(),
2194 FieldOperand(left_side.reg(), String::kLengthOffset));
2195 __ sub(Operand(temp2.reg()), Immediate(1));
2196 Label comparison;
2197 // If the length is 0 then our subtraction gave -1 which compares less
2198 // than any character.
2199 __ j(negative, &comparison);
2200 // Otherwise load the first character.
2201 __ movzx_b(temp2.reg(),
2202 FieldOperand(left_side.reg(), SeqAsciiString::kHeaderSize));
2203 __ bind(&comparison);
2204 // Compare the first character of the string with out constant
2205 // 1-character string.
2206 uint8_t char_value =
2207 static_cast<uint8_t>(String::cast(*right_side.handle())->Get(0));
2208 __ cmp(Operand(temp2.reg()), Immediate(char_value));
2209 Label characters_were_different;
2210 __ j(not_equal, &characters_were_different);
2211 // If the first character is the same then the long string sorts after
2212 // the short one.
2213 __ cmp(FieldOperand(left_side.reg(), String::kLengthOffset),
2214 Immediate(1));
2215 __ bind(&characters_were_different);
2216 }
2217 temp2.Unuse();
2218 left_side.Unuse();
2219 right_side.Unuse();
2220 dest->Split(cc);
2221 }
2222 } else {
2223 // Neither side is a constant Smi or null.
Steve Blocka7e24c12009-10-30 11:49:00 +00002224 // If either side is a non-smi constant, skip the smi check.
2225 bool known_non_smi =
2226 (left_side.is_constant() && !left_side.handle()->IsSmi()) ||
2227 (right_side.is_constant() && !right_side.handle()->IsSmi());
Leon Clarkee46be812010-01-19 14:06:41 +00002228 NaNInformation nan_info =
2229 (CouldBeNaN(left_side) && CouldBeNaN(right_side)) ?
2230 kBothCouldBeNaN :
2231 kCantBothBeNaN;
Steve Blocka7e24c12009-10-30 11:49:00 +00002232 left_side.ToRegister();
2233 right_side.ToRegister();
2234
2235 if (known_non_smi) {
2236 // When non-smi, call out to the compare stub.
Leon Clarkee46be812010-01-19 14:06:41 +00002237 CompareStub stub(cc, strict, nan_info);
Steve Blocka7e24c12009-10-30 11:49:00 +00002238 Result answer = frame_->CallStub(&stub, &left_side, &right_side);
2239 if (cc == equal) {
2240 __ test(answer.reg(), Operand(answer.reg()));
2241 } else {
2242 __ cmp(answer.reg(), 0);
2243 }
2244 answer.Unuse();
2245 dest->Split(cc);
2246 } else {
2247 // Here we split control flow to the stub call and inlined cases
2248 // before finally splitting it to the control destination. We use
2249 // a jump target and branching to duplicate the virtual frame at
2250 // the first split. We manually handle the off-frame references
2251 // by reconstituting them on the non-fall-through path.
2252 JumpTarget is_smi;
2253 Register left_reg = left_side.reg();
2254 Register right_reg = right_side.reg();
2255
2256 Result temp = allocator_->Allocate();
2257 ASSERT(temp.is_valid());
2258 __ mov(temp.reg(), left_side.reg());
2259 __ or_(temp.reg(), Operand(right_side.reg()));
2260 __ test(temp.reg(), Immediate(kSmiTagMask));
2261 temp.Unuse();
2262 is_smi.Branch(zero, taken);
2263 // When non-smi, call out to the compare stub.
Leon Clarkee46be812010-01-19 14:06:41 +00002264 CompareStub stub(cc, strict, nan_info);
Steve Blocka7e24c12009-10-30 11:49:00 +00002265 Result answer = frame_->CallStub(&stub, &left_side, &right_side);
2266 if (cc == equal) {
2267 __ test(answer.reg(), Operand(answer.reg()));
2268 } else {
2269 __ cmp(answer.reg(), 0);
2270 }
2271 answer.Unuse();
2272 dest->true_target()->Branch(cc);
2273 dest->false_target()->Jump();
2274
2275 is_smi.Bind();
2276 left_side = Result(left_reg);
2277 right_side = Result(right_reg);
2278 __ cmp(left_side.reg(), Operand(right_side.reg()));
2279 right_side.Unuse();
2280 left_side.Unuse();
2281 dest->Split(cc);
2282 }
2283 }
2284}
2285
2286
Steve Blocka7e24c12009-10-30 11:49:00 +00002287// Call the function just below TOS on the stack with the given
2288// arguments. The receiver is the TOS.
2289void CodeGenerator::CallWithArguments(ZoneList<Expression*>* args,
Leon Clarkee46be812010-01-19 14:06:41 +00002290 CallFunctionFlags flags,
Steve Blocka7e24c12009-10-30 11:49:00 +00002291 int position) {
2292 // Push the arguments ("left-to-right") on the stack.
2293 int arg_count = args->length();
2294 for (int i = 0; i < arg_count; i++) {
2295 Load(args->at(i));
2296 }
2297
2298 // Record the position for debugging purposes.
2299 CodeForSourcePosition(position);
2300
2301 // Use the shared code stub to call the function.
2302 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Leon Clarkee46be812010-01-19 14:06:41 +00002303 CallFunctionStub call_function(arg_count, in_loop, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00002304 Result answer = frame_->CallStub(&call_function, arg_count + 1);
2305 // Restore context and replace function on the stack with the
2306 // result of the stub invocation.
2307 frame_->RestoreContextRegister();
2308 frame_->SetElementAt(0, &answer);
2309}
2310
2311
Leon Clarked91b9f72010-01-27 17:25:45 +00002312void CodeGenerator::CallApplyLazy(Expression* applicand,
Steve Blocka7e24c12009-10-30 11:49:00 +00002313 Expression* receiver,
2314 VariableProxy* arguments,
2315 int position) {
Leon Clarked91b9f72010-01-27 17:25:45 +00002316 // An optimized implementation of expressions of the form
2317 // x.apply(y, arguments).
2318 // If the arguments object of the scope has not been allocated,
2319 // and x.apply is Function.prototype.apply, this optimization
2320 // just copies y and the arguments of the current function on the
2321 // stack, as receiver and arguments, and calls x.
2322 // In the implementation comments, we call x the applicand
2323 // and y the receiver.
Steve Blocka7e24c12009-10-30 11:49:00 +00002324 ASSERT(ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION);
2325 ASSERT(arguments->IsArguments());
2326
Leon Clarked91b9f72010-01-27 17:25:45 +00002327 // Load applicand.apply onto the stack. This will usually
Steve Blocka7e24c12009-10-30 11:49:00 +00002328 // give us a megamorphic load site. Not super, but it works.
Leon Clarked91b9f72010-01-27 17:25:45 +00002329 Load(applicand);
2330 Handle<String> name = Factory::LookupAsciiSymbol("apply");
2331 frame()->Push(name);
2332 Result answer = frame()->CallLoadIC(RelocInfo::CODE_TARGET);
2333 __ nop();
2334 frame()->Push(&answer);
Steve Blocka7e24c12009-10-30 11:49:00 +00002335
2336 // Load the receiver and the existing arguments object onto the
2337 // expression stack. Avoid allocating the arguments object here.
2338 Load(receiver);
Andrei Popescu31002712010-02-23 13:46:05 +00002339 LoadFromSlot(scope()->arguments()->var()->slot(), NOT_INSIDE_TYPEOF);
Steve Blocka7e24c12009-10-30 11:49:00 +00002340
2341 // Emit the source position information after having loaded the
2342 // receiver and the arguments.
2343 CodeForSourcePosition(position);
Leon Clarked91b9f72010-01-27 17:25:45 +00002344 // Contents of frame at this point:
2345 // Frame[0]: arguments object of the current function or the hole.
2346 // Frame[1]: receiver
2347 // Frame[2]: applicand.apply
2348 // Frame[3]: applicand.
Steve Blocka7e24c12009-10-30 11:49:00 +00002349
2350 // Check if the arguments object has been lazily allocated
2351 // already. If so, just use that instead of copying the arguments
2352 // from the stack. This also deals with cases where a local variable
2353 // named 'arguments' has been introduced.
2354 frame_->Dup();
2355 Result probe = frame_->Pop();
Leon Clarked91b9f72010-01-27 17:25:45 +00002356 { VirtualFrame::SpilledScope spilled_scope;
2357 Label slow, done;
2358 bool try_lazy = true;
2359 if (probe.is_constant()) {
2360 try_lazy = probe.handle()->IsTheHole();
2361 } else {
2362 __ cmp(Operand(probe.reg()), Immediate(Factory::the_hole_value()));
2363 probe.Unuse();
2364 __ j(not_equal, &slow);
2365 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002366
Leon Clarked91b9f72010-01-27 17:25:45 +00002367 if (try_lazy) {
2368 Label build_args;
2369 // Get rid of the arguments object probe.
2370 frame_->Drop(); // Can be called on a spilled frame.
2371 // Stack now has 3 elements on it.
2372 // Contents of stack at this point:
2373 // esp[0]: receiver
2374 // esp[1]: applicand.apply
2375 // esp[2]: applicand.
Steve Blocka7e24c12009-10-30 11:49:00 +00002376
Leon Clarked91b9f72010-01-27 17:25:45 +00002377 // Check that the receiver really is a JavaScript object.
2378 __ mov(eax, Operand(esp, 0));
2379 __ test(eax, Immediate(kSmiTagMask));
2380 __ j(zero, &build_args);
Steve Blocka7e24c12009-10-30 11:49:00 +00002381 // We allow all JSObjects including JSFunctions. As long as
2382 // JS_FUNCTION_TYPE is the last instance type and it is right
2383 // after LAST_JS_OBJECT_TYPE, we do not have to check the upper
2384 // bound.
2385 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
2386 ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
Leon Clarked91b9f72010-01-27 17:25:45 +00002387 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
2388 __ j(below, &build_args);
Steve Blocka7e24c12009-10-30 11:49:00 +00002389
Leon Clarked91b9f72010-01-27 17:25:45 +00002390 // Check that applicand.apply is Function.prototype.apply.
2391 __ mov(eax, Operand(esp, kPointerSize));
2392 __ test(eax, Immediate(kSmiTagMask));
2393 __ j(zero, &build_args);
2394 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ecx);
2395 __ j(not_equal, &build_args);
2396 __ mov(ecx, FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset));
Leon Clarkeeab96aa2010-01-27 16:31:12 +00002397 Handle<Code> apply_code(Builtins::builtin(Builtins::FunctionApply));
Leon Clarked91b9f72010-01-27 17:25:45 +00002398 __ cmp(FieldOperand(ecx, SharedFunctionInfo::kCodeOffset),
Leon Clarkeeab96aa2010-01-27 16:31:12 +00002399 Immediate(apply_code));
Leon Clarked91b9f72010-01-27 17:25:45 +00002400 __ j(not_equal, &build_args);
2401
2402 // Check that applicand is a function.
2403 __ mov(edi, Operand(esp, 2 * kPointerSize));
2404 __ test(edi, Immediate(kSmiTagMask));
2405 __ j(zero, &build_args);
2406 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
2407 __ j(not_equal, &build_args);
2408
2409 // Copy the arguments to this function possibly from the
2410 // adaptor frame below it.
2411 Label invoke, adapted;
2412 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2413 __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
2414 __ cmp(Operand(ecx),
2415 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2416 __ j(equal, &adapted);
2417
2418 // No arguments adaptor frame. Copy fixed number of arguments.
Andrei Popescu31002712010-02-23 13:46:05 +00002419 __ mov(eax, Immediate(scope()->num_parameters()));
2420 for (int i = 0; i < scope()->num_parameters(); i++) {
Leon Clarked91b9f72010-01-27 17:25:45 +00002421 __ push(frame_->ParameterAt(i));
2422 }
2423 __ jmp(&invoke);
2424
2425 // Arguments adaptor frame present. Copy arguments from there, but
2426 // avoid copying too many arguments to avoid stack overflows.
2427 __ bind(&adapted);
2428 static const uint32_t kArgumentsLimit = 1 * KB;
2429 __ mov(eax, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2430 __ SmiUntag(eax);
2431 __ mov(ecx, Operand(eax));
2432 __ cmp(eax, kArgumentsLimit);
2433 __ j(above, &build_args);
2434
2435 // Loop through the arguments pushing them onto the execution
2436 // stack. We don't inform the virtual frame of the push, so we don't
2437 // have to worry about getting rid of the elements from the virtual
2438 // frame.
2439 Label loop;
2440 // ecx is a small non-negative integer, due to the test above.
2441 __ test(ecx, Operand(ecx));
2442 __ j(zero, &invoke);
2443 __ bind(&loop);
2444 __ push(Operand(edx, ecx, times_pointer_size, 1 * kPointerSize));
2445 __ dec(ecx);
2446 __ j(not_zero, &loop);
2447
2448 // Invoke the function.
2449 __ bind(&invoke);
2450 ParameterCount actual(eax);
2451 __ InvokeFunction(edi, actual, CALL_FUNCTION);
2452 // Drop applicand.apply and applicand from the stack, and push
2453 // the result of the function call, but leave the spilled frame
2454 // unchanged, with 3 elements, so it is correct when we compile the
2455 // slow-case code.
2456 __ add(Operand(esp), Immediate(2 * kPointerSize));
2457 __ push(eax);
2458 // Stack now has 1 element:
2459 // esp[0]: result
2460 __ jmp(&done);
2461
2462 // Slow-case: Allocate the arguments object since we know it isn't
2463 // there, and fall-through to the slow-case where we call
2464 // applicand.apply.
2465 __ bind(&build_args);
2466 // Stack now has 3 elements, because we have jumped from where:
2467 // esp[0]: receiver
2468 // esp[1]: applicand.apply
2469 // esp[2]: applicand.
2470
2471 // StoreArgumentsObject requires a correct frame, and may modify it.
2472 Result arguments_object = StoreArgumentsObject(false);
2473 frame_->SpillAll();
2474 arguments_object.ToRegister();
2475 frame_->EmitPush(arguments_object.reg());
2476 arguments_object.Unuse();
2477 // Stack and frame now have 4 elements.
2478 __ bind(&slow);
Leon Clarkeeab96aa2010-01-27 16:31:12 +00002479 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002480
Leon Clarked91b9f72010-01-27 17:25:45 +00002481 // Generic computation of x.apply(y, args) with no special optimization.
2482 // Flip applicand.apply and applicand on the stack, so
2483 // applicand looks like the receiver of the applicand.apply call.
2484 // Then process it as a normal function call.
2485 __ mov(eax, Operand(esp, 3 * kPointerSize));
2486 __ mov(ebx, Operand(esp, 2 * kPointerSize));
2487 __ mov(Operand(esp, 2 * kPointerSize), eax);
2488 __ mov(Operand(esp, 3 * kPointerSize), ebx);
Leon Clarkeeab96aa2010-01-27 16:31:12 +00002489
Leon Clarked91b9f72010-01-27 17:25:45 +00002490 CallFunctionStub call_function(2, NOT_IN_LOOP, NO_CALL_FUNCTION_FLAGS);
2491 Result res = frame_->CallStub(&call_function, 3);
2492 // The function and its two arguments have been dropped.
2493 frame_->Drop(1); // Drop the receiver as well.
2494 res.ToRegister();
2495 frame_->EmitPush(res.reg());
2496 // Stack now has 1 element:
2497 // esp[0]: result
2498 if (try_lazy) __ bind(&done);
2499 } // End of spilled scope.
2500 // Restore the context register after a call.
Steve Blocka7e24c12009-10-30 11:49:00 +00002501 frame_->RestoreContextRegister();
2502}
2503
2504
2505class DeferredStackCheck: public DeferredCode {
2506 public:
2507 DeferredStackCheck() {
2508 set_comment("[ DeferredStackCheck");
2509 }
2510
2511 virtual void Generate();
2512};
2513
2514
2515void DeferredStackCheck::Generate() {
2516 StackCheckStub stub;
2517 __ CallStub(&stub);
2518}
2519
2520
2521void CodeGenerator::CheckStack() {
Steve Blockd0582a62009-12-15 09:54:21 +00002522 DeferredStackCheck* deferred = new DeferredStackCheck;
2523 ExternalReference stack_limit =
2524 ExternalReference::address_of_stack_limit();
2525 __ cmp(esp, Operand::StaticVariable(stack_limit));
2526 deferred->Branch(below);
2527 deferred->BindExit();
Steve Blocka7e24c12009-10-30 11:49:00 +00002528}
2529
2530
2531void CodeGenerator::VisitAndSpill(Statement* statement) {
2532 ASSERT(in_spilled_code());
2533 set_in_spilled_code(false);
2534 Visit(statement);
2535 if (frame_ != NULL) {
2536 frame_->SpillAll();
2537 }
2538 set_in_spilled_code(true);
2539}
2540
2541
2542void CodeGenerator::VisitStatementsAndSpill(ZoneList<Statement*>* statements) {
2543 ASSERT(in_spilled_code());
2544 set_in_spilled_code(false);
2545 VisitStatements(statements);
2546 if (frame_ != NULL) {
2547 frame_->SpillAll();
2548 }
2549 set_in_spilled_code(true);
2550}
2551
2552
2553void CodeGenerator::VisitStatements(ZoneList<Statement*>* statements) {
2554 ASSERT(!in_spilled_code());
2555 for (int i = 0; has_valid_frame() && i < statements->length(); i++) {
2556 Visit(statements->at(i));
2557 }
2558}
2559
2560
2561void CodeGenerator::VisitBlock(Block* node) {
2562 ASSERT(!in_spilled_code());
2563 Comment cmnt(masm_, "[ Block");
2564 CodeForStatementPosition(node);
2565 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
2566 VisitStatements(node->statements());
2567 if (node->break_target()->is_linked()) {
2568 node->break_target()->Bind();
2569 }
2570 node->break_target()->Unuse();
2571}
2572
2573
2574void CodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
2575 // Call the runtime to declare the globals. The inevitable call
2576 // will sync frame elements to memory anyway, so we do it eagerly to
2577 // allow us to push the arguments directly into place.
2578 frame_->SyncRange(0, frame_->element_count() - 1);
2579
Steve Block3ce2e202009-11-05 08:53:23 +00002580 frame_->EmitPush(esi); // The context is the first argument.
Steve Blocka7e24c12009-10-30 11:49:00 +00002581 frame_->EmitPush(Immediate(pairs));
Steve Blocka7e24c12009-10-30 11:49:00 +00002582 frame_->EmitPush(Immediate(Smi::FromInt(is_eval() ? 1 : 0)));
2583 Result ignored = frame_->CallRuntime(Runtime::kDeclareGlobals, 3);
2584 // Return value is ignored.
2585}
2586
2587
2588void CodeGenerator::VisitDeclaration(Declaration* node) {
2589 Comment cmnt(masm_, "[ Declaration");
2590 Variable* var = node->proxy()->var();
2591 ASSERT(var != NULL); // must have been resolved
2592 Slot* slot = var->slot();
2593
2594 // If it was not possible to allocate the variable at compile time,
2595 // we need to "declare" it at runtime to make sure it actually
2596 // exists in the local context.
2597 if (slot != NULL && slot->type() == Slot::LOOKUP) {
2598 // Variables with a "LOOKUP" slot were introduced as non-locals
2599 // during variable resolution and must have mode DYNAMIC.
2600 ASSERT(var->is_dynamic());
2601 // For now, just do a runtime call. Sync the virtual frame eagerly
2602 // so we can simply push the arguments into place.
2603 frame_->SyncRange(0, frame_->element_count() - 1);
2604 frame_->EmitPush(esi);
2605 frame_->EmitPush(Immediate(var->name()));
2606 // Declaration nodes are always introduced in one of two modes.
2607 ASSERT(node->mode() == Variable::VAR || node->mode() == Variable::CONST);
2608 PropertyAttributes attr = node->mode() == Variable::VAR ? NONE : READ_ONLY;
2609 frame_->EmitPush(Immediate(Smi::FromInt(attr)));
2610 // Push initial value, if any.
2611 // Note: For variables we must not push an initial value (such as
2612 // 'undefined') because we may have a (legal) redeclaration and we
2613 // must not destroy the current value.
2614 if (node->mode() == Variable::CONST) {
2615 frame_->EmitPush(Immediate(Factory::the_hole_value()));
2616 } else if (node->fun() != NULL) {
2617 Load(node->fun());
2618 } else {
2619 frame_->EmitPush(Immediate(Smi::FromInt(0))); // no initial value!
2620 }
2621 Result ignored = frame_->CallRuntime(Runtime::kDeclareContextSlot, 4);
2622 // Ignore the return value (declarations are statements).
2623 return;
2624 }
2625
2626 ASSERT(!var->is_global());
2627
2628 // If we have a function or a constant, we need to initialize the variable.
2629 Expression* val = NULL;
2630 if (node->mode() == Variable::CONST) {
2631 val = new Literal(Factory::the_hole_value());
2632 } else {
2633 val = node->fun(); // NULL if we don't have a function
2634 }
2635
2636 if (val != NULL) {
2637 {
2638 // Set the initial value.
2639 Reference target(this, node->proxy());
2640 Load(val);
2641 target.SetValue(NOT_CONST_INIT);
2642 // The reference is removed from the stack (preserving TOS) when
2643 // it goes out of scope.
2644 }
2645 // Get rid of the assigned value (declarations are statements).
2646 frame_->Drop();
2647 }
2648}
2649
2650
2651void CodeGenerator::VisitExpressionStatement(ExpressionStatement* node) {
2652 ASSERT(!in_spilled_code());
2653 Comment cmnt(masm_, "[ ExpressionStatement");
2654 CodeForStatementPosition(node);
2655 Expression* expression = node->expression();
2656 expression->MarkAsStatement();
2657 Load(expression);
2658 // Remove the lingering expression result from the top of stack.
2659 frame_->Drop();
2660}
2661
2662
2663void CodeGenerator::VisitEmptyStatement(EmptyStatement* node) {
2664 ASSERT(!in_spilled_code());
2665 Comment cmnt(masm_, "// EmptyStatement");
2666 CodeForStatementPosition(node);
2667 // nothing to do
2668}
2669
2670
2671void CodeGenerator::VisitIfStatement(IfStatement* node) {
2672 ASSERT(!in_spilled_code());
2673 Comment cmnt(masm_, "[ IfStatement");
2674 // Generate different code depending on which parts of the if statement
2675 // are present or not.
2676 bool has_then_stm = node->HasThenStatement();
2677 bool has_else_stm = node->HasElseStatement();
2678
2679 CodeForStatementPosition(node);
2680 JumpTarget exit;
2681 if (has_then_stm && has_else_stm) {
2682 JumpTarget then;
2683 JumpTarget else_;
2684 ControlDestination dest(&then, &else_, true);
Steve Blockd0582a62009-12-15 09:54:21 +00002685 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00002686
2687 if (dest.false_was_fall_through()) {
2688 // The else target was bound, so we compile the else part first.
2689 Visit(node->else_statement());
2690
2691 // We may have dangling jumps to the then part.
2692 if (then.is_linked()) {
2693 if (has_valid_frame()) exit.Jump();
2694 then.Bind();
2695 Visit(node->then_statement());
2696 }
2697 } else {
2698 // The then target was bound, so we compile the then part first.
2699 Visit(node->then_statement());
2700
2701 if (else_.is_linked()) {
2702 if (has_valid_frame()) exit.Jump();
2703 else_.Bind();
2704 Visit(node->else_statement());
2705 }
2706 }
2707
2708 } else if (has_then_stm) {
2709 ASSERT(!has_else_stm);
2710 JumpTarget then;
2711 ControlDestination dest(&then, &exit, true);
Steve Blockd0582a62009-12-15 09:54:21 +00002712 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00002713
2714 if (dest.false_was_fall_through()) {
2715 // The exit label was bound. We may have dangling jumps to the
2716 // then part.
2717 if (then.is_linked()) {
2718 exit.Unuse();
2719 exit.Jump();
2720 then.Bind();
2721 Visit(node->then_statement());
2722 }
2723 } else {
2724 // The then label was bound.
2725 Visit(node->then_statement());
2726 }
2727
2728 } else if (has_else_stm) {
2729 ASSERT(!has_then_stm);
2730 JumpTarget else_;
2731 ControlDestination dest(&exit, &else_, false);
Steve Blockd0582a62009-12-15 09:54:21 +00002732 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00002733
2734 if (dest.true_was_fall_through()) {
2735 // The exit label was bound. We may have dangling jumps to the
2736 // else part.
2737 if (else_.is_linked()) {
2738 exit.Unuse();
2739 exit.Jump();
2740 else_.Bind();
2741 Visit(node->else_statement());
2742 }
2743 } else {
2744 // The else label was bound.
2745 Visit(node->else_statement());
2746 }
2747
2748 } else {
2749 ASSERT(!has_then_stm && !has_else_stm);
2750 // We only care about the condition's side effects (not its value
2751 // or control flow effect). LoadCondition is called without
2752 // forcing control flow.
2753 ControlDestination dest(&exit, &exit, true);
Steve Blockd0582a62009-12-15 09:54:21 +00002754 LoadCondition(node->condition(), &dest, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00002755 if (!dest.is_used()) {
2756 // We got a value on the frame rather than (or in addition to)
2757 // control flow.
2758 frame_->Drop();
2759 }
2760 }
2761
2762 if (exit.is_linked()) {
2763 exit.Bind();
2764 }
2765}
2766
2767
2768void CodeGenerator::VisitContinueStatement(ContinueStatement* node) {
2769 ASSERT(!in_spilled_code());
2770 Comment cmnt(masm_, "[ ContinueStatement");
2771 CodeForStatementPosition(node);
2772 node->target()->continue_target()->Jump();
2773}
2774
2775
2776void CodeGenerator::VisitBreakStatement(BreakStatement* node) {
2777 ASSERT(!in_spilled_code());
2778 Comment cmnt(masm_, "[ BreakStatement");
2779 CodeForStatementPosition(node);
2780 node->target()->break_target()->Jump();
2781}
2782
2783
2784void CodeGenerator::VisitReturnStatement(ReturnStatement* node) {
2785 ASSERT(!in_spilled_code());
2786 Comment cmnt(masm_, "[ ReturnStatement");
2787
2788 CodeForStatementPosition(node);
2789 Load(node->expression());
2790 Result return_value = frame_->Pop();
Steve Blockd0582a62009-12-15 09:54:21 +00002791 masm()->WriteRecordedPositions();
Steve Blocka7e24c12009-10-30 11:49:00 +00002792 if (function_return_is_shadowed_) {
2793 function_return_.Jump(&return_value);
2794 } else {
2795 frame_->PrepareForReturn();
2796 if (function_return_.is_bound()) {
2797 // If the function return label is already bound we reuse the
2798 // code by jumping to the return site.
2799 function_return_.Jump(&return_value);
2800 } else {
2801 function_return_.Bind(&return_value);
2802 GenerateReturnSequence(&return_value);
2803 }
2804 }
2805}
2806
2807
2808void CodeGenerator::GenerateReturnSequence(Result* return_value) {
2809 // The return value is a live (but not currently reference counted)
2810 // reference to eax. This is safe because the current frame does not
2811 // contain a reference to eax (it is prepared for the return by spilling
2812 // all registers).
2813 if (FLAG_trace) {
2814 frame_->Push(return_value);
2815 *return_value = frame_->CallRuntime(Runtime::kTraceExit, 1);
2816 }
2817 return_value->ToRegister(eax);
2818
2819 // Add a label for checking the size of the code used for returning.
2820 Label check_exit_codesize;
2821 masm_->bind(&check_exit_codesize);
2822
2823 // Leave the frame and return popping the arguments and the
2824 // receiver.
2825 frame_->Exit();
Andrei Popescu31002712010-02-23 13:46:05 +00002826 masm_->ret((scope()->num_parameters() + 1) * kPointerSize);
Steve Blocka7e24c12009-10-30 11:49:00 +00002827 DeleteFrame();
2828
2829#ifdef ENABLE_DEBUGGER_SUPPORT
2830 // Check that the size of the code used for returning matches what is
2831 // expected by the debugger.
Steve Blockd0582a62009-12-15 09:54:21 +00002832 ASSERT_EQ(Assembler::kJSReturnSequenceLength,
Steve Blocka7e24c12009-10-30 11:49:00 +00002833 masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
2834#endif
2835}
2836
2837
2838void CodeGenerator::VisitWithEnterStatement(WithEnterStatement* node) {
2839 ASSERT(!in_spilled_code());
2840 Comment cmnt(masm_, "[ WithEnterStatement");
2841 CodeForStatementPosition(node);
2842 Load(node->expression());
2843 Result context;
2844 if (node->is_catch_block()) {
2845 context = frame_->CallRuntime(Runtime::kPushCatchContext, 1);
2846 } else {
2847 context = frame_->CallRuntime(Runtime::kPushContext, 1);
2848 }
2849
2850 // Update context local.
2851 frame_->SaveContextRegister();
2852
2853 // Verify that the runtime call result and esi agree.
2854 if (FLAG_debug_code) {
2855 __ cmp(context.reg(), Operand(esi));
2856 __ Assert(equal, "Runtime::NewContext should end up in esi");
2857 }
2858}
2859
2860
2861void CodeGenerator::VisitWithExitStatement(WithExitStatement* node) {
2862 ASSERT(!in_spilled_code());
2863 Comment cmnt(masm_, "[ WithExitStatement");
2864 CodeForStatementPosition(node);
2865 // Pop context.
2866 __ mov(esi, ContextOperand(esi, Context::PREVIOUS_INDEX));
2867 // Update context local.
2868 frame_->SaveContextRegister();
2869}
2870
2871
2872void CodeGenerator::VisitSwitchStatement(SwitchStatement* node) {
2873 ASSERT(!in_spilled_code());
2874 Comment cmnt(masm_, "[ SwitchStatement");
2875 CodeForStatementPosition(node);
2876 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
2877
2878 // Compile the switch value.
2879 Load(node->tag());
2880
2881 ZoneList<CaseClause*>* cases = node->cases();
2882 int length = cases->length();
2883 CaseClause* default_clause = NULL;
2884
2885 JumpTarget next_test;
2886 // Compile the case label expressions and comparisons. Exit early
2887 // if a comparison is unconditionally true. The target next_test is
2888 // bound before the loop in order to indicate control flow to the
2889 // first comparison.
2890 next_test.Bind();
2891 for (int i = 0; i < length && !next_test.is_unused(); i++) {
2892 CaseClause* clause = cases->at(i);
2893 // The default is not a test, but remember it for later.
2894 if (clause->is_default()) {
2895 default_clause = clause;
2896 continue;
2897 }
2898
2899 Comment cmnt(masm_, "[ Case comparison");
2900 // We recycle the same target next_test for each test. Bind it if
2901 // the previous test has not done so and then unuse it for the
2902 // loop.
2903 if (next_test.is_linked()) {
2904 next_test.Bind();
2905 }
2906 next_test.Unuse();
2907
2908 // Duplicate the switch value.
2909 frame_->Dup();
2910
2911 // Compile the label expression.
2912 Load(clause->label());
2913
2914 // Compare and branch to the body if true or the next test if
2915 // false. Prefer the next test as a fall through.
2916 ControlDestination dest(clause->body_target(), &next_test, false);
Leon Clarkee46be812010-01-19 14:06:41 +00002917 Comparison(node, equal, true, &dest);
Steve Blocka7e24c12009-10-30 11:49:00 +00002918
2919 // If the comparison fell through to the true target, jump to the
2920 // actual body.
2921 if (dest.true_was_fall_through()) {
2922 clause->body_target()->Unuse();
2923 clause->body_target()->Jump();
2924 }
2925 }
2926
2927 // If there was control flow to a next test from the last one
2928 // compiled, compile a jump to the default or break target.
2929 if (!next_test.is_unused()) {
2930 if (next_test.is_linked()) {
2931 next_test.Bind();
2932 }
2933 // Drop the switch value.
2934 frame_->Drop();
2935 if (default_clause != NULL) {
2936 default_clause->body_target()->Jump();
2937 } else {
2938 node->break_target()->Jump();
2939 }
2940 }
2941
2942
2943 // The last instruction emitted was a jump, either to the default
2944 // clause or the break target, or else to a case body from the loop
2945 // that compiles the tests.
2946 ASSERT(!has_valid_frame());
2947 // Compile case bodies as needed.
2948 for (int i = 0; i < length; i++) {
2949 CaseClause* clause = cases->at(i);
2950
2951 // There are two ways to reach the body: from the corresponding
2952 // test or as the fall through of the previous body.
2953 if (clause->body_target()->is_linked() || has_valid_frame()) {
2954 if (clause->body_target()->is_linked()) {
2955 if (has_valid_frame()) {
2956 // If we have both a jump to the test and a fall through, put
2957 // a jump on the fall through path to avoid the dropping of
2958 // the switch value on the test path. The exception is the
2959 // default which has already had the switch value dropped.
2960 if (clause->is_default()) {
2961 clause->body_target()->Bind();
2962 } else {
2963 JumpTarget body;
2964 body.Jump();
2965 clause->body_target()->Bind();
2966 frame_->Drop();
2967 body.Bind();
2968 }
2969 } else {
2970 // No fall through to worry about.
2971 clause->body_target()->Bind();
2972 if (!clause->is_default()) {
2973 frame_->Drop();
2974 }
2975 }
2976 } else {
2977 // Otherwise, we have only fall through.
2978 ASSERT(has_valid_frame());
2979 }
2980
2981 // We are now prepared to compile the body.
2982 Comment cmnt(masm_, "[ Case body");
2983 VisitStatements(clause->statements());
2984 }
2985 clause->body_target()->Unuse();
2986 }
2987
2988 // We may not have a valid frame here so bind the break target only
2989 // if needed.
2990 if (node->break_target()->is_linked()) {
2991 node->break_target()->Bind();
2992 }
2993 node->break_target()->Unuse();
2994}
2995
2996
Steve Block3ce2e202009-11-05 08:53:23 +00002997void CodeGenerator::VisitDoWhileStatement(DoWhileStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002998 ASSERT(!in_spilled_code());
Steve Block3ce2e202009-11-05 08:53:23 +00002999 Comment cmnt(masm_, "[ DoWhileStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00003000 CodeForStatementPosition(node);
3001 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
Steve Block3ce2e202009-11-05 08:53:23 +00003002 JumpTarget body(JumpTarget::BIDIRECTIONAL);
3003 IncrementLoopNesting();
Steve Blocka7e24c12009-10-30 11:49:00 +00003004
Steve Block3ce2e202009-11-05 08:53:23 +00003005 ConditionAnalysis info = AnalyzeCondition(node->cond());
3006 // Label the top of the loop for the backward jump if necessary.
3007 switch (info) {
3008 case ALWAYS_TRUE:
3009 // Use the continue target.
3010 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
3011 node->continue_target()->Bind();
3012 break;
3013 case ALWAYS_FALSE:
3014 // No need to label it.
3015 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
3016 break;
3017 case DONT_KNOW:
3018 // Continue is the test, so use the backward body target.
3019 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
3020 body.Bind();
3021 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00003022 }
3023
Steve Block3ce2e202009-11-05 08:53:23 +00003024 CheckStack(); // TODO(1222600): ignore if body contains calls.
3025 Visit(node->body());
Steve Blocka7e24c12009-10-30 11:49:00 +00003026
Steve Block3ce2e202009-11-05 08:53:23 +00003027 // Compile the test.
3028 switch (info) {
3029 case ALWAYS_TRUE:
3030 // If control flow can fall off the end of the body, jump back to
3031 // the top and bind the break target at the exit.
3032 if (has_valid_frame()) {
3033 node->continue_target()->Jump();
Steve Blocka7e24c12009-10-30 11:49:00 +00003034 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003035 if (node->break_target()->is_linked()) {
3036 node->break_target()->Bind();
3037 }
3038 break;
Steve Block3ce2e202009-11-05 08:53:23 +00003039 case ALWAYS_FALSE:
3040 // We may have had continues or breaks in the body.
3041 if (node->continue_target()->is_linked()) {
3042 node->continue_target()->Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00003043 }
Steve Block3ce2e202009-11-05 08:53:23 +00003044 if (node->break_target()->is_linked()) {
3045 node->break_target()->Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00003046 }
Steve Block3ce2e202009-11-05 08:53:23 +00003047 break;
3048 case DONT_KNOW:
3049 // We have to compile the test expression if it can be reached by
3050 // control flow falling out of the body or via continue.
3051 if (node->continue_target()->is_linked()) {
3052 node->continue_target()->Bind();
3053 }
3054 if (has_valid_frame()) {
Steve Blockd0582a62009-12-15 09:54:21 +00003055 Comment cmnt(masm_, "[ DoWhileCondition");
3056 CodeForDoWhileConditionPosition(node);
Steve Block3ce2e202009-11-05 08:53:23 +00003057 ControlDestination dest(&body, node->break_target(), false);
Steve Blockd0582a62009-12-15 09:54:21 +00003058 LoadCondition(node->cond(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00003059 }
Steve Block3ce2e202009-11-05 08:53:23 +00003060 if (node->break_target()->is_linked()) {
3061 node->break_target()->Bind();
3062 }
3063 break;
3064 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003065
Steve Block3ce2e202009-11-05 08:53:23 +00003066 DecrementLoopNesting();
3067}
Steve Blocka7e24c12009-10-30 11:49:00 +00003068
Steve Block3ce2e202009-11-05 08:53:23 +00003069
3070void CodeGenerator::VisitWhileStatement(WhileStatement* node) {
3071 ASSERT(!in_spilled_code());
3072 Comment cmnt(masm_, "[ WhileStatement");
3073 CodeForStatementPosition(node);
3074
3075 // If the condition is always false and has no side effects, we do not
3076 // need to compile anything.
3077 ConditionAnalysis info = AnalyzeCondition(node->cond());
3078 if (info == ALWAYS_FALSE) return;
3079
3080 // Do not duplicate conditions that may have function literal
3081 // subexpressions. This can cause us to compile the function literal
3082 // twice.
3083 bool test_at_bottom = !node->may_have_function_literal();
3084 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
3085 IncrementLoopNesting();
3086 JumpTarget body;
3087 if (test_at_bottom) {
3088 body.set_direction(JumpTarget::BIDIRECTIONAL);
3089 }
3090
3091 // Based on the condition analysis, compile the test as necessary.
3092 switch (info) {
3093 case ALWAYS_TRUE:
3094 // We will not compile the test expression. Label the top of the
3095 // loop with the continue target.
3096 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
3097 node->continue_target()->Bind();
3098 break;
3099 case DONT_KNOW: {
3100 if (test_at_bottom) {
3101 // Continue is the test at the bottom, no need to label the test
3102 // at the top. The body is a backward target.
3103 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
3104 } else {
3105 // Label the test at the top as the continue target. The body
3106 // is a forward-only target.
3107 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
3108 node->continue_target()->Bind();
3109 }
3110 // Compile the test with the body as the true target and preferred
3111 // fall-through and with the break target as the false target.
3112 ControlDestination dest(&body, node->break_target(), true);
Steve Blockd0582a62009-12-15 09:54:21 +00003113 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00003114
3115 if (dest.false_was_fall_through()) {
3116 // If we got the break target as fall-through, the test may have
3117 // been unconditionally false (if there are no jumps to the
3118 // body).
3119 if (!body.is_linked()) {
3120 DecrementLoopNesting();
3121 return;
3122 }
3123
3124 // Otherwise, jump around the body on the fall through and then
3125 // bind the body target.
3126 node->break_target()->Unuse();
3127 node->break_target()->Jump();
3128 body.Bind();
3129 }
3130 break;
3131 }
3132 case ALWAYS_FALSE:
3133 UNREACHABLE();
3134 break;
3135 }
3136
3137 CheckStack(); // TODO(1222600): ignore if body contains calls.
3138 Visit(node->body());
3139
3140 // Based on the condition analysis, compile the backward jump as
3141 // necessary.
3142 switch (info) {
3143 case ALWAYS_TRUE:
3144 // The loop body has been labeled with the continue target.
3145 if (has_valid_frame()) {
3146 node->continue_target()->Jump();
3147 }
3148 break;
3149 case DONT_KNOW:
3150 if (test_at_bottom) {
3151 // If we have chosen to recompile the test at the bottom, then
3152 // it is the continue target.
Steve Blocka7e24c12009-10-30 11:49:00 +00003153 if (node->continue_target()->is_linked()) {
3154 node->continue_target()->Bind();
3155 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003156 if (has_valid_frame()) {
Steve Block3ce2e202009-11-05 08:53:23 +00003157 // The break target is the fall-through (body is a backward
3158 // jump from here and thus an invalid fall-through).
3159 ControlDestination dest(&body, node->break_target(), false);
Steve Blockd0582a62009-12-15 09:54:21 +00003160 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00003161 }
3162 } else {
3163 // If we have chosen not to recompile the test at the bottom,
3164 // jump back to the one at the top.
3165 if (has_valid_frame()) {
3166 node->continue_target()->Jump();
Steve Blocka7e24c12009-10-30 11:49:00 +00003167 }
3168 }
Steve Block3ce2e202009-11-05 08:53:23 +00003169 break;
3170 case ALWAYS_FALSE:
3171 UNREACHABLE();
3172 break;
3173 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003174
Steve Block3ce2e202009-11-05 08:53:23 +00003175 // The break target may be already bound (by the condition), or there
3176 // may not be a valid frame. Bind it only if needed.
3177 if (node->break_target()->is_linked()) {
3178 node->break_target()->Bind();
3179 }
3180 DecrementLoopNesting();
3181}
3182
3183
3184void CodeGenerator::VisitForStatement(ForStatement* node) {
3185 ASSERT(!in_spilled_code());
3186 Comment cmnt(masm_, "[ ForStatement");
3187 CodeForStatementPosition(node);
3188
3189 // Compile the init expression if present.
3190 if (node->init() != NULL) {
3191 Visit(node->init());
3192 }
3193
3194 // If the condition is always false and has no side effects, we do not
3195 // need to compile anything else.
3196 ConditionAnalysis info = AnalyzeCondition(node->cond());
3197 if (info == ALWAYS_FALSE) return;
3198
3199 // Do not duplicate conditions that may have function literal
3200 // subexpressions. This can cause us to compile the function literal
3201 // twice.
3202 bool test_at_bottom = !node->may_have_function_literal();
3203 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
3204 IncrementLoopNesting();
3205
3206 // Target for backward edge if no test at the bottom, otherwise
3207 // unused.
3208 JumpTarget loop(JumpTarget::BIDIRECTIONAL);
3209
3210 // Target for backward edge if there is a test at the bottom,
3211 // otherwise used as target for test at the top.
3212 JumpTarget body;
3213 if (test_at_bottom) {
3214 body.set_direction(JumpTarget::BIDIRECTIONAL);
3215 }
3216
3217 // Based on the condition analysis, compile the test as necessary.
3218 switch (info) {
3219 case ALWAYS_TRUE:
3220 // We will not compile the test expression. Label the top of the
3221 // loop.
3222 if (node->next() == NULL) {
3223 // Use the continue target if there is no update expression.
3224 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
3225 node->continue_target()->Bind();
3226 } else {
3227 // Otherwise use the backward loop target.
3228 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
3229 loop.Bind();
3230 }
3231 break;
3232 case DONT_KNOW: {
3233 if (test_at_bottom) {
3234 // Continue is either the update expression or the test at the
3235 // bottom, no need to label the test at the top.
3236 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
3237 } else if (node->next() == NULL) {
3238 // We are not recompiling the test at the bottom and there is no
3239 // update expression.
3240 node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
3241 node->continue_target()->Bind();
3242 } else {
3243 // We are not recompiling the test at the bottom and there is an
3244 // update expression.
3245 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
3246 loop.Bind();
3247 }
3248 // Compile the test with the body as the true target and preferred
3249 // fall-through and with the break target as the false target.
3250 ControlDestination dest(&body, node->break_target(), true);
Steve Blockd0582a62009-12-15 09:54:21 +00003251 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00003252
3253 if (dest.false_was_fall_through()) {
3254 // If we got the break target as fall-through, the test may have
3255 // been unconditionally false (if there are no jumps to the
3256 // body).
3257 if (!body.is_linked()) {
3258 DecrementLoopNesting();
3259 return;
3260 }
3261
3262 // Otherwise, jump around the body on the fall through and then
3263 // bind the body target.
3264 node->break_target()->Unuse();
3265 node->break_target()->Jump();
3266 body.Bind();
3267 }
3268 break;
3269 }
3270 case ALWAYS_FALSE:
3271 UNREACHABLE();
3272 break;
3273 }
3274
3275 CheckStack(); // TODO(1222600): ignore if body contains calls.
3276 Visit(node->body());
3277
3278 // If there is an update expression, compile it if necessary.
3279 if (node->next() != NULL) {
3280 if (node->continue_target()->is_linked()) {
3281 node->continue_target()->Bind();
3282 }
3283
3284 // Control can reach the update by falling out of the body or by a
3285 // continue.
3286 if (has_valid_frame()) {
3287 // Record the source position of the statement as this code which
3288 // is after the code for the body actually belongs to the loop
3289 // statement and not the body.
3290 CodeForStatementPosition(node);
3291 Visit(node->next());
3292 }
3293 }
3294
3295 // Based on the condition analysis, compile the backward jump as
3296 // necessary.
3297 switch (info) {
3298 case ALWAYS_TRUE:
3299 if (has_valid_frame()) {
3300 if (node->next() == NULL) {
3301 node->continue_target()->Jump();
3302 } else {
3303 loop.Jump();
3304 }
3305 }
3306 break;
3307 case DONT_KNOW:
3308 if (test_at_bottom) {
3309 if (node->continue_target()->is_linked()) {
3310 // We can have dangling jumps to the continue target if there
3311 // was no update expression.
3312 node->continue_target()->Bind();
3313 }
3314 // Control can reach the test at the bottom by falling out of
3315 // the body, by a continue in the body, or from the update
3316 // expression.
3317 if (has_valid_frame()) {
3318 // The break target is the fall-through (body is a backward
3319 // jump from here).
3320 ControlDestination dest(&body, node->break_target(), false);
Steve Blockd0582a62009-12-15 09:54:21 +00003321 LoadCondition(node->cond(), &dest, true);
Steve Block3ce2e202009-11-05 08:53:23 +00003322 }
3323 } else {
3324 // Otherwise, jump back to the test at the top.
Steve Blocka7e24c12009-10-30 11:49:00 +00003325 if (has_valid_frame()) {
3326 if (node->next() == NULL) {
3327 node->continue_target()->Jump();
3328 } else {
3329 loop.Jump();
3330 }
3331 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003332 }
3333 break;
Steve Block3ce2e202009-11-05 08:53:23 +00003334 case ALWAYS_FALSE:
3335 UNREACHABLE();
3336 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00003337 }
3338
Steve Block3ce2e202009-11-05 08:53:23 +00003339 // The break target may be already bound (by the condition), or
3340 // there may not be a valid frame. Bind it only if needed.
3341 if (node->break_target()->is_linked()) {
3342 node->break_target()->Bind();
3343 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003344 DecrementLoopNesting();
Steve Blocka7e24c12009-10-30 11:49:00 +00003345}
3346
3347
3348void CodeGenerator::VisitForInStatement(ForInStatement* node) {
3349 ASSERT(!in_spilled_code());
3350 VirtualFrame::SpilledScope spilled_scope;
3351 Comment cmnt(masm_, "[ ForInStatement");
3352 CodeForStatementPosition(node);
3353
3354 JumpTarget primitive;
3355 JumpTarget jsobject;
3356 JumpTarget fixed_array;
3357 JumpTarget entry(JumpTarget::BIDIRECTIONAL);
3358 JumpTarget end_del_check;
3359 JumpTarget exit;
3360
3361 // Get the object to enumerate over (converted to JSObject).
3362 LoadAndSpill(node->enumerable());
3363
3364 // Both SpiderMonkey and kjs ignore null and undefined in contrast
3365 // to the specification. 12.6.4 mandates a call to ToObject.
3366 frame_->EmitPop(eax);
3367
3368 // eax: value to be iterated over
3369 __ cmp(eax, Factory::undefined_value());
3370 exit.Branch(equal);
3371 __ cmp(eax, Factory::null_value());
3372 exit.Branch(equal);
3373
3374 // Stack layout in body:
3375 // [iteration counter (smi)] <- slot 0
3376 // [length of array] <- slot 1
3377 // [FixedArray] <- slot 2
3378 // [Map or 0] <- slot 3
3379 // [Object] <- slot 4
3380
3381 // Check if enumerable is already a JSObject
3382 // eax: value to be iterated over
3383 __ test(eax, Immediate(kSmiTagMask));
3384 primitive.Branch(zero);
3385 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
3386 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
3387 __ cmp(ecx, FIRST_JS_OBJECT_TYPE);
3388 jsobject.Branch(above_equal);
3389
3390 primitive.Bind();
3391 frame_->EmitPush(eax);
3392 frame_->InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION, 1);
3393 // function call returns the value in eax, which is where we want it below
3394
3395 jsobject.Bind();
3396 // Get the set of properties (as a FixedArray or Map).
3397 // eax: value to be iterated over
Steve Blockd0582a62009-12-15 09:54:21 +00003398 frame_->EmitPush(eax); // Push the object being iterated over.
Steve Blocka7e24c12009-10-30 11:49:00 +00003399
Steve Blockd0582a62009-12-15 09:54:21 +00003400 // Check cache validity in generated code. This is a fast case for
3401 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
3402 // guarantee cache validity, call the runtime system to check cache
3403 // validity or get the property names in a fixed array.
3404 JumpTarget call_runtime;
3405 JumpTarget loop(JumpTarget::BIDIRECTIONAL);
3406 JumpTarget check_prototype;
3407 JumpTarget use_cache;
3408 __ mov(ecx, eax);
3409 loop.Bind();
3410 // Check that there are no elements.
3411 __ mov(edx, FieldOperand(ecx, JSObject::kElementsOffset));
3412 __ cmp(Operand(edx), Immediate(Factory::empty_fixed_array()));
3413 call_runtime.Branch(not_equal);
3414 // Check that instance descriptors are not empty so that we can
3415 // check for an enum cache. Leave the map in ebx for the subsequent
3416 // prototype load.
3417 __ mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
3418 __ mov(edx, FieldOperand(ebx, Map::kInstanceDescriptorsOffset));
3419 __ cmp(Operand(edx), Immediate(Factory::empty_descriptor_array()));
3420 call_runtime.Branch(equal);
3421 // Check that there in an enum cache in the non-empty instance
3422 // descriptors. This is the case if the next enumeration index
3423 // field does not contain a smi.
3424 __ mov(edx, FieldOperand(edx, DescriptorArray::kEnumerationIndexOffset));
3425 __ test(edx, Immediate(kSmiTagMask));
3426 call_runtime.Branch(zero);
3427 // For all objects but the receiver, check that the cache is empty.
3428 __ cmp(ecx, Operand(eax));
3429 check_prototype.Branch(equal);
3430 __ mov(edx, FieldOperand(edx, DescriptorArray::kEnumCacheBridgeCacheOffset));
3431 __ cmp(Operand(edx), Immediate(Factory::empty_fixed_array()));
3432 call_runtime.Branch(not_equal);
3433 check_prototype.Bind();
3434 // Load the prototype from the map and loop if non-null.
3435 __ mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
3436 __ cmp(Operand(ecx), Immediate(Factory::null_value()));
3437 loop.Branch(not_equal);
3438 // The enum cache is valid. Load the map of the object being
3439 // iterated over and use the cache for the iteration.
3440 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
3441 use_cache.Jump();
3442
3443 call_runtime.Bind();
3444 // Call the runtime to get the property names for the object.
Steve Blocka7e24c12009-10-30 11:49:00 +00003445 frame_->EmitPush(eax); // push the Object (slot 4) for the runtime call
3446 frame_->CallRuntime(Runtime::kGetPropertyNamesFast, 1);
3447
Steve Blockd0582a62009-12-15 09:54:21 +00003448 // If we got a map from the runtime call, we can do a fast
3449 // modification check. Otherwise, we got a fixed array, and we have
3450 // to do a slow check.
Steve Blocka7e24c12009-10-30 11:49:00 +00003451 // eax: map or fixed array (result from call to
3452 // Runtime::kGetPropertyNamesFast)
3453 __ mov(edx, Operand(eax));
3454 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
3455 __ cmp(ecx, Factory::meta_map());
3456 fixed_array.Branch(not_equal);
3457
Steve Blockd0582a62009-12-15 09:54:21 +00003458 use_cache.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00003459 // Get enum cache
Steve Blockd0582a62009-12-15 09:54:21 +00003460 // eax: map (either the result from a call to
3461 // Runtime::kGetPropertyNamesFast or has been fetched directly from
3462 // the object)
Steve Blocka7e24c12009-10-30 11:49:00 +00003463 __ mov(ecx, Operand(eax));
Steve Blockd0582a62009-12-15 09:54:21 +00003464
Steve Blocka7e24c12009-10-30 11:49:00 +00003465 __ mov(ecx, FieldOperand(ecx, Map::kInstanceDescriptorsOffset));
3466 // Get the bridge array held in the enumeration index field.
3467 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumerationIndexOffset));
3468 // Get the cache from the bridge array.
3469 __ mov(edx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset));
3470
3471 frame_->EmitPush(eax); // <- slot 3
3472 frame_->EmitPush(edx); // <- slot 2
3473 __ mov(eax, FieldOperand(edx, FixedArray::kLengthOffset));
Leon Clarkee46be812010-01-19 14:06:41 +00003474 __ SmiTag(eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00003475 frame_->EmitPush(eax); // <- slot 1
3476 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 0
3477 entry.Jump();
3478
3479 fixed_array.Bind();
3480 // eax: fixed array (result from call to Runtime::kGetPropertyNamesFast)
3481 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 3
3482 frame_->EmitPush(eax); // <- slot 2
3483
3484 // Push the length of the array and the initial index onto the stack.
3485 __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset));
Leon Clarkee46be812010-01-19 14:06:41 +00003486 __ SmiTag(eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00003487 frame_->EmitPush(eax); // <- slot 1
3488 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 0
3489
3490 // Condition.
3491 entry.Bind();
3492 // Grab the current frame's height for the break and continue
3493 // targets only after all the state is pushed on the frame.
3494 node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
3495 node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
3496
3497 __ mov(eax, frame_->ElementAt(0)); // load the current count
3498 __ cmp(eax, frame_->ElementAt(1)); // compare to the array length
3499 node->break_target()->Branch(above_equal);
3500
3501 // Get the i'th entry of the array.
3502 __ mov(edx, frame_->ElementAt(2));
3503 __ mov(ebx, Operand(edx, eax, times_2,
3504 FixedArray::kHeaderSize - kHeapObjectTag));
3505
3506 // Get the expected map from the stack or a zero map in the
3507 // permanent slow case eax: current iteration count ebx: i'th entry
3508 // of the enum cache
3509 __ mov(edx, frame_->ElementAt(3));
3510 // Check if the expected map still matches that of the enumerable.
3511 // If not, we have to filter the key.
3512 // eax: current iteration count
3513 // ebx: i'th entry of the enum cache
3514 // edx: expected map value
3515 __ mov(ecx, frame_->ElementAt(4));
3516 __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
3517 __ cmp(ecx, Operand(edx));
3518 end_del_check.Branch(equal);
3519
3520 // Convert the entry to a string (or null if it isn't a property anymore).
3521 frame_->EmitPush(frame_->ElementAt(4)); // push enumerable
3522 frame_->EmitPush(ebx); // push entry
3523 frame_->InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION, 2);
3524 __ mov(ebx, Operand(eax));
3525
3526 // If the property has been removed while iterating, we just skip it.
3527 __ cmp(ebx, Factory::null_value());
3528 node->continue_target()->Branch(equal);
3529
3530 end_del_check.Bind();
3531 // Store the entry in the 'each' expression and take another spin in the
3532 // loop. edx: i'th entry of the enum cache (or string there of)
3533 frame_->EmitPush(ebx);
3534 { Reference each(this, node->each());
3535 // Loading a reference may leave the frame in an unspilled state.
3536 frame_->SpillAll();
3537 if (!each.is_illegal()) {
3538 if (each.size() > 0) {
3539 frame_->EmitPush(frame_->ElementAt(each.size()));
Leon Clarked91b9f72010-01-27 17:25:45 +00003540 each.SetValue(NOT_CONST_INIT);
3541 frame_->Drop(2);
3542 } else {
3543 // If the reference was to a slot we rely on the convenient property
3544 // that it doesn't matter whether a value (eg, ebx pushed above) is
3545 // right on top of or right underneath a zero-sized reference.
3546 each.SetValue(NOT_CONST_INIT);
Steve Blocka7e24c12009-10-30 11:49:00 +00003547 frame_->Drop();
3548 }
3549 }
3550 }
3551 // Unloading a reference may leave the frame in an unspilled state.
3552 frame_->SpillAll();
3553
Steve Blocka7e24c12009-10-30 11:49:00 +00003554 // Body.
3555 CheckStack(); // TODO(1222600): ignore if body contains calls.
3556 VisitAndSpill(node->body());
3557
3558 // Next. Reestablish a spilled frame in case we are coming here via
3559 // a continue in the body.
3560 node->continue_target()->Bind();
3561 frame_->SpillAll();
3562 frame_->EmitPop(eax);
3563 __ add(Operand(eax), Immediate(Smi::FromInt(1)));
3564 frame_->EmitPush(eax);
3565 entry.Jump();
3566
3567 // Cleanup. No need to spill because VirtualFrame::Drop is safe for
3568 // any frame.
3569 node->break_target()->Bind();
3570 frame_->Drop(5);
3571
3572 // Exit.
3573 exit.Bind();
3574
3575 node->continue_target()->Unuse();
3576 node->break_target()->Unuse();
3577}
3578
3579
Steve Block3ce2e202009-11-05 08:53:23 +00003580void CodeGenerator::VisitTryCatchStatement(TryCatchStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003581 ASSERT(!in_spilled_code());
3582 VirtualFrame::SpilledScope spilled_scope;
Steve Block3ce2e202009-11-05 08:53:23 +00003583 Comment cmnt(masm_, "[ TryCatchStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00003584 CodeForStatementPosition(node);
3585
3586 JumpTarget try_block;
3587 JumpTarget exit;
3588
3589 try_block.Call();
3590 // --- Catch block ---
3591 frame_->EmitPush(eax);
3592
3593 // Store the caught exception in the catch variable.
Leon Clarkee46be812010-01-19 14:06:41 +00003594 Variable* catch_var = node->catch_var()->var();
3595 ASSERT(catch_var != NULL && catch_var->slot() != NULL);
3596 StoreToSlot(catch_var->slot(), NOT_CONST_INIT);
Steve Blocka7e24c12009-10-30 11:49:00 +00003597
3598 // Remove the exception from the stack.
3599 frame_->Drop();
3600
3601 VisitStatementsAndSpill(node->catch_block()->statements());
3602 if (has_valid_frame()) {
3603 exit.Jump();
3604 }
3605
3606
3607 // --- Try block ---
3608 try_block.Bind();
3609
3610 frame_->PushTryHandler(TRY_CATCH_HANDLER);
3611 int handler_height = frame_->height();
3612
3613 // Shadow the jump targets for all escapes from the try block, including
3614 // returns. During shadowing, the original target is hidden as the
3615 // ShadowTarget and operations on the original actually affect the
3616 // shadowing target.
3617 //
3618 // We should probably try to unify the escaping targets and the return
3619 // target.
3620 int nof_escapes = node->escaping_targets()->length();
3621 List<ShadowTarget*> shadows(1 + nof_escapes);
3622
3623 // Add the shadow target for the function return.
3624 static const int kReturnShadowIndex = 0;
3625 shadows.Add(new ShadowTarget(&function_return_));
3626 bool function_return_was_shadowed = function_return_is_shadowed_;
3627 function_return_is_shadowed_ = true;
3628 ASSERT(shadows[kReturnShadowIndex]->other_target() == &function_return_);
3629
3630 // Add the remaining shadow targets.
3631 for (int i = 0; i < nof_escapes; i++) {
3632 shadows.Add(new ShadowTarget(node->escaping_targets()->at(i)));
3633 }
3634
3635 // Generate code for the statements in the try block.
3636 VisitStatementsAndSpill(node->try_block()->statements());
3637
3638 // Stop the introduced shadowing and count the number of required unlinks.
3639 // After shadowing stops, the original targets are unshadowed and the
3640 // ShadowTargets represent the formerly shadowing targets.
3641 bool has_unlinks = false;
3642 for (int i = 0; i < shadows.length(); i++) {
3643 shadows[i]->StopShadowing();
3644 has_unlinks = has_unlinks || shadows[i]->is_linked();
3645 }
3646 function_return_is_shadowed_ = function_return_was_shadowed;
3647
3648 // Get an external reference to the handler address.
3649 ExternalReference handler_address(Top::k_handler_address);
3650
3651 // Make sure that there's nothing left on the stack above the
3652 // handler structure.
3653 if (FLAG_debug_code) {
3654 __ mov(eax, Operand::StaticVariable(handler_address));
3655 __ cmp(esp, Operand(eax));
3656 __ Assert(equal, "stack pointer should point to top handler");
3657 }
3658
3659 // If we can fall off the end of the try block, unlink from try chain.
3660 if (has_valid_frame()) {
3661 // The next handler address is on top of the frame. Unlink from
3662 // the handler list and drop the rest of this handler from the
3663 // frame.
3664 ASSERT(StackHandlerConstants::kNextOffset == 0);
3665 frame_->EmitPop(Operand::StaticVariable(handler_address));
3666 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
3667 if (has_unlinks) {
3668 exit.Jump();
3669 }
3670 }
3671
3672 // Generate unlink code for the (formerly) shadowing targets that
3673 // have been jumped to. Deallocate each shadow target.
3674 Result return_value;
3675 for (int i = 0; i < shadows.length(); i++) {
3676 if (shadows[i]->is_linked()) {
3677 // Unlink from try chain; be careful not to destroy the TOS if
3678 // there is one.
3679 if (i == kReturnShadowIndex) {
3680 shadows[i]->Bind(&return_value);
3681 return_value.ToRegister(eax);
3682 } else {
3683 shadows[i]->Bind();
3684 }
3685 // Because we can be jumping here (to spilled code) from
3686 // unspilled code, we need to reestablish a spilled frame at
3687 // this block.
3688 frame_->SpillAll();
3689
3690 // Reload sp from the top handler, because some statements that we
3691 // break from (eg, for...in) may have left stuff on the stack.
3692 __ mov(esp, Operand::StaticVariable(handler_address));
3693 frame_->Forget(frame_->height() - handler_height);
3694
3695 ASSERT(StackHandlerConstants::kNextOffset == 0);
3696 frame_->EmitPop(Operand::StaticVariable(handler_address));
3697 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
3698
3699 if (i == kReturnShadowIndex) {
3700 if (!function_return_is_shadowed_) frame_->PrepareForReturn();
3701 shadows[i]->other_target()->Jump(&return_value);
3702 } else {
3703 shadows[i]->other_target()->Jump();
3704 }
3705 }
3706 }
3707
3708 exit.Bind();
3709}
3710
3711
Steve Block3ce2e202009-11-05 08:53:23 +00003712void CodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003713 ASSERT(!in_spilled_code());
3714 VirtualFrame::SpilledScope spilled_scope;
Steve Block3ce2e202009-11-05 08:53:23 +00003715 Comment cmnt(masm_, "[ TryFinallyStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00003716 CodeForStatementPosition(node);
3717
3718 // State: Used to keep track of reason for entering the finally
3719 // block. Should probably be extended to hold information for
3720 // break/continue from within the try block.
3721 enum { FALLING, THROWING, JUMPING };
3722
3723 JumpTarget try_block;
3724 JumpTarget finally_block;
3725
3726 try_block.Call();
3727
3728 frame_->EmitPush(eax);
3729 // In case of thrown exceptions, this is where we continue.
3730 __ Set(ecx, Immediate(Smi::FromInt(THROWING)));
3731 finally_block.Jump();
3732
3733 // --- Try block ---
3734 try_block.Bind();
3735
3736 frame_->PushTryHandler(TRY_FINALLY_HANDLER);
3737 int handler_height = frame_->height();
3738
3739 // Shadow the jump targets for all escapes from the try block, including
3740 // returns. During shadowing, the original target is hidden as the
3741 // ShadowTarget and operations on the original actually affect the
3742 // shadowing target.
3743 //
3744 // We should probably try to unify the escaping targets and the return
3745 // target.
3746 int nof_escapes = node->escaping_targets()->length();
3747 List<ShadowTarget*> shadows(1 + nof_escapes);
3748
3749 // Add the shadow target for the function return.
3750 static const int kReturnShadowIndex = 0;
3751 shadows.Add(new ShadowTarget(&function_return_));
3752 bool function_return_was_shadowed = function_return_is_shadowed_;
3753 function_return_is_shadowed_ = true;
3754 ASSERT(shadows[kReturnShadowIndex]->other_target() == &function_return_);
3755
3756 // Add the remaining shadow targets.
3757 for (int i = 0; i < nof_escapes; i++) {
3758 shadows.Add(new ShadowTarget(node->escaping_targets()->at(i)));
3759 }
3760
3761 // Generate code for the statements in the try block.
3762 VisitStatementsAndSpill(node->try_block()->statements());
3763
3764 // Stop the introduced shadowing and count the number of required unlinks.
3765 // After shadowing stops, the original targets are unshadowed and the
3766 // ShadowTargets represent the formerly shadowing targets.
3767 int nof_unlinks = 0;
3768 for (int i = 0; i < shadows.length(); i++) {
3769 shadows[i]->StopShadowing();
3770 if (shadows[i]->is_linked()) nof_unlinks++;
3771 }
3772 function_return_is_shadowed_ = function_return_was_shadowed;
3773
3774 // Get an external reference to the handler address.
3775 ExternalReference handler_address(Top::k_handler_address);
3776
3777 // If we can fall off the end of the try block, unlink from the try
3778 // chain and set the state on the frame to FALLING.
3779 if (has_valid_frame()) {
3780 // The next handler address is on top of the frame.
3781 ASSERT(StackHandlerConstants::kNextOffset == 0);
3782 frame_->EmitPop(Operand::StaticVariable(handler_address));
3783 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
3784
3785 // Fake a top of stack value (unneeded when FALLING) and set the
3786 // state in ecx, then jump around the unlink blocks if any.
3787 frame_->EmitPush(Immediate(Factory::undefined_value()));
3788 __ Set(ecx, Immediate(Smi::FromInt(FALLING)));
3789 if (nof_unlinks > 0) {
3790 finally_block.Jump();
3791 }
3792 }
3793
3794 // Generate code to unlink and set the state for the (formerly)
3795 // shadowing targets that have been jumped to.
3796 for (int i = 0; i < shadows.length(); i++) {
3797 if (shadows[i]->is_linked()) {
3798 // If we have come from the shadowed return, the return value is
3799 // on the virtual frame. We must preserve it until it is
3800 // pushed.
3801 if (i == kReturnShadowIndex) {
3802 Result return_value;
3803 shadows[i]->Bind(&return_value);
3804 return_value.ToRegister(eax);
3805 } else {
3806 shadows[i]->Bind();
3807 }
3808 // Because we can be jumping here (to spilled code) from
3809 // unspilled code, we need to reestablish a spilled frame at
3810 // this block.
3811 frame_->SpillAll();
3812
3813 // Reload sp from the top handler, because some statements that
3814 // we break from (eg, for...in) may have left stuff on the
3815 // stack.
3816 __ mov(esp, Operand::StaticVariable(handler_address));
3817 frame_->Forget(frame_->height() - handler_height);
3818
3819 // Unlink this handler and drop it from the frame.
3820 ASSERT(StackHandlerConstants::kNextOffset == 0);
3821 frame_->EmitPop(Operand::StaticVariable(handler_address));
3822 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
3823
3824 if (i == kReturnShadowIndex) {
3825 // If this target shadowed the function return, materialize
3826 // the return value on the stack.
3827 frame_->EmitPush(eax);
3828 } else {
3829 // Fake TOS for targets that shadowed breaks and continues.
3830 frame_->EmitPush(Immediate(Factory::undefined_value()));
3831 }
3832 __ Set(ecx, Immediate(Smi::FromInt(JUMPING + i)));
3833 if (--nof_unlinks > 0) {
3834 // If this is not the last unlink block, jump around the next.
3835 finally_block.Jump();
3836 }
3837 }
3838 }
3839
3840 // --- Finally block ---
3841 finally_block.Bind();
3842
3843 // Push the state on the stack.
3844 frame_->EmitPush(ecx);
3845
3846 // We keep two elements on the stack - the (possibly faked) result
3847 // and the state - while evaluating the finally block.
3848 //
3849 // Generate code for the statements in the finally block.
3850 VisitStatementsAndSpill(node->finally_block()->statements());
3851
3852 if (has_valid_frame()) {
3853 // Restore state and return value or faked TOS.
3854 frame_->EmitPop(ecx);
3855 frame_->EmitPop(eax);
3856 }
3857
3858 // Generate code to jump to the right destination for all used
3859 // formerly shadowing targets. Deallocate each shadow target.
3860 for (int i = 0; i < shadows.length(); i++) {
3861 if (has_valid_frame() && shadows[i]->is_bound()) {
3862 BreakTarget* original = shadows[i]->other_target();
3863 __ cmp(Operand(ecx), Immediate(Smi::FromInt(JUMPING + i)));
3864 if (i == kReturnShadowIndex) {
3865 // The return value is (already) in eax.
3866 Result return_value = allocator_->Allocate(eax);
3867 ASSERT(return_value.is_valid());
3868 if (function_return_is_shadowed_) {
3869 original->Branch(equal, &return_value);
3870 } else {
3871 // Branch around the preparation for return which may emit
3872 // code.
3873 JumpTarget skip;
3874 skip.Branch(not_equal);
3875 frame_->PrepareForReturn();
3876 original->Jump(&return_value);
3877 skip.Bind();
3878 }
3879 } else {
3880 original->Branch(equal);
3881 }
3882 }
3883 }
3884
3885 if (has_valid_frame()) {
3886 // Check if we need to rethrow the exception.
3887 JumpTarget exit;
3888 __ cmp(Operand(ecx), Immediate(Smi::FromInt(THROWING)));
3889 exit.Branch(not_equal);
3890
3891 // Rethrow exception.
3892 frame_->EmitPush(eax); // undo pop from above
3893 frame_->CallRuntime(Runtime::kReThrow, 1);
3894
3895 // Done.
3896 exit.Bind();
3897 }
3898}
3899
3900
3901void CodeGenerator::VisitDebuggerStatement(DebuggerStatement* node) {
3902 ASSERT(!in_spilled_code());
3903 Comment cmnt(masm_, "[ DebuggerStatement");
3904 CodeForStatementPosition(node);
3905#ifdef ENABLE_DEBUGGER_SUPPORT
3906 // Spill everything, even constants, to the frame.
3907 frame_->SpillAll();
Leon Clarke4515c472010-02-03 11:58:03 +00003908
3909 DebuggerStatementStub ces;
3910 frame_->CallStub(&ces, 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00003911 // Ignore the return value.
3912#endif
3913}
3914
3915
3916void CodeGenerator::InstantiateBoilerplate(Handle<JSFunction> boilerplate) {
Leon Clarkee46be812010-01-19 14:06:41 +00003917 ASSERT(boilerplate->IsBoilerplate());
3918
Steve Blocka7e24c12009-10-30 11:49:00 +00003919 // The inevitable call will sync frame elements to memory anyway, so
3920 // we do it eagerly to allow us to push the arguments directly into
3921 // place.
Steve Blocka7e24c12009-10-30 11:49:00 +00003922 frame_->SyncRange(0, frame_->element_count() - 1);
3923
Leon Clarkee46be812010-01-19 14:06:41 +00003924 // Use the fast case closure allocation code that allocates in new
3925 // space for nested functions that don't need literals cloning.
3926 if (scope()->is_function_scope() && boilerplate->NumberOfLiterals() == 0) {
3927 FastNewClosureStub stub;
3928 frame_->EmitPush(Immediate(boilerplate));
3929 Result answer = frame_->CallStub(&stub, 1);
3930 frame_->Push(&answer);
3931 } else {
3932 // Call the runtime to instantiate the function boilerplate
3933 // object.
3934 frame_->EmitPush(esi);
3935 frame_->EmitPush(Immediate(boilerplate));
3936 Result result = frame_->CallRuntime(Runtime::kNewClosure, 2);
3937 frame_->Push(&result);
3938 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003939}
3940
3941
3942void CodeGenerator::VisitFunctionLiteral(FunctionLiteral* node) {
3943 Comment cmnt(masm_, "[ FunctionLiteral");
3944
3945 // Build the function boilerplate and instantiate it.
Steve Blockd0582a62009-12-15 09:54:21 +00003946 Handle<JSFunction> boilerplate =
Andrei Popescu31002712010-02-23 13:46:05 +00003947 Compiler::BuildBoilerplate(node, script(), this);
Steve Blocka7e24c12009-10-30 11:49:00 +00003948 // Check for stack-overflow exception.
3949 if (HasStackOverflow()) return;
3950 InstantiateBoilerplate(boilerplate);
3951}
3952
3953
3954void CodeGenerator::VisitFunctionBoilerplateLiteral(
3955 FunctionBoilerplateLiteral* node) {
3956 Comment cmnt(masm_, "[ FunctionBoilerplateLiteral");
3957 InstantiateBoilerplate(node->boilerplate());
3958}
3959
3960
3961void CodeGenerator::VisitConditional(Conditional* node) {
3962 Comment cmnt(masm_, "[ Conditional");
3963 JumpTarget then;
3964 JumpTarget else_;
3965 JumpTarget exit;
3966 ControlDestination dest(&then, &else_, true);
Steve Blockd0582a62009-12-15 09:54:21 +00003967 LoadCondition(node->condition(), &dest, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00003968
3969 if (dest.false_was_fall_through()) {
3970 // The else target was bound, so we compile the else part first.
Steve Blockd0582a62009-12-15 09:54:21 +00003971 Load(node->else_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00003972
3973 if (then.is_linked()) {
3974 exit.Jump();
3975 then.Bind();
Steve Blockd0582a62009-12-15 09:54:21 +00003976 Load(node->then_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00003977 }
3978 } else {
3979 // The then target was bound, so we compile the then part first.
Steve Blockd0582a62009-12-15 09:54:21 +00003980 Load(node->then_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00003981
3982 if (else_.is_linked()) {
3983 exit.Jump();
3984 else_.Bind();
Steve Blockd0582a62009-12-15 09:54:21 +00003985 Load(node->else_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00003986 }
3987 }
3988
3989 exit.Bind();
3990}
3991
3992
3993void CodeGenerator::LoadFromSlot(Slot* slot, TypeofState typeof_state) {
3994 if (slot->type() == Slot::LOOKUP) {
3995 ASSERT(slot->var()->is_dynamic());
3996
3997 JumpTarget slow;
3998 JumpTarget done;
3999 Result value;
4000
4001 // Generate fast-case code for variables that might be shadowed by
4002 // eval-introduced variables. Eval is used a lot without
4003 // introducing variables. In those cases, we do not want to
4004 // perform a runtime call for all variables in the scope
4005 // containing the eval.
4006 if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) {
4007 value = LoadFromGlobalSlotCheckExtensions(slot, typeof_state, &slow);
4008 // If there was no control flow to slow, we can exit early.
4009 if (!slow.is_linked()) {
4010 frame_->Push(&value);
4011 return;
4012 }
4013
4014 done.Jump(&value);
4015
4016 } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) {
4017 Slot* potential_slot = slot->var()->local_if_not_shadowed()->slot();
4018 // Only generate the fast case for locals that rewrite to slots.
4019 // This rules out argument loads.
4020 if (potential_slot != NULL) {
4021 // Allocate a fresh register to use as a temp in
4022 // ContextSlotOperandCheckExtensions and to hold the result
4023 // value.
4024 value = allocator_->Allocate();
4025 ASSERT(value.is_valid());
4026 __ mov(value.reg(),
4027 ContextSlotOperandCheckExtensions(potential_slot,
4028 value,
4029 &slow));
4030 if (potential_slot->var()->mode() == Variable::CONST) {
4031 __ cmp(value.reg(), Factory::the_hole_value());
4032 done.Branch(not_equal, &value);
4033 __ mov(value.reg(), Factory::undefined_value());
4034 }
4035 // There is always control flow to slow from
4036 // ContextSlotOperandCheckExtensions so we have to jump around
4037 // it.
4038 done.Jump(&value);
4039 }
4040 }
4041
4042 slow.Bind();
4043 // A runtime call is inevitable. We eagerly sync frame elements
4044 // to memory so that we can push the arguments directly into place
4045 // on top of the frame.
4046 frame_->SyncRange(0, frame_->element_count() - 1);
4047 frame_->EmitPush(esi);
4048 frame_->EmitPush(Immediate(slot->var()->name()));
4049 if (typeof_state == INSIDE_TYPEOF) {
4050 value =
4051 frame_->CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
4052 } else {
4053 value = frame_->CallRuntime(Runtime::kLoadContextSlot, 2);
4054 }
4055
4056 done.Bind(&value);
4057 frame_->Push(&value);
4058
4059 } else if (slot->var()->mode() == Variable::CONST) {
4060 // Const slots may contain 'the hole' value (the constant hasn't been
4061 // initialized yet) which needs to be converted into the 'undefined'
4062 // value.
4063 //
4064 // We currently spill the virtual frame because constants use the
4065 // potentially unsafe direct-frame access of SlotOperand.
4066 VirtualFrame::SpilledScope spilled_scope;
4067 Comment cmnt(masm_, "[ Load const");
4068 JumpTarget exit;
4069 __ mov(ecx, SlotOperand(slot, ecx));
4070 __ cmp(ecx, Factory::the_hole_value());
4071 exit.Branch(not_equal);
4072 __ mov(ecx, Factory::undefined_value());
4073 exit.Bind();
4074 frame_->EmitPush(ecx);
4075
4076 } else if (slot->type() == Slot::PARAMETER) {
4077 frame_->PushParameterAt(slot->index());
4078
4079 } else if (slot->type() == Slot::LOCAL) {
4080 frame_->PushLocalAt(slot->index());
4081
4082 } else {
4083 // The other remaining slot types (LOOKUP and GLOBAL) cannot reach
4084 // here.
4085 //
4086 // The use of SlotOperand below is safe for an unspilled frame
4087 // because it will always be a context slot.
4088 ASSERT(slot->type() == Slot::CONTEXT);
4089 Result temp = allocator_->Allocate();
4090 ASSERT(temp.is_valid());
4091 __ mov(temp.reg(), SlotOperand(slot, temp.reg()));
4092 frame_->Push(&temp);
4093 }
4094}
4095
4096
4097void CodeGenerator::LoadFromSlotCheckForArguments(Slot* slot,
4098 TypeofState state) {
4099 LoadFromSlot(slot, state);
4100
4101 // Bail out quickly if we're not using lazy arguments allocation.
4102 if (ArgumentsMode() != LAZY_ARGUMENTS_ALLOCATION) return;
4103
4104 // ... or if the slot isn't a non-parameter arguments slot.
4105 if (slot->type() == Slot::PARAMETER || !slot->is_arguments()) return;
4106
4107 // Pop the loaded value from the stack.
4108 Result value = frame_->Pop();
4109
4110 // If the loaded value is a constant, we know if the arguments
4111 // object has been lazily loaded yet.
4112 if (value.is_constant()) {
4113 if (value.handle()->IsTheHole()) {
4114 Result arguments = StoreArgumentsObject(false);
4115 frame_->Push(&arguments);
4116 } else {
4117 frame_->Push(&value);
4118 }
4119 return;
4120 }
4121
4122 // The loaded value is in a register. If it is the sentinel that
4123 // indicates that we haven't loaded the arguments object yet, we
4124 // need to do it now.
4125 JumpTarget exit;
4126 __ cmp(Operand(value.reg()), Immediate(Factory::the_hole_value()));
4127 frame_->Push(&value);
4128 exit.Branch(not_equal);
4129 Result arguments = StoreArgumentsObject(false);
4130 frame_->SetElementAt(0, &arguments);
4131 exit.Bind();
4132}
4133
4134
4135Result CodeGenerator::LoadFromGlobalSlotCheckExtensions(
4136 Slot* slot,
4137 TypeofState typeof_state,
4138 JumpTarget* slow) {
4139 // Check that no extension objects have been created by calls to
4140 // eval from the current scope to the global scope.
4141 Register context = esi;
4142 Result tmp = allocator_->Allocate();
4143 ASSERT(tmp.is_valid()); // All non-reserved registers were available.
4144
4145 Scope* s = scope();
4146 while (s != NULL) {
4147 if (s->num_heap_slots() > 0) {
4148 if (s->calls_eval()) {
4149 // Check that extension is NULL.
4150 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
4151 Immediate(0));
4152 slow->Branch(not_equal, not_taken);
4153 }
4154 // Load next context in chain.
4155 __ mov(tmp.reg(), ContextOperand(context, Context::CLOSURE_INDEX));
4156 __ mov(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset));
4157 context = tmp.reg();
4158 }
4159 // If no outer scope calls eval, we do not need to check more
4160 // context extensions. If we have reached an eval scope, we check
4161 // all extensions from this point.
4162 if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break;
4163 s = s->outer_scope();
4164 }
4165
4166 if (s != NULL && s->is_eval_scope()) {
4167 // Loop up the context chain. There is no frame effect so it is
4168 // safe to use raw labels here.
4169 Label next, fast;
4170 if (!context.is(tmp.reg())) {
4171 __ mov(tmp.reg(), context);
4172 }
4173 __ bind(&next);
4174 // Terminate at global context.
4175 __ cmp(FieldOperand(tmp.reg(), HeapObject::kMapOffset),
4176 Immediate(Factory::global_context_map()));
4177 __ j(equal, &fast);
4178 // Check that extension is NULL.
4179 __ cmp(ContextOperand(tmp.reg(), Context::EXTENSION_INDEX), Immediate(0));
4180 slow->Branch(not_equal, not_taken);
4181 // Load next context in chain.
4182 __ mov(tmp.reg(), ContextOperand(tmp.reg(), Context::CLOSURE_INDEX));
4183 __ mov(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset));
4184 __ jmp(&next);
4185 __ bind(&fast);
4186 }
4187 tmp.Unuse();
4188
4189 // All extension objects were empty and it is safe to use a global
4190 // load IC call.
4191 LoadGlobal();
4192 frame_->Push(slot->var()->name());
4193 RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
4194 ? RelocInfo::CODE_TARGET
4195 : RelocInfo::CODE_TARGET_CONTEXT;
4196 Result answer = frame_->CallLoadIC(mode);
4197 // A test eax instruction following the call signals that the inobject
4198 // property case was inlined. Ensure that there is not a test eax
4199 // instruction here.
4200 __ nop();
4201 // Discard the global object. The result is in answer.
4202 frame_->Drop();
4203 return answer;
4204}
4205
4206
4207void CodeGenerator::StoreToSlot(Slot* slot, InitState init_state) {
4208 if (slot->type() == Slot::LOOKUP) {
4209 ASSERT(slot->var()->is_dynamic());
4210
4211 // For now, just do a runtime call. Since the call is inevitable,
4212 // we eagerly sync the virtual frame so we can directly push the
4213 // arguments into place.
4214 frame_->SyncRange(0, frame_->element_count() - 1);
4215
4216 frame_->EmitPush(esi);
4217 frame_->EmitPush(Immediate(slot->var()->name()));
4218
4219 Result value;
4220 if (init_state == CONST_INIT) {
4221 // Same as the case for a normal store, but ignores attribute
4222 // (e.g. READ_ONLY) of context slot so that we can initialize const
4223 // properties (introduced via eval("const foo = (some expr);")). Also,
4224 // uses the current function context instead of the top context.
4225 //
4226 // Note that we must declare the foo upon entry of eval(), via a
4227 // context slot declaration, but we cannot initialize it at the same
4228 // time, because the const declaration may be at the end of the eval
4229 // code (sigh...) and the const variable may have been used before
4230 // (where its value is 'undefined'). Thus, we can only do the
4231 // initialization when we actually encounter the expression and when
4232 // the expression operands are defined and valid, and thus we need the
4233 // split into 2 operations: declaration of the context slot followed
4234 // by initialization.
4235 value = frame_->CallRuntime(Runtime::kInitializeConstContextSlot, 3);
4236 } else {
4237 value = frame_->CallRuntime(Runtime::kStoreContextSlot, 3);
4238 }
4239 // Storing a variable must keep the (new) value on the expression
4240 // stack. This is necessary for compiling chained assignment
4241 // expressions.
4242 frame_->Push(&value);
4243
4244 } else {
4245 ASSERT(!slot->var()->is_dynamic());
4246
4247 JumpTarget exit;
4248 if (init_state == CONST_INIT) {
4249 ASSERT(slot->var()->mode() == Variable::CONST);
4250 // Only the first const initialization must be executed (the slot
4251 // still contains 'the hole' value). When the assignment is executed,
4252 // the code is identical to a normal store (see below).
4253 //
4254 // We spill the frame in the code below because the direct-frame
4255 // access of SlotOperand is potentially unsafe with an unspilled
4256 // frame.
4257 VirtualFrame::SpilledScope spilled_scope;
4258 Comment cmnt(masm_, "[ Init const");
4259 __ mov(ecx, SlotOperand(slot, ecx));
4260 __ cmp(ecx, Factory::the_hole_value());
4261 exit.Branch(not_equal);
4262 }
4263
4264 // We must execute the store. Storing a variable must keep the (new)
4265 // value on the stack. This is necessary for compiling assignment
4266 // expressions.
4267 //
4268 // Note: We will reach here even with slot->var()->mode() ==
4269 // Variable::CONST because of const declarations which will initialize
4270 // consts to 'the hole' value and by doing so, end up calling this code.
4271 if (slot->type() == Slot::PARAMETER) {
4272 frame_->StoreToParameterAt(slot->index());
4273 } else if (slot->type() == Slot::LOCAL) {
4274 frame_->StoreToLocalAt(slot->index());
4275 } else {
4276 // The other slot types (LOOKUP and GLOBAL) cannot reach here.
4277 //
4278 // The use of SlotOperand below is safe for an unspilled frame
4279 // because the slot is a context slot.
4280 ASSERT(slot->type() == Slot::CONTEXT);
4281 frame_->Dup();
4282 Result value = frame_->Pop();
4283 value.ToRegister();
4284 Result start = allocator_->Allocate();
4285 ASSERT(start.is_valid());
4286 __ mov(SlotOperand(slot, start.reg()), value.reg());
4287 // RecordWrite may destroy the value registers.
4288 //
4289 // TODO(204): Avoid actually spilling when the value is not
4290 // needed (probably the common case).
4291 frame_->Spill(value.reg());
4292 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
4293 Result temp = allocator_->Allocate();
4294 ASSERT(temp.is_valid());
4295 __ RecordWrite(start.reg(), offset, value.reg(), temp.reg());
4296 // The results start, value, and temp are unused by going out of
4297 // scope.
4298 }
4299
4300 exit.Bind();
4301 }
4302}
4303
4304
4305void CodeGenerator::VisitSlot(Slot* node) {
4306 Comment cmnt(masm_, "[ Slot");
Steve Blockd0582a62009-12-15 09:54:21 +00004307 LoadFromSlotCheckForArguments(node, NOT_INSIDE_TYPEOF);
Steve Blocka7e24c12009-10-30 11:49:00 +00004308}
4309
4310
4311void CodeGenerator::VisitVariableProxy(VariableProxy* node) {
4312 Comment cmnt(masm_, "[ VariableProxy");
4313 Variable* var = node->var();
4314 Expression* expr = var->rewrite();
4315 if (expr != NULL) {
4316 Visit(expr);
4317 } else {
4318 ASSERT(var->is_global());
4319 Reference ref(this, node);
Steve Blockd0582a62009-12-15 09:54:21 +00004320 ref.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00004321 }
4322}
4323
4324
4325void CodeGenerator::VisitLiteral(Literal* node) {
4326 Comment cmnt(masm_, "[ Literal");
4327 frame_->Push(node->handle());
4328}
4329
4330
Steve Blockd0582a62009-12-15 09:54:21 +00004331void CodeGenerator::PushUnsafeSmi(Handle<Object> value) {
4332 ASSERT(value->IsSmi());
4333 int bits = reinterpret_cast<int>(*value);
4334 __ push(Immediate(bits & 0x0000FFFF));
4335 __ or_(Operand(esp, 0), Immediate(bits & 0xFFFF0000));
4336}
4337
4338
4339void CodeGenerator::StoreUnsafeSmiToLocal(int offset, Handle<Object> value) {
4340 ASSERT(value->IsSmi());
4341 int bits = reinterpret_cast<int>(*value);
4342 __ mov(Operand(ebp, offset), Immediate(bits & 0x0000FFFF));
4343 __ or_(Operand(ebp, offset), Immediate(bits & 0xFFFF0000));
4344}
4345
4346
4347void CodeGenerator::MoveUnsafeSmi(Register target, Handle<Object> value) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004348 ASSERT(target.is_valid());
4349 ASSERT(value->IsSmi());
4350 int bits = reinterpret_cast<int>(*value);
4351 __ Set(target, Immediate(bits & 0x0000FFFF));
Steve Blockd0582a62009-12-15 09:54:21 +00004352 __ or_(target, bits & 0xFFFF0000);
Steve Blocka7e24c12009-10-30 11:49:00 +00004353}
4354
4355
4356bool CodeGenerator::IsUnsafeSmi(Handle<Object> value) {
4357 if (!value->IsSmi()) return false;
4358 int int_value = Smi::cast(*value)->value();
4359 return !is_intn(int_value, kMaxSmiInlinedBits);
4360}
4361
4362
4363// Materialize the regexp literal 'node' in the literals array
4364// 'literals' of the function. Leave the regexp boilerplate in
4365// 'boilerplate'.
4366class DeferredRegExpLiteral: public DeferredCode {
4367 public:
4368 DeferredRegExpLiteral(Register boilerplate,
4369 Register literals,
4370 RegExpLiteral* node)
4371 : boilerplate_(boilerplate), literals_(literals), node_(node) {
4372 set_comment("[ DeferredRegExpLiteral");
4373 }
4374
4375 void Generate();
4376
4377 private:
4378 Register boilerplate_;
4379 Register literals_;
4380 RegExpLiteral* node_;
4381};
4382
4383
4384void DeferredRegExpLiteral::Generate() {
4385 // Since the entry is undefined we call the runtime system to
4386 // compute the literal.
4387 // Literal array (0).
4388 __ push(literals_);
4389 // Literal index (1).
4390 __ push(Immediate(Smi::FromInt(node_->literal_index())));
4391 // RegExp pattern (2).
4392 __ push(Immediate(node_->pattern()));
4393 // RegExp flags (3).
4394 __ push(Immediate(node_->flags()));
4395 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
4396 if (!boilerplate_.is(eax)) __ mov(boilerplate_, eax);
4397}
4398
4399
4400void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) {
4401 Comment cmnt(masm_, "[ RegExp Literal");
4402
4403 // Retrieve the literals array and check the allocated entry. Begin
4404 // with a writable copy of the function of this activation in a
4405 // register.
4406 frame_->PushFunction();
4407 Result literals = frame_->Pop();
4408 literals.ToRegister();
4409 frame_->Spill(literals.reg());
4410
4411 // Load the literals array of the function.
4412 __ mov(literals.reg(),
4413 FieldOperand(literals.reg(), JSFunction::kLiteralsOffset));
4414
4415 // Load the literal at the ast saved index.
4416 Result boilerplate = allocator_->Allocate();
4417 ASSERT(boilerplate.is_valid());
4418 int literal_offset =
4419 FixedArray::kHeaderSize + node->literal_index() * kPointerSize;
4420 __ mov(boilerplate.reg(), FieldOperand(literals.reg(), literal_offset));
4421
4422 // Check whether we need to materialize the RegExp object. If so,
4423 // jump to the deferred code passing the literals array.
4424 DeferredRegExpLiteral* deferred =
4425 new DeferredRegExpLiteral(boilerplate.reg(), literals.reg(), node);
4426 __ cmp(boilerplate.reg(), Factory::undefined_value());
4427 deferred->Branch(equal);
4428 deferred->BindExit();
4429 literals.Unuse();
4430
4431 // Push the boilerplate object.
4432 frame_->Push(&boilerplate);
4433}
4434
4435
Steve Blocka7e24c12009-10-30 11:49:00 +00004436void CodeGenerator::VisitObjectLiteral(ObjectLiteral* node) {
4437 Comment cmnt(masm_, "[ ObjectLiteral");
4438
Leon Clarkee46be812010-01-19 14:06:41 +00004439 // Load a writable copy of the function of this activation in a
Steve Blocka7e24c12009-10-30 11:49:00 +00004440 // register.
4441 frame_->PushFunction();
4442 Result literals = frame_->Pop();
4443 literals.ToRegister();
4444 frame_->Spill(literals.reg());
4445
4446 // Load the literals array of the function.
4447 __ mov(literals.reg(),
4448 FieldOperand(literals.reg(), JSFunction::kLiteralsOffset));
Leon Clarkee46be812010-01-19 14:06:41 +00004449 // Literal array.
4450 frame_->Push(&literals);
4451 // Literal index.
4452 frame_->Push(Smi::FromInt(node->literal_index()));
4453 // Constant properties.
4454 frame_->Push(node->constant_properties());
4455 Result clone;
4456 if (node->depth() > 1) {
4457 clone = frame_->CallRuntime(Runtime::kCreateObjectLiteral, 3);
4458 } else {
4459 clone = frame_->CallRuntime(Runtime::kCreateObjectLiteralShallow, 3);
Steve Blocka7e24c12009-10-30 11:49:00 +00004460 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004461 frame_->Push(&clone);
4462
4463 for (int i = 0; i < node->properties()->length(); i++) {
4464 ObjectLiteral::Property* property = node->properties()->at(i);
4465 switch (property->kind()) {
4466 case ObjectLiteral::Property::CONSTANT:
4467 break;
4468 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
4469 if (CompileTimeValue::IsCompileTimeValue(property->value())) break;
4470 // else fall through.
4471 case ObjectLiteral::Property::COMPUTED: {
4472 Handle<Object> key(property->key()->handle());
4473 if (key->IsSymbol()) {
4474 // Duplicate the object as the IC receiver.
4475 frame_->Dup();
4476 Load(property->value());
4477 frame_->Push(key);
4478 Result ignored = frame_->CallStoreIC();
Steve Blocka7e24c12009-10-30 11:49:00 +00004479 break;
4480 }
4481 // Fall through
4482 }
4483 case ObjectLiteral::Property::PROTOTYPE: {
4484 // Duplicate the object as an argument to the runtime call.
4485 frame_->Dup();
4486 Load(property->key());
4487 Load(property->value());
4488 Result ignored = frame_->CallRuntime(Runtime::kSetProperty, 3);
4489 // Ignore the result.
4490 break;
4491 }
4492 case ObjectLiteral::Property::SETTER: {
4493 // Duplicate the object as an argument to the runtime call.
4494 frame_->Dup();
4495 Load(property->key());
4496 frame_->Push(Smi::FromInt(1));
4497 Load(property->value());
4498 Result ignored = frame_->CallRuntime(Runtime::kDefineAccessor, 4);
4499 // Ignore the result.
4500 break;
4501 }
4502 case ObjectLiteral::Property::GETTER: {
4503 // Duplicate the object as an argument to the runtime call.
4504 frame_->Dup();
4505 Load(property->key());
4506 frame_->Push(Smi::FromInt(0));
4507 Load(property->value());
4508 Result ignored = frame_->CallRuntime(Runtime::kDefineAccessor, 4);
4509 // Ignore the result.
4510 break;
4511 }
4512 default: UNREACHABLE();
4513 }
4514 }
4515}
4516
4517
Steve Blocka7e24c12009-10-30 11:49:00 +00004518void CodeGenerator::VisitArrayLiteral(ArrayLiteral* node) {
4519 Comment cmnt(masm_, "[ ArrayLiteral");
4520
Leon Clarkee46be812010-01-19 14:06:41 +00004521 // Load a writable copy of the function of this activation in a
Steve Blocka7e24c12009-10-30 11:49:00 +00004522 // register.
4523 frame_->PushFunction();
4524 Result literals = frame_->Pop();
4525 literals.ToRegister();
4526 frame_->Spill(literals.reg());
4527
4528 // Load the literals array of the function.
4529 __ mov(literals.reg(),
4530 FieldOperand(literals.reg(), JSFunction::kLiteralsOffset));
4531
Leon Clarkee46be812010-01-19 14:06:41 +00004532 frame_->Push(&literals);
4533 frame_->Push(Smi::FromInt(node->literal_index()));
4534 frame_->Push(node->constant_elements());
4535 int length = node->values()->length();
4536 Result clone;
4537 if (node->depth() > 1) {
4538 clone = frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3);
4539 } else if (length > FastCloneShallowArrayStub::kMaximumLength) {
4540 clone = frame_->CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
4541 } else {
4542 FastCloneShallowArrayStub stub(length);
4543 clone = frame_->CallStub(&stub, 3);
Steve Blocka7e24c12009-10-30 11:49:00 +00004544 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004545 frame_->Push(&clone);
4546
4547 // Generate code to set the elements in the array that are not
4548 // literals.
Leon Clarkee46be812010-01-19 14:06:41 +00004549 for (int i = 0; i < length; i++) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004550 Expression* value = node->values()->at(i);
4551
4552 // If value is a literal the property value is already set in the
4553 // boilerplate object.
4554 if (value->AsLiteral() != NULL) continue;
4555 // If value is a materialized literal the property value is already set
4556 // in the boilerplate object if it is simple.
4557 if (CompileTimeValue::IsCompileTimeValue(value)) continue;
4558
4559 // The property must be set by generated code.
4560 Load(value);
4561
4562 // Get the property value off the stack.
4563 Result prop_value = frame_->Pop();
4564 prop_value.ToRegister();
4565
4566 // Fetch the array literal while leaving a copy on the stack and
4567 // use it to get the elements array.
4568 frame_->Dup();
4569 Result elements = frame_->Pop();
4570 elements.ToRegister();
4571 frame_->Spill(elements.reg());
4572 // Get the elements array.
4573 __ mov(elements.reg(),
4574 FieldOperand(elements.reg(), JSObject::kElementsOffset));
4575
4576 // Write to the indexed properties array.
4577 int offset = i * kPointerSize + FixedArray::kHeaderSize;
4578 __ mov(FieldOperand(elements.reg(), offset), prop_value.reg());
4579
4580 // Update the write barrier for the array address.
4581 frame_->Spill(prop_value.reg()); // Overwritten by the write barrier.
4582 Result scratch = allocator_->Allocate();
4583 ASSERT(scratch.is_valid());
4584 __ RecordWrite(elements.reg(), offset, prop_value.reg(), scratch.reg());
4585 }
4586}
4587
4588
4589void CodeGenerator::VisitCatchExtensionObject(CatchExtensionObject* node) {
4590 ASSERT(!in_spilled_code());
4591 // Call runtime routine to allocate the catch extension object and
4592 // assign the exception value to the catch variable.
4593 Comment cmnt(masm_, "[ CatchExtensionObject");
4594 Load(node->key());
4595 Load(node->value());
4596 Result result =
4597 frame_->CallRuntime(Runtime::kCreateCatchExtensionObject, 2);
4598 frame_->Push(&result);
4599}
4600
4601
4602void CodeGenerator::VisitAssignment(Assignment* node) {
Leon Clarked91b9f72010-01-27 17:25:45 +00004603#ifdef DEBUG
4604 int original_height = frame_->height();
4605#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00004606 Comment cmnt(masm_, "[ Assignment");
4607
Leon Clarked91b9f72010-01-27 17:25:45 +00004608 { Reference target(this, node->target(), node->is_compound());
Steve Blocka7e24c12009-10-30 11:49:00 +00004609 if (target.is_illegal()) {
4610 // Fool the virtual frame into thinking that we left the assignment's
4611 // value on the frame.
4612 frame_->Push(Smi::FromInt(0));
4613 return;
4614 }
4615 Variable* var = node->target()->AsVariableProxy()->AsVariable();
4616
4617 if (node->starts_initialization_block()) {
4618 ASSERT(target.type() == Reference::NAMED ||
4619 target.type() == Reference::KEYED);
4620 // Change to slow case in the beginning of an initialization
4621 // block to avoid the quadratic behavior of repeatedly adding
4622 // fast properties.
4623
4624 // The receiver is the argument to the runtime call. It is the
4625 // first value pushed when the reference was loaded to the
4626 // frame.
4627 frame_->PushElementAt(target.size() - 1);
4628 Result ignored = frame_->CallRuntime(Runtime::kToSlowProperties, 1);
4629 }
Leon Clarked91b9f72010-01-27 17:25:45 +00004630 if (node->ends_initialization_block()) {
4631 // Add an extra copy of the receiver to the frame, so that it can be
4632 // converted back to fast case after the assignment.
4633 ASSERT(target.type() == Reference::NAMED ||
4634 target.type() == Reference::KEYED);
4635 if (target.type() == Reference::NAMED) {
4636 frame_->Dup();
4637 // Dup target receiver on stack.
4638 } else {
4639 ASSERT(target.type() == Reference::KEYED);
4640 Result temp = frame_->Pop();
4641 frame_->Dup();
4642 frame_->Push(&temp);
4643 }
4644 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004645 if (node->op() == Token::ASSIGN ||
4646 node->op() == Token::INIT_VAR ||
4647 node->op() == Token::INIT_CONST) {
4648 Load(node->value());
4649
Leon Clarked91b9f72010-01-27 17:25:45 +00004650 } else { // Assignment is a compound assignment.
Steve Blocka7e24c12009-10-30 11:49:00 +00004651 Literal* literal = node->value()->AsLiteral();
4652 bool overwrite_value =
4653 (node->value()->AsBinaryOperation() != NULL &&
4654 node->value()->AsBinaryOperation()->ResultOverwriteAllowed());
4655 Variable* right_var = node->value()->AsVariableProxy()->AsVariable();
4656 // There are two cases where the target is not read in the right hand
4657 // side, that are easy to test for: the right hand side is a literal,
4658 // or the right hand side is a different variable. TakeValue invalidates
4659 // the target, with an implicit promise that it will be written to again
4660 // before it is read.
4661 if (literal != NULL || (right_var != NULL && right_var != var)) {
Steve Blockd0582a62009-12-15 09:54:21 +00004662 target.TakeValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00004663 } else {
Steve Blockd0582a62009-12-15 09:54:21 +00004664 target.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00004665 }
4666 Load(node->value());
4667 GenericBinaryOperation(node->binary_op(),
4668 node->type(),
4669 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
4670 }
4671
4672 if (var != NULL &&
4673 var->mode() == Variable::CONST &&
4674 node->op() != Token::INIT_VAR && node->op() != Token::INIT_CONST) {
4675 // Assignment ignored - leave the value on the stack.
Leon Clarked91b9f72010-01-27 17:25:45 +00004676 UnloadReference(&target);
Steve Blocka7e24c12009-10-30 11:49:00 +00004677 } else {
4678 CodeForSourcePosition(node->position());
4679 if (node->op() == Token::INIT_CONST) {
4680 // Dynamic constant initializations must use the function context
4681 // and initialize the actual constant declared. Dynamic variable
4682 // initializations are simply assignments and use SetValue.
4683 target.SetValue(CONST_INIT);
4684 } else {
4685 target.SetValue(NOT_CONST_INIT);
4686 }
4687 if (node->ends_initialization_block()) {
Leon Clarked91b9f72010-01-27 17:25:45 +00004688 ASSERT(target.type() == Reference::UNLOADED);
Steve Blocka7e24c12009-10-30 11:49:00 +00004689 // End of initialization block. Revert to fast case. The
Leon Clarked91b9f72010-01-27 17:25:45 +00004690 // argument to the runtime call is the extra copy of the receiver,
4691 // which is below the value of the assignment.
4692 // Swap the receiver and the value of the assignment expression.
4693 Result lhs = frame_->Pop();
4694 Result receiver = frame_->Pop();
4695 frame_->Push(&lhs);
4696 frame_->Push(&receiver);
Steve Blocka7e24c12009-10-30 11:49:00 +00004697 Result ignored = frame_->CallRuntime(Runtime::kToFastProperties, 1);
4698 }
4699 }
4700 }
Leon Clarked91b9f72010-01-27 17:25:45 +00004701 ASSERT(frame_->height() == original_height + 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00004702}
4703
4704
4705void CodeGenerator::VisitThrow(Throw* node) {
4706 Comment cmnt(masm_, "[ Throw");
4707 Load(node->exception());
4708 Result result = frame_->CallRuntime(Runtime::kThrow, 1);
4709 frame_->Push(&result);
4710}
4711
4712
4713void CodeGenerator::VisitProperty(Property* node) {
4714 Comment cmnt(masm_, "[ Property");
4715 Reference property(this, node);
Steve Blockd0582a62009-12-15 09:54:21 +00004716 property.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00004717}
4718
4719
4720void CodeGenerator::VisitCall(Call* node) {
4721 Comment cmnt(masm_, "[ Call");
4722
4723 Expression* function = node->expression();
4724 ZoneList<Expression*>* args = node->arguments();
4725
4726 // Check if the function is a variable or a property.
4727 Variable* var = function->AsVariableProxy()->AsVariable();
4728 Property* property = function->AsProperty();
4729
4730 // ------------------------------------------------------------------------
4731 // Fast-case: Use inline caching.
4732 // ---
4733 // According to ECMA-262, section 11.2.3, page 44, the function to call
4734 // must be resolved after the arguments have been evaluated. The IC code
4735 // automatically handles this by loading the arguments before the function
4736 // is resolved in cache misses (this also holds for megamorphic calls).
4737 // ------------------------------------------------------------------------
4738
4739 if (var != NULL && var->is_possibly_eval()) {
4740 // ----------------------------------
4741 // JavaScript example: 'eval(arg)' // eval is not known to be shadowed
4742 // ----------------------------------
4743
4744 // In a call to eval, we first call %ResolvePossiblyDirectEval to
4745 // resolve the function we need to call and the receiver of the
4746 // call. Then we call the resolved function using the given
4747 // arguments.
4748
4749 // Prepare the stack for the call to the resolved function.
4750 Load(function);
4751
4752 // Allocate a frame slot for the receiver.
4753 frame_->Push(Factory::undefined_value());
4754 int arg_count = args->length();
4755 for (int i = 0; i < arg_count; i++) {
4756 Load(args->at(i));
4757 }
4758
4759 // Prepare the stack for the call to ResolvePossiblyDirectEval.
4760 frame_->PushElementAt(arg_count + 1);
4761 if (arg_count > 0) {
4762 frame_->PushElementAt(arg_count);
4763 } else {
4764 frame_->Push(Factory::undefined_value());
4765 }
4766
Leon Clarkee46be812010-01-19 14:06:41 +00004767 // Push the receiver.
4768 frame_->PushParameterAt(-1);
4769
Steve Blocka7e24c12009-10-30 11:49:00 +00004770 // Resolve the call.
4771 Result result =
Leon Clarkee46be812010-01-19 14:06:41 +00004772 frame_->CallRuntime(Runtime::kResolvePossiblyDirectEval, 3);
Steve Blocka7e24c12009-10-30 11:49:00 +00004773
Leon Clarkee46be812010-01-19 14:06:41 +00004774 // The runtime call returns a pair of values in eax (function) and
4775 // edx (receiver). Touch up the stack with the right values.
4776 Result receiver = allocator_->Allocate(edx);
4777 frame_->SetElementAt(arg_count + 1, &result);
4778 frame_->SetElementAt(arg_count, &receiver);
4779 receiver.Unuse();
Steve Blocka7e24c12009-10-30 11:49:00 +00004780
4781 // Call the function.
4782 CodeForSourcePosition(node->position());
4783 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Leon Clarkee46be812010-01-19 14:06:41 +00004784 CallFunctionStub call_function(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
Steve Blocka7e24c12009-10-30 11:49:00 +00004785 result = frame_->CallStub(&call_function, arg_count + 1);
4786
4787 // Restore the context and overwrite the function on the stack with
4788 // the result.
4789 frame_->RestoreContextRegister();
4790 frame_->SetElementAt(0, &result);
4791
4792 } else if (var != NULL && !var->is_this() && var->is_global()) {
4793 // ----------------------------------
4794 // JavaScript example: 'foo(1, 2, 3)' // foo is global
4795 // ----------------------------------
4796
Steve Blocka7e24c12009-10-30 11:49:00 +00004797 // Pass the global object as the receiver and let the IC stub
4798 // patch the stack to use the global proxy as 'this' in the
4799 // invoked function.
4800 LoadGlobal();
4801
4802 // Load the arguments.
4803 int arg_count = args->length();
4804 for (int i = 0; i < arg_count; i++) {
4805 Load(args->at(i));
4806 }
4807
Leon Clarkee46be812010-01-19 14:06:41 +00004808 // Push the name of the function onto the frame.
4809 frame_->Push(var->name());
4810
Steve Blocka7e24c12009-10-30 11:49:00 +00004811 // Call the IC initialization code.
4812 CodeForSourcePosition(node->position());
4813 Result result = frame_->CallCallIC(RelocInfo::CODE_TARGET_CONTEXT,
4814 arg_count,
4815 loop_nesting());
4816 frame_->RestoreContextRegister();
Leon Clarkee46be812010-01-19 14:06:41 +00004817 frame_->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00004818
4819 } else if (var != NULL && var->slot() != NULL &&
4820 var->slot()->type() == Slot::LOOKUP) {
4821 // ----------------------------------
4822 // JavaScript example: 'with (obj) foo(1, 2, 3)' // foo is in obj
4823 // ----------------------------------
4824
4825 // Load the function from the context. Sync the frame so we can
4826 // push the arguments directly into place.
4827 frame_->SyncRange(0, frame_->element_count() - 1);
4828 frame_->EmitPush(esi);
4829 frame_->EmitPush(Immediate(var->name()));
4830 frame_->CallRuntime(Runtime::kLoadContextSlot, 2);
4831 // The runtime call returns a pair of values in eax and edx. The
4832 // looked-up function is in eax and the receiver is in edx. These
4833 // register references are not ref counted here. We spill them
4834 // eagerly since they are arguments to an inevitable call (and are
4835 // not sharable by the arguments).
4836 ASSERT(!allocator()->is_used(eax));
4837 frame_->EmitPush(eax);
4838
4839 // Load the receiver.
4840 ASSERT(!allocator()->is_used(edx));
4841 frame_->EmitPush(edx);
4842
4843 // Call the function.
Leon Clarkee46be812010-01-19 14:06:41 +00004844 CallWithArguments(args, NO_CALL_FUNCTION_FLAGS, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00004845
4846 } else if (property != NULL) {
4847 // Check if the key is a literal string.
4848 Literal* literal = property->key()->AsLiteral();
4849
4850 if (literal != NULL && literal->handle()->IsSymbol()) {
4851 // ------------------------------------------------------------------
4852 // JavaScript example: 'object.foo(1, 2, 3)' or 'map["key"](1, 2, 3)'
4853 // ------------------------------------------------------------------
4854
4855 Handle<String> name = Handle<String>::cast(literal->handle());
4856
4857 if (ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION &&
4858 name->IsEqualTo(CStrVector("apply")) &&
4859 args->length() == 2 &&
4860 args->at(1)->AsVariableProxy() != NULL &&
4861 args->at(1)->AsVariableProxy()->IsArguments()) {
4862 // Use the optimized Function.prototype.apply that avoids
4863 // allocating lazily allocated arguments objects.
Leon Clarked91b9f72010-01-27 17:25:45 +00004864 CallApplyLazy(property->obj(),
Steve Blocka7e24c12009-10-30 11:49:00 +00004865 args->at(0),
4866 args->at(1)->AsVariableProxy(),
4867 node->position());
4868
4869 } else {
Leon Clarkee46be812010-01-19 14:06:41 +00004870 // Push the receiver onto the frame.
Steve Blocka7e24c12009-10-30 11:49:00 +00004871 Load(property->obj());
4872
4873 // Load the arguments.
4874 int arg_count = args->length();
4875 for (int i = 0; i < arg_count; i++) {
4876 Load(args->at(i));
4877 }
4878
Leon Clarkee46be812010-01-19 14:06:41 +00004879 // Push the name of the function onto the frame.
4880 frame_->Push(name);
4881
Steve Blocka7e24c12009-10-30 11:49:00 +00004882 // Call the IC initialization code.
4883 CodeForSourcePosition(node->position());
4884 Result result =
4885 frame_->CallCallIC(RelocInfo::CODE_TARGET, arg_count,
4886 loop_nesting());
4887 frame_->RestoreContextRegister();
Leon Clarkee46be812010-01-19 14:06:41 +00004888 frame_->Push(&result);
Steve Blocka7e24c12009-10-30 11:49:00 +00004889 }
4890
4891 } else {
4892 // -------------------------------------------
4893 // JavaScript example: 'array[index](1, 2, 3)'
4894 // -------------------------------------------
4895
4896 // Load the function to call from the property through a reference.
Steve Blocka7e24c12009-10-30 11:49:00 +00004897
4898 // Pass receiver to called function.
4899 if (property->is_synthetic()) {
Leon Clarked91b9f72010-01-27 17:25:45 +00004900 Reference ref(this, property);
4901 ref.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00004902 // Use global object as receiver.
4903 LoadGlobalReceiver();
4904 } else {
Leon Clarked91b9f72010-01-27 17:25:45 +00004905 Load(property->obj());
4906 Load(property->key());
4907 Result function = EmitKeyedLoad(false);
4908 frame_->Drop(); // Key.
4909 Result receiver = frame_->Pop();
4910 frame_->Push(&function);
4911 frame_->Push(&receiver);
Steve Blocka7e24c12009-10-30 11:49:00 +00004912 }
4913
4914 // Call the function.
Leon Clarkee46be812010-01-19 14:06:41 +00004915 CallWithArguments(args, RECEIVER_MIGHT_BE_VALUE, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00004916 }
4917
4918 } else {
4919 // ----------------------------------
4920 // JavaScript example: 'foo(1, 2, 3)' // foo is not global
4921 // ----------------------------------
4922
4923 // Load the function.
4924 Load(function);
4925
4926 // Pass the global proxy as the receiver.
4927 LoadGlobalReceiver();
4928
4929 // Call the function.
Leon Clarkee46be812010-01-19 14:06:41 +00004930 CallWithArguments(args, NO_CALL_FUNCTION_FLAGS, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00004931 }
4932}
4933
4934
4935void CodeGenerator::VisitCallNew(CallNew* node) {
4936 Comment cmnt(masm_, "[ CallNew");
4937
4938 // According to ECMA-262, section 11.2.2, page 44, the function
4939 // expression in new calls must be evaluated before the
4940 // arguments. This is different from ordinary calls, where the
4941 // actual function to call is resolved after the arguments have been
4942 // evaluated.
4943
4944 // Compute function to call and use the global object as the
4945 // receiver. There is no need to use the global proxy here because
4946 // it will always be replaced with a newly allocated object.
4947 Load(node->expression());
4948 LoadGlobal();
4949
4950 // Push the arguments ("left-to-right") on the stack.
4951 ZoneList<Expression*>* args = node->arguments();
4952 int arg_count = args->length();
4953 for (int i = 0; i < arg_count; i++) {
4954 Load(args->at(i));
4955 }
4956
4957 // Call the construct call builtin that handles allocation and
4958 // constructor invocation.
4959 CodeForSourcePosition(node->position());
4960 Result result = frame_->CallConstructor(arg_count);
4961 // Replace the function on the stack with the result.
4962 frame_->SetElementAt(0, &result);
4963}
4964
4965
4966void CodeGenerator::GenerateIsSmi(ZoneList<Expression*>* args) {
4967 ASSERT(args->length() == 1);
4968 Load(args->at(0));
4969 Result value = frame_->Pop();
4970 value.ToRegister();
4971 ASSERT(value.is_valid());
4972 __ test(value.reg(), Immediate(kSmiTagMask));
4973 value.Unuse();
4974 destination()->Split(zero);
4975}
4976
4977
4978void CodeGenerator::GenerateLog(ZoneList<Expression*>* args) {
4979 // Conditionally generate a log call.
4980 // Args:
4981 // 0 (literal string): The type of logging (corresponds to the flags).
4982 // This is used to determine whether or not to generate the log call.
4983 // 1 (string): Format string. Access the string at argument index 2
4984 // with '%2s' (see Logger::LogRuntime for all the formats).
4985 // 2 (array): Arguments to the format string.
4986 ASSERT_EQ(args->length(), 3);
4987#ifdef ENABLE_LOGGING_AND_PROFILING
4988 if (ShouldGenerateLog(args->at(0))) {
4989 Load(args->at(1));
4990 Load(args->at(2));
4991 frame_->CallRuntime(Runtime::kLog, 2);
4992 }
4993#endif
4994 // Finally, we're expected to leave a value on the top of the stack.
4995 frame_->Push(Factory::undefined_value());
4996}
4997
4998
4999void CodeGenerator::GenerateIsNonNegativeSmi(ZoneList<Expression*>* args) {
5000 ASSERT(args->length() == 1);
5001 Load(args->at(0));
5002 Result value = frame_->Pop();
5003 value.ToRegister();
5004 ASSERT(value.is_valid());
5005 __ test(value.reg(), Immediate(kSmiTagMask | 0x80000000));
5006 value.Unuse();
5007 destination()->Split(zero);
5008}
5009
5010
5011// This generates code that performs a charCodeAt() call or returns
5012// undefined in order to trigger the slow case, Runtime_StringCharCodeAt.
Steve Blockd0582a62009-12-15 09:54:21 +00005013// It can handle flat, 8 and 16 bit characters and cons strings where the
5014// answer is found in the left hand branch of the cons. The slow case will
5015// flatten the string, which will ensure that the answer is in the left hand
5016// side the next time around.
Steve Blocka7e24c12009-10-30 11:49:00 +00005017void CodeGenerator::GenerateFastCharCodeAt(ZoneList<Expression*>* args) {
5018 Comment(masm_, "[ GenerateFastCharCodeAt");
5019 ASSERT(args->length() == 2);
5020
5021 Label slow_case;
5022 Label end;
5023 Label not_a_flat_string;
Steve Blocka7e24c12009-10-30 11:49:00 +00005024 Label try_again_with_new_string;
5025 Label ascii_string;
5026 Label got_char_code;
5027
5028 Load(args->at(0));
5029 Load(args->at(1));
5030 Result index = frame_->Pop();
5031 Result object = frame_->Pop();
5032
5033 // Get register ecx to use as shift amount later.
5034 Result shift_amount;
5035 if (object.is_register() && object.reg().is(ecx)) {
5036 Result fresh = allocator_->Allocate();
5037 shift_amount = object;
5038 object = fresh;
5039 __ mov(object.reg(), ecx);
5040 }
5041 if (index.is_register() && index.reg().is(ecx)) {
5042 Result fresh = allocator_->Allocate();
5043 shift_amount = index;
5044 index = fresh;
5045 __ mov(index.reg(), ecx);
5046 }
5047 // There could be references to ecx in the frame. Allocating will
5048 // spill them, otherwise spill explicitly.
5049 if (shift_amount.is_valid()) {
5050 frame_->Spill(ecx);
5051 } else {
5052 shift_amount = allocator()->Allocate(ecx);
5053 }
5054 ASSERT(shift_amount.is_register());
5055 ASSERT(shift_amount.reg().is(ecx));
5056 ASSERT(allocator_->count(ecx) == 1);
5057
5058 // We will mutate the index register and possibly the object register.
5059 // The case where they are somehow the same register is handled
5060 // because we only mutate them in the case where the receiver is a
5061 // heap object and the index is not.
5062 object.ToRegister();
5063 index.ToRegister();
5064 frame_->Spill(object.reg());
5065 frame_->Spill(index.reg());
5066
5067 // We need a single extra temporary register.
5068 Result temp = allocator()->Allocate();
5069 ASSERT(temp.is_valid());
5070
5071 // There is no virtual frame effect from here up to the final result
5072 // push.
5073
5074 // If the receiver is a smi trigger the slow case.
5075 ASSERT(kSmiTag == 0);
5076 __ test(object.reg(), Immediate(kSmiTagMask));
5077 __ j(zero, &slow_case);
5078
5079 // If the index is negative or non-smi trigger the slow case.
5080 ASSERT(kSmiTag == 0);
5081 __ test(index.reg(), Immediate(kSmiTagMask | 0x80000000));
5082 __ j(not_zero, &slow_case);
5083 // Untag the index.
Leon Clarkee46be812010-01-19 14:06:41 +00005084 __ SmiUntag(index.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00005085
5086 __ bind(&try_again_with_new_string);
5087 // Fetch the instance type of the receiver into ecx.
5088 __ mov(ecx, FieldOperand(object.reg(), HeapObject::kMapOffset));
5089 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
5090 // If the receiver is not a string trigger the slow case.
5091 __ test(ecx, Immediate(kIsNotStringMask));
5092 __ j(not_zero, &slow_case);
5093
Steve Blocka7e24c12009-10-30 11:49:00 +00005094 // Fetch the length field into the temporary register.
5095 __ mov(temp.reg(), FieldOperand(object.reg(), String::kLengthOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00005096 // Check for index out of range.
5097 __ cmp(index.reg(), Operand(temp.reg()));
5098 __ j(greater_equal, &slow_case);
5099 // Reload the instance type (into the temp register this time)..
5100 __ mov(temp.reg(), FieldOperand(object.reg(), HeapObject::kMapOffset));
5101 __ movzx_b(temp.reg(), FieldOperand(temp.reg(), Map::kInstanceTypeOffset));
5102
5103 // We need special handling for non-flat strings.
5104 ASSERT(kSeqStringTag == 0);
5105 __ test(temp.reg(), Immediate(kStringRepresentationMask));
5106 __ j(not_zero, &not_a_flat_string);
5107 // Check for 1-byte or 2-byte string.
5108 __ test(temp.reg(), Immediate(kStringEncodingMask));
5109 __ j(not_zero, &ascii_string);
5110
5111 // 2-byte string.
5112 // Load the 2-byte character code into the temp register.
5113 __ movzx_w(temp.reg(), FieldOperand(object.reg(),
5114 index.reg(),
5115 times_2,
5116 SeqTwoByteString::kHeaderSize));
5117 __ jmp(&got_char_code);
5118
5119 // ASCII string.
5120 __ bind(&ascii_string);
5121 // Load the byte into the temp register.
5122 __ movzx_b(temp.reg(), FieldOperand(object.reg(),
5123 index.reg(),
5124 times_1,
5125 SeqAsciiString::kHeaderSize));
5126 __ bind(&got_char_code);
Leon Clarkee46be812010-01-19 14:06:41 +00005127 __ SmiTag(temp.reg());
Steve Blocka7e24c12009-10-30 11:49:00 +00005128 __ jmp(&end);
5129
5130 // Handle non-flat strings.
5131 __ bind(&not_a_flat_string);
5132 __ and_(temp.reg(), kStringRepresentationMask);
5133 __ cmp(temp.reg(), kConsStringTag);
Steve Blocka7e24c12009-10-30 11:49:00 +00005134 __ j(not_equal, &slow_case);
5135
Steve Blocka7e24c12009-10-30 11:49:00 +00005136 // ConsString.
Steve Blockd0582a62009-12-15 09:54:21 +00005137 // Check that the right hand side is the empty string (ie if this is really a
5138 // flat string in a cons string). If that is not the case we would rather go
5139 // to the runtime system now, to flatten the string.
5140 __ mov(temp.reg(), FieldOperand(object.reg(), ConsString::kSecondOffset));
Leon Clarke4515c472010-02-03 11:58:03 +00005141 __ cmp(Operand(temp.reg()), Factory::empty_string());
Steve Blockd0582a62009-12-15 09:54:21 +00005142 __ j(not_equal, &slow_case);
5143 // Get the first of the two strings.
Steve Blocka7e24c12009-10-30 11:49:00 +00005144 __ mov(object.reg(), FieldOperand(object.reg(), ConsString::kFirstOffset));
5145 __ jmp(&try_again_with_new_string);
5146
5147 __ bind(&slow_case);
5148 // Move the undefined value into the result register, which will
5149 // trigger the slow case.
5150 __ Set(temp.reg(), Immediate(Factory::undefined_value()));
5151
5152 __ bind(&end);
5153 frame_->Push(&temp);
5154}
5155
5156
5157void CodeGenerator::GenerateIsArray(ZoneList<Expression*>* args) {
5158 ASSERT(args->length() == 1);
5159 Load(args->at(0));
5160 Result value = frame_->Pop();
5161 value.ToRegister();
5162 ASSERT(value.is_valid());
5163 __ test(value.reg(), Immediate(kSmiTagMask));
5164 destination()->false_target()->Branch(equal);
5165 // It is a heap object - get map.
5166 Result temp = allocator()->Allocate();
5167 ASSERT(temp.is_valid());
5168 // Check if the object is a JS array or not.
5169 __ CmpObjectType(value.reg(), JS_ARRAY_TYPE, temp.reg());
5170 value.Unuse();
5171 temp.Unuse();
5172 destination()->Split(equal);
5173}
5174
5175
Steve Blockd0582a62009-12-15 09:54:21 +00005176void CodeGenerator::GenerateIsObject(ZoneList<Expression*>* args) {
5177 // This generates a fast version of:
5178 // (typeof(arg) === 'object' || %_ClassOf(arg) == 'RegExp')
5179 ASSERT(args->length() == 1);
5180 Load(args->at(0));
5181 Result obj = frame_->Pop();
5182 obj.ToRegister();
5183
5184 __ test(obj.reg(), Immediate(kSmiTagMask));
5185 destination()->false_target()->Branch(zero);
5186 __ cmp(obj.reg(), Factory::null_value());
5187 destination()->true_target()->Branch(equal);
5188
5189 Result map = allocator()->Allocate();
5190 ASSERT(map.is_valid());
5191 __ mov(map.reg(), FieldOperand(obj.reg(), HeapObject::kMapOffset));
5192 // Undetectable objects behave like undefined when tested with typeof.
5193 __ movzx_b(map.reg(), FieldOperand(map.reg(), Map::kBitFieldOffset));
5194 __ test(map.reg(), Immediate(1 << Map::kIsUndetectable));
5195 destination()->false_target()->Branch(not_zero);
5196 __ mov(map.reg(), FieldOperand(obj.reg(), HeapObject::kMapOffset));
5197 __ movzx_b(map.reg(), FieldOperand(map.reg(), Map::kInstanceTypeOffset));
5198 __ cmp(map.reg(), FIRST_JS_OBJECT_TYPE);
5199 destination()->false_target()->Branch(less);
5200 __ cmp(map.reg(), LAST_JS_OBJECT_TYPE);
5201 obj.Unuse();
5202 map.Unuse();
5203 destination()->Split(less_equal);
5204}
5205
5206
5207void CodeGenerator::GenerateIsFunction(ZoneList<Expression*>* args) {
5208 // This generates a fast version of:
5209 // (%_ClassOf(arg) === 'Function')
5210 ASSERT(args->length() == 1);
5211 Load(args->at(0));
5212 Result obj = frame_->Pop();
5213 obj.ToRegister();
5214 __ test(obj.reg(), Immediate(kSmiTagMask));
5215 destination()->false_target()->Branch(zero);
5216 Result temp = allocator()->Allocate();
5217 ASSERT(temp.is_valid());
5218 __ CmpObjectType(obj.reg(), JS_FUNCTION_TYPE, temp.reg());
5219 obj.Unuse();
5220 temp.Unuse();
5221 destination()->Split(equal);
5222}
5223
5224
Leon Clarked91b9f72010-01-27 17:25:45 +00005225void CodeGenerator::GenerateIsUndetectableObject(ZoneList<Expression*>* args) {
5226 ASSERT(args->length() == 1);
5227 Load(args->at(0));
5228 Result obj = frame_->Pop();
5229 obj.ToRegister();
5230 __ test(obj.reg(), Immediate(kSmiTagMask));
5231 destination()->false_target()->Branch(zero);
5232 Result temp = allocator()->Allocate();
5233 ASSERT(temp.is_valid());
5234 __ mov(temp.reg(),
5235 FieldOperand(obj.reg(), HeapObject::kMapOffset));
5236 __ movzx_b(temp.reg(),
5237 FieldOperand(temp.reg(), Map::kBitFieldOffset));
5238 __ test(temp.reg(), Immediate(1 << Map::kIsUndetectable));
5239 obj.Unuse();
5240 temp.Unuse();
5241 destination()->Split(not_zero);
5242}
5243
5244
Steve Blocka7e24c12009-10-30 11:49:00 +00005245void CodeGenerator::GenerateIsConstructCall(ZoneList<Expression*>* args) {
5246 ASSERT(args->length() == 0);
5247
5248 // Get the frame pointer for the calling frame.
5249 Result fp = allocator()->Allocate();
5250 __ mov(fp.reg(), Operand(ebp, StandardFrameConstants::kCallerFPOffset));
5251
5252 // Skip the arguments adaptor frame if it exists.
5253 Label check_frame_marker;
5254 __ cmp(Operand(fp.reg(), StandardFrameConstants::kContextOffset),
5255 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
5256 __ j(not_equal, &check_frame_marker);
5257 __ mov(fp.reg(), Operand(fp.reg(), StandardFrameConstants::kCallerFPOffset));
5258
5259 // Check the marker in the calling frame.
5260 __ bind(&check_frame_marker);
5261 __ cmp(Operand(fp.reg(), StandardFrameConstants::kMarkerOffset),
5262 Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
5263 fp.Unuse();
5264 destination()->Split(equal);
5265}
5266
5267
5268void CodeGenerator::GenerateArgumentsLength(ZoneList<Expression*>* args) {
5269 ASSERT(args->length() == 0);
5270 // ArgumentsAccessStub takes the parameter count as an input argument
5271 // in register eax. Create a constant result for it.
Andrei Popescu31002712010-02-23 13:46:05 +00005272 Result count(Handle<Smi>(Smi::FromInt(scope()->num_parameters())));
Steve Blocka7e24c12009-10-30 11:49:00 +00005273 // Call the shared stub to get to the arguments.length.
5274 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_LENGTH);
5275 Result result = frame_->CallStub(&stub, &count);
5276 frame_->Push(&result);
5277}
5278
5279
5280void CodeGenerator::GenerateClassOf(ZoneList<Expression*>* args) {
5281 ASSERT(args->length() == 1);
5282 JumpTarget leave, null, function, non_function_constructor;
5283 Load(args->at(0)); // Load the object.
5284 Result obj = frame_->Pop();
5285 obj.ToRegister();
5286 frame_->Spill(obj.reg());
5287
5288 // If the object is a smi, we return null.
5289 __ test(obj.reg(), Immediate(kSmiTagMask));
5290 null.Branch(zero);
5291
5292 // Check that the object is a JS object but take special care of JS
5293 // functions to make sure they have 'Function' as their class.
5294 { Result tmp = allocator()->Allocate();
5295 __ mov(obj.reg(), FieldOperand(obj.reg(), HeapObject::kMapOffset));
5296 __ movzx_b(tmp.reg(), FieldOperand(obj.reg(), Map::kInstanceTypeOffset));
5297 __ cmp(tmp.reg(), FIRST_JS_OBJECT_TYPE);
5298 null.Branch(less);
5299
5300 // As long as JS_FUNCTION_TYPE is the last instance type and it is
5301 // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
5302 // LAST_JS_OBJECT_TYPE.
5303 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
5304 ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
5305 __ cmp(tmp.reg(), JS_FUNCTION_TYPE);
5306 function.Branch(equal);
5307 }
5308
5309 // Check if the constructor in the map is a function.
5310 { Result tmp = allocator()->Allocate();
5311 __ mov(obj.reg(), FieldOperand(obj.reg(), Map::kConstructorOffset));
5312 __ CmpObjectType(obj.reg(), JS_FUNCTION_TYPE, tmp.reg());
5313 non_function_constructor.Branch(not_equal);
5314 }
5315
5316 // The map register now contains the constructor function. Grab the
5317 // instance class name from there.
5318 __ mov(obj.reg(),
5319 FieldOperand(obj.reg(), JSFunction::kSharedFunctionInfoOffset));
5320 __ mov(obj.reg(),
5321 FieldOperand(obj.reg(), SharedFunctionInfo::kInstanceClassNameOffset));
5322 frame_->Push(&obj);
5323 leave.Jump();
5324
5325 // Functions have class 'Function'.
5326 function.Bind();
5327 frame_->Push(Factory::function_class_symbol());
5328 leave.Jump();
5329
5330 // Objects with a non-function constructor have class 'Object'.
5331 non_function_constructor.Bind();
5332 frame_->Push(Factory::Object_symbol());
5333 leave.Jump();
5334
5335 // Non-JS objects have class null.
5336 null.Bind();
5337 frame_->Push(Factory::null_value());
5338
5339 // All done.
5340 leave.Bind();
5341}
5342
5343
5344void CodeGenerator::GenerateValueOf(ZoneList<Expression*>* args) {
5345 ASSERT(args->length() == 1);
5346 JumpTarget leave;
5347 Load(args->at(0)); // Load the object.
5348 frame_->Dup();
5349 Result object = frame_->Pop();
5350 object.ToRegister();
5351 ASSERT(object.is_valid());
5352 // if (object->IsSmi()) return object.
5353 __ test(object.reg(), Immediate(kSmiTagMask));
5354 leave.Branch(zero, taken);
5355 // It is a heap object - get map.
5356 Result temp = allocator()->Allocate();
5357 ASSERT(temp.is_valid());
5358 // if (!object->IsJSValue()) return object.
5359 __ CmpObjectType(object.reg(), JS_VALUE_TYPE, temp.reg());
5360 leave.Branch(not_equal, not_taken);
5361 __ mov(temp.reg(), FieldOperand(object.reg(), JSValue::kValueOffset));
5362 object.Unuse();
5363 frame_->SetElementAt(0, &temp);
5364 leave.Bind();
5365}
5366
5367
5368void CodeGenerator::GenerateSetValueOf(ZoneList<Expression*>* args) {
5369 ASSERT(args->length() == 2);
5370 JumpTarget leave;
5371 Load(args->at(0)); // Load the object.
5372 Load(args->at(1)); // Load the value.
5373 Result value = frame_->Pop();
5374 Result object = frame_->Pop();
5375 value.ToRegister();
5376 object.ToRegister();
5377
5378 // if (object->IsSmi()) return value.
5379 __ test(object.reg(), Immediate(kSmiTagMask));
5380 leave.Branch(zero, &value, taken);
5381
5382 // It is a heap object - get its map.
5383 Result scratch = allocator_->Allocate();
5384 ASSERT(scratch.is_valid());
5385 // if (!object->IsJSValue()) return value.
5386 __ CmpObjectType(object.reg(), JS_VALUE_TYPE, scratch.reg());
5387 leave.Branch(not_equal, &value, not_taken);
5388
5389 // Store the value.
5390 __ mov(FieldOperand(object.reg(), JSValue::kValueOffset), value.reg());
5391 // Update the write barrier. Save the value as it will be
5392 // overwritten by the write barrier code and is needed afterward.
5393 Result duplicate_value = allocator_->Allocate();
5394 ASSERT(duplicate_value.is_valid());
5395 __ mov(duplicate_value.reg(), value.reg());
5396 // The object register is also overwritten by the write barrier and
5397 // possibly aliased in the frame.
5398 frame_->Spill(object.reg());
5399 __ RecordWrite(object.reg(), JSValue::kValueOffset, duplicate_value.reg(),
5400 scratch.reg());
5401 object.Unuse();
5402 scratch.Unuse();
5403 duplicate_value.Unuse();
5404
5405 // Leave.
5406 leave.Bind(&value);
5407 frame_->Push(&value);
5408}
5409
5410
5411void CodeGenerator::GenerateArgumentsAccess(ZoneList<Expression*>* args) {
5412 ASSERT(args->length() == 1);
5413
5414 // ArgumentsAccessStub expects the key in edx and the formal
5415 // parameter count in eax.
5416 Load(args->at(0));
5417 Result key = frame_->Pop();
5418 // Explicitly create a constant result.
Andrei Popescu31002712010-02-23 13:46:05 +00005419 Result count(Handle<Smi>(Smi::FromInt(scope()->num_parameters())));
Steve Blocka7e24c12009-10-30 11:49:00 +00005420 // Call the shared stub to get to arguments[key].
5421 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
5422 Result result = frame_->CallStub(&stub, &key, &count);
5423 frame_->Push(&result);
5424}
5425
5426
5427void CodeGenerator::GenerateObjectEquals(ZoneList<Expression*>* args) {
5428 ASSERT(args->length() == 2);
5429
5430 // Load the two objects into registers and perform the comparison.
5431 Load(args->at(0));
5432 Load(args->at(1));
5433 Result right = frame_->Pop();
5434 Result left = frame_->Pop();
5435 right.ToRegister();
5436 left.ToRegister();
5437 __ cmp(right.reg(), Operand(left.reg()));
5438 right.Unuse();
5439 left.Unuse();
5440 destination()->Split(equal);
5441}
5442
5443
5444void CodeGenerator::GenerateGetFramePointer(ZoneList<Expression*>* args) {
5445 ASSERT(args->length() == 0);
5446 ASSERT(kSmiTag == 0); // EBP value is aligned, so it should look like Smi.
5447 Result ebp_as_smi = allocator_->Allocate();
5448 ASSERT(ebp_as_smi.is_valid());
5449 __ mov(ebp_as_smi.reg(), Operand(ebp));
5450 frame_->Push(&ebp_as_smi);
5451}
5452
5453
5454void CodeGenerator::GenerateRandomPositiveSmi(ZoneList<Expression*>* args) {
5455 ASSERT(args->length() == 0);
5456 frame_->SpillAll();
5457
5458 // Make sure the frame is aligned like the OS expects.
5459 static const int kFrameAlignment = OS::ActivationFrameAlignment();
5460 if (kFrameAlignment > 0) {
5461 ASSERT(IsPowerOf2(kFrameAlignment));
5462 __ mov(edi, Operand(esp)); // Save in callee-saved register.
5463 __ and_(esp, -kFrameAlignment);
5464 }
5465
5466 // Call V8::RandomPositiveSmi().
5467 __ call(FUNCTION_ADDR(V8::RandomPositiveSmi), RelocInfo::RUNTIME_ENTRY);
5468
5469 // Restore stack pointer from callee-saved register edi.
5470 if (kFrameAlignment > 0) {
5471 __ mov(esp, Operand(edi));
5472 }
5473
5474 Result result = allocator_->Allocate(eax);
5475 frame_->Push(&result);
5476}
5477
5478
Steve Blockd0582a62009-12-15 09:54:21 +00005479void CodeGenerator::GenerateStringAdd(ZoneList<Expression*>* args) {
5480 ASSERT_EQ(2, args->length());
5481
5482 Load(args->at(0));
5483 Load(args->at(1));
5484
5485 StringAddStub stub(NO_STRING_ADD_FLAGS);
5486 Result answer = frame_->CallStub(&stub, 2);
5487 frame_->Push(&answer);
5488}
5489
5490
Leon Clarkee46be812010-01-19 14:06:41 +00005491void CodeGenerator::GenerateSubString(ZoneList<Expression*>* args) {
5492 ASSERT_EQ(3, args->length());
5493
5494 Load(args->at(0));
5495 Load(args->at(1));
5496 Load(args->at(2));
5497
5498 SubStringStub stub;
5499 Result answer = frame_->CallStub(&stub, 3);
5500 frame_->Push(&answer);
5501}
5502
5503
5504void CodeGenerator::GenerateStringCompare(ZoneList<Expression*>* args) {
5505 ASSERT_EQ(2, args->length());
5506
5507 Load(args->at(0));
5508 Load(args->at(1));
5509
5510 StringCompareStub stub;
5511 Result answer = frame_->CallStub(&stub, 2);
5512 frame_->Push(&answer);
5513}
5514
5515
5516void CodeGenerator::GenerateRegExpExec(ZoneList<Expression*>* args) {
5517 ASSERT_EQ(args->length(), 4);
5518
5519 // Load the arguments on the stack and call the stub.
5520 Load(args->at(0));
5521 Load(args->at(1));
5522 Load(args->at(2));
5523 Load(args->at(3));
5524 RegExpExecStub stub;
5525 Result result = frame_->CallStub(&stub, 4);
5526 frame_->Push(&result);
5527}
5528
5529
Steve Blocka7e24c12009-10-30 11:49:00 +00005530void CodeGenerator::VisitCallRuntime(CallRuntime* node) {
5531 if (CheckForInlineRuntimeCall(node)) {
5532 return;
5533 }
5534
5535 ZoneList<Expression*>* args = node->arguments();
5536 Comment cmnt(masm_, "[ CallRuntime");
5537 Runtime::Function* function = node->function();
5538
5539 if (function == NULL) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005540 // Push the builtins object found in the current global object.
5541 Result temp = allocator()->Allocate();
5542 ASSERT(temp.is_valid());
5543 __ mov(temp.reg(), GlobalObject());
5544 __ mov(temp.reg(), FieldOperand(temp.reg(), GlobalObject::kBuiltinsOffset));
5545 frame_->Push(&temp);
5546 }
5547
5548 // Push the arguments ("left-to-right").
5549 int arg_count = args->length();
5550 for (int i = 0; i < arg_count; i++) {
5551 Load(args->at(i));
5552 }
5553
5554 if (function == NULL) {
5555 // Call the JS runtime function.
Leon Clarkee46be812010-01-19 14:06:41 +00005556 frame_->Push(node->name());
Steve Blocka7e24c12009-10-30 11:49:00 +00005557 Result answer = frame_->CallCallIC(RelocInfo::CODE_TARGET,
5558 arg_count,
5559 loop_nesting_);
5560 frame_->RestoreContextRegister();
Leon Clarkee46be812010-01-19 14:06:41 +00005561 frame_->Push(&answer);
Steve Blocka7e24c12009-10-30 11:49:00 +00005562 } else {
5563 // Call the C runtime function.
5564 Result answer = frame_->CallRuntime(function, arg_count);
5565 frame_->Push(&answer);
5566 }
5567}
5568
5569
5570void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005571 Comment cmnt(masm_, "[ UnaryOperation");
5572
5573 Token::Value op = node->op();
5574
5575 if (op == Token::NOT) {
5576 // Swap the true and false targets but keep the same actual label
5577 // as the fall through.
5578 destination()->Invert();
Steve Blockd0582a62009-12-15 09:54:21 +00005579 LoadCondition(node->expression(), destination(), true);
Steve Blocka7e24c12009-10-30 11:49:00 +00005580 // Swap the labels back.
5581 destination()->Invert();
5582
5583 } else if (op == Token::DELETE) {
5584 Property* property = node->expression()->AsProperty();
5585 if (property != NULL) {
5586 Load(property->obj());
5587 Load(property->key());
5588 Result answer = frame_->InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, 2);
5589 frame_->Push(&answer);
5590 return;
5591 }
5592
5593 Variable* variable = node->expression()->AsVariableProxy()->AsVariable();
5594 if (variable != NULL) {
5595 Slot* slot = variable->slot();
5596 if (variable->is_global()) {
5597 LoadGlobal();
5598 frame_->Push(variable->name());
5599 Result answer = frame_->InvokeBuiltin(Builtins::DELETE,
5600 CALL_FUNCTION, 2);
5601 frame_->Push(&answer);
5602 return;
5603
5604 } else if (slot != NULL && slot->type() == Slot::LOOKUP) {
5605 // Call the runtime to look up the context holding the named
5606 // variable. Sync the virtual frame eagerly so we can push the
5607 // arguments directly into place.
5608 frame_->SyncRange(0, frame_->element_count() - 1);
5609 frame_->EmitPush(esi);
5610 frame_->EmitPush(Immediate(variable->name()));
5611 Result context = frame_->CallRuntime(Runtime::kLookupContext, 2);
5612 ASSERT(context.is_register());
5613 frame_->EmitPush(context.reg());
5614 context.Unuse();
5615 frame_->EmitPush(Immediate(variable->name()));
5616 Result answer = frame_->InvokeBuiltin(Builtins::DELETE,
5617 CALL_FUNCTION, 2);
5618 frame_->Push(&answer);
5619 return;
5620 }
5621
5622 // Default: Result of deleting non-global, not dynamically
5623 // introduced variables is false.
5624 frame_->Push(Factory::false_value());
5625
5626 } else {
5627 // Default: Result of deleting expressions is true.
5628 Load(node->expression()); // may have side-effects
5629 frame_->SetElementAt(0, Factory::true_value());
5630 }
5631
5632 } else if (op == Token::TYPEOF) {
5633 // Special case for loading the typeof expression; see comment on
5634 // LoadTypeofExpression().
5635 LoadTypeofExpression(node->expression());
5636 Result answer = frame_->CallRuntime(Runtime::kTypeof, 1);
5637 frame_->Push(&answer);
5638
5639 } else if (op == Token::VOID) {
5640 Expression* expression = node->expression();
5641 if (expression && expression->AsLiteral() && (
5642 expression->AsLiteral()->IsTrue() ||
5643 expression->AsLiteral()->IsFalse() ||
5644 expression->AsLiteral()->handle()->IsNumber() ||
5645 expression->AsLiteral()->handle()->IsString() ||
5646 expression->AsLiteral()->handle()->IsJSRegExp() ||
5647 expression->AsLiteral()->IsNull())) {
5648 // Omit evaluating the value of the primitive literal.
5649 // It will be discarded anyway, and can have no side effect.
5650 frame_->Push(Factory::undefined_value());
5651 } else {
5652 Load(node->expression());
5653 frame_->SetElementAt(0, Factory::undefined_value());
5654 }
5655
5656 } else {
5657 Load(node->expression());
Leon Clarkee46be812010-01-19 14:06:41 +00005658 bool overwrite =
5659 (node->expression()->AsBinaryOperation() != NULL &&
5660 node->expression()->AsBinaryOperation()->ResultOverwriteAllowed());
Steve Blocka7e24c12009-10-30 11:49:00 +00005661 switch (op) {
5662 case Token::SUB: {
Leon Clarkee46be812010-01-19 14:06:41 +00005663 GenericUnaryOpStub stub(Token::SUB, overwrite);
Steve Blocka7e24c12009-10-30 11:49:00 +00005664 // TODO(1222589): remove dependency of TOS being cached inside stub
5665 Result operand = frame_->Pop();
5666 Result answer = frame_->CallStub(&stub, &operand);
5667 frame_->Push(&answer);
5668 break;
5669 }
5670
5671 case Token::BIT_NOT: {
5672 // Smi check.
5673 JumpTarget smi_label;
5674 JumpTarget continue_label;
5675 Result operand = frame_->Pop();
5676 operand.ToRegister();
5677 __ test(operand.reg(), Immediate(kSmiTagMask));
5678 smi_label.Branch(zero, &operand, taken);
5679
Leon Clarkee46be812010-01-19 14:06:41 +00005680 GenericUnaryOpStub stub(Token::BIT_NOT, overwrite);
5681 Result answer = frame_->CallStub(&stub, &operand);
Steve Blocka7e24c12009-10-30 11:49:00 +00005682 continue_label.Jump(&answer);
Leon Clarkee46be812010-01-19 14:06:41 +00005683
Steve Blocka7e24c12009-10-30 11:49:00 +00005684 smi_label.Bind(&answer);
5685 answer.ToRegister();
5686 frame_->Spill(answer.reg());
5687 __ not_(answer.reg());
5688 __ and_(answer.reg(), ~kSmiTagMask); // Remove inverted smi-tag.
Leon Clarkee46be812010-01-19 14:06:41 +00005689
Steve Blocka7e24c12009-10-30 11:49:00 +00005690 continue_label.Bind(&answer);
5691 frame_->Push(&answer);
5692 break;
5693 }
5694
5695 case Token::ADD: {
5696 // Smi check.
5697 JumpTarget continue_label;
5698 Result operand = frame_->Pop();
5699 operand.ToRegister();
5700 __ test(operand.reg(), Immediate(kSmiTagMask));
5701 continue_label.Branch(zero, &operand, taken);
5702
5703 frame_->Push(&operand);
5704 Result answer = frame_->InvokeBuiltin(Builtins::TO_NUMBER,
5705 CALL_FUNCTION, 1);
5706
5707 continue_label.Bind(&answer);
5708 frame_->Push(&answer);
5709 break;
5710 }
5711
5712 default:
5713 // NOT, DELETE, TYPEOF, and VOID are handled outside the
5714 // switch.
5715 UNREACHABLE();
5716 }
5717 }
5718}
5719
5720
5721// The value in dst was optimistically incremented or decremented. The
5722// result overflowed or was not smi tagged. Undo the operation, call
5723// into the runtime to convert the argument to a number, and call the
5724// specialized add or subtract stub. The result is left in dst.
5725class DeferredPrefixCountOperation: public DeferredCode {
5726 public:
5727 DeferredPrefixCountOperation(Register dst, bool is_increment)
5728 : dst_(dst), is_increment_(is_increment) {
5729 set_comment("[ DeferredCountOperation");
5730 }
5731
5732 virtual void Generate();
5733
5734 private:
5735 Register dst_;
5736 bool is_increment_;
5737};
5738
5739
5740void DeferredPrefixCountOperation::Generate() {
5741 // Undo the optimistic smi operation.
5742 if (is_increment_) {
5743 __ sub(Operand(dst_), Immediate(Smi::FromInt(1)));
5744 } else {
5745 __ add(Operand(dst_), Immediate(Smi::FromInt(1)));
5746 }
5747 __ push(dst_);
5748 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION);
5749 __ push(eax);
5750 __ push(Immediate(Smi::FromInt(1)));
5751 if (is_increment_) {
5752 __ CallRuntime(Runtime::kNumberAdd, 2);
5753 } else {
5754 __ CallRuntime(Runtime::kNumberSub, 2);
5755 }
5756 if (!dst_.is(eax)) __ mov(dst_, eax);
5757}
5758
5759
5760// The value in dst was optimistically incremented or decremented. The
5761// result overflowed or was not smi tagged. Undo the operation and call
5762// into the runtime to convert the argument to a number. Update the
5763// original value in old. Call the specialized add or subtract stub.
5764// The result is left in dst.
5765class DeferredPostfixCountOperation: public DeferredCode {
5766 public:
5767 DeferredPostfixCountOperation(Register dst, Register old, bool is_increment)
5768 : dst_(dst), old_(old), is_increment_(is_increment) {
5769 set_comment("[ DeferredCountOperation");
5770 }
5771
5772 virtual void Generate();
5773
5774 private:
5775 Register dst_;
5776 Register old_;
5777 bool is_increment_;
5778};
5779
5780
5781void DeferredPostfixCountOperation::Generate() {
5782 // Undo the optimistic smi operation.
5783 if (is_increment_) {
5784 __ sub(Operand(dst_), Immediate(Smi::FromInt(1)));
5785 } else {
5786 __ add(Operand(dst_), Immediate(Smi::FromInt(1)));
5787 }
5788 __ push(dst_);
5789 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION);
5790
5791 // Save the result of ToNumber to use as the old value.
5792 __ push(eax);
5793
5794 // Call the runtime for the addition or subtraction.
5795 __ push(eax);
5796 __ push(Immediate(Smi::FromInt(1)));
5797 if (is_increment_) {
5798 __ CallRuntime(Runtime::kNumberAdd, 2);
5799 } else {
5800 __ CallRuntime(Runtime::kNumberSub, 2);
5801 }
5802 if (!dst_.is(eax)) __ mov(dst_, eax);
5803 __ pop(old_);
5804}
5805
5806
5807void CodeGenerator::VisitCountOperation(CountOperation* node) {
5808 Comment cmnt(masm_, "[ CountOperation");
5809
5810 bool is_postfix = node->is_postfix();
5811 bool is_increment = node->op() == Token::INC;
5812
5813 Variable* var = node->expression()->AsVariableProxy()->AsVariable();
5814 bool is_const = (var != NULL && var->mode() == Variable::CONST);
5815
5816 // Postfix operations need a stack slot under the reference to hold
5817 // the old value while the new value is being stored. This is so that
5818 // in the case that storing the new value requires a call, the old
5819 // value will be in the frame to be spilled.
5820 if (is_postfix) frame_->Push(Smi::FromInt(0));
5821
Leon Clarked91b9f72010-01-27 17:25:45 +00005822 // A constant reference is not saved to, so a constant reference is not a
5823 // compound assignment reference.
5824 { Reference target(this, node->expression(), !is_const);
Steve Blocka7e24c12009-10-30 11:49:00 +00005825 if (target.is_illegal()) {
5826 // Spoof the virtual frame to have the expected height (one higher
5827 // than on entry).
5828 if (!is_postfix) frame_->Push(Smi::FromInt(0));
5829 return;
5830 }
Steve Blockd0582a62009-12-15 09:54:21 +00005831 target.TakeValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00005832
5833 Result new_value = frame_->Pop();
5834 new_value.ToRegister();
5835
5836 Result old_value; // Only allocated in the postfix case.
5837 if (is_postfix) {
5838 // Allocate a temporary to preserve the old value.
5839 old_value = allocator_->Allocate();
5840 ASSERT(old_value.is_valid());
5841 __ mov(old_value.reg(), new_value.reg());
5842 }
5843 // Ensure the new value is writable.
5844 frame_->Spill(new_value.reg());
5845
5846 // In order to combine the overflow and the smi tag check, we need
5847 // to be able to allocate a byte register. We attempt to do so
5848 // without spilling. If we fail, we will generate separate overflow
5849 // and smi tag checks.
5850 //
5851 // We allocate and clear the temporary byte register before
5852 // performing the count operation since clearing the register using
5853 // xor will clear the overflow flag.
5854 Result tmp = allocator_->AllocateByteRegisterWithoutSpilling();
5855 if (tmp.is_valid()) {
5856 __ Set(tmp.reg(), Immediate(0));
5857 }
5858
5859 DeferredCode* deferred = NULL;
5860 if (is_postfix) {
5861 deferred = new DeferredPostfixCountOperation(new_value.reg(),
5862 old_value.reg(),
5863 is_increment);
5864 } else {
5865 deferred = new DeferredPrefixCountOperation(new_value.reg(),
5866 is_increment);
5867 }
5868
5869 if (is_increment) {
5870 __ add(Operand(new_value.reg()), Immediate(Smi::FromInt(1)));
5871 } else {
5872 __ sub(Operand(new_value.reg()), Immediate(Smi::FromInt(1)));
5873 }
5874
5875 // If the count operation didn't overflow and the result is a valid
5876 // smi, we're done. Otherwise, we jump to the deferred slow-case
5877 // code.
5878 if (tmp.is_valid()) {
5879 // We combine the overflow and the smi tag check if we could
5880 // successfully allocate a temporary byte register.
5881 __ setcc(overflow, tmp.reg());
5882 __ or_(Operand(tmp.reg()), new_value.reg());
5883 __ test(tmp.reg(), Immediate(kSmiTagMask));
5884 tmp.Unuse();
5885 deferred->Branch(not_zero);
5886 } else {
5887 // Otherwise we test separately for overflow and smi tag.
5888 deferred->Branch(overflow);
5889 __ test(new_value.reg(), Immediate(kSmiTagMask));
5890 deferred->Branch(not_zero);
5891 }
5892 deferred->BindExit();
5893
5894 // Postfix: store the old value in the allocated slot under the
5895 // reference.
5896 if (is_postfix) frame_->SetElementAt(target.size(), &old_value);
5897
5898 frame_->Push(&new_value);
5899 // Non-constant: update the reference.
5900 if (!is_const) target.SetValue(NOT_CONST_INIT);
5901 }
5902
5903 // Postfix: drop the new value and use the old.
5904 if (is_postfix) frame_->Drop();
5905}
5906
5907
5908void CodeGenerator::VisitBinaryOperation(BinaryOperation* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005909 Comment cmnt(masm_, "[ BinaryOperation");
5910 Token::Value op = node->op();
5911
5912 // According to ECMA-262 section 11.11, page 58, the binary logical
5913 // operators must yield the result of one of the two expressions
5914 // before any ToBoolean() conversions. This means that the value
5915 // produced by a && or || operator is not necessarily a boolean.
5916
5917 // NOTE: If the left hand side produces a materialized value (not
5918 // control flow), we force the right hand side to do the same. This
5919 // is necessary because we assume that if we get control flow on the
5920 // last path out of an expression we got it on all paths.
5921 if (op == Token::AND) {
5922 JumpTarget is_true;
5923 ControlDestination dest(&is_true, destination()->false_target(), true);
Steve Blockd0582a62009-12-15 09:54:21 +00005924 LoadCondition(node->left(), &dest, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00005925
5926 if (dest.false_was_fall_through()) {
5927 // The current false target was used as the fall-through. If
5928 // there are no dangling jumps to is_true then the left
5929 // subexpression was unconditionally false. Otherwise we have
5930 // paths where we do have to evaluate the right subexpression.
5931 if (is_true.is_linked()) {
5932 // We need to compile the right subexpression. If the jump to
5933 // the current false target was a forward jump then we have a
5934 // valid frame, we have just bound the false target, and we
5935 // have to jump around the code for the right subexpression.
5936 if (has_valid_frame()) {
5937 destination()->false_target()->Unuse();
5938 destination()->false_target()->Jump();
5939 }
5940 is_true.Bind();
5941 // The left subexpression compiled to control flow, so the
5942 // right one is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00005943 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00005944 } else {
5945 // We have actually just jumped to or bound the current false
5946 // target but the current control destination is not marked as
5947 // used.
5948 destination()->Use(false);
5949 }
5950
5951 } else if (dest.is_used()) {
5952 // The left subexpression compiled to control flow (and is_true
5953 // was just bound), so the right is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00005954 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00005955
5956 } else {
5957 // We have a materialized value on the frame, so we exit with
5958 // one on all paths. There are possibly also jumps to is_true
5959 // from nested subexpressions.
5960 JumpTarget pop_and_continue;
5961 JumpTarget exit;
5962
5963 // Avoid popping the result if it converts to 'false' using the
5964 // standard ToBoolean() conversion as described in ECMA-262,
5965 // section 9.2, page 30.
5966 //
5967 // Duplicate the TOS value. The duplicate will be popped by
5968 // ToBoolean.
5969 frame_->Dup();
5970 ControlDestination dest(&pop_and_continue, &exit, true);
5971 ToBoolean(&dest);
5972
5973 // Pop the result of evaluating the first part.
5974 frame_->Drop();
5975
5976 // Compile right side expression.
5977 is_true.Bind();
5978 Load(node->right());
5979
5980 // Exit (always with a materialized value).
5981 exit.Bind();
5982 }
5983
5984 } else if (op == Token::OR) {
5985 JumpTarget is_false;
5986 ControlDestination dest(destination()->true_target(), &is_false, false);
Steve Blockd0582a62009-12-15 09:54:21 +00005987 LoadCondition(node->left(), &dest, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00005988
5989 if (dest.true_was_fall_through()) {
5990 // The current true target was used as the fall-through. If
5991 // there are no dangling jumps to is_false then the left
5992 // subexpression was unconditionally true. Otherwise we have
5993 // paths where we do have to evaluate the right subexpression.
5994 if (is_false.is_linked()) {
5995 // We need to compile the right subexpression. If the jump to
5996 // the current true target was a forward jump then we have a
5997 // valid frame, we have just bound the true target, and we
5998 // have to jump around the code for the right subexpression.
5999 if (has_valid_frame()) {
6000 destination()->true_target()->Unuse();
6001 destination()->true_target()->Jump();
6002 }
6003 is_false.Bind();
6004 // The left subexpression compiled to control flow, so the
6005 // right one is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00006006 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00006007 } else {
6008 // We have just jumped to or bound the current true target but
6009 // the current control destination is not marked as used.
6010 destination()->Use(true);
6011 }
6012
6013 } else if (dest.is_used()) {
6014 // The left subexpression compiled to control flow (and is_false
6015 // was just bound), so the right is free to do so as well.
Steve Blockd0582a62009-12-15 09:54:21 +00006016 LoadCondition(node->right(), destination(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00006017
6018 } else {
6019 // We have a materialized value on the frame, so we exit with
6020 // one on all paths. There are possibly also jumps to is_false
6021 // from nested subexpressions.
6022 JumpTarget pop_and_continue;
6023 JumpTarget exit;
6024
6025 // Avoid popping the result if it converts to 'true' using the
6026 // standard ToBoolean() conversion as described in ECMA-262,
6027 // section 9.2, page 30.
6028 //
6029 // Duplicate the TOS value. The duplicate will be popped by
6030 // ToBoolean.
6031 frame_->Dup();
6032 ControlDestination dest(&exit, &pop_and_continue, false);
6033 ToBoolean(&dest);
6034
6035 // Pop the result of evaluating the first part.
6036 frame_->Drop();
6037
6038 // Compile right side expression.
6039 is_false.Bind();
6040 Load(node->right());
6041
6042 // Exit (always with a materialized value).
6043 exit.Bind();
6044 }
6045
6046 } else {
6047 // NOTE: The code below assumes that the slow cases (calls to runtime)
6048 // never return a constant/immutable object.
6049 OverwriteMode overwrite_mode = NO_OVERWRITE;
6050 if (node->left()->AsBinaryOperation() != NULL &&
6051 node->left()->AsBinaryOperation()->ResultOverwriteAllowed()) {
6052 overwrite_mode = OVERWRITE_LEFT;
6053 } else if (node->right()->AsBinaryOperation() != NULL &&
6054 node->right()->AsBinaryOperation()->ResultOverwriteAllowed()) {
6055 overwrite_mode = OVERWRITE_RIGHT;
6056 }
6057
6058 Load(node->left());
6059 Load(node->right());
6060 GenericBinaryOperation(node->op(), node->type(), overwrite_mode);
6061 }
6062}
6063
6064
6065void CodeGenerator::VisitThisFunction(ThisFunction* node) {
6066 frame_->PushFunction();
6067}
6068
6069
6070void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
6071 Comment cmnt(masm_, "[ CompareOperation");
6072
Leon Clarkee46be812010-01-19 14:06:41 +00006073 bool left_already_loaded = false;
6074
Steve Blocka7e24c12009-10-30 11:49:00 +00006075 // Get the expressions from the node.
6076 Expression* left = node->left();
6077 Expression* right = node->right();
6078 Token::Value op = node->op();
6079 // To make typeof testing for natives implemented in JavaScript really
6080 // efficient, we generate special code for expressions of the form:
6081 // 'typeof <expression> == <string>'.
6082 UnaryOperation* operation = left->AsUnaryOperation();
6083 if ((op == Token::EQ || op == Token::EQ_STRICT) &&
6084 (operation != NULL && operation->op() == Token::TYPEOF) &&
6085 (right->AsLiteral() != NULL &&
6086 right->AsLiteral()->handle()->IsString())) {
6087 Handle<String> check(String::cast(*right->AsLiteral()->handle()));
6088
6089 // Load the operand and move it to a register.
6090 LoadTypeofExpression(operation->expression());
6091 Result answer = frame_->Pop();
6092 answer.ToRegister();
6093
6094 if (check->Equals(Heap::number_symbol())) {
6095 __ test(answer.reg(), Immediate(kSmiTagMask));
6096 destination()->true_target()->Branch(zero);
6097 frame_->Spill(answer.reg());
6098 __ mov(answer.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset));
6099 __ cmp(answer.reg(), Factory::heap_number_map());
6100 answer.Unuse();
6101 destination()->Split(equal);
6102
6103 } else if (check->Equals(Heap::string_symbol())) {
6104 __ test(answer.reg(), Immediate(kSmiTagMask));
6105 destination()->false_target()->Branch(zero);
6106
6107 // It can be an undetectable string object.
6108 Result temp = allocator()->Allocate();
6109 ASSERT(temp.is_valid());
6110 __ mov(temp.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset));
6111 __ movzx_b(temp.reg(), FieldOperand(temp.reg(), Map::kBitFieldOffset));
6112 __ test(temp.reg(), Immediate(1 << Map::kIsUndetectable));
6113 destination()->false_target()->Branch(not_zero);
6114 __ mov(temp.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset));
6115 __ movzx_b(temp.reg(),
6116 FieldOperand(temp.reg(), Map::kInstanceTypeOffset));
6117 __ cmp(temp.reg(), FIRST_NONSTRING_TYPE);
6118 temp.Unuse();
6119 answer.Unuse();
6120 destination()->Split(less);
6121
6122 } else if (check->Equals(Heap::boolean_symbol())) {
6123 __ cmp(answer.reg(), Factory::true_value());
6124 destination()->true_target()->Branch(equal);
6125 __ cmp(answer.reg(), Factory::false_value());
6126 answer.Unuse();
6127 destination()->Split(equal);
6128
6129 } else if (check->Equals(Heap::undefined_symbol())) {
6130 __ cmp(answer.reg(), Factory::undefined_value());
6131 destination()->true_target()->Branch(equal);
6132
6133 __ test(answer.reg(), Immediate(kSmiTagMask));
6134 destination()->false_target()->Branch(zero);
6135
6136 // It can be an undetectable object.
6137 frame_->Spill(answer.reg());
6138 __ mov(answer.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset));
6139 __ movzx_b(answer.reg(),
6140 FieldOperand(answer.reg(), Map::kBitFieldOffset));
6141 __ test(answer.reg(), Immediate(1 << Map::kIsUndetectable));
6142 answer.Unuse();
6143 destination()->Split(not_zero);
6144
6145 } else if (check->Equals(Heap::function_symbol())) {
6146 __ test(answer.reg(), Immediate(kSmiTagMask));
6147 destination()->false_target()->Branch(zero);
6148 frame_->Spill(answer.reg());
6149 __ CmpObjectType(answer.reg(), JS_FUNCTION_TYPE, answer.reg());
Steve Blockd0582a62009-12-15 09:54:21 +00006150 destination()->true_target()->Branch(equal);
6151 // Regular expressions are callable so typeof == 'function'.
6152 __ CmpInstanceType(answer.reg(), JS_REGEXP_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00006153 answer.Unuse();
6154 destination()->Split(equal);
Steve Blocka7e24c12009-10-30 11:49:00 +00006155 } else if (check->Equals(Heap::object_symbol())) {
6156 __ test(answer.reg(), Immediate(kSmiTagMask));
6157 destination()->false_target()->Branch(zero);
6158 __ cmp(answer.reg(), Factory::null_value());
6159 destination()->true_target()->Branch(equal);
6160
Steve Blocka7e24c12009-10-30 11:49:00 +00006161 Result map = allocator()->Allocate();
6162 ASSERT(map.is_valid());
Steve Blockd0582a62009-12-15 09:54:21 +00006163 // Regular expressions are typeof == 'function', not 'object'.
6164 __ CmpObjectType(answer.reg(), JS_REGEXP_TYPE, map.reg());
6165 destination()->false_target()->Branch(equal);
6166
6167 // It can be an undetectable object.
Steve Blocka7e24c12009-10-30 11:49:00 +00006168 __ movzx_b(map.reg(), FieldOperand(map.reg(), Map::kBitFieldOffset));
6169 __ test(map.reg(), Immediate(1 << Map::kIsUndetectable));
6170 destination()->false_target()->Branch(not_zero);
6171 __ mov(map.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset));
6172 __ movzx_b(map.reg(), FieldOperand(map.reg(), Map::kInstanceTypeOffset));
6173 __ cmp(map.reg(), FIRST_JS_OBJECT_TYPE);
6174 destination()->false_target()->Branch(less);
6175 __ cmp(map.reg(), LAST_JS_OBJECT_TYPE);
6176 answer.Unuse();
6177 map.Unuse();
6178 destination()->Split(less_equal);
6179 } else {
6180 // Uncommon case: typeof testing against a string literal that is
6181 // never returned from the typeof operator.
6182 answer.Unuse();
6183 destination()->Goto(false);
6184 }
6185 return;
Leon Clarkee46be812010-01-19 14:06:41 +00006186 } else if (op == Token::LT &&
6187 right->AsLiteral() != NULL &&
6188 right->AsLiteral()->handle()->IsHeapNumber()) {
6189 Handle<HeapNumber> check(HeapNumber::cast(*right->AsLiteral()->handle()));
6190 if (check->value() == 2147483648.0) { // 0x80000000.
6191 Load(left);
6192 left_already_loaded = true;
6193 Result lhs = frame_->Pop();
6194 lhs.ToRegister();
6195 __ test(lhs.reg(), Immediate(kSmiTagMask));
6196 destination()->true_target()->Branch(zero); // All Smis are less.
6197 Result scratch = allocator()->Allocate();
6198 ASSERT(scratch.is_valid());
6199 __ mov(scratch.reg(), FieldOperand(lhs.reg(), HeapObject::kMapOffset));
6200 __ cmp(scratch.reg(), Factory::heap_number_map());
6201 JumpTarget not_a_number;
6202 not_a_number.Branch(not_equal, &lhs);
6203 __ mov(scratch.reg(),
6204 FieldOperand(lhs.reg(), HeapNumber::kExponentOffset));
6205 __ cmp(Operand(scratch.reg()), Immediate(0xfff00000));
6206 not_a_number.Branch(above_equal, &lhs); // It's a negative NaN or -Inf.
6207 const uint32_t borderline_exponent =
6208 (HeapNumber::kExponentBias + 31) << HeapNumber::kExponentShift;
6209 __ cmp(Operand(scratch.reg()), Immediate(borderline_exponent));
6210 scratch.Unuse();
6211 lhs.Unuse();
6212 destination()->true_target()->Branch(less);
6213 destination()->false_target()->Jump();
6214
6215 not_a_number.Bind(&lhs);
6216 frame_->Push(&lhs);
6217 }
Steve Blocka7e24c12009-10-30 11:49:00 +00006218 }
6219
6220 Condition cc = no_condition;
6221 bool strict = false;
6222 switch (op) {
6223 case Token::EQ_STRICT:
6224 strict = true;
6225 // Fall through
6226 case Token::EQ:
6227 cc = equal;
6228 break;
6229 case Token::LT:
6230 cc = less;
6231 break;
6232 case Token::GT:
6233 cc = greater;
6234 break;
6235 case Token::LTE:
6236 cc = less_equal;
6237 break;
6238 case Token::GTE:
6239 cc = greater_equal;
6240 break;
6241 case Token::IN: {
Leon Clarkee46be812010-01-19 14:06:41 +00006242 if (!left_already_loaded) Load(left);
Steve Blocka7e24c12009-10-30 11:49:00 +00006243 Load(right);
6244 Result answer = frame_->InvokeBuiltin(Builtins::IN, CALL_FUNCTION, 2);
6245 frame_->Push(&answer); // push the result
6246 return;
6247 }
6248 case Token::INSTANCEOF: {
Leon Clarkee46be812010-01-19 14:06:41 +00006249 if (!left_already_loaded) Load(left);
Steve Blocka7e24c12009-10-30 11:49:00 +00006250 Load(right);
6251 InstanceofStub stub;
6252 Result answer = frame_->CallStub(&stub, 2);
6253 answer.ToRegister();
6254 __ test(answer.reg(), Operand(answer.reg()));
6255 answer.Unuse();
6256 destination()->Split(zero);
6257 return;
6258 }
6259 default:
6260 UNREACHABLE();
6261 }
Leon Clarkee46be812010-01-19 14:06:41 +00006262 if (!left_already_loaded) Load(left);
Steve Blocka7e24c12009-10-30 11:49:00 +00006263 Load(right);
Leon Clarkee46be812010-01-19 14:06:41 +00006264 Comparison(node, cc, strict, destination());
Steve Blocka7e24c12009-10-30 11:49:00 +00006265}
6266
6267
6268#ifdef DEBUG
6269bool CodeGenerator::HasValidEntryRegisters() {
6270 return (allocator()->count(eax) == (frame()->is_used(eax) ? 1 : 0))
6271 && (allocator()->count(ebx) == (frame()->is_used(ebx) ? 1 : 0))
6272 && (allocator()->count(ecx) == (frame()->is_used(ecx) ? 1 : 0))
6273 && (allocator()->count(edx) == (frame()->is_used(edx) ? 1 : 0))
6274 && (allocator()->count(edi) == (frame()->is_used(edi) ? 1 : 0));
6275}
6276#endif
6277
6278
6279// Emit a LoadIC call to get the value from receiver and leave it in
6280// dst. The receiver register is restored after the call.
6281class DeferredReferenceGetNamedValue: public DeferredCode {
6282 public:
6283 DeferredReferenceGetNamedValue(Register dst,
6284 Register receiver,
6285 Handle<String> name)
6286 : dst_(dst), receiver_(receiver), name_(name) {
6287 set_comment("[ DeferredReferenceGetNamedValue");
6288 }
6289
6290 virtual void Generate();
6291
6292 Label* patch_site() { return &patch_site_; }
6293
6294 private:
6295 Label patch_site_;
6296 Register dst_;
6297 Register receiver_;
6298 Handle<String> name_;
6299};
6300
6301
6302void DeferredReferenceGetNamedValue::Generate() {
6303 __ push(receiver_);
6304 __ Set(ecx, Immediate(name_));
6305 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
6306 __ call(ic, RelocInfo::CODE_TARGET);
6307 // The call must be followed by a test eax instruction to indicate
6308 // that the inobject property case was inlined.
6309 //
6310 // Store the delta to the map check instruction here in the test
6311 // instruction. Use masm_-> instead of the __ macro since the
6312 // latter can't return a value.
6313 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
6314 // Here we use masm_-> instead of the __ macro because this is the
6315 // instruction that gets patched and coverage code gets in the way.
6316 masm_->test(eax, Immediate(-delta_to_patch_site));
6317 __ IncrementCounter(&Counters::named_load_inline_miss, 1);
6318
6319 if (!dst_.is(eax)) __ mov(dst_, eax);
6320 __ pop(receiver_);
6321}
6322
6323
6324class DeferredReferenceGetKeyedValue: public DeferredCode {
6325 public:
6326 explicit DeferredReferenceGetKeyedValue(Register dst,
6327 Register receiver,
6328 Register key,
6329 bool is_global)
6330 : dst_(dst), receiver_(receiver), key_(key), is_global_(is_global) {
6331 set_comment("[ DeferredReferenceGetKeyedValue");
6332 }
6333
6334 virtual void Generate();
6335
6336 Label* patch_site() { return &patch_site_; }
6337
6338 private:
6339 Label patch_site_;
6340 Register dst_;
6341 Register receiver_;
6342 Register key_;
6343 bool is_global_;
6344};
6345
6346
6347void DeferredReferenceGetKeyedValue::Generate() {
6348 __ push(receiver_); // First IC argument.
6349 __ push(key_); // Second IC argument.
6350
6351 // Calculate the delta from the IC call instruction to the map check
6352 // cmp instruction in the inlined version. This delta is stored in
6353 // a test(eax, delta) instruction after the call so that we can find
6354 // it in the IC initialization code and patch the cmp instruction.
6355 // This means that we cannot allow test instructions after calls to
6356 // KeyedLoadIC stubs in other places.
6357 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
6358 RelocInfo::Mode mode = is_global_
6359 ? RelocInfo::CODE_TARGET_CONTEXT
6360 : RelocInfo::CODE_TARGET;
6361 __ call(ic, mode);
6362 // The delta from the start of the map-compare instruction to the
6363 // test instruction. We use masm_-> directly here instead of the __
6364 // macro because the macro sometimes uses macro expansion to turn
6365 // into something that can't return a value. This is encountered
6366 // when doing generated code coverage tests.
6367 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
6368 // Here we use masm_-> instead of the __ macro because this is the
6369 // instruction that gets patched and coverage code gets in the way.
6370 masm_->test(eax, Immediate(-delta_to_patch_site));
6371 __ IncrementCounter(&Counters::keyed_load_inline_miss, 1);
6372
6373 if (!dst_.is(eax)) __ mov(dst_, eax);
6374 __ pop(key_);
6375 __ pop(receiver_);
6376}
6377
6378
6379class DeferredReferenceSetKeyedValue: public DeferredCode {
6380 public:
6381 DeferredReferenceSetKeyedValue(Register value,
6382 Register key,
6383 Register receiver)
6384 : value_(value), key_(key), receiver_(receiver) {
6385 set_comment("[ DeferredReferenceSetKeyedValue");
6386 }
6387
6388 virtual void Generate();
6389
6390 Label* patch_site() { return &patch_site_; }
6391
6392 private:
6393 Register value_;
6394 Register key_;
6395 Register receiver_;
6396 Label patch_site_;
6397};
6398
6399
6400void DeferredReferenceSetKeyedValue::Generate() {
6401 __ IncrementCounter(&Counters::keyed_store_inline_miss, 1);
6402 // Push receiver and key arguments on the stack.
6403 __ push(receiver_);
6404 __ push(key_);
6405 // Move value argument to eax as expected by the IC stub.
6406 if (!value_.is(eax)) __ mov(eax, value_);
6407 // Call the IC stub.
6408 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
6409 __ call(ic, RelocInfo::CODE_TARGET);
6410 // The delta from the start of the map-compare instruction to the
6411 // test instruction. We use masm_-> directly here instead of the
6412 // __ macro because the macro sometimes uses macro expansion to turn
6413 // into something that can't return a value. This is encountered
6414 // when doing generated code coverage tests.
6415 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
6416 // Here we use masm_-> instead of the __ macro because this is the
6417 // instruction that gets patched and coverage code gets in the way.
6418 masm_->test(eax, Immediate(-delta_to_patch_site));
6419 // Restore value (returned from store IC), key and receiver
6420 // registers.
6421 if (!value_.is(eax)) __ mov(value_, eax);
6422 __ pop(key_);
6423 __ pop(receiver_);
6424}
6425
6426
Leon Clarked91b9f72010-01-27 17:25:45 +00006427Result CodeGenerator::EmitKeyedLoad(bool is_global) {
6428 Comment cmnt(masm_, "[ Load from keyed Property");
6429 // Inline array load code if inside of a loop. We do not know
6430 // the receiver map yet, so we initially generate the code with
6431 // a check against an invalid map. In the inline cache code, we
6432 // patch the map check if appropriate.
6433 if (loop_nesting() > 0) {
6434 Comment cmnt(masm_, "[ Inlined load from keyed Property");
6435
6436 Result key = frame_->Pop();
6437 Result receiver = frame_->Pop();
6438 key.ToRegister();
6439 receiver.ToRegister();
6440
6441 // Use a fresh temporary to load the elements without destroying
6442 // the receiver which is needed for the deferred slow case.
6443 Result elements = allocator()->Allocate();
6444 ASSERT(elements.is_valid());
6445
6446 // Use a fresh temporary for the index and later the loaded
6447 // value.
6448 Result index = allocator()->Allocate();
6449 ASSERT(index.is_valid());
6450
6451 DeferredReferenceGetKeyedValue* deferred =
6452 new DeferredReferenceGetKeyedValue(index.reg(),
6453 receiver.reg(),
6454 key.reg(),
6455 is_global);
6456
6457 // Check that the receiver is not a smi (only needed if this
6458 // is not a load from the global context) and that it has the
6459 // expected map.
6460 if (!is_global) {
6461 __ test(receiver.reg(), Immediate(kSmiTagMask));
6462 deferred->Branch(zero);
6463 }
6464
6465 // Initially, use an invalid map. The map is patched in the IC
6466 // initialization code.
6467 __ bind(deferred->patch_site());
6468 // Use masm-> here instead of the double underscore macro since extra
6469 // coverage code can interfere with the patching.
6470 masm_->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
6471 Immediate(Factory::null_value()));
6472 deferred->Branch(not_equal);
6473
6474 // Check that the key is a smi.
6475 __ test(key.reg(), Immediate(kSmiTagMask));
6476 deferred->Branch(not_zero);
6477
6478 // Get the elements array from the receiver and check that it
6479 // is not a dictionary.
6480 __ mov(elements.reg(),
6481 FieldOperand(receiver.reg(), JSObject::kElementsOffset));
6482 __ cmp(FieldOperand(elements.reg(), HeapObject::kMapOffset),
6483 Immediate(Factory::fixed_array_map()));
6484 deferred->Branch(not_equal);
6485
6486 // Shift the key to get the actual index value and check that
6487 // it is within bounds.
6488 __ mov(index.reg(), key.reg());
6489 __ SmiUntag(index.reg());
6490 __ cmp(index.reg(),
6491 FieldOperand(elements.reg(), FixedArray::kLengthOffset));
6492 deferred->Branch(above_equal);
6493
6494 // Load and check that the result is not the hole. We could
6495 // reuse the index or elements register for the value.
6496 //
6497 // TODO(206): Consider whether it makes sense to try some
6498 // heuristic about which register to reuse. For example, if
6499 // one is eax, the we can reuse that one because the value
6500 // coming from the deferred code will be in eax.
6501 Result value = index;
6502 __ mov(value.reg(), Operand(elements.reg(),
6503 index.reg(),
6504 times_4,
6505 FixedArray::kHeaderSize - kHeapObjectTag));
6506 elements.Unuse();
6507 index.Unuse();
6508 __ cmp(Operand(value.reg()), Immediate(Factory::the_hole_value()));
6509 deferred->Branch(equal);
6510 __ IncrementCounter(&Counters::keyed_load_inline, 1);
6511
6512 deferred->BindExit();
6513 // Restore the receiver and key to the frame and push the
6514 // result on top of it.
6515 frame_->Push(&receiver);
6516 frame_->Push(&key);
6517 return value;
6518 } else {
6519 Comment cmnt(masm_, "[ Load from keyed Property");
6520 RelocInfo::Mode mode = is_global
6521 ? RelocInfo::CODE_TARGET_CONTEXT
6522 : RelocInfo::CODE_TARGET;
6523 Result answer = frame_->CallKeyedLoadIC(mode);
6524 // Make sure that we do not have a test instruction after the
6525 // call. A test instruction after the call is used to
6526 // indicate that we have generated an inline version of the
6527 // keyed load. The explicit nop instruction is here because
6528 // the push that follows might be peep-hole optimized away.
6529 __ nop();
6530 return answer;
6531 }
6532}
6533
6534
Steve Blocka7e24c12009-10-30 11:49:00 +00006535#undef __
6536#define __ ACCESS_MASM(masm)
6537
6538
6539Handle<String> Reference::GetName() {
6540 ASSERT(type_ == NAMED);
6541 Property* property = expression_->AsProperty();
6542 if (property == NULL) {
6543 // Global variable reference treated as a named property reference.
6544 VariableProxy* proxy = expression_->AsVariableProxy();
6545 ASSERT(proxy->AsVariable() != NULL);
6546 ASSERT(proxy->AsVariable()->is_global());
6547 return proxy->name();
6548 } else {
6549 Literal* raw_name = property->key()->AsLiteral();
6550 ASSERT(raw_name != NULL);
6551 return Handle<String>(String::cast(*raw_name->handle()));
6552 }
6553}
6554
6555
Steve Blockd0582a62009-12-15 09:54:21 +00006556void Reference::GetValue() {
Steve Blocka7e24c12009-10-30 11:49:00 +00006557 ASSERT(!cgen_->in_spilled_code());
6558 ASSERT(cgen_->HasValidEntryRegisters());
6559 ASSERT(!is_illegal());
6560 MacroAssembler* masm = cgen_->masm();
6561
6562 // Record the source position for the property load.
6563 Property* property = expression_->AsProperty();
6564 if (property != NULL) {
6565 cgen_->CodeForSourcePosition(property->position());
6566 }
6567
6568 switch (type_) {
6569 case SLOT: {
6570 Comment cmnt(masm, "[ Load from Slot");
6571 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
6572 ASSERT(slot != NULL);
Steve Blockd0582a62009-12-15 09:54:21 +00006573 cgen_->LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF);
Steve Blocka7e24c12009-10-30 11:49:00 +00006574 break;
6575 }
6576
6577 case NAMED: {
Steve Blocka7e24c12009-10-30 11:49:00 +00006578 Variable* var = expression_->AsVariableProxy()->AsVariable();
6579 bool is_global = var != NULL;
6580 ASSERT(!is_global || var->is_global());
6581
6582 // Do not inline the inobject property case for loads from the global
6583 // object. Also do not inline for unoptimized code. This saves time
6584 // in the code generator. Unoptimized code is toplevel code or code
6585 // that is not in a loop.
6586 if (is_global ||
6587 cgen_->scope()->is_global_scope() ||
6588 cgen_->loop_nesting() == 0) {
6589 Comment cmnt(masm, "[ Load from named Property");
6590 cgen_->frame()->Push(GetName());
6591
6592 RelocInfo::Mode mode = is_global
6593 ? RelocInfo::CODE_TARGET_CONTEXT
6594 : RelocInfo::CODE_TARGET;
6595 Result answer = cgen_->frame()->CallLoadIC(mode);
6596 // A test eax instruction following the call signals that the
6597 // inobject property case was inlined. Ensure that there is not
6598 // a test eax instruction here.
6599 __ nop();
6600 cgen_->frame()->Push(&answer);
6601 } else {
6602 // Inline the inobject property case.
6603 Comment cmnt(masm, "[ Inlined named property load");
6604 Result receiver = cgen_->frame()->Pop();
6605 receiver.ToRegister();
6606
6607 Result value = cgen_->allocator()->Allocate();
6608 ASSERT(value.is_valid());
6609 DeferredReferenceGetNamedValue* deferred =
6610 new DeferredReferenceGetNamedValue(value.reg(),
6611 receiver.reg(),
6612 GetName());
6613
6614 // Check that the receiver is a heap object.
6615 __ test(receiver.reg(), Immediate(kSmiTagMask));
6616 deferred->Branch(zero);
6617
6618 __ bind(deferred->patch_site());
6619 // This is the map check instruction that will be patched (so we can't
6620 // use the double underscore macro that may insert instructions).
6621 // Initially use an invalid map to force a failure.
6622 masm->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
6623 Immediate(Factory::null_value()));
6624 // This branch is always a forwards branch so it's always a fixed
6625 // size which allows the assert below to succeed and patching to work.
6626 deferred->Branch(not_equal);
6627
6628 // The delta from the patch label to the load offset must be
6629 // statically known.
6630 ASSERT(masm->SizeOfCodeGeneratedSince(deferred->patch_site()) ==
6631 LoadIC::kOffsetToLoadInstruction);
6632 // The initial (invalid) offset has to be large enough to force
6633 // a 32-bit instruction encoding to allow patching with an
6634 // arbitrary offset. Use kMaxInt (minus kHeapObjectTag).
6635 int offset = kMaxInt;
6636 masm->mov(value.reg(), FieldOperand(receiver.reg(), offset));
6637
6638 __ IncrementCounter(&Counters::named_load_inline, 1);
6639 deferred->BindExit();
6640 cgen_->frame()->Push(&receiver);
6641 cgen_->frame()->Push(&value);
6642 }
6643 break;
6644 }
6645
6646 case KEYED: {
Steve Blocka7e24c12009-10-30 11:49:00 +00006647 Variable* var = expression_->AsVariableProxy()->AsVariable();
6648 bool is_global = var != NULL;
6649 ASSERT(!is_global || var->is_global());
Leon Clarked91b9f72010-01-27 17:25:45 +00006650 Result value = cgen_->EmitKeyedLoad(is_global);
6651 cgen_->frame()->Push(&value);
Steve Blocka7e24c12009-10-30 11:49:00 +00006652 break;
6653 }
6654
6655 default:
6656 UNREACHABLE();
6657 }
Leon Clarked91b9f72010-01-27 17:25:45 +00006658
6659 if (!persist_after_get_) {
6660 cgen_->UnloadReference(this);
6661 }
Steve Blocka7e24c12009-10-30 11:49:00 +00006662}
6663
6664
Steve Blockd0582a62009-12-15 09:54:21 +00006665void Reference::TakeValue() {
Steve Blocka7e24c12009-10-30 11:49:00 +00006666 // For non-constant frame-allocated slots, we invalidate the value in the
6667 // slot. For all others, we fall back on GetValue.
6668 ASSERT(!cgen_->in_spilled_code());
6669 ASSERT(!is_illegal());
6670 if (type_ != SLOT) {
Steve Blockd0582a62009-12-15 09:54:21 +00006671 GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00006672 return;
6673 }
6674
6675 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
6676 ASSERT(slot != NULL);
6677 if (slot->type() == Slot::LOOKUP ||
6678 slot->type() == Slot::CONTEXT ||
6679 slot->var()->mode() == Variable::CONST ||
6680 slot->is_arguments()) {
Steve Blockd0582a62009-12-15 09:54:21 +00006681 GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00006682 return;
6683 }
6684
6685 // Only non-constant, frame-allocated parameters and locals can
6686 // reach here. Be careful not to use the optimizations for arguments
6687 // object access since it may not have been initialized yet.
6688 ASSERT(!slot->is_arguments());
6689 if (slot->type() == Slot::PARAMETER) {
6690 cgen_->frame()->TakeParameterAt(slot->index());
6691 } else {
6692 ASSERT(slot->type() == Slot::LOCAL);
6693 cgen_->frame()->TakeLocalAt(slot->index());
6694 }
Leon Clarked91b9f72010-01-27 17:25:45 +00006695
6696 ASSERT(persist_after_get_);
6697 // Do not unload the reference, because it is used in SetValue.
Steve Blocka7e24c12009-10-30 11:49:00 +00006698}
6699
6700
6701void Reference::SetValue(InitState init_state) {
6702 ASSERT(cgen_->HasValidEntryRegisters());
6703 ASSERT(!is_illegal());
6704 MacroAssembler* masm = cgen_->masm();
6705 switch (type_) {
6706 case SLOT: {
6707 Comment cmnt(masm, "[ Store to Slot");
6708 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
6709 ASSERT(slot != NULL);
6710 cgen_->StoreToSlot(slot, init_state);
Leon Clarke4515c472010-02-03 11:58:03 +00006711 cgen_->UnloadReference(this);
Steve Blocka7e24c12009-10-30 11:49:00 +00006712 break;
6713 }
6714
6715 case NAMED: {
6716 Comment cmnt(masm, "[ Store to named Property");
6717 cgen_->frame()->Push(GetName());
6718 Result answer = cgen_->frame()->CallStoreIC();
6719 cgen_->frame()->Push(&answer);
Leon Clarke4515c472010-02-03 11:58:03 +00006720 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00006721 break;
6722 }
6723
6724 case KEYED: {
6725 Comment cmnt(masm, "[ Store to keyed Property");
6726
6727 // Generate inlined version of the keyed store if the code is in
6728 // a loop and the key is likely to be a smi.
6729 Property* property = expression()->AsProperty();
6730 ASSERT(property != NULL);
Leon Clarkee46be812010-01-19 14:06:41 +00006731 StaticType* key_smi_analysis = property->key()->type();
Steve Blocka7e24c12009-10-30 11:49:00 +00006732
6733 if (cgen_->loop_nesting() > 0 && key_smi_analysis->IsLikelySmi()) {
6734 Comment cmnt(masm, "[ Inlined store to keyed Property");
6735
6736 // Get the receiver, key and value into registers.
6737 Result value = cgen_->frame()->Pop();
6738 Result key = cgen_->frame()->Pop();
6739 Result receiver = cgen_->frame()->Pop();
6740
6741 Result tmp = cgen_->allocator_->Allocate();
6742 ASSERT(tmp.is_valid());
6743
6744 // Determine whether the value is a constant before putting it
6745 // in a register.
6746 bool value_is_constant = value.is_constant();
6747
6748 // Make sure that value, key and receiver are in registers.
6749 value.ToRegister();
6750 key.ToRegister();
6751 receiver.ToRegister();
6752
6753 DeferredReferenceSetKeyedValue* deferred =
6754 new DeferredReferenceSetKeyedValue(value.reg(),
6755 key.reg(),
6756 receiver.reg());
6757
6758 // Check that the value is a smi if it is not a constant. We
6759 // can skip the write barrier for smis and constants.
6760 if (!value_is_constant) {
6761 __ test(value.reg(), Immediate(kSmiTagMask));
6762 deferred->Branch(not_zero);
6763 }
6764
6765 // Check that the key is a non-negative smi.
6766 __ test(key.reg(), Immediate(kSmiTagMask | 0x80000000));
6767 deferred->Branch(not_zero);
6768
6769 // Check that the receiver is not a smi.
6770 __ test(receiver.reg(), Immediate(kSmiTagMask));
6771 deferred->Branch(zero);
6772
6773 // Check that the receiver is a JSArray.
6774 __ mov(tmp.reg(),
6775 FieldOperand(receiver.reg(), HeapObject::kMapOffset));
6776 __ movzx_b(tmp.reg(),
6777 FieldOperand(tmp.reg(), Map::kInstanceTypeOffset));
6778 __ cmp(tmp.reg(), JS_ARRAY_TYPE);
6779 deferred->Branch(not_equal);
6780
6781 // Check that the key is within bounds. Both the key and the
6782 // length of the JSArray are smis.
6783 __ cmp(key.reg(),
6784 FieldOperand(receiver.reg(), JSArray::kLengthOffset));
6785 deferred->Branch(greater_equal);
6786
6787 // Get the elements array from the receiver and check that it
6788 // is not a dictionary.
6789 __ mov(tmp.reg(),
6790 FieldOperand(receiver.reg(), JSObject::kElementsOffset));
6791 // Bind the deferred code patch site to be able to locate the
6792 // fixed array map comparison. When debugging, we patch this
6793 // comparison to always fail so that we will hit the IC call
6794 // in the deferred code which will allow the debugger to
6795 // break for fast case stores.
6796 __ bind(deferred->patch_site());
6797 __ cmp(FieldOperand(tmp.reg(), HeapObject::kMapOffset),
6798 Immediate(Factory::fixed_array_map()));
6799 deferred->Branch(not_equal);
6800
6801 // Store the value.
6802 __ mov(Operand(tmp.reg(),
6803 key.reg(),
6804 times_2,
6805 FixedArray::kHeaderSize - kHeapObjectTag),
6806 value.reg());
6807 __ IncrementCounter(&Counters::keyed_store_inline, 1);
6808
6809 deferred->BindExit();
6810
6811 cgen_->frame()->Push(&receiver);
6812 cgen_->frame()->Push(&key);
6813 cgen_->frame()->Push(&value);
6814 } else {
6815 Result answer = cgen_->frame()->CallKeyedStoreIC();
6816 // Make sure that we do not have a test instruction after the
6817 // call. A test instruction after the call is used to
6818 // indicate that we have generated an inline version of the
6819 // keyed store.
6820 __ nop();
6821 cgen_->frame()->Push(&answer);
6822 }
Leon Clarke4515c472010-02-03 11:58:03 +00006823 cgen_->UnloadReference(this);
Steve Blocka7e24c12009-10-30 11:49:00 +00006824 break;
6825 }
6826
6827 default:
6828 UNREACHABLE();
6829 }
6830}
6831
6832
Leon Clarkee46be812010-01-19 14:06:41 +00006833void FastNewClosureStub::Generate(MacroAssembler* masm) {
6834 // Clone the boilerplate in new space. Set the context to the
6835 // current context in esi.
6836 Label gc;
6837 __ AllocateInNewSpace(JSFunction::kSize, eax, ebx, ecx, &gc, TAG_OBJECT);
6838
6839 // Get the boilerplate function from the stack.
6840 __ mov(edx, Operand(esp, 1 * kPointerSize));
6841
6842 // Compute the function map in the current global context and set that
6843 // as the map of the allocated object.
6844 __ mov(ecx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
6845 __ mov(ecx, FieldOperand(ecx, GlobalObject::kGlobalContextOffset));
6846 __ mov(ecx, Operand(ecx, Context::SlotOffset(Context::FUNCTION_MAP_INDEX)));
6847 __ mov(FieldOperand(eax, JSObject::kMapOffset), ecx);
6848
6849 // Clone the rest of the boilerplate fields. We don't have to update
6850 // the write barrier because the allocated object is in new space.
6851 for (int offset = kPointerSize;
6852 offset < JSFunction::kSize;
6853 offset += kPointerSize) {
6854 if (offset == JSFunction::kContextOffset) {
6855 __ mov(FieldOperand(eax, offset), esi);
6856 } else {
6857 __ mov(ebx, FieldOperand(edx, offset));
6858 __ mov(FieldOperand(eax, offset), ebx);
6859 }
6860 }
6861
6862 // Return and remove the on-stack parameter.
6863 __ ret(1 * kPointerSize);
6864
6865 // Create a new closure through the slower runtime call.
6866 __ bind(&gc);
6867 __ pop(ecx); // Temporarily remove return address.
6868 __ pop(edx);
6869 __ push(esi);
6870 __ push(edx);
6871 __ push(ecx); // Restore return address.
6872 __ TailCallRuntime(ExternalReference(Runtime::kNewClosure), 2, 1);
6873}
6874
6875
6876void FastNewContextStub::Generate(MacroAssembler* masm) {
6877 // Try to allocate the context in new space.
6878 Label gc;
6879 int length = slots_ + Context::MIN_CONTEXT_SLOTS;
6880 __ AllocateInNewSpace((length * kPointerSize) + FixedArray::kHeaderSize,
6881 eax, ebx, ecx, &gc, TAG_OBJECT);
6882
6883 // Get the function from the stack.
6884 __ mov(ecx, Operand(esp, 1 * kPointerSize));
6885
6886 // Setup the object header.
6887 __ mov(FieldOperand(eax, HeapObject::kMapOffset), Factory::context_map());
6888 __ mov(FieldOperand(eax, Array::kLengthOffset), Immediate(length));
6889
6890 // Setup the fixed slots.
6891 __ xor_(ebx, Operand(ebx)); // Set to NULL.
6892 __ mov(Operand(eax, Context::SlotOffset(Context::CLOSURE_INDEX)), ecx);
6893 __ mov(Operand(eax, Context::SlotOffset(Context::FCONTEXT_INDEX)), eax);
6894 __ mov(Operand(eax, Context::SlotOffset(Context::PREVIOUS_INDEX)), ebx);
6895 __ mov(Operand(eax, Context::SlotOffset(Context::EXTENSION_INDEX)), ebx);
6896
6897 // Copy the global object from the surrounding context. We go through the
6898 // context in the function (ecx) to match the allocation behavior we have
6899 // in the runtime system (see Heap::AllocateFunctionContext).
6900 __ mov(ebx, FieldOperand(ecx, JSFunction::kContextOffset));
6901 __ mov(ebx, Operand(ebx, Context::SlotOffset(Context::GLOBAL_INDEX)));
6902 __ mov(Operand(eax, Context::SlotOffset(Context::GLOBAL_INDEX)), ebx);
6903
6904 // Initialize the rest of the slots to undefined.
6905 __ mov(ebx, Factory::undefined_value());
6906 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) {
6907 __ mov(Operand(eax, Context::SlotOffset(i)), ebx);
6908 }
6909
6910 // Return and remove the on-stack parameter.
6911 __ mov(esi, Operand(eax));
6912 __ ret(1 * kPointerSize);
6913
6914 // Need to collect. Call into runtime system.
6915 __ bind(&gc);
6916 __ TailCallRuntime(ExternalReference(Runtime::kNewContext), 1, 1);
6917}
6918
6919
6920void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
6921 int elements_size = (length_ > 0) ? FixedArray::SizeFor(length_) : 0;
6922 int size = JSArray::kSize + elements_size;
6923
6924 // Load boilerplate object into ecx and check if we need to create a
6925 // boilerplate.
6926 Label slow_case;
6927 __ mov(ecx, Operand(esp, 3 * kPointerSize));
6928 __ mov(eax, Operand(esp, 2 * kPointerSize));
6929 ASSERT((kPointerSize == 4) && (kSmiTagSize == 1) && (kSmiTag == 0));
6930 __ mov(ecx, FieldOperand(ecx, eax, times_2, FixedArray::kHeaderSize));
6931 __ cmp(ecx, Factory::undefined_value());
6932 __ j(equal, &slow_case);
6933
6934 // Allocate both the JS array and the elements array in one big
6935 // allocation. This avoids multiple limit checks.
6936 __ AllocateInNewSpace(size, eax, ebx, edx, &slow_case, TAG_OBJECT);
6937
6938 // Copy the JS array part.
6939 for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
6940 if ((i != JSArray::kElementsOffset) || (length_ == 0)) {
6941 __ mov(ebx, FieldOperand(ecx, i));
6942 __ mov(FieldOperand(eax, i), ebx);
6943 }
6944 }
6945
6946 if (length_ > 0) {
6947 // Get hold of the elements array of the boilerplate and setup the
6948 // elements pointer in the resulting object.
6949 __ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset));
6950 __ lea(edx, Operand(eax, JSArray::kSize));
6951 __ mov(FieldOperand(eax, JSArray::kElementsOffset), edx);
6952
6953 // Copy the elements array.
6954 for (int i = 0; i < elements_size; i += kPointerSize) {
6955 __ mov(ebx, FieldOperand(ecx, i));
6956 __ mov(FieldOperand(edx, i), ebx);
6957 }
6958 }
6959
6960 // Return and remove the on-stack parameters.
6961 __ ret(3 * kPointerSize);
6962
6963 __ bind(&slow_case);
6964 ExternalReference runtime(Runtime::kCreateArrayLiteralShallow);
6965 __ TailCallRuntime(runtime, 3, 1);
6966}
6967
6968
Steve Blocka7e24c12009-10-30 11:49:00 +00006969// NOTE: The stub does not handle the inlined cases (Smis, Booleans, undefined).
6970void ToBooleanStub::Generate(MacroAssembler* masm) {
6971 Label false_result, true_result, not_string;
6972 __ mov(eax, Operand(esp, 1 * kPointerSize));
6973
6974 // 'null' => false.
6975 __ cmp(eax, Factory::null_value());
6976 __ j(equal, &false_result);
6977
6978 // Get the map and type of the heap object.
6979 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
6980 __ movzx_b(ecx, FieldOperand(edx, Map::kInstanceTypeOffset));
6981
6982 // Undetectable => false.
6983 __ movzx_b(ebx, FieldOperand(edx, Map::kBitFieldOffset));
6984 __ and_(ebx, 1 << Map::kIsUndetectable);
6985 __ j(not_zero, &false_result);
6986
6987 // JavaScript object => true.
6988 __ cmp(ecx, FIRST_JS_OBJECT_TYPE);
6989 __ j(above_equal, &true_result);
6990
6991 // String value => false iff empty.
6992 __ cmp(ecx, FIRST_NONSTRING_TYPE);
6993 __ j(above_equal, &not_string);
Steve Blocka7e24c12009-10-30 11:49:00 +00006994 __ mov(edx, FieldOperand(eax, String::kLengthOffset));
Steve Blockd0582a62009-12-15 09:54:21 +00006995 __ test(edx, Operand(edx));
Steve Blocka7e24c12009-10-30 11:49:00 +00006996 __ j(zero, &false_result);
6997 __ jmp(&true_result);
6998
6999 __ bind(&not_string);
7000 // HeapNumber => false iff +0, -0, or NaN.
7001 __ cmp(edx, Factory::heap_number_map());
7002 __ j(not_equal, &true_result);
7003 __ fldz();
7004 __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset));
Steve Block3ce2e202009-11-05 08:53:23 +00007005 __ FCmp();
Steve Blocka7e24c12009-10-30 11:49:00 +00007006 __ j(zero, &false_result);
7007 // Fall through to |true_result|.
7008
7009 // Return 1/0 for true/false in eax.
7010 __ bind(&true_result);
7011 __ mov(eax, 1);
7012 __ ret(1 * kPointerSize);
7013 __ bind(&false_result);
7014 __ mov(eax, 0);
7015 __ ret(1 * kPointerSize);
7016}
7017
7018
Steve Block3ce2e202009-11-05 08:53:23 +00007019void GenericBinaryOpStub::GenerateCall(
7020 MacroAssembler* masm,
7021 Register left,
7022 Register right) {
7023 if (!ArgsInRegistersSupported()) {
7024 // Pass arguments on the stack.
7025 __ push(left);
7026 __ push(right);
7027 } else {
7028 // The calling convention with registers is left in edx and right in eax.
Steve Blockd0582a62009-12-15 09:54:21 +00007029 Register left_arg = edx;
7030 Register right_arg = eax;
7031 if (!(left.is(left_arg) && right.is(right_arg))) {
7032 if (left.is(right_arg) && right.is(left_arg)) {
Steve Block3ce2e202009-11-05 08:53:23 +00007033 if (IsOperationCommutative()) {
7034 SetArgsReversed();
7035 } else {
7036 __ xchg(left, right);
7037 }
Steve Blockd0582a62009-12-15 09:54:21 +00007038 } else if (left.is(left_arg)) {
7039 __ mov(right_arg, right);
7040 } else if (left.is(right_arg)) {
Steve Block3ce2e202009-11-05 08:53:23 +00007041 if (IsOperationCommutative()) {
Steve Blockd0582a62009-12-15 09:54:21 +00007042 __ mov(left_arg, right);
Steve Block3ce2e202009-11-05 08:53:23 +00007043 SetArgsReversed();
7044 } else {
Steve Blockd0582a62009-12-15 09:54:21 +00007045 // Order of moves important to avoid destroying left argument.
7046 __ mov(left_arg, left);
7047 __ mov(right_arg, right);
Steve Block3ce2e202009-11-05 08:53:23 +00007048 }
Steve Blockd0582a62009-12-15 09:54:21 +00007049 } else if (right.is(left_arg)) {
Steve Block3ce2e202009-11-05 08:53:23 +00007050 if (IsOperationCommutative()) {
Steve Blockd0582a62009-12-15 09:54:21 +00007051 __ mov(right_arg, left);
Steve Block3ce2e202009-11-05 08:53:23 +00007052 SetArgsReversed();
7053 } else {
Steve Blockd0582a62009-12-15 09:54:21 +00007054 // Order of moves important to avoid destroying right argument.
7055 __ mov(right_arg, right);
7056 __ mov(left_arg, left);
Steve Block3ce2e202009-11-05 08:53:23 +00007057 }
Steve Blockd0582a62009-12-15 09:54:21 +00007058 } else if (right.is(right_arg)) {
7059 __ mov(left_arg, left);
Steve Block3ce2e202009-11-05 08:53:23 +00007060 } else {
Steve Blockd0582a62009-12-15 09:54:21 +00007061 // Order of moves is not important.
7062 __ mov(left_arg, left);
7063 __ mov(right_arg, right);
Steve Block3ce2e202009-11-05 08:53:23 +00007064 }
7065 }
7066
7067 // Update flags to indicate that arguments are in registers.
7068 SetArgsInRegisters();
Steve Blockd0582a62009-12-15 09:54:21 +00007069 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
Steve Block3ce2e202009-11-05 08:53:23 +00007070 }
7071
7072 // Call the stub.
7073 __ CallStub(this);
7074}
7075
7076
7077void GenericBinaryOpStub::GenerateCall(
7078 MacroAssembler* masm,
7079 Register left,
7080 Smi* right) {
7081 if (!ArgsInRegistersSupported()) {
7082 // Pass arguments on the stack.
7083 __ push(left);
7084 __ push(Immediate(right));
7085 } else {
Steve Blockd0582a62009-12-15 09:54:21 +00007086 // The calling convention with registers is left in edx and right in eax.
7087 Register left_arg = edx;
7088 Register right_arg = eax;
7089 if (left.is(left_arg)) {
7090 __ mov(right_arg, Immediate(right));
7091 } else if (left.is(right_arg) && IsOperationCommutative()) {
7092 __ mov(left_arg, Immediate(right));
Steve Block3ce2e202009-11-05 08:53:23 +00007093 SetArgsReversed();
7094 } else {
Steve Blockd0582a62009-12-15 09:54:21 +00007095 __ mov(left_arg, left);
7096 __ mov(right_arg, Immediate(right));
Steve Block3ce2e202009-11-05 08:53:23 +00007097 }
7098
7099 // Update flags to indicate that arguments are in registers.
7100 SetArgsInRegisters();
Steve Blockd0582a62009-12-15 09:54:21 +00007101 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
Steve Block3ce2e202009-11-05 08:53:23 +00007102 }
7103
7104 // Call the stub.
7105 __ CallStub(this);
7106}
7107
7108
7109void GenericBinaryOpStub::GenerateCall(
7110 MacroAssembler* masm,
7111 Smi* left,
7112 Register right) {
7113 if (!ArgsInRegistersSupported()) {
7114 // Pass arguments on the stack.
7115 __ push(Immediate(left));
7116 __ push(right);
7117 } else {
Steve Blockd0582a62009-12-15 09:54:21 +00007118 // The calling convention with registers is left in edx and right in eax.
7119 Register left_arg = edx;
7120 Register right_arg = eax;
7121 if (right.is(right_arg)) {
7122 __ mov(left_arg, Immediate(left));
7123 } else if (right.is(left_arg) && IsOperationCommutative()) {
7124 __ mov(right_arg, Immediate(left));
7125 SetArgsReversed();
Steve Block3ce2e202009-11-05 08:53:23 +00007126 } else {
Steve Blockd0582a62009-12-15 09:54:21 +00007127 __ mov(left_arg, Immediate(left));
7128 __ mov(right_arg, right);
Steve Block3ce2e202009-11-05 08:53:23 +00007129 }
7130 // Update flags to indicate that arguments are in registers.
7131 SetArgsInRegisters();
Steve Blockd0582a62009-12-15 09:54:21 +00007132 __ IncrementCounter(&Counters::generic_binary_stub_calls_regs, 1);
Steve Block3ce2e202009-11-05 08:53:23 +00007133 }
7134
7135 // Call the stub.
7136 __ CallStub(this);
7137}
7138
7139
Leon Clarked91b9f72010-01-27 17:25:45 +00007140Result GenericBinaryOpStub::GenerateCall(MacroAssembler* masm,
7141 VirtualFrame* frame,
7142 Result* left,
7143 Result* right) {
7144 if (ArgsInRegistersSupported()) {
7145 SetArgsInRegisters();
7146 return frame->CallStub(this, left, right);
7147 } else {
7148 frame->Push(left);
7149 frame->Push(right);
7150 return frame->CallStub(this, 2);
7151 }
7152}
7153
7154
Steve Blocka7e24c12009-10-30 11:49:00 +00007155void GenericBinaryOpStub::GenerateSmiCode(MacroAssembler* masm, Label* slow) {
Leon Clarked91b9f72010-01-27 17:25:45 +00007156 // 1. Move arguments into edx, eax except for DIV and MOD, which need the
7157 // dividend in eax and edx free for the division. Use eax, ebx for those.
7158 Comment load_comment(masm, "-- Load arguments");
7159 Register left = edx;
7160 Register right = eax;
7161 if (op_ == Token::DIV || op_ == Token::MOD) {
7162 left = eax;
7163 right = ebx;
7164 if (HasArgsInRegisters()) {
7165 __ mov(ebx, eax);
7166 __ mov(eax, edx);
7167 }
7168 }
7169 if (!HasArgsInRegisters()) {
7170 __ mov(right, Operand(esp, 1 * kPointerSize));
7171 __ mov(left, Operand(esp, 2 * kPointerSize));
7172 }
Steve Blocka7e24c12009-10-30 11:49:00 +00007173
Leon Clarked91b9f72010-01-27 17:25:45 +00007174 // 2. Prepare the smi check of both operands by oring them together.
7175 Comment smi_check_comment(masm, "-- Smi check arguments");
7176 Label not_smis;
7177 Register combined = ecx;
7178 ASSERT(!left.is(combined) && !right.is(combined));
Steve Blocka7e24c12009-10-30 11:49:00 +00007179 switch (op_) {
Leon Clarked91b9f72010-01-27 17:25:45 +00007180 case Token::BIT_OR:
7181 // Perform the operation into eax and smi check the result. Preserve
7182 // eax in case the result is not a smi.
7183 ASSERT(!left.is(ecx) && !right.is(ecx));
7184 __ mov(ecx, right);
7185 __ or_(right, Operand(left)); // Bitwise or is commutative.
7186 combined = right;
7187 break;
7188
7189 case Token::BIT_XOR:
7190 case Token::BIT_AND:
Leon Clarkeeab96aa2010-01-27 16:31:12 +00007191 case Token::ADD:
Steve Blocka7e24c12009-10-30 11:49:00 +00007192 case Token::SUB:
Leon Clarked91b9f72010-01-27 17:25:45 +00007193 case Token::MUL:
Steve Blocka7e24c12009-10-30 11:49:00 +00007194 case Token::DIV:
7195 case Token::MOD:
Leon Clarked91b9f72010-01-27 17:25:45 +00007196 __ mov(combined, right);
7197 __ or_(combined, Operand(left));
7198 break;
7199
7200 case Token::SHL:
7201 case Token::SAR:
7202 case Token::SHR:
7203 // Move the right operand into ecx for the shift operation, use eax
7204 // for the smi check register.
7205 ASSERT(!left.is(ecx) && !right.is(ecx));
7206 __ mov(ecx, right);
7207 __ or_(right, Operand(left));
7208 combined = right;
Steve Blocka7e24c12009-10-30 11:49:00 +00007209 break;
7210
7211 default:
Steve Blocka7e24c12009-10-30 11:49:00 +00007212 break;
7213 }
7214
Leon Clarked91b9f72010-01-27 17:25:45 +00007215 // 3. Perform the smi check of the operands.
7216 ASSERT(kSmiTag == 0); // Adjust zero check if not the case.
7217 __ test(combined, Immediate(kSmiTagMask));
7218 __ j(not_zero, &not_smis, not_taken);
Steve Blocka7e24c12009-10-30 11:49:00 +00007219
Leon Clarked91b9f72010-01-27 17:25:45 +00007220 // 4. Operands are both smis, perform the operation leaving the result in
7221 // eax and check the result if necessary.
7222 Comment perform_smi(masm, "-- Perform smi operation");
7223 Label use_fp_on_smis;
Steve Blocka7e24c12009-10-30 11:49:00 +00007224 switch (op_) {
Leon Clarked91b9f72010-01-27 17:25:45 +00007225 case Token::BIT_OR:
7226 // Nothing to do.
7227 break;
7228
7229 case Token::BIT_XOR:
7230 ASSERT(right.is(eax));
7231 __ xor_(right, Operand(left)); // Bitwise xor is commutative.
7232 break;
7233
7234 case Token::BIT_AND:
7235 ASSERT(right.is(eax));
7236 __ and_(right, Operand(left)); // Bitwise and is commutative.
7237 break;
7238
7239 case Token::SHL:
7240 // Remove tags from operands (but keep sign).
7241 __ SmiUntag(left);
7242 __ SmiUntag(ecx);
7243 // Perform the operation.
7244 __ shl_cl(left);
7245 // Check that the *signed* result fits in a smi.
7246 __ cmp(left, 0xc0000000);
7247 __ j(sign, &use_fp_on_smis, not_taken);
7248 // Tag the result and store it in register eax.
7249 __ SmiTag(left);
7250 __ mov(eax, left);
7251 break;
7252
7253 case Token::SAR:
7254 // Remove tags from operands (but keep sign).
7255 __ SmiUntag(left);
7256 __ SmiUntag(ecx);
7257 // Perform the operation.
7258 __ sar_cl(left);
7259 // Tag the result and store it in register eax.
7260 __ SmiTag(left);
7261 __ mov(eax, left);
7262 break;
7263
7264 case Token::SHR:
7265 // Remove tags from operands (but keep sign).
7266 __ SmiUntag(left);
7267 __ SmiUntag(ecx);
7268 // Perform the operation.
7269 __ shr_cl(left);
7270 // Check that the *unsigned* result fits in a smi.
7271 // Neither of the two high-order bits can be set:
7272 // - 0x80000000: high bit would be lost when smi tagging.
7273 // - 0x40000000: this number would convert to negative when
7274 // Smi tagging these two cases can only happen with shifts
7275 // by 0 or 1 when handed a valid smi.
7276 __ test(left, Immediate(0xc0000000));
7277 __ j(not_zero, slow, not_taken);
7278 // Tag the result and store it in register eax.
7279 __ SmiTag(left);
7280 __ mov(eax, left);
7281 break;
7282
Steve Blocka7e24c12009-10-30 11:49:00 +00007283 case Token::ADD:
Leon Clarked91b9f72010-01-27 17:25:45 +00007284 ASSERT(right.is(eax));
7285 __ add(right, Operand(left)); // Addition is commutative.
7286 __ j(overflow, &use_fp_on_smis, not_taken);
7287 break;
7288
Steve Blocka7e24c12009-10-30 11:49:00 +00007289 case Token::SUB:
Leon Clarked91b9f72010-01-27 17:25:45 +00007290 __ sub(left, Operand(right));
7291 __ j(overflow, &use_fp_on_smis, not_taken);
7292 __ mov(eax, left);
Steve Blocka7e24c12009-10-30 11:49:00 +00007293 break;
7294
7295 case Token::MUL:
7296 // If the smi tag is 0 we can just leave the tag on one operand.
Leon Clarked91b9f72010-01-27 17:25:45 +00007297 ASSERT(kSmiTag == 0); // Adjust code below if not the case.
7298 // We can't revert the multiplication if the result is not a smi
7299 // so save the right operand.
7300 __ mov(ebx, right);
Steve Blocka7e24c12009-10-30 11:49:00 +00007301 // Remove tag from one of the operands (but keep sign).
Leon Clarked91b9f72010-01-27 17:25:45 +00007302 __ SmiUntag(right);
Steve Blocka7e24c12009-10-30 11:49:00 +00007303 // Do multiplication.
Leon Clarked91b9f72010-01-27 17:25:45 +00007304 __ imul(right, Operand(left)); // Multiplication is commutative.
7305 __ j(overflow, &use_fp_on_smis, not_taken);
7306 // Check for negative zero result. Use combined = left | right.
7307 __ NegativeZeroTest(right, combined, &use_fp_on_smis);
Steve Blocka7e24c12009-10-30 11:49:00 +00007308 break;
7309
7310 case Token::DIV:
Leon Clarked91b9f72010-01-27 17:25:45 +00007311 // We can't revert the division if the result is not a smi so
7312 // save the left operand.
7313 __ mov(edi, left);
7314 // Check for 0 divisor.
7315 __ test(right, Operand(right));
7316 __ j(zero, &use_fp_on_smis, not_taken);
7317 // Sign extend left into edx:eax.
7318 ASSERT(left.is(eax));
7319 __ cdq();
7320 // Divide edx:eax by right.
7321 __ idiv(right);
7322 // Check for the corner case of dividing the most negative smi by
7323 // -1. We cannot use the overflow flag, since it is not set by idiv
7324 // instruction.
Steve Blocka7e24c12009-10-30 11:49:00 +00007325 ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
7326 __ cmp(eax, 0x40000000);
Leon Clarked91b9f72010-01-27 17:25:45 +00007327 __ j(equal, &use_fp_on_smis);
7328 // Check for negative zero result. Use combined = left | right.
7329 __ NegativeZeroTest(eax, combined, &use_fp_on_smis);
Steve Blocka7e24c12009-10-30 11:49:00 +00007330 // Check that the remainder is zero.
7331 __ test(edx, Operand(edx));
Leon Clarked91b9f72010-01-27 17:25:45 +00007332 __ j(not_zero, &use_fp_on_smis);
Steve Blocka7e24c12009-10-30 11:49:00 +00007333 // Tag the result and store it in register eax.
Leon Clarkee46be812010-01-19 14:06:41 +00007334 __ SmiTag(eax);
Steve Blocka7e24c12009-10-30 11:49:00 +00007335 break;
7336
7337 case Token::MOD:
Leon Clarked91b9f72010-01-27 17:25:45 +00007338 // Check for 0 divisor.
7339 __ test(right, Operand(right));
7340 __ j(zero, &not_smis, not_taken);
7341
7342 // Sign extend left into edx:eax.
7343 ASSERT(left.is(eax));
7344 __ cdq();
7345 // Divide edx:eax by right.
7346 __ idiv(right);
7347 // Check for negative zero result. Use combined = left | right.
7348 __ NegativeZeroTest(edx, combined, slow);
Steve Blocka7e24c12009-10-30 11:49:00 +00007349 // Move remainder to register eax.
Leon Clarked91b9f72010-01-27 17:25:45 +00007350 __ mov(eax, edx);
Steve Blocka7e24c12009-10-30 11:49:00 +00007351 break;
7352
7353 default:
7354 UNREACHABLE();
Leon Clarked91b9f72010-01-27 17:25:45 +00007355 }
7356
7357 // 5. Emit return of result in eax.
7358 GenerateReturn(masm);
7359
7360 // 6. For some operations emit inline code to perform floating point
7361 // operations on known smis (e.g., if the result of the operation
7362 // overflowed the smi range).
7363 switch (op_) {
7364 case Token::SHL: {
7365 Comment perform_float(masm, "-- Perform float operation on smis");
7366 __ bind(&use_fp_on_smis);
7367 // Result we want is in left == edx, so we can put the allocated heap
7368 // number in eax.
7369 __ AllocateHeapNumber(eax, ecx, ebx, slow);
7370 // Store the result in the HeapNumber and return.
7371 if (CpuFeatures::IsSupported(SSE2)) {
7372 CpuFeatures::Scope use_sse2(SSE2);
7373 __ cvtsi2sd(xmm0, Operand(left));
7374 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
7375 } else {
7376 // It's OK to overwrite the right argument on the stack because we
7377 // are about to return.
7378 __ mov(Operand(esp, 1 * kPointerSize), left);
7379 __ fild_s(Operand(esp, 1 * kPointerSize));
7380 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
7381 }
7382 GenerateReturn(masm);
7383 break;
7384 }
7385
7386 case Token::ADD:
7387 case Token::SUB:
7388 case Token::MUL:
7389 case Token::DIV: {
7390 Comment perform_float(masm, "-- Perform float operation on smis");
7391 __ bind(&use_fp_on_smis);
7392 // Restore arguments to edx, eax.
7393 switch (op_) {
7394 case Token::ADD:
7395 // Revert right = right + left.
7396 __ sub(right, Operand(left));
7397 break;
7398 case Token::SUB:
7399 // Revert left = left - right.
7400 __ add(left, Operand(right));
7401 break;
7402 case Token::MUL:
7403 // Right was clobbered but a copy is in ebx.
7404 __ mov(right, ebx);
7405 break;
7406 case Token::DIV:
7407 // Left was clobbered but a copy is in edi. Right is in ebx for
7408 // division.
7409 __ mov(edx, edi);
7410 __ mov(eax, right);
7411 break;
7412 default: UNREACHABLE();
7413 break;
7414 }
7415 __ AllocateHeapNumber(ecx, ebx, no_reg, slow);
7416 if (CpuFeatures::IsSupported(SSE2)) {
7417 CpuFeatures::Scope use_sse2(SSE2);
7418 FloatingPointHelper::LoadSSE2Smis(masm, ebx);
7419 switch (op_) {
7420 case Token::ADD: __ addsd(xmm0, xmm1); break;
7421 case Token::SUB: __ subsd(xmm0, xmm1); break;
7422 case Token::MUL: __ mulsd(xmm0, xmm1); break;
7423 case Token::DIV: __ divsd(xmm0, xmm1); break;
7424 default: UNREACHABLE();
7425 }
7426 __ movdbl(FieldOperand(ecx, HeapNumber::kValueOffset), xmm0);
7427 } else { // SSE2 not available, use FPU.
7428 FloatingPointHelper::LoadFloatSmis(masm, ebx);
7429 switch (op_) {
7430 case Token::ADD: __ faddp(1); break;
7431 case Token::SUB: __ fsubp(1); break;
7432 case Token::MUL: __ fmulp(1); break;
7433 case Token::DIV: __ fdivp(1); break;
7434 default: UNREACHABLE();
7435 }
7436 __ fstp_d(FieldOperand(ecx, HeapNumber::kValueOffset));
7437 }
7438 __ mov(eax, ecx);
7439 GenerateReturn(masm);
7440 break;
7441 }
7442
7443 default:
7444 break;
7445 }
7446
7447 // 7. Non-smi operands, fall out to the non-smi code with the operands in
7448 // edx and eax.
7449 Comment done_comment(masm, "-- Enter non-smi code");
7450 __ bind(&not_smis);
7451 switch (op_) {
7452 case Token::BIT_OR:
7453 case Token::SHL:
7454 case Token::SAR:
7455 case Token::SHR:
7456 // Right operand is saved in ecx and eax was destroyed by the smi
7457 // check.
7458 __ mov(eax, ecx);
7459 break;
7460
7461 case Token::DIV:
7462 case Token::MOD:
7463 // Operands are in eax, ebx at this point.
7464 __ mov(edx, eax);
7465 __ mov(eax, ebx);
7466 break;
7467
7468 default:
Steve Blocka7e24c12009-10-30 11:49:00 +00007469 break;
7470 }
7471}
7472
7473
7474void GenericBinaryOpStub::Generate(MacroAssembler* masm) {
7475 Label call_runtime;
7476
Steve Block3ce2e202009-11-05 08:53:23 +00007477 __ IncrementCounter(&Counters::generic_binary_stub_calls, 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00007478
Steve Block3ce2e202009-11-05 08:53:23 +00007479 // Generate fast case smi code if requested. This flag is set when the fast
7480 // case smi code is not generated by the caller. Generating it here will speed
7481 // up common operations.
7482 if (HasSmiCodeInStub()) {
Leon Clarked91b9f72010-01-27 17:25:45 +00007483 GenerateSmiCode(masm, &call_runtime);
7484 } else if (op_ != Token::MOD) { // MOD goes straight to runtime.
7485 GenerateLoadArguments(masm);
Steve Blocka7e24c12009-10-30 11:49:00 +00007486 }
7487
Steve Blocka7e24c12009-10-30 11:49:00 +00007488 // Floating point case.
7489 switch (op_) {
7490 case Token::ADD:
7491 case Token::SUB:
7492 case Token::MUL:
7493 case Token::DIV: {
Steve Blockd0582a62009-12-15 09:54:21 +00007494 if (CpuFeatures::IsSupported(SSE2)) {
7495 CpuFeatures::Scope use_sse2(SSE2);
Leon Clarked91b9f72010-01-27 17:25:45 +00007496 FloatingPointHelper::LoadSSE2Operands(masm, &call_runtime);
Steve Blocka7e24c12009-10-30 11:49:00 +00007497
7498 switch (op_) {
7499 case Token::ADD: __ addsd(xmm0, xmm1); break;
7500 case Token::SUB: __ subsd(xmm0, xmm1); break;
7501 case Token::MUL: __ mulsd(xmm0, xmm1); break;
7502 case Token::DIV: __ divsd(xmm0, xmm1); break;
7503 default: UNREACHABLE();
7504 }
Leon Clarked91b9f72010-01-27 17:25:45 +00007505 GenerateHeapResultAllocation(masm, &call_runtime);
Steve Blocka7e24c12009-10-30 11:49:00 +00007506 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
Steve Block3ce2e202009-11-05 08:53:23 +00007507 GenerateReturn(masm);
Steve Blocka7e24c12009-10-30 11:49:00 +00007508 } else { // SSE2 not available, use FPU.
7509 FloatingPointHelper::CheckFloatOperands(masm, &call_runtime, ebx);
Leon Clarked91b9f72010-01-27 17:25:45 +00007510 FloatingPointHelper::LoadFloatOperands(
7511 masm,
7512 ecx,
7513 FloatingPointHelper::ARGS_IN_REGISTERS);
Steve Blocka7e24c12009-10-30 11:49:00 +00007514 switch (op_) {
7515 case Token::ADD: __ faddp(1); break;
7516 case Token::SUB: __ fsubp(1); break;
7517 case Token::MUL: __ fmulp(1); break;
7518 case Token::DIV: __ fdivp(1); break;
7519 default: UNREACHABLE();
7520 }
Leon Clarked91b9f72010-01-27 17:25:45 +00007521 Label after_alloc_failure;
7522 GenerateHeapResultAllocation(masm, &after_alloc_failure);
Steve Blocka7e24c12009-10-30 11:49:00 +00007523 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
Steve Block3ce2e202009-11-05 08:53:23 +00007524 GenerateReturn(masm);
Leon Clarked91b9f72010-01-27 17:25:45 +00007525 __ bind(&after_alloc_failure);
7526 __ ffree();
7527 __ jmp(&call_runtime);
Steve Blocka7e24c12009-10-30 11:49:00 +00007528 }
7529 }
7530 case Token::MOD: {
7531 // For MOD we go directly to runtime in the non-smi case.
7532 break;
7533 }
7534 case Token::BIT_OR:
7535 case Token::BIT_AND:
7536 case Token::BIT_XOR:
7537 case Token::SAR:
7538 case Token::SHL:
7539 case Token::SHR: {
Leon Clarked91b9f72010-01-27 17:25:45 +00007540 Label non_smi_result;
7541 FloatingPointHelper::LoadAsIntegers(masm, use_sse3_, &call_runtime);
Steve Blocka7e24c12009-10-30 11:49:00 +00007542 switch (op_) {
7543 case Token::BIT_OR: __ or_(eax, Operand(ecx)); break;
7544 case Token::BIT_AND: __ and_(eax, Operand(ecx)); break;
7545 case Token::BIT_XOR: __ xor_(eax, Operand(ecx)); break;
Steve Blockd0582a62009-12-15 09:54:21 +00007546 case Token::SAR: __ sar_cl(eax); break;
7547 case Token::SHL: __ shl_cl(eax); break;
7548 case Token::SHR: __ shr_cl(eax); break;
Steve Blocka7e24c12009-10-30 11:49:00 +00007549 default: UNREACHABLE();
7550 }
7551 if (op_ == Token::SHR) {
7552 // Check if result is non-negative and fits in a smi.
7553 __ test(eax, Immediate(0xc0000000));
Leon Clarked91b9f72010-01-27 17:25:45 +00007554 __ j(not_zero, &call_runtime);
Steve Blocka7e24c12009-10-30 11:49:00 +00007555 } else {
7556 // Check if result fits in a smi.
7557 __ cmp(eax, 0xc0000000);
7558 __ j(negative, &non_smi_result);
7559 }
7560 // Tag smi result and return.
Leon Clarkee46be812010-01-19 14:06:41 +00007561 __ SmiTag(eax);
Steve Blockd0582a62009-12-15 09:54:21 +00007562 GenerateReturn(masm);
Steve Blocka7e24c12009-10-30 11:49:00 +00007563
7564 // All ops except SHR return a signed int32 that we load in a HeapNumber.
7565 if (op_ != Token::SHR) {
7566 __ bind(&non_smi_result);
7567 // Allocate a heap number if needed.
7568 __ mov(ebx, Operand(eax)); // ebx: result
Leon Clarked91b9f72010-01-27 17:25:45 +00007569 Label skip_allocation;
Steve Blocka7e24c12009-10-30 11:49:00 +00007570 switch (mode_) {
7571 case OVERWRITE_LEFT:
7572 case OVERWRITE_RIGHT:
7573 // If the operand was an object, we skip the
7574 // allocation of a heap number.
7575 __ mov(eax, Operand(esp, mode_ == OVERWRITE_RIGHT ?
7576 1 * kPointerSize : 2 * kPointerSize));
7577 __ test(eax, Immediate(kSmiTagMask));
7578 __ j(not_zero, &skip_allocation, not_taken);
7579 // Fall through!
7580 case NO_OVERWRITE:
Steve Block3ce2e202009-11-05 08:53:23 +00007581 __ AllocateHeapNumber(eax, ecx, edx, &call_runtime);
Steve Blocka7e24c12009-10-30 11:49:00 +00007582 __ bind(&skip_allocation);
7583 break;
7584 default: UNREACHABLE();
7585 }
7586 // Store the result in the HeapNumber and return.
Leon Clarkee46be812010-01-19 14:06:41 +00007587 if (CpuFeatures::IsSupported(SSE2)) {
7588 CpuFeatures::Scope use_sse2(SSE2);
7589 __ cvtsi2sd(xmm0, Operand(ebx));
7590 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
7591 } else {
7592 __ mov(Operand(esp, 1 * kPointerSize), ebx);
7593 __ fild_s(Operand(esp, 1 * kPointerSize));
7594 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
7595 }
Steve Blockd0582a62009-12-15 09:54:21 +00007596 GenerateReturn(masm);
Steve Blocka7e24c12009-10-30 11:49:00 +00007597 }
Steve Blocka7e24c12009-10-30 11:49:00 +00007598 break;
7599 }
7600 default: UNREACHABLE(); break;
7601 }
7602
7603 // If all else fails, use the runtime system to get the correct
Steve Block3ce2e202009-11-05 08:53:23 +00007604 // result. If arguments was passed in registers now place them on the
Steve Blockd0582a62009-12-15 09:54:21 +00007605 // stack in the correct order below the return address.
Steve Blocka7e24c12009-10-30 11:49:00 +00007606 __ bind(&call_runtime);
Leon Clarked91b9f72010-01-27 17:25:45 +00007607 if (HasArgsInRegisters()) {
Steve Block3ce2e202009-11-05 08:53:23 +00007608 __ pop(ecx);
Leon Clarked91b9f72010-01-27 17:25:45 +00007609 if (HasArgsReversed()) {
Steve Block3ce2e202009-11-05 08:53:23 +00007610 __ push(eax);
7611 __ push(edx);
7612 } else {
7613 __ push(edx);
7614 __ push(eax);
7615 }
7616 __ push(ecx);
7617 }
Steve Blocka7e24c12009-10-30 11:49:00 +00007618 switch (op_) {
7619 case Token::ADD: {
7620 // Test for string arguments before calling runtime.
Steve Blockd0582a62009-12-15 09:54:21 +00007621 Label not_strings, not_string1, string1;
Steve Blocka7e24c12009-10-30 11:49:00 +00007622 Result answer;
Leon Clarked91b9f72010-01-27 17:25:45 +00007623 __ test(edx, Immediate(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00007624 __ j(zero, &not_string1);
Leon Clarked91b9f72010-01-27 17:25:45 +00007625 __ CmpObjectType(edx, FIRST_NONSTRING_TYPE, ecx);
Steve Blocka7e24c12009-10-30 11:49:00 +00007626 __ j(above_equal, &not_string1);
7627
Leon Clarked91b9f72010-01-27 17:25:45 +00007628 // First argument is a string, test second.
7629 __ test(eax, Immediate(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00007630 __ j(zero, &string1);
Leon Clarked91b9f72010-01-27 17:25:45 +00007631 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, ecx);
Steve Blocka7e24c12009-10-30 11:49:00 +00007632 __ j(above_equal, &string1);
7633
Steve Blockd0582a62009-12-15 09:54:21 +00007634 // First and second argument are strings. Jump to the string add stub.
7635 StringAddStub stub(NO_STRING_CHECK_IN_STUB);
7636 __ TailCallStub(&stub);
Steve Blocka7e24c12009-10-30 11:49:00 +00007637
7638 // Only first argument is a string.
7639 __ bind(&string1);
Leon Clarked91b9f72010-01-27 17:25:45 +00007640 __ InvokeBuiltin(
7641 HasArgsReversed() ?
7642 Builtins::STRING_ADD_RIGHT :
7643 Builtins::STRING_ADD_LEFT,
7644 JUMP_FUNCTION);
Steve Blocka7e24c12009-10-30 11:49:00 +00007645
7646 // First argument was not a string, test second.
7647 __ bind(&not_string1);
Leon Clarked91b9f72010-01-27 17:25:45 +00007648 __ test(eax, Immediate(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00007649 __ j(zero, &not_strings);
Leon Clarked91b9f72010-01-27 17:25:45 +00007650 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, ecx);
Steve Blocka7e24c12009-10-30 11:49:00 +00007651 __ j(above_equal, &not_strings);
7652
7653 // Only second argument is a string.
Leon Clarked91b9f72010-01-27 17:25:45 +00007654 __ InvokeBuiltin(
7655 HasArgsReversed() ?
7656 Builtins::STRING_ADD_LEFT :
7657 Builtins::STRING_ADD_RIGHT,
7658 JUMP_FUNCTION);
Steve Blocka7e24c12009-10-30 11:49:00 +00007659
7660 __ bind(&not_strings);
7661 // Neither argument is a string.
7662 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION);
7663 break;
7664 }
7665 case Token::SUB:
7666 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION);
7667 break;
7668 case Token::MUL:
7669 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION);
Leon Clarked91b9f72010-01-27 17:25:45 +00007670 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00007671 case Token::DIV:
7672 __ InvokeBuiltin(Builtins::DIV, JUMP_FUNCTION);
7673 break;
7674 case Token::MOD:
7675 __ InvokeBuiltin(Builtins::MOD, JUMP_FUNCTION);
7676 break;
7677 case Token::BIT_OR:
7678 __ InvokeBuiltin(Builtins::BIT_OR, JUMP_FUNCTION);
7679 break;
7680 case Token::BIT_AND:
7681 __ InvokeBuiltin(Builtins::BIT_AND, JUMP_FUNCTION);
7682 break;
7683 case Token::BIT_XOR:
7684 __ InvokeBuiltin(Builtins::BIT_XOR, JUMP_FUNCTION);
7685 break;
7686 case Token::SAR:
7687 __ InvokeBuiltin(Builtins::SAR, JUMP_FUNCTION);
7688 break;
7689 case Token::SHL:
7690 __ InvokeBuiltin(Builtins::SHL, JUMP_FUNCTION);
7691 break;
7692 case Token::SHR:
7693 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION);
7694 break;
7695 default:
7696 UNREACHABLE();
7697 }
7698}
7699
7700
Leon Clarked91b9f72010-01-27 17:25:45 +00007701void GenericBinaryOpStub::GenerateHeapResultAllocation(MacroAssembler* masm,
7702 Label* alloc_failure) {
7703 Label skip_allocation;
7704 OverwriteMode mode = mode_;
7705 if (HasArgsReversed()) {
7706 if (mode == OVERWRITE_RIGHT) {
7707 mode = OVERWRITE_LEFT;
7708 } else if (mode == OVERWRITE_LEFT) {
7709 mode = OVERWRITE_RIGHT;
7710 }
7711 }
7712 switch (mode) {
7713 case OVERWRITE_LEFT: {
7714 // If the argument in edx is already an object, we skip the
7715 // allocation of a heap number.
7716 __ test(edx, Immediate(kSmiTagMask));
7717 __ j(not_zero, &skip_allocation, not_taken);
7718 // Allocate a heap number for the result. Keep eax and edx intact
7719 // for the possible runtime call.
7720 __ AllocateHeapNumber(ebx, ecx, no_reg, alloc_failure);
7721 // Now edx can be overwritten losing one of the arguments as we are
7722 // now done and will not need it any more.
7723 __ mov(edx, Operand(ebx));
7724 __ bind(&skip_allocation);
7725 // Use object in edx as a result holder
7726 __ mov(eax, Operand(edx));
7727 break;
7728 }
7729 case OVERWRITE_RIGHT:
7730 // If the argument in eax is already an object, we skip the
7731 // allocation of a heap number.
7732 __ test(eax, Immediate(kSmiTagMask));
7733 __ j(not_zero, &skip_allocation, not_taken);
7734 // Fall through!
7735 case NO_OVERWRITE:
7736 // Allocate a heap number for the result. Keep eax and edx intact
7737 // for the possible runtime call.
7738 __ AllocateHeapNumber(ebx, ecx, no_reg, alloc_failure);
7739 // Now eax can be overwritten losing one of the arguments as we are
7740 // now done and will not need it any more.
7741 __ mov(eax, ebx);
7742 __ bind(&skip_allocation);
7743 break;
7744 default: UNREACHABLE();
7745 }
7746}
7747
7748
Steve Block3ce2e202009-11-05 08:53:23 +00007749void GenericBinaryOpStub::GenerateLoadArguments(MacroAssembler* masm) {
7750 // If arguments are not passed in registers read them from the stack.
Leon Clarked91b9f72010-01-27 17:25:45 +00007751 if (!HasArgsInRegisters()) {
Steve Block3ce2e202009-11-05 08:53:23 +00007752 __ mov(eax, Operand(esp, 1 * kPointerSize));
7753 __ mov(edx, Operand(esp, 2 * kPointerSize));
7754 }
7755}
Steve Blocka7e24c12009-10-30 11:49:00 +00007756
Steve Block3ce2e202009-11-05 08:53:23 +00007757
7758void GenericBinaryOpStub::GenerateReturn(MacroAssembler* masm) {
7759 // If arguments are not passed in registers remove them from the stack before
7760 // returning.
Leon Clarked91b9f72010-01-27 17:25:45 +00007761 if (!HasArgsInRegisters()) {
Steve Block3ce2e202009-11-05 08:53:23 +00007762 __ ret(2 * kPointerSize); // Remove both operands
7763 } else {
7764 __ ret(0);
7765 }
Steve Blocka7e24c12009-10-30 11:49:00 +00007766}
7767
7768
Leon Clarkee46be812010-01-19 14:06:41 +00007769// Get the integer part of a heap number. Surprisingly, all this bit twiddling
7770// is faster than using the built-in instructions on floating point registers.
7771// Trashes edi and ebx. Dest is ecx. Source cannot be ecx or one of the
7772// trashed registers.
7773void IntegerConvert(MacroAssembler* masm,
7774 Register source,
7775 bool use_sse3,
7776 Label* conversion_failure) {
Leon Clarked91b9f72010-01-27 17:25:45 +00007777 ASSERT(!source.is(ecx) && !source.is(edi) && !source.is(ebx));
Leon Clarkee46be812010-01-19 14:06:41 +00007778 Label done, right_exponent, normal_exponent;
7779 Register scratch = ebx;
7780 Register scratch2 = edi;
7781 // Get exponent word.
7782 __ mov(scratch, FieldOperand(source, HeapNumber::kExponentOffset));
7783 // Get exponent alone in scratch2.
7784 __ mov(scratch2, scratch);
7785 __ and_(scratch2, HeapNumber::kExponentMask);
7786 if (use_sse3) {
7787 CpuFeatures::Scope scope(SSE3);
7788 // Check whether the exponent is too big for a 64 bit signed integer.
7789 static const uint32_t kTooBigExponent =
7790 (HeapNumber::kExponentBias + 63) << HeapNumber::kExponentShift;
7791 __ cmp(Operand(scratch2), Immediate(kTooBigExponent));
7792 __ j(greater_equal, conversion_failure);
7793 // Load x87 register with heap number.
7794 __ fld_d(FieldOperand(source, HeapNumber::kValueOffset));
7795 // Reserve space for 64 bit answer.
7796 __ sub(Operand(esp), Immediate(sizeof(uint64_t))); // Nolint.
7797 // Do conversion, which cannot fail because we checked the exponent.
7798 __ fisttp_d(Operand(esp, 0));
7799 __ mov(ecx, Operand(esp, 0)); // Load low word of answer into ecx.
7800 __ add(Operand(esp), Immediate(sizeof(uint64_t))); // Nolint.
7801 } else {
7802 // Load ecx with zero. We use this either for the final shift or
7803 // for the answer.
7804 __ xor_(ecx, Operand(ecx));
7805 // Check whether the exponent matches a 32 bit signed int that cannot be
7806 // represented by a Smi. A non-smi 32 bit integer is 1.xxx * 2^30 so the
7807 // exponent is 30 (biased). This is the exponent that we are fastest at and
7808 // also the highest exponent we can handle here.
7809 const uint32_t non_smi_exponent =
7810 (HeapNumber::kExponentBias + 30) << HeapNumber::kExponentShift;
7811 __ cmp(Operand(scratch2), Immediate(non_smi_exponent));
7812 // If we have a match of the int32-but-not-Smi exponent then skip some
7813 // logic.
7814 __ j(equal, &right_exponent);
7815 // If the exponent is higher than that then go to slow case. This catches
7816 // numbers that don't fit in a signed int32, infinities and NaNs.
7817 __ j(less, &normal_exponent);
7818
7819 {
7820 // Handle a big exponent. The only reason we have this code is that the
7821 // >>> operator has a tendency to generate numbers with an exponent of 31.
7822 const uint32_t big_non_smi_exponent =
7823 (HeapNumber::kExponentBias + 31) << HeapNumber::kExponentShift;
7824 __ cmp(Operand(scratch2), Immediate(big_non_smi_exponent));
7825 __ j(not_equal, conversion_failure);
7826 // We have the big exponent, typically from >>>. This means the number is
7827 // in the range 2^31 to 2^32 - 1. Get the top bits of the mantissa.
7828 __ mov(scratch2, scratch);
7829 __ and_(scratch2, HeapNumber::kMantissaMask);
7830 // Put back the implicit 1.
7831 __ or_(scratch2, 1 << HeapNumber::kExponentShift);
7832 // Shift up the mantissa bits to take up the space the exponent used to
7833 // take. We just orred in the implicit bit so that took care of one and
7834 // we want to use the full unsigned range so we subtract 1 bit from the
7835 // shift distance.
7836 const int big_shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 1;
7837 __ shl(scratch2, big_shift_distance);
7838 // Get the second half of the double.
7839 __ mov(ecx, FieldOperand(source, HeapNumber::kMantissaOffset));
7840 // Shift down 21 bits to get the most significant 11 bits or the low
7841 // mantissa word.
7842 __ shr(ecx, 32 - big_shift_distance);
7843 __ or_(ecx, Operand(scratch2));
7844 // We have the answer in ecx, but we may need to negate it.
7845 __ test(scratch, Operand(scratch));
7846 __ j(positive, &done);
7847 __ neg(ecx);
7848 __ jmp(&done);
7849 }
7850
7851 __ bind(&normal_exponent);
7852 // Exponent word in scratch, exponent part of exponent word in scratch2.
7853 // Zero in ecx.
7854 // We know the exponent is smaller than 30 (biased). If it is less than
7855 // 0 (biased) then the number is smaller in magnitude than 1.0 * 2^0, ie
7856 // it rounds to zero.
7857 const uint32_t zero_exponent =
7858 (HeapNumber::kExponentBias + 0) << HeapNumber::kExponentShift;
7859 __ sub(Operand(scratch2), Immediate(zero_exponent));
7860 // ecx already has a Smi zero.
7861 __ j(less, &done);
7862
7863 // We have a shifted exponent between 0 and 30 in scratch2.
7864 __ shr(scratch2, HeapNumber::kExponentShift);
7865 __ mov(ecx, Immediate(30));
7866 __ sub(ecx, Operand(scratch2));
7867
7868 __ bind(&right_exponent);
7869 // Here ecx is the shift, scratch is the exponent word.
7870 // Get the top bits of the mantissa.
7871 __ and_(scratch, HeapNumber::kMantissaMask);
7872 // Put back the implicit 1.
7873 __ or_(scratch, 1 << HeapNumber::kExponentShift);
7874 // Shift up the mantissa bits to take up the space the exponent used to
7875 // take. We have kExponentShift + 1 significant bits int he low end of the
7876 // word. Shift them to the top bits.
7877 const int shift_distance = HeapNumber::kNonMantissaBitsInTopWord - 2;
7878 __ shl(scratch, shift_distance);
7879 // Get the second half of the double. For some exponents we don't
7880 // actually need this because the bits get shifted out again, but
7881 // it's probably slower to test than just to do it.
7882 __ mov(scratch2, FieldOperand(source, HeapNumber::kMantissaOffset));
7883 // Shift down 22 bits to get the most significant 10 bits or the low
7884 // mantissa word.
7885 __ shr(scratch2, 32 - shift_distance);
7886 __ or_(scratch2, Operand(scratch));
7887 // Move down according to the exponent.
7888 __ shr_cl(scratch2);
7889 // Now the unsigned answer is in scratch2. We need to move it to ecx and
7890 // we may need to fix the sign.
7891 Label negative;
7892 __ xor_(ecx, Operand(ecx));
7893 __ cmp(ecx, FieldOperand(source, HeapNumber::kExponentOffset));
7894 __ j(greater, &negative);
7895 __ mov(ecx, scratch2);
7896 __ jmp(&done);
7897 __ bind(&negative);
7898 __ sub(ecx, Operand(scratch2));
7899 __ bind(&done);
7900 }
7901}
7902
7903
7904// Input: edx, eax are the left and right objects of a bit op.
7905// Output: eax, ecx are left and right integers for a bit op.
7906void FloatingPointHelper::LoadAsIntegers(MacroAssembler* masm,
7907 bool use_sse3,
7908 Label* conversion_failure) {
7909 // Check float operands.
7910 Label arg1_is_object, check_undefined_arg1;
7911 Label arg2_is_object, check_undefined_arg2;
7912 Label load_arg2, done;
7913
7914 __ test(edx, Immediate(kSmiTagMask));
7915 __ j(not_zero, &arg1_is_object);
7916 __ SmiUntag(edx);
7917 __ jmp(&load_arg2);
7918
7919 // If the argument is undefined it converts to zero (ECMA-262, section 9.5).
7920 __ bind(&check_undefined_arg1);
7921 __ cmp(edx, Factory::undefined_value());
7922 __ j(not_equal, conversion_failure);
7923 __ mov(edx, Immediate(0));
7924 __ jmp(&load_arg2);
7925
7926 __ bind(&arg1_is_object);
7927 __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset));
7928 __ cmp(ebx, Factory::heap_number_map());
7929 __ j(not_equal, &check_undefined_arg1);
7930 // Get the untagged integer version of the edx heap number in ecx.
7931 IntegerConvert(masm, edx, use_sse3, conversion_failure);
7932 __ mov(edx, ecx);
7933
7934 // Here edx has the untagged integer, eax has a Smi or a heap number.
7935 __ bind(&load_arg2);
7936 // Test if arg2 is a Smi.
7937 __ test(eax, Immediate(kSmiTagMask));
7938 __ j(not_zero, &arg2_is_object);
7939 __ SmiUntag(eax);
7940 __ mov(ecx, eax);
7941 __ jmp(&done);
7942
7943 // If the argument is undefined it converts to zero (ECMA-262, section 9.5).
7944 __ bind(&check_undefined_arg2);
7945 __ cmp(eax, Factory::undefined_value());
7946 __ j(not_equal, conversion_failure);
7947 __ mov(ecx, Immediate(0));
7948 __ jmp(&done);
7949
7950 __ bind(&arg2_is_object);
7951 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
7952 __ cmp(ebx, Factory::heap_number_map());
7953 __ j(not_equal, &check_undefined_arg2);
7954 // Get the untagged integer version of the eax heap number in ecx.
7955 IntegerConvert(masm, eax, use_sse3, conversion_failure);
7956 __ bind(&done);
7957 __ mov(eax, edx);
7958}
7959
7960
Steve Blocka7e24c12009-10-30 11:49:00 +00007961void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm,
7962 Register number) {
7963 Label load_smi, done;
7964
7965 __ test(number, Immediate(kSmiTagMask));
7966 __ j(zero, &load_smi, not_taken);
7967 __ fld_d(FieldOperand(number, HeapNumber::kValueOffset));
7968 __ jmp(&done);
7969
7970 __ bind(&load_smi);
Leon Clarkee46be812010-01-19 14:06:41 +00007971 __ SmiUntag(number);
Steve Blocka7e24c12009-10-30 11:49:00 +00007972 __ push(number);
7973 __ fild_s(Operand(esp, 0));
7974 __ pop(number);
7975
7976 __ bind(&done);
7977}
7978
7979
Leon Clarked91b9f72010-01-27 17:25:45 +00007980void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm,
Steve Blocka7e24c12009-10-30 11:49:00 +00007981 Label* not_numbers) {
7982 Label load_smi_edx, load_eax, load_smi_eax, load_float_eax, done;
7983 // Load operand in edx into xmm0, or branch to not_numbers.
7984 __ test(edx, Immediate(kSmiTagMask));
7985 __ j(zero, &load_smi_edx, not_taken); // Argument in edx is a smi.
7986 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), Factory::heap_number_map());
7987 __ j(not_equal, not_numbers); // Argument in edx is not a number.
7988 __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
7989 __ bind(&load_eax);
7990 // Load operand in eax into xmm1, or branch to not_numbers.
7991 __ test(eax, Immediate(kSmiTagMask));
7992 __ j(zero, &load_smi_eax, not_taken); // Argument in eax is a smi.
7993 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), Factory::heap_number_map());
7994 __ j(equal, &load_float_eax);
7995 __ jmp(not_numbers); // Argument in eax is not a number.
7996 __ bind(&load_smi_edx);
Leon Clarkee46be812010-01-19 14:06:41 +00007997 __ SmiUntag(edx); // Untag smi before converting to float.
Steve Blocka7e24c12009-10-30 11:49:00 +00007998 __ cvtsi2sd(xmm0, Operand(edx));
Leon Clarkee46be812010-01-19 14:06:41 +00007999 __ SmiTag(edx); // Retag smi for heap number overwriting test.
Steve Blocka7e24c12009-10-30 11:49:00 +00008000 __ jmp(&load_eax);
8001 __ bind(&load_smi_eax);
Leon Clarkee46be812010-01-19 14:06:41 +00008002 __ SmiUntag(eax); // Untag smi before converting to float.
Steve Blocka7e24c12009-10-30 11:49:00 +00008003 __ cvtsi2sd(xmm1, Operand(eax));
Leon Clarkee46be812010-01-19 14:06:41 +00008004 __ SmiTag(eax); // Retag smi for heap number overwriting test.
Steve Blocka7e24c12009-10-30 11:49:00 +00008005 __ jmp(&done);
8006 __ bind(&load_float_eax);
8007 __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
8008 __ bind(&done);
8009}
8010
8011
Leon Clarked91b9f72010-01-27 17:25:45 +00008012void FloatingPointHelper::LoadSSE2Smis(MacroAssembler* masm,
8013 Register scratch) {
8014 const Register left = edx;
8015 const Register right = eax;
8016 __ mov(scratch, left);
8017 ASSERT(!scratch.is(right)); // We're about to clobber scratch.
8018 __ SmiUntag(scratch);
8019 __ cvtsi2sd(xmm0, Operand(scratch));
8020
8021 __ mov(scratch, right);
8022 __ SmiUntag(scratch);
8023 __ cvtsi2sd(xmm1, Operand(scratch));
8024}
8025
8026
Steve Blocka7e24c12009-10-30 11:49:00 +00008027void FloatingPointHelper::LoadFloatOperands(MacroAssembler* masm,
Leon Clarked91b9f72010-01-27 17:25:45 +00008028 Register scratch,
8029 ArgLocation arg_location) {
Steve Blocka7e24c12009-10-30 11:49:00 +00008030 Label load_smi_1, load_smi_2, done_load_1, done;
Leon Clarked91b9f72010-01-27 17:25:45 +00008031 if (arg_location == ARGS_IN_REGISTERS) {
8032 __ mov(scratch, edx);
8033 } else {
8034 __ mov(scratch, Operand(esp, 2 * kPointerSize));
8035 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008036 __ test(scratch, Immediate(kSmiTagMask));
8037 __ j(zero, &load_smi_1, not_taken);
8038 __ fld_d(FieldOperand(scratch, HeapNumber::kValueOffset));
8039 __ bind(&done_load_1);
8040
Leon Clarked91b9f72010-01-27 17:25:45 +00008041 if (arg_location == ARGS_IN_REGISTERS) {
8042 __ mov(scratch, eax);
8043 } else {
8044 __ mov(scratch, Operand(esp, 1 * kPointerSize));
8045 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008046 __ test(scratch, Immediate(kSmiTagMask));
8047 __ j(zero, &load_smi_2, not_taken);
8048 __ fld_d(FieldOperand(scratch, HeapNumber::kValueOffset));
8049 __ jmp(&done);
8050
8051 __ bind(&load_smi_1);
Leon Clarkee46be812010-01-19 14:06:41 +00008052 __ SmiUntag(scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +00008053 __ push(scratch);
8054 __ fild_s(Operand(esp, 0));
8055 __ pop(scratch);
8056 __ jmp(&done_load_1);
8057
8058 __ bind(&load_smi_2);
Leon Clarkee46be812010-01-19 14:06:41 +00008059 __ SmiUntag(scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +00008060 __ push(scratch);
8061 __ fild_s(Operand(esp, 0));
8062 __ pop(scratch);
8063
8064 __ bind(&done);
8065}
8066
8067
Leon Clarked91b9f72010-01-27 17:25:45 +00008068void FloatingPointHelper::LoadFloatSmis(MacroAssembler* masm,
8069 Register scratch) {
8070 const Register left = edx;
8071 const Register right = eax;
8072 __ mov(scratch, left);
8073 ASSERT(!scratch.is(right)); // We're about to clobber scratch.
8074 __ SmiUntag(scratch);
8075 __ push(scratch);
8076 __ fild_s(Operand(esp, 0));
8077
8078 __ mov(scratch, right);
8079 __ SmiUntag(scratch);
8080 __ mov(Operand(esp, 0), scratch);
8081 __ fild_s(Operand(esp, 0));
8082 __ pop(scratch);
8083}
8084
8085
Steve Blocka7e24c12009-10-30 11:49:00 +00008086void FloatingPointHelper::CheckFloatOperands(MacroAssembler* masm,
8087 Label* non_float,
8088 Register scratch) {
8089 Label test_other, done;
8090 // Test if both operands are floats or smi -> scratch=k_is_float;
8091 // Otherwise scratch = k_not_float.
8092 __ test(edx, Immediate(kSmiTagMask));
8093 __ j(zero, &test_other, not_taken); // argument in edx is OK
8094 __ mov(scratch, FieldOperand(edx, HeapObject::kMapOffset));
8095 __ cmp(scratch, Factory::heap_number_map());
8096 __ j(not_equal, non_float); // argument in edx is not a number -> NaN
8097
8098 __ bind(&test_other);
8099 __ test(eax, Immediate(kSmiTagMask));
8100 __ j(zero, &done); // argument in eax is OK
8101 __ mov(scratch, FieldOperand(eax, HeapObject::kMapOffset));
8102 __ cmp(scratch, Factory::heap_number_map());
8103 __ j(not_equal, non_float); // argument in eax is not a number -> NaN
8104
8105 // Fall-through: Both operands are numbers.
8106 __ bind(&done);
8107}
8108
8109
Leon Clarkee46be812010-01-19 14:06:41 +00008110void GenericUnaryOpStub::Generate(MacroAssembler* masm) {
8111 Label slow, done;
Steve Blocka7e24c12009-10-30 11:49:00 +00008112
Leon Clarkee46be812010-01-19 14:06:41 +00008113 if (op_ == Token::SUB) {
8114 // Check whether the value is a smi.
8115 Label try_float;
8116 __ test(eax, Immediate(kSmiTagMask));
8117 __ j(not_zero, &try_float, not_taken);
Steve Blocka7e24c12009-10-30 11:49:00 +00008118
Leon Clarkee46be812010-01-19 14:06:41 +00008119 // Go slow case if the value of the expression is zero
8120 // to make sure that we switch between 0 and -0.
8121 __ test(eax, Operand(eax));
8122 __ j(zero, &slow, not_taken);
Steve Blocka7e24c12009-10-30 11:49:00 +00008123
Leon Clarkee46be812010-01-19 14:06:41 +00008124 // The value of the expression is a smi that is not zero. Try
8125 // optimistic subtraction '0 - value'.
8126 Label undo;
Steve Blocka7e24c12009-10-30 11:49:00 +00008127 __ mov(edx, Operand(eax));
Leon Clarkee46be812010-01-19 14:06:41 +00008128 __ Set(eax, Immediate(0));
8129 __ sub(eax, Operand(edx));
8130 __ j(overflow, &undo, not_taken);
8131
8132 // If result is a smi we are done.
8133 __ test(eax, Immediate(kSmiTagMask));
8134 __ j(zero, &done, taken);
8135
8136 // Restore eax and go slow case.
8137 __ bind(&undo);
8138 __ mov(eax, Operand(edx));
8139 __ jmp(&slow);
8140
8141 // Try floating point case.
8142 __ bind(&try_float);
8143 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
8144 __ cmp(edx, Factory::heap_number_map());
8145 __ j(not_equal, &slow);
8146 if (overwrite_) {
8147 __ mov(edx, FieldOperand(eax, HeapNumber::kExponentOffset));
8148 __ xor_(edx, HeapNumber::kSignMask); // Flip sign.
8149 __ mov(FieldOperand(eax, HeapNumber::kExponentOffset), edx);
8150 } else {
8151 __ mov(edx, Operand(eax));
8152 // edx: operand
8153 __ AllocateHeapNumber(eax, ebx, ecx, &undo);
8154 // eax: allocated 'empty' number
8155 __ mov(ecx, FieldOperand(edx, HeapNumber::kExponentOffset));
8156 __ xor_(ecx, HeapNumber::kSignMask); // Flip sign.
8157 __ mov(FieldOperand(eax, HeapNumber::kExponentOffset), ecx);
8158 __ mov(ecx, FieldOperand(edx, HeapNumber::kMantissaOffset));
8159 __ mov(FieldOperand(eax, HeapNumber::kMantissaOffset), ecx);
8160 }
8161 } else if (op_ == Token::BIT_NOT) {
8162 // Check if the operand is a heap number.
8163 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
8164 __ cmp(edx, Factory::heap_number_map());
8165 __ j(not_equal, &slow, not_taken);
8166
8167 // Convert the heap number in eax to an untagged integer in ecx.
8168 IntegerConvert(masm, eax, CpuFeatures::IsSupported(SSE3), &slow);
8169
8170 // Do the bitwise operation and check if the result fits in a smi.
8171 Label try_float;
8172 __ not_(ecx);
8173 __ cmp(ecx, 0xc0000000);
8174 __ j(sign, &try_float, not_taken);
8175
8176 // Tag the result as a smi and we're done.
8177 ASSERT(kSmiTagSize == 1);
8178 __ lea(eax, Operand(ecx, times_2, kSmiTag));
8179 __ jmp(&done);
8180
8181 // Try to store the result in a heap number.
8182 __ bind(&try_float);
8183 if (!overwrite_) {
8184 // Allocate a fresh heap number, but don't overwrite eax until
8185 // we're sure we can do it without going through the slow case
8186 // that needs the value in eax.
8187 __ AllocateHeapNumber(ebx, edx, edi, &slow);
8188 __ mov(eax, Operand(ebx));
8189 }
8190 if (CpuFeatures::IsSupported(SSE2)) {
8191 CpuFeatures::Scope use_sse2(SSE2);
8192 __ cvtsi2sd(xmm0, Operand(ecx));
8193 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
8194 } else {
8195 __ push(ecx);
8196 __ fild_s(Operand(esp, 0));
8197 __ pop(ecx);
8198 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
8199 }
8200 } else {
8201 UNIMPLEMENTED();
Steve Blocka7e24c12009-10-30 11:49:00 +00008202 }
8203
Leon Clarkee46be812010-01-19 14:06:41 +00008204 // Return from the stub.
Steve Blocka7e24c12009-10-30 11:49:00 +00008205 __ bind(&done);
Steve Blocka7e24c12009-10-30 11:49:00 +00008206 __ StubReturn(1);
Leon Clarkee46be812010-01-19 14:06:41 +00008207
8208 // Handle the slow case by jumping to the JavaScript builtin.
8209 __ bind(&slow);
8210 __ pop(ecx); // pop return address.
8211 __ push(eax);
8212 __ push(ecx); // push return address
8213 switch (op_) {
8214 case Token::SUB:
8215 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION);
8216 break;
8217 case Token::BIT_NOT:
8218 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION);
8219 break;
8220 default:
8221 UNREACHABLE();
8222 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008223}
8224
8225
8226void ArgumentsAccessStub::GenerateReadLength(MacroAssembler* masm) {
8227 // Check if the calling frame is an arguments adaptor frame.
Steve Blocka7e24c12009-10-30 11:49:00 +00008228 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
8229 __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
8230 __ cmp(Operand(ecx), Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
Steve Blocka7e24c12009-10-30 11:49:00 +00008231
8232 // Arguments adaptor case: Read the arguments length from the
8233 // adaptor frame and return it.
Leon Clarkee46be812010-01-19 14:06:41 +00008234 // Otherwise nothing to do: The number of formal parameters has already been
8235 // passed in register eax by calling function. Just return it.
8236 if (CpuFeatures::IsSupported(CMOV)) {
8237 CpuFeatures::Scope use_cmov(CMOV);
8238 __ cmov(equal, eax,
8239 Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
8240 } else {
8241 Label exit;
8242 __ j(not_equal, &exit);
8243 __ mov(eax, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
8244 __ bind(&exit);
8245 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008246 __ ret(0);
8247}
8248
8249
8250void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
8251 // The key is in edx and the parameter count is in eax.
8252
8253 // The displacement is used for skipping the frame pointer on the
8254 // stack. It is the offset of the last parameter (if any) relative
8255 // to the frame pointer.
8256 static const int kDisplacement = 1 * kPointerSize;
8257
8258 // Check that the key is a smi.
8259 Label slow;
8260 __ test(edx, Immediate(kSmiTagMask));
8261 __ j(not_zero, &slow, not_taken);
8262
8263 // Check if the calling frame is an arguments adaptor frame.
8264 Label adaptor;
8265 __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
8266 __ mov(ecx, Operand(ebx, StandardFrameConstants::kContextOffset));
8267 __ cmp(Operand(ecx), Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
8268 __ j(equal, &adaptor);
8269
8270 // Check index against formal parameters count limit passed in
8271 // through register eax. Use unsigned comparison to get negative
8272 // check for free.
8273 __ cmp(edx, Operand(eax));
8274 __ j(above_equal, &slow, not_taken);
8275
8276 // Read the argument from the stack and return it.
8277 ASSERT(kSmiTagSize == 1 && kSmiTag == 0); // shifting code depends on this
8278 __ lea(ebx, Operand(ebp, eax, times_2, 0));
8279 __ neg(edx);
8280 __ mov(eax, Operand(ebx, edx, times_2, kDisplacement));
8281 __ ret(0);
8282
8283 // Arguments adaptor case: Check index against actual arguments
8284 // limit found in the arguments adaptor frame. Use unsigned
8285 // comparison to get negative check for free.
8286 __ bind(&adaptor);
8287 __ mov(ecx, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
8288 __ cmp(edx, Operand(ecx));
8289 __ j(above_equal, &slow, not_taken);
8290
8291 // Read the argument from the stack and return it.
8292 ASSERT(kSmiTagSize == 1 && kSmiTag == 0); // shifting code depends on this
8293 __ lea(ebx, Operand(ebx, ecx, times_2, 0));
8294 __ neg(edx);
8295 __ mov(eax, Operand(ebx, edx, times_2, kDisplacement));
8296 __ ret(0);
8297
8298 // Slow-case: Handle non-smi or out-of-bounds access to arguments
8299 // by calling the runtime system.
8300 __ bind(&slow);
8301 __ pop(ebx); // Return address.
8302 __ push(edx);
8303 __ push(ebx);
8304 __ TailCallRuntime(ExternalReference(Runtime::kGetArgumentsProperty), 1, 1);
8305}
8306
8307
8308void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
8309 // The displacement is used for skipping the return address and the
8310 // frame pointer on the stack. It is the offset of the last
8311 // parameter (if any) relative to the frame pointer.
8312 static const int kDisplacement = 2 * kPointerSize;
8313
8314 // Check if the calling frame is an arguments adaptor frame.
Leon Clarkee46be812010-01-19 14:06:41 +00008315 Label adaptor_frame, try_allocate, runtime;
Steve Blocka7e24c12009-10-30 11:49:00 +00008316 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
8317 __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
8318 __ cmp(Operand(ecx), Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
Leon Clarkee46be812010-01-19 14:06:41 +00008319 __ j(equal, &adaptor_frame);
8320
8321 // Get the length from the frame.
8322 __ mov(ecx, Operand(esp, 1 * kPointerSize));
8323 __ jmp(&try_allocate);
Steve Blocka7e24c12009-10-30 11:49:00 +00008324
8325 // Patch the arguments.length and the parameters pointer.
Leon Clarkee46be812010-01-19 14:06:41 +00008326 __ bind(&adaptor_frame);
Steve Blocka7e24c12009-10-30 11:49:00 +00008327 __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
8328 __ mov(Operand(esp, 1 * kPointerSize), ecx);
8329 __ lea(edx, Operand(edx, ecx, times_2, kDisplacement));
8330 __ mov(Operand(esp, 2 * kPointerSize), edx);
8331
Leon Clarkee46be812010-01-19 14:06:41 +00008332 // Try the new space allocation. Start out with computing the size of
8333 // the arguments object and the elements array.
8334 Label add_arguments_object;
8335 __ bind(&try_allocate);
8336 __ test(ecx, Operand(ecx));
8337 __ j(zero, &add_arguments_object);
8338 __ lea(ecx, Operand(ecx, times_2, FixedArray::kHeaderSize));
8339 __ bind(&add_arguments_object);
8340 __ add(Operand(ecx), Immediate(Heap::kArgumentsObjectSize));
8341
8342 // Do the allocation of both objects in one go.
8343 __ AllocateInNewSpace(ecx, eax, edx, ebx, &runtime, TAG_OBJECT);
8344
8345 // Get the arguments boilerplate from the current (global) context.
8346 int offset = Context::SlotOffset(Context::ARGUMENTS_BOILERPLATE_INDEX);
8347 __ mov(edi, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
8348 __ mov(edi, FieldOperand(edi, GlobalObject::kGlobalContextOffset));
8349 __ mov(edi, Operand(edi, offset));
8350
8351 // Copy the JS object part.
8352 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
8353 __ mov(ebx, FieldOperand(edi, i));
8354 __ mov(FieldOperand(eax, i), ebx);
8355 }
8356
8357 // Setup the callee in-object property.
8358 ASSERT(Heap::arguments_callee_index == 0);
8359 __ mov(ebx, Operand(esp, 3 * kPointerSize));
8360 __ mov(FieldOperand(eax, JSObject::kHeaderSize), ebx);
8361
8362 // Get the length (smi tagged) and set that as an in-object property too.
8363 ASSERT(Heap::arguments_length_index == 1);
8364 __ mov(ecx, Operand(esp, 1 * kPointerSize));
8365 __ mov(FieldOperand(eax, JSObject::kHeaderSize + kPointerSize), ecx);
8366
8367 // If there are no actual arguments, we're done.
8368 Label done;
8369 __ test(ecx, Operand(ecx));
8370 __ j(zero, &done);
8371
8372 // Get the parameters pointer from the stack and untag the length.
8373 __ mov(edx, Operand(esp, 2 * kPointerSize));
8374 __ SmiUntag(ecx);
8375
8376 // Setup the elements pointer in the allocated arguments object and
8377 // initialize the header in the elements fixed array.
8378 __ lea(edi, Operand(eax, Heap::kArgumentsObjectSize));
8379 __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi);
8380 __ mov(FieldOperand(edi, FixedArray::kMapOffset),
8381 Immediate(Factory::fixed_array_map()));
8382 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
8383
8384 // Copy the fixed array slots.
8385 Label loop;
8386 __ bind(&loop);
8387 __ mov(ebx, Operand(edx, -1 * kPointerSize)); // Skip receiver.
8388 __ mov(FieldOperand(edi, FixedArray::kHeaderSize), ebx);
8389 __ add(Operand(edi), Immediate(kPointerSize));
8390 __ sub(Operand(edx), Immediate(kPointerSize));
8391 __ dec(ecx);
8392 __ test(ecx, Operand(ecx));
8393 __ j(not_zero, &loop);
8394
8395 // Return and remove the on-stack parameters.
8396 __ bind(&done);
8397 __ ret(3 * kPointerSize);
8398
Steve Blocka7e24c12009-10-30 11:49:00 +00008399 // Do the runtime call to allocate the arguments object.
8400 __ bind(&runtime);
8401 __ TailCallRuntime(ExternalReference(Runtime::kNewArgumentsFast), 3, 1);
8402}
8403
8404
Leon Clarkee46be812010-01-19 14:06:41 +00008405void RegExpExecStub::Generate(MacroAssembler* masm) {
Leon Clarke4515c472010-02-03 11:58:03 +00008406 // Just jump directly to runtime if native RegExp is not selected at compile
8407 // time or if regexp entry in generated code is turned off runtime switch or
8408 // at compilation.
8409#ifndef V8_NATIVE_REGEXP
8410 __ TailCallRuntime(ExternalReference(Runtime::kRegExpExec), 4, 1);
8411#else // V8_NATIVE_REGEXP
Leon Clarkee46be812010-01-19 14:06:41 +00008412 if (!FLAG_regexp_entry_native) {
8413 __ TailCallRuntime(ExternalReference(Runtime::kRegExpExec), 4, 1);
8414 return;
8415 }
8416
8417 // Stack frame on entry.
8418 // esp[0]: return address
8419 // esp[4]: last_match_info (expected JSArray)
8420 // esp[8]: previous index
8421 // esp[12]: subject string
8422 // esp[16]: JSRegExp object
8423
Leon Clarked91b9f72010-01-27 17:25:45 +00008424 static const int kLastMatchInfoOffset = 1 * kPointerSize;
8425 static const int kPreviousIndexOffset = 2 * kPointerSize;
8426 static const int kSubjectOffset = 3 * kPointerSize;
8427 static const int kJSRegExpOffset = 4 * kPointerSize;
8428
8429 Label runtime, invoke_regexp;
8430
8431 // Ensure that a RegExp stack is allocated.
8432 ExternalReference address_of_regexp_stack_memory_address =
8433 ExternalReference::address_of_regexp_stack_memory_address();
8434 ExternalReference address_of_regexp_stack_memory_size =
8435 ExternalReference::address_of_regexp_stack_memory_size();
8436 __ mov(ebx, Operand::StaticVariable(address_of_regexp_stack_memory_size));
8437 __ test(ebx, Operand(ebx));
8438 __ j(zero, &runtime, not_taken);
Leon Clarkee46be812010-01-19 14:06:41 +00008439
8440 // Check that the first argument is a JSRegExp object.
Leon Clarked91b9f72010-01-27 17:25:45 +00008441 __ mov(eax, Operand(esp, kJSRegExpOffset));
Leon Clarkee46be812010-01-19 14:06:41 +00008442 ASSERT_EQ(0, kSmiTag);
8443 __ test(eax, Immediate(kSmiTagMask));
8444 __ j(zero, &runtime);
8445 __ CmpObjectType(eax, JS_REGEXP_TYPE, ecx);
8446 __ j(not_equal, &runtime);
8447 // Check that the RegExp has been compiled (data contains a fixed array).
8448 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
Leon Clarke4515c472010-02-03 11:58:03 +00008449 if (FLAG_debug_code) {
8450 __ test(ecx, Immediate(kSmiTagMask));
8451 __ Check(not_zero, "Unexpected type for RegExp data, FixedArray expected");
8452 __ CmpObjectType(ecx, FIXED_ARRAY_TYPE, ebx);
8453 __ Check(equal, "Unexpected type for RegExp data, FixedArray expected");
8454 }
Leon Clarkee46be812010-01-19 14:06:41 +00008455
8456 // ecx: RegExp data (FixedArray)
8457 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
8458 __ mov(ebx, FieldOperand(ecx, JSRegExp::kDataTagOffset));
8459 __ cmp(Operand(ebx), Immediate(Smi::FromInt(JSRegExp::IRREGEXP)));
8460 __ j(not_equal, &runtime);
8461
8462 // ecx: RegExp data (FixedArray)
8463 // Check that the number of captures fit in the static offsets vector buffer.
8464 __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset));
8465 // Calculate number of capture registers (number_of_captures + 1) * 2. This
8466 // uses the asumption that smis are 2 * their untagged value.
8467 ASSERT_EQ(0, kSmiTag);
8468 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
8469 __ add(Operand(edx), Immediate(2)); // edx was a smi.
8470 // Check that the static offsets vector buffer is large enough.
8471 __ cmp(edx, OffsetsVector::kStaticOffsetsVectorSize);
8472 __ j(above, &runtime);
8473
8474 // ecx: RegExp data (FixedArray)
8475 // edx: Number of capture registers
8476 // Check that the second argument is a string.
Leon Clarked91b9f72010-01-27 17:25:45 +00008477 __ mov(eax, Operand(esp, kSubjectOffset));
Leon Clarkee46be812010-01-19 14:06:41 +00008478 __ test(eax, Immediate(kSmiTagMask));
8479 __ j(zero, &runtime);
8480 Condition is_string = masm->IsObjectStringType(eax, ebx, ebx);
8481 __ j(NegateCondition(is_string), &runtime);
8482 // Get the length of the string to ebx.
8483 __ mov(ebx, FieldOperand(eax, String::kLengthOffset));
8484
8485 // ebx: Length of subject string
8486 // ecx: RegExp data (FixedArray)
8487 // edx: Number of capture registers
8488 // Check that the third argument is a positive smi.
Leon Clarke4515c472010-02-03 11:58:03 +00008489 // Check that the third argument is a positive smi less than the subject
8490 // string length. A negative value will be greater (usigned comparison).
Leon Clarked91b9f72010-01-27 17:25:45 +00008491 __ mov(eax, Operand(esp, kPreviousIndexOffset));
Leon Clarkee46be812010-01-19 14:06:41 +00008492 __ SmiUntag(eax);
8493 __ cmp(eax, Operand(ebx));
Leon Clarke4515c472010-02-03 11:58:03 +00008494 __ j(above, &runtime);
Leon Clarkee46be812010-01-19 14:06:41 +00008495
8496 // ecx: RegExp data (FixedArray)
8497 // edx: Number of capture registers
8498 // Check that the fourth object is a JSArray object.
Leon Clarked91b9f72010-01-27 17:25:45 +00008499 __ mov(eax, Operand(esp, kLastMatchInfoOffset));
Leon Clarkee46be812010-01-19 14:06:41 +00008500 __ test(eax, Immediate(kSmiTagMask));
8501 __ j(zero, &runtime);
8502 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
8503 __ j(not_equal, &runtime);
8504 // Check that the JSArray is in fast case.
8505 __ mov(ebx, FieldOperand(eax, JSArray::kElementsOffset));
8506 __ mov(eax, FieldOperand(ebx, HeapObject::kMapOffset));
8507 __ cmp(eax, Factory::fixed_array_map());
8508 __ j(not_equal, &runtime);
8509 // Check that the last match info has space for the capture registers and the
8510 // additional information.
8511 __ mov(eax, FieldOperand(ebx, FixedArray::kLengthOffset));
8512 __ add(Operand(edx), Immediate(RegExpImpl::kLastMatchOverhead));
8513 __ cmp(edx, Operand(eax));
8514 __ j(greater, &runtime);
8515
8516 // ecx: RegExp data (FixedArray)
Leon Clarked91b9f72010-01-27 17:25:45 +00008517 // Check the representation and encoding of the subject string.
8518 Label seq_string, seq_two_byte_string, check_code;
8519 const int kStringRepresentationEncodingMask =
8520 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
8521 __ mov(eax, Operand(esp, kSubjectOffset));
Leon Clarkee46be812010-01-19 14:06:41 +00008522 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
8523 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
Leon Clarked91b9f72010-01-27 17:25:45 +00008524 __ and_(ebx, kStringRepresentationEncodingMask);
8525 // First check for sequential string.
8526 ASSERT_EQ(0, kStringTag);
8527 ASSERT_EQ(0, kSeqStringTag);
8528 __ test(Operand(ebx),
8529 Immediate(kIsNotStringMask | kStringRepresentationMask));
8530 __ j(zero, &seq_string);
8531
8532 // Check for flat cons string.
8533 // A flat cons string is a cons string where the second part is the empty
8534 // string. In that case the subject string is just the first part of the cons
8535 // string. Also in this case the first part of the cons string is known to be
Leon Clarke4515c472010-02-03 11:58:03 +00008536 // a sequential string or an external string.
Leon Clarked91b9f72010-01-27 17:25:45 +00008537 __ mov(edx, ebx);
8538 __ and_(edx, kStringRepresentationMask);
8539 __ cmp(edx, kConsStringTag);
Leon Clarkee46be812010-01-19 14:06:41 +00008540 __ j(not_equal, &runtime);
Leon Clarked91b9f72010-01-27 17:25:45 +00008541 __ mov(edx, FieldOperand(eax, ConsString::kSecondOffset));
Leon Clarke4515c472010-02-03 11:58:03 +00008542 __ cmp(Operand(edx), Factory::empty_string());
Leon Clarked91b9f72010-01-27 17:25:45 +00008543 __ j(not_equal, &runtime);
8544 __ mov(eax, FieldOperand(eax, ConsString::kFirstOffset));
8545 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
8546 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
Leon Clarke4515c472010-02-03 11:58:03 +00008547 ASSERT_EQ(0, kSeqStringTag);
8548 __ test(ebx, Immediate(kStringRepresentationMask));
8549 __ j(not_zero, &runtime);
Leon Clarked91b9f72010-01-27 17:25:45 +00008550 __ and_(ebx, kStringRepresentationEncodingMask);
Leon Clarkee46be812010-01-19 14:06:41 +00008551
Leon Clarked91b9f72010-01-27 17:25:45 +00008552 __ bind(&seq_string);
8553 // eax: subject string (sequential either ascii to two byte)
8554 // ebx: suject string type & kStringRepresentationEncodingMask
Leon Clarkee46be812010-01-19 14:06:41 +00008555 // ecx: RegExp data (FixedArray)
8556 // Check that the irregexp code has been generated for an ascii string. If
Leon Clarked91b9f72010-01-27 17:25:45 +00008557 // it has, the field contains a code object otherwise it contains the hole.
8558 __ cmp(ebx, kStringTag | kSeqStringTag | kTwoByteStringTag);
8559 __ j(equal, &seq_two_byte_string);
Leon Clarke4515c472010-02-03 11:58:03 +00008560 if (FLAG_debug_code) {
8561 __ cmp(ebx, kStringTag | kSeqStringTag | kAsciiStringTag);
8562 __ Check(equal, "Expected sequential ascii string");
8563 }
Leon Clarkee46be812010-01-19 14:06:41 +00008564 __ mov(edx, FieldOperand(ecx, JSRegExp::kDataAsciiCodeOffset));
Leon Clarked91b9f72010-01-27 17:25:45 +00008565 __ Set(edi, Immediate(1)); // Type is ascii.
8566 __ jmp(&check_code);
8567
8568 __ bind(&seq_two_byte_string);
8569 // eax: subject string
8570 // ecx: RegExp data (FixedArray)
8571 __ mov(edx, FieldOperand(ecx, JSRegExp::kDataUC16CodeOffset));
8572 __ Set(edi, Immediate(0)); // Type is two byte.
8573
8574 __ bind(&check_code);
Leon Clarke4515c472010-02-03 11:58:03 +00008575 // Check that the irregexp code has been generated for the actual string
8576 // encoding. If it has, the field contains a code object otherwise it contains
8577 // the hole.
Leon Clarkee46be812010-01-19 14:06:41 +00008578 __ CmpObjectType(edx, CODE_TYPE, ebx);
8579 __ j(not_equal, &runtime);
8580
Leon Clarked91b9f72010-01-27 17:25:45 +00008581 // eax: subject string
8582 // edx: code
Leon Clarke4515c472010-02-03 11:58:03 +00008583 // edi: encoding of subject string (1 if ascii, 0 if two_byte);
Leon Clarkee46be812010-01-19 14:06:41 +00008584 // Load used arguments before starting to push arguments for call to native
8585 // RegExp code to avoid handling changing stack height.
Leon Clarked91b9f72010-01-27 17:25:45 +00008586 __ mov(ebx, Operand(esp, kPreviousIndexOffset));
Leon Clarked91b9f72010-01-27 17:25:45 +00008587 __ SmiUntag(ebx); // Previous index from smi.
Leon Clarkee46be812010-01-19 14:06:41 +00008588
8589 // eax: subject string
8590 // ebx: previous index
8591 // edx: code
Leon Clarke4515c472010-02-03 11:58:03 +00008592 // edi: encoding of subject string (1 if ascii 0 if two_byte);
Leon Clarkee46be812010-01-19 14:06:41 +00008593 // All checks done. Now push arguments for native regexp code.
8594 __ IncrementCounter(&Counters::regexp_entry_native, 1);
8595
Leon Clarked91b9f72010-01-27 17:25:45 +00008596 // Argument 7: Indicate that this is a direct call from JavaScript.
Leon Clarkee46be812010-01-19 14:06:41 +00008597 __ push(Immediate(1));
8598
Leon Clarked91b9f72010-01-27 17:25:45 +00008599 // Argument 6: Start (high end) of backtracking stack memory area.
Leon Clarkee46be812010-01-19 14:06:41 +00008600 __ mov(ecx, Operand::StaticVariable(address_of_regexp_stack_memory_address));
8601 __ add(ecx, Operand::StaticVariable(address_of_regexp_stack_memory_size));
8602 __ push(ecx);
8603
Leon Clarkee46be812010-01-19 14:06:41 +00008604 // Argument 5: static offsets vector buffer.
8605 __ push(Immediate(ExternalReference::address_of_static_offsets_vector()));
8606
Leon Clarked91b9f72010-01-27 17:25:45 +00008607 // Argument 4: End of string data
8608 // Argument 3: Start of string data
8609 Label push_two_byte, push_rest;
8610 __ test(edi, Operand(edi));
8611 __ mov(edi, FieldOperand(eax, String::kLengthOffset));
8612 __ j(zero, &push_two_byte);
8613 __ lea(ecx, FieldOperand(eax, edi, times_1, SeqAsciiString::kHeaderSize));
8614 __ push(ecx); // Argument 4.
8615 __ lea(ecx, FieldOperand(eax, ebx, times_1, SeqAsciiString::kHeaderSize));
8616 __ push(ecx); // Argument 3.
8617 __ jmp(&push_rest);
Leon Clarkee46be812010-01-19 14:06:41 +00008618
Leon Clarked91b9f72010-01-27 17:25:45 +00008619 __ bind(&push_two_byte);
Leon Clarked91b9f72010-01-27 17:25:45 +00008620 __ lea(ecx, FieldOperand(eax, edi, times_2, SeqTwoByteString::kHeaderSize));
8621 __ push(ecx); // Argument 4.
8622 __ lea(ecx, FieldOperand(eax, ebx, times_2, SeqTwoByteString::kHeaderSize));
8623 __ push(ecx); // Argument 3.
8624
8625 __ bind(&push_rest);
Leon Clarkee46be812010-01-19 14:06:41 +00008626
8627 // Argument 2: Previous index.
Leon Clarked91b9f72010-01-27 17:25:45 +00008628 __ push(ebx);
Leon Clarkee46be812010-01-19 14:06:41 +00008629
8630 // Argument 1: Subject string.
8631 __ push(eax);
8632
8633 // Locate the code entry and call it.
8634 __ add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
8635 __ call(Operand(edx));
8636 // Remove arguments.
Leon Clarked91b9f72010-01-27 17:25:45 +00008637 __ add(Operand(esp), Immediate(7 * kPointerSize));
Leon Clarkee46be812010-01-19 14:06:41 +00008638
8639 // Check the result.
8640 Label success;
8641 __ cmp(eax, NativeRegExpMacroAssembler::SUCCESS);
8642 __ j(equal, &success, taken);
8643 Label failure;
8644 __ cmp(eax, NativeRegExpMacroAssembler::FAILURE);
8645 __ j(equal, &failure, taken);
8646 __ cmp(eax, NativeRegExpMacroAssembler::EXCEPTION);
8647 // If not exception it can only be retry. Handle that in the runtime system.
8648 __ j(not_equal, &runtime);
8649 // Result must now be exception. If there is no pending exception already a
8650 // stack overflow (on the backtrack stack) was detected in RegExp code but
8651 // haven't created the exception yet. Handle that in the runtime system.
Leon Clarke4515c472010-02-03 11:58:03 +00008652 // TODO(592) Rerunning the RegExp to get the stack overflow exception.
Leon Clarkee46be812010-01-19 14:06:41 +00008653 ExternalReference pending_exception(Top::k_pending_exception_address);
8654 __ mov(eax,
8655 Operand::StaticVariable(ExternalReference::the_hole_value_location()));
8656 __ cmp(eax, Operand::StaticVariable(pending_exception));
8657 __ j(equal, &runtime);
8658 __ bind(&failure);
8659 // For failure and exception return null.
8660 __ mov(Operand(eax), Factory::null_value());
8661 __ ret(4 * kPointerSize);
8662
8663 // Load RegExp data.
8664 __ bind(&success);
Leon Clarked91b9f72010-01-27 17:25:45 +00008665 __ mov(eax, Operand(esp, kJSRegExpOffset));
Leon Clarkee46be812010-01-19 14:06:41 +00008666 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
8667 __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset));
8668 // Calculate number of capture registers (number_of_captures + 1) * 2.
Leon Clarke4515c472010-02-03 11:58:03 +00008669 ASSERT_EQ(0, kSmiTag);
8670 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
Leon Clarkee46be812010-01-19 14:06:41 +00008671 __ add(Operand(edx), Immediate(2)); // edx was a smi.
8672
8673 // edx: Number of capture registers
8674 // Load last_match_info which is still known to be a fast case JSArray.
Leon Clarked91b9f72010-01-27 17:25:45 +00008675 __ mov(eax, Operand(esp, kLastMatchInfoOffset));
Leon Clarkee46be812010-01-19 14:06:41 +00008676 __ mov(ebx, FieldOperand(eax, JSArray::kElementsOffset));
8677
8678 // ebx: last_match_info backing store (FixedArray)
8679 // edx: number of capture registers
8680 // Store the capture count.
8681 __ SmiTag(edx); // Number of capture registers to smi.
8682 __ mov(FieldOperand(ebx, RegExpImpl::kLastCaptureCountOffset), edx);
8683 __ SmiUntag(edx); // Number of capture registers back from smi.
8684 // Store last subject and last input.
Leon Clarked91b9f72010-01-27 17:25:45 +00008685 __ mov(eax, Operand(esp, kSubjectOffset));
Leon Clarkee46be812010-01-19 14:06:41 +00008686 __ mov(FieldOperand(ebx, RegExpImpl::kLastSubjectOffset), eax);
8687 __ mov(ecx, ebx);
8688 __ RecordWrite(ecx, RegExpImpl::kLastSubjectOffset, eax, edi);
Leon Clarked91b9f72010-01-27 17:25:45 +00008689 __ mov(eax, Operand(esp, kSubjectOffset));
Leon Clarkee46be812010-01-19 14:06:41 +00008690 __ mov(FieldOperand(ebx, RegExpImpl::kLastInputOffset), eax);
8691 __ mov(ecx, ebx);
8692 __ RecordWrite(ecx, RegExpImpl::kLastInputOffset, eax, edi);
8693
8694 // Get the static offsets vector filled by the native regexp code.
8695 ExternalReference address_of_static_offsets_vector =
8696 ExternalReference::address_of_static_offsets_vector();
8697 __ mov(ecx, Immediate(address_of_static_offsets_vector));
8698
8699 // ebx: last_match_info backing store (FixedArray)
8700 // ecx: offsets vector
8701 // edx: number of capture registers
8702 Label next_capture, done;
Leon Clarked91b9f72010-01-27 17:25:45 +00008703 __ mov(eax, Operand(esp, kPreviousIndexOffset));
Leon Clarkee46be812010-01-19 14:06:41 +00008704 // Capture register counter starts from number of capture registers and
8705 // counts down until wraping after zero.
8706 __ bind(&next_capture);
8707 __ sub(Operand(edx), Immediate(1));
8708 __ j(negative, &done);
8709 // Read the value from the static offsets vector buffer.
Leon Clarke4515c472010-02-03 11:58:03 +00008710 __ mov(edi, Operand(ecx, edx, times_int_size, 0));
Leon Clarkee46be812010-01-19 14:06:41 +00008711 // Perform explicit shift
8712 ASSERT_EQ(0, kSmiTag);
8713 __ shl(edi, kSmiTagSize);
8714 // Add previous index (from its stack slot) if value is not negative.
8715 Label capture_negative;
8716 // Carry flag set by shift above.
8717 __ j(negative, &capture_negative, not_taken);
8718 __ add(edi, Operand(eax)); // Add previous index (adding smi to smi).
8719 __ bind(&capture_negative);
8720 // Store the smi value in the last match info.
8721 __ mov(FieldOperand(ebx,
8722 edx,
8723 times_pointer_size,
8724 RegExpImpl::kFirstCaptureOffset),
8725 edi);
8726 __ jmp(&next_capture);
8727 __ bind(&done);
8728
8729 // Return last match info.
Leon Clarked91b9f72010-01-27 17:25:45 +00008730 __ mov(eax, Operand(esp, kLastMatchInfoOffset));
Leon Clarkee46be812010-01-19 14:06:41 +00008731 __ ret(4 * kPointerSize);
8732
8733 // Do the runtime call to execute the regexp.
8734 __ bind(&runtime);
8735 __ TailCallRuntime(ExternalReference(Runtime::kRegExpExec), 4, 1);
Leon Clarke4515c472010-02-03 11:58:03 +00008736#endif // V8_NATIVE_REGEXP
Leon Clarkee46be812010-01-19 14:06:41 +00008737}
8738
8739
Steve Blocka7e24c12009-10-30 11:49:00 +00008740void CompareStub::Generate(MacroAssembler* masm) {
8741 Label call_builtin, done;
8742
8743 // NOTICE! This code is only reached after a smi-fast-case check, so
8744 // it is certain that at least one operand isn't a smi.
8745
8746 if (cc_ == equal) { // Both strict and non-strict.
8747 Label slow; // Fallthrough label.
8748 // Equality is almost reflexive (everything but NaN), so start by testing
8749 // for "identity and not NaN".
8750 {
8751 Label not_identical;
8752 __ cmp(eax, Operand(edx));
8753 __ j(not_equal, &not_identical);
8754 // Test for NaN. Sadly, we can't just compare to Factory::nan_value(),
8755 // so we do the second best thing - test it ourselves.
8756
Leon Clarkee46be812010-01-19 14:06:41 +00008757 if (never_nan_nan_) {
8758 __ Set(eax, Immediate(0));
8759 __ ret(0);
8760 } else {
8761 Label return_equal;
8762 Label heap_number;
8763 // If it's not a heap number, then return equal.
8764 __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
8765 Immediate(Factory::heap_number_map()));
8766 __ j(equal, &heap_number);
8767 __ bind(&return_equal);
8768 __ Set(eax, Immediate(0));
8769 __ ret(0);
Steve Blocka7e24c12009-10-30 11:49:00 +00008770
Leon Clarkee46be812010-01-19 14:06:41 +00008771 __ bind(&heap_number);
8772 // It is a heap number, so return non-equal if it's NaN and equal if
8773 // it's not NaN.
8774 // The representation of NaN values has all exponent bits (52..62) set,
8775 // and not all mantissa bits (0..51) clear.
8776 // We only accept QNaNs, which have bit 51 set.
8777 // Read top bits of double representation (second word of value).
Steve Blocka7e24c12009-10-30 11:49:00 +00008778
Leon Clarkee46be812010-01-19 14:06:41 +00008779 // Value is a QNaN if value & kQuietNaNMask == kQuietNaNMask, i.e.,
8780 // all bits in the mask are set. We only need to check the word
8781 // that contains the exponent and high bit of the mantissa.
8782 ASSERT_NE(0, (kQuietNaNHighBitsMask << 1) & 0x80000000u);
8783 __ mov(edx, FieldOperand(edx, HeapNumber::kExponentOffset));
8784 __ xor_(eax, Operand(eax));
8785 // Shift value and mask so kQuietNaNHighBitsMask applies to topmost
8786 // bits.
8787 __ add(edx, Operand(edx));
8788 __ cmp(edx, kQuietNaNHighBitsMask << 1);
8789 __ setcc(above_equal, eax);
8790 __ ret(0);
8791 }
Steve Blocka7e24c12009-10-30 11:49:00 +00008792
8793 __ bind(&not_identical);
8794 }
8795
8796 // If we're doing a strict equality comparison, we don't have to do
8797 // type conversion, so we generate code to do fast comparison for objects
8798 // and oddballs. Non-smi numbers and strings still go through the usual
8799 // slow-case code.
8800 if (strict_) {
8801 // If either is a Smi (we know that not both are), then they can only
8802 // be equal if the other is a HeapNumber. If so, use the slow case.
8803 {
8804 Label not_smis;
8805 ASSERT_EQ(0, kSmiTag);
8806 ASSERT_EQ(0, Smi::FromInt(0));
8807 __ mov(ecx, Immediate(kSmiTagMask));
8808 __ and_(ecx, Operand(eax));
8809 __ test(ecx, Operand(edx));
8810 __ j(not_zero, &not_smis);
8811 // One operand is a smi.
8812
8813 // Check whether the non-smi is a heap number.
8814 ASSERT_EQ(1, kSmiTagMask);
8815 // ecx still holds eax & kSmiTag, which is either zero or one.
8816 __ sub(Operand(ecx), Immediate(0x01));
8817 __ mov(ebx, edx);
8818 __ xor_(ebx, Operand(eax));
8819 __ and_(ebx, Operand(ecx)); // ebx holds either 0 or eax ^ edx.
8820 __ xor_(ebx, Operand(eax));
8821 // if eax was smi, ebx is now edx, else eax.
8822
8823 // Check if the non-smi operand is a heap number.
8824 __ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
8825 Immediate(Factory::heap_number_map()));
8826 // If heap number, handle it in the slow case.
8827 __ j(equal, &slow);
8828 // Return non-equal (ebx is not zero)
8829 __ mov(eax, ebx);
8830 __ ret(0);
8831
8832 __ bind(&not_smis);
8833 }
8834
8835 // If either operand is a JSObject or an oddball value, then they are not
8836 // equal since their pointers are different
8837 // There is no test for undetectability in strict equality.
8838
8839 // Get the type of the first operand.
8840 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
8841 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
8842
8843 // If the first object is a JS object, we have done pointer comparison.
8844 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
8845 Label first_non_object;
8846 __ cmp(ecx, FIRST_JS_OBJECT_TYPE);
8847 __ j(less, &first_non_object);
8848
8849 // Return non-zero (eax is not zero)
8850 Label return_not_equal;
8851 ASSERT(kHeapObjectTag != 0);
8852 __ bind(&return_not_equal);
8853 __ ret(0);
8854
8855 __ bind(&first_non_object);
8856 // Check for oddballs: true, false, null, undefined.
8857 __ cmp(ecx, ODDBALL_TYPE);
8858 __ j(equal, &return_not_equal);
8859
8860 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
8861 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
8862
8863 __ cmp(ecx, FIRST_JS_OBJECT_TYPE);
8864 __ j(greater_equal, &return_not_equal);
8865
8866 // Check for oddballs: true, false, null, undefined.
8867 __ cmp(ecx, ODDBALL_TYPE);
8868 __ j(equal, &return_not_equal);
8869
8870 // Fall through to the general case.
8871 }
8872 __ bind(&slow);
8873 }
8874
8875 // Push arguments below the return address.
8876 __ pop(ecx);
8877 __ push(eax);
8878 __ push(edx);
8879 __ push(ecx);
8880
8881 // Inlined floating point compare.
8882 // Call builtin if operands are not floating point or smi.
8883 Label check_for_symbols;
8884 Label unordered;
Steve Blockd0582a62009-12-15 09:54:21 +00008885 if (CpuFeatures::IsSupported(SSE2)) {
8886 CpuFeatures::Scope use_sse2(SSE2);
8887 CpuFeatures::Scope use_cmov(CMOV);
Steve Blocka7e24c12009-10-30 11:49:00 +00008888
Leon Clarked91b9f72010-01-27 17:25:45 +00008889 FloatingPointHelper::LoadSSE2Operands(masm, &check_for_symbols);
Steve Blocka7e24c12009-10-30 11:49:00 +00008890 __ comisd(xmm0, xmm1);
8891
8892 // Jump to builtin for NaN.
8893 __ j(parity_even, &unordered, not_taken);
8894 __ mov(eax, 0); // equal
8895 __ mov(ecx, Immediate(Smi::FromInt(1)));
8896 __ cmov(above, eax, Operand(ecx));
8897 __ mov(ecx, Immediate(Smi::FromInt(-1)));
8898 __ cmov(below, eax, Operand(ecx));
8899 __ ret(2 * kPointerSize);
8900 } else {
8901 FloatingPointHelper::CheckFloatOperands(masm, &check_for_symbols, ebx);
8902 FloatingPointHelper::LoadFloatOperands(masm, ecx);
8903 __ FCmp();
8904
8905 // Jump to builtin for NaN.
8906 __ j(parity_even, &unordered, not_taken);
8907
8908 Label below_lbl, above_lbl;
8909 // Return a result of -1, 0, or 1, to indicate result of comparison.
8910 __ j(below, &below_lbl, not_taken);
8911 __ j(above, &above_lbl, not_taken);
8912
8913 __ xor_(eax, Operand(eax)); // equal
8914 // Both arguments were pushed in case a runtime call was needed.
8915 __ ret(2 * kPointerSize);
8916
8917 __ bind(&below_lbl);
8918 __ mov(eax, Immediate(Smi::FromInt(-1)));
8919 __ ret(2 * kPointerSize);
8920
8921 __ bind(&above_lbl);
8922 __ mov(eax, Immediate(Smi::FromInt(1)));
8923 __ ret(2 * kPointerSize); // eax, edx were pushed
8924 }
8925 // If one of the numbers was NaN, then the result is always false.
8926 // The cc is never not-equal.
8927 __ bind(&unordered);
8928 ASSERT(cc_ != not_equal);
8929 if (cc_ == less || cc_ == less_equal) {
8930 __ mov(eax, Immediate(Smi::FromInt(1)));
8931 } else {
8932 __ mov(eax, Immediate(Smi::FromInt(-1)));
8933 }
8934 __ ret(2 * kPointerSize); // eax, edx were pushed
8935
8936 // Fast negative check for symbol-to-symbol equality.
8937 __ bind(&check_for_symbols);
Leon Clarkee46be812010-01-19 14:06:41 +00008938 Label check_for_strings;
Steve Blocka7e24c12009-10-30 11:49:00 +00008939 if (cc_ == equal) {
Leon Clarkee46be812010-01-19 14:06:41 +00008940 BranchIfNonSymbol(masm, &check_for_strings, eax, ecx);
8941 BranchIfNonSymbol(masm, &check_for_strings, edx, ecx);
Steve Blocka7e24c12009-10-30 11:49:00 +00008942
8943 // We've already checked for object identity, so if both operands
8944 // are symbols they aren't equal. Register eax already holds a
8945 // non-zero value, which indicates not equal, so just return.
8946 __ ret(2 * kPointerSize);
8947 }
8948
Leon Clarkee46be812010-01-19 14:06:41 +00008949 __ bind(&check_for_strings);
8950
Leon Clarked91b9f72010-01-27 17:25:45 +00008951 __ JumpIfNotBothSequentialAsciiStrings(edx, eax, ecx, ebx, &call_builtin);
Leon Clarkee46be812010-01-19 14:06:41 +00008952
8953 // Inline comparison of ascii strings.
8954 StringCompareStub::GenerateCompareFlatAsciiStrings(masm,
8955 edx,
8956 eax,
8957 ecx,
8958 ebx,
8959 edi);
8960#ifdef DEBUG
8961 __ Abort("Unexpected fall-through from string comparison");
8962#endif
8963
Steve Blocka7e24c12009-10-30 11:49:00 +00008964 __ bind(&call_builtin);
8965 // must swap argument order
8966 __ pop(ecx);
8967 __ pop(edx);
8968 __ pop(eax);
8969 __ push(edx);
8970 __ push(eax);
8971
8972 // Figure out which native to call and setup the arguments.
8973 Builtins::JavaScript builtin;
8974 if (cc_ == equal) {
8975 builtin = strict_ ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
8976 } else {
8977 builtin = Builtins::COMPARE;
8978 int ncr; // NaN compare result
8979 if (cc_ == less || cc_ == less_equal) {
8980 ncr = GREATER;
8981 } else {
8982 ASSERT(cc_ == greater || cc_ == greater_equal); // remaining cases
8983 ncr = LESS;
8984 }
8985 __ push(Immediate(Smi::FromInt(ncr)));
8986 }
8987
8988 // Restore return address on the stack.
8989 __ push(ecx);
8990
8991 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
8992 // tagged as a small integer.
8993 __ InvokeBuiltin(builtin, JUMP_FUNCTION);
8994}
8995
8996
8997void CompareStub::BranchIfNonSymbol(MacroAssembler* masm,
8998 Label* label,
8999 Register object,
9000 Register scratch) {
9001 __ test(object, Immediate(kSmiTagMask));
9002 __ j(zero, label);
9003 __ mov(scratch, FieldOperand(object, HeapObject::kMapOffset));
9004 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
9005 __ and_(scratch, kIsSymbolMask | kIsNotStringMask);
9006 __ cmp(scratch, kSymbolTag | kStringTag);
9007 __ j(not_equal, label);
9008}
9009
9010
9011void StackCheckStub::Generate(MacroAssembler* masm) {
9012 // Because builtins always remove the receiver from the stack, we
9013 // have to fake one to avoid underflowing the stack. The receiver
9014 // must be inserted below the return address on the stack so we
9015 // temporarily store that in a register.
9016 __ pop(eax);
9017 __ push(Immediate(Smi::FromInt(0)));
9018 __ push(eax);
9019
9020 // Do tail-call to runtime routine.
9021 __ TailCallRuntime(ExternalReference(Runtime::kStackGuard), 1, 1);
9022}
9023
9024
9025void CallFunctionStub::Generate(MacroAssembler* masm) {
9026 Label slow;
9027
Leon Clarkee46be812010-01-19 14:06:41 +00009028 // If the receiver might be a value (string, number or boolean) check for this
9029 // and box it if it is.
9030 if (ReceiverMightBeValue()) {
9031 // Get the receiver from the stack.
9032 // +1 ~ return address
9033 Label receiver_is_value, receiver_is_js_object;
9034 __ mov(eax, Operand(esp, (argc_ + 1) * kPointerSize));
9035
9036 // Check if receiver is a smi (which is a number value).
9037 __ test(eax, Immediate(kSmiTagMask));
9038 __ j(zero, &receiver_is_value, not_taken);
9039
9040 // Check if the receiver is a valid JS object.
9041 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, edi);
9042 __ j(above_equal, &receiver_is_js_object);
9043
9044 // Call the runtime to box the value.
9045 __ bind(&receiver_is_value);
9046 __ EnterInternalFrame();
9047 __ push(eax);
9048 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
9049 __ LeaveInternalFrame();
9050 __ mov(Operand(esp, (argc_ + 1) * kPointerSize), eax);
9051
9052 __ bind(&receiver_is_js_object);
9053 }
9054
Steve Blocka7e24c12009-10-30 11:49:00 +00009055 // Get the function to call from the stack.
9056 // +2 ~ receiver, return address
9057 __ mov(edi, Operand(esp, (argc_ + 2) * kPointerSize));
9058
9059 // Check that the function really is a JavaScript function.
9060 __ test(edi, Immediate(kSmiTagMask));
9061 __ j(zero, &slow, not_taken);
9062 // Goto slow case if we do not have a function.
9063 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
9064 __ j(not_equal, &slow, not_taken);
9065
9066 // Fast-case: Just invoke the function.
9067 ParameterCount actual(argc_);
9068 __ InvokeFunction(edi, actual, JUMP_FUNCTION);
9069
9070 // Slow-case: Non-function called.
9071 __ bind(&slow);
9072 __ Set(eax, Immediate(argc_));
9073 __ Set(ebx, Immediate(0));
9074 __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
9075 Handle<Code> adaptor(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
9076 __ jmp(adaptor, RelocInfo::CODE_TARGET);
9077}
9078
9079
Steve Blocka7e24c12009-10-30 11:49:00 +00009080void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
9081 // eax holds the exception.
9082
9083 // Adjust this code if not the case.
9084 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
9085
9086 // Drop the sp to the top of the handler.
9087 ExternalReference handler_address(Top::k_handler_address);
9088 __ mov(esp, Operand::StaticVariable(handler_address));
9089
9090 // Restore next handler and frame pointer, discard handler state.
9091 ASSERT(StackHandlerConstants::kNextOffset == 0);
9092 __ pop(Operand::StaticVariable(handler_address));
9093 ASSERT(StackHandlerConstants::kFPOffset == 1 * kPointerSize);
9094 __ pop(ebp);
9095 __ pop(edx); // Remove state.
9096
9097 // Before returning we restore the context from the frame pointer if
9098 // not NULL. The frame pointer is NULL in the exception handler of
9099 // a JS entry frame.
9100 __ xor_(esi, Operand(esi)); // Tentatively set context pointer to NULL.
9101 Label skip;
9102 __ cmp(ebp, 0);
9103 __ j(equal, &skip, not_taken);
9104 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
9105 __ bind(&skip);
9106
9107 ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
9108 __ ret(0);
9109}
9110
9111
Steve Blockd0582a62009-12-15 09:54:21 +00009112// If true, a Handle<T> passed by value is passed and returned by
9113// using the location_ field directly. If false, it is passed and
9114// returned as a pointer to a handle.
9115#ifdef USING_MAC_ABI
9116static const bool kPassHandlesDirectly = true;
9117#else
9118static const bool kPassHandlesDirectly = false;
9119#endif
9120
9121
9122void ApiGetterEntryStub::Generate(MacroAssembler* masm) {
9123 Label get_result;
9124 Label prologue;
9125 Label promote_scheduled_exception;
9126 __ EnterApiExitFrame(ExitFrame::MODE_NORMAL, kStackSpace, kArgc);
9127 ASSERT_EQ(kArgc, 4);
9128 if (kPassHandlesDirectly) {
9129 // When handles as passed directly we don't have to allocate extra
9130 // space for and pass an out parameter.
9131 __ mov(Operand(esp, 0 * kPointerSize), ebx); // name.
9132 __ mov(Operand(esp, 1 * kPointerSize), eax); // arguments pointer.
9133 } else {
9134 // The function expects three arguments to be passed but we allocate
9135 // four to get space for the output cell. The argument slots are filled
9136 // as follows:
9137 //
9138 // 3: output cell
9139 // 2: arguments pointer
9140 // 1: name
9141 // 0: pointer to the output cell
9142 //
9143 // Note that this is one more "argument" than the function expects
9144 // so the out cell will have to be popped explicitly after returning
9145 // from the function.
9146 __ mov(Operand(esp, 1 * kPointerSize), ebx); // name.
9147 __ mov(Operand(esp, 2 * kPointerSize), eax); // arguments pointer.
9148 __ mov(ebx, esp);
9149 __ add(Operand(ebx), Immediate(3 * kPointerSize));
9150 __ mov(Operand(esp, 0 * kPointerSize), ebx); // output
9151 __ mov(Operand(esp, 3 * kPointerSize), Immediate(0)); // out cell.
9152 }
9153 // Call the api function!
9154 __ call(fun()->address(), RelocInfo::RUNTIME_ENTRY);
9155 // Check if the function scheduled an exception.
9156 ExternalReference scheduled_exception_address =
9157 ExternalReference::scheduled_exception_address();
9158 __ cmp(Operand::StaticVariable(scheduled_exception_address),
9159 Immediate(Factory::the_hole_value()));
9160 __ j(not_equal, &promote_scheduled_exception, not_taken);
9161 if (!kPassHandlesDirectly) {
9162 // The returned value is a pointer to the handle holding the result.
9163 // Dereference this to get to the location.
9164 __ mov(eax, Operand(eax, 0));
9165 }
9166 // Check if the result handle holds 0
9167 __ test(eax, Operand(eax));
9168 __ j(not_zero, &get_result, taken);
9169 // It was zero; the result is undefined.
9170 __ mov(eax, Factory::undefined_value());
9171 __ jmp(&prologue);
9172 // It was non-zero. Dereference to get the result value.
9173 __ bind(&get_result);
9174 __ mov(eax, Operand(eax, 0));
9175 __ bind(&prologue);
9176 __ LeaveExitFrame(ExitFrame::MODE_NORMAL);
9177 __ ret(0);
9178 __ bind(&promote_scheduled_exception);
9179 __ TailCallRuntime(ExternalReference(Runtime::kPromoteScheduledException),
9180 0,
9181 1);
9182}
9183
9184
Steve Blocka7e24c12009-10-30 11:49:00 +00009185void CEntryStub::GenerateCore(MacroAssembler* masm,
9186 Label* throw_normal_exception,
9187 Label* throw_termination_exception,
9188 Label* throw_out_of_memory_exception,
Steve Blocka7e24c12009-10-30 11:49:00 +00009189 bool do_gc,
9190 bool always_allocate_scope) {
9191 // eax: result parameter for PerformGC, if any
9192 // ebx: pointer to C function (C callee-saved)
9193 // ebp: frame pointer (restored after C call)
9194 // esp: stack pointer (restored after C call)
9195 // edi: number of arguments including receiver (C callee-saved)
9196 // esi: pointer to the first argument (C callee-saved)
9197
Leon Clarke4515c472010-02-03 11:58:03 +00009198 // Result returned in eax, or eax+edx if result_size_ is 2.
9199
Steve Blocka7e24c12009-10-30 11:49:00 +00009200 if (do_gc) {
9201 __ mov(Operand(esp, 0 * kPointerSize), eax); // Result.
9202 __ call(FUNCTION_ADDR(Runtime::PerformGC), RelocInfo::RUNTIME_ENTRY);
9203 }
9204
9205 ExternalReference scope_depth =
9206 ExternalReference::heap_always_allocate_scope_depth();
9207 if (always_allocate_scope) {
9208 __ inc(Operand::StaticVariable(scope_depth));
9209 }
9210
9211 // Call C function.
9212 __ mov(Operand(esp, 0 * kPointerSize), edi); // argc.
9213 __ mov(Operand(esp, 1 * kPointerSize), esi); // argv.
9214 __ call(Operand(ebx));
9215 // Result is in eax or edx:eax - do not destroy these registers!
9216
9217 if (always_allocate_scope) {
9218 __ dec(Operand::StaticVariable(scope_depth));
9219 }
9220
9221 // Make sure we're not trying to return 'the hole' from the runtime
9222 // call as this may lead to crashes in the IC code later.
9223 if (FLAG_debug_code) {
9224 Label okay;
9225 __ cmp(eax, Factory::the_hole_value());
9226 __ j(not_equal, &okay);
9227 __ int3();
9228 __ bind(&okay);
9229 }
9230
9231 // Check for failure result.
9232 Label failure_returned;
9233 ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0);
9234 __ lea(ecx, Operand(eax, 1));
9235 // Lower 2 bits of ecx are 0 iff eax has failure tag.
9236 __ test(ecx, Immediate(kFailureTagMask));
9237 __ j(zero, &failure_returned, not_taken);
9238
9239 // Exit the JavaScript to C++ exit frame.
Leon Clarke4515c472010-02-03 11:58:03 +00009240 __ LeaveExitFrame(mode_);
Steve Blocka7e24c12009-10-30 11:49:00 +00009241 __ ret(0);
9242
9243 // Handling of failure.
9244 __ bind(&failure_returned);
9245
9246 Label retry;
9247 // If the returned exception is RETRY_AFTER_GC continue at retry label
9248 ASSERT(Failure::RETRY_AFTER_GC == 0);
9249 __ test(eax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
9250 __ j(zero, &retry, taken);
9251
9252 // Special handling of out of memory exceptions.
9253 __ cmp(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException()));
9254 __ j(equal, throw_out_of_memory_exception);
9255
9256 // Retrieve the pending exception and clear the variable.
9257 ExternalReference pending_exception_address(Top::k_pending_exception_address);
9258 __ mov(eax, Operand::StaticVariable(pending_exception_address));
9259 __ mov(edx,
9260 Operand::StaticVariable(ExternalReference::the_hole_value_location()));
9261 __ mov(Operand::StaticVariable(pending_exception_address), edx);
9262
9263 // Special handling of termination exceptions which are uncatchable
9264 // by javascript code.
9265 __ cmp(eax, Factory::termination_exception());
9266 __ j(equal, throw_termination_exception);
9267
9268 // Handle normal exception.
9269 __ jmp(throw_normal_exception);
9270
9271 // Retry.
9272 __ bind(&retry);
9273}
9274
9275
9276void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm,
9277 UncatchableExceptionType type) {
9278 // Adjust this code if not the case.
9279 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
9280
9281 // Drop sp to the top stack handler.
9282 ExternalReference handler_address(Top::k_handler_address);
9283 __ mov(esp, Operand::StaticVariable(handler_address));
9284
9285 // Unwind the handlers until the ENTRY handler is found.
9286 Label loop, done;
9287 __ bind(&loop);
9288 // Load the type of the current stack handler.
9289 const int kStateOffset = StackHandlerConstants::kStateOffset;
9290 __ cmp(Operand(esp, kStateOffset), Immediate(StackHandler::ENTRY));
9291 __ j(equal, &done);
9292 // Fetch the next handler in the list.
9293 const int kNextOffset = StackHandlerConstants::kNextOffset;
9294 __ mov(esp, Operand(esp, kNextOffset));
9295 __ jmp(&loop);
9296 __ bind(&done);
9297
9298 // Set the top handler address to next handler past the current ENTRY handler.
9299 ASSERT(StackHandlerConstants::kNextOffset == 0);
9300 __ pop(Operand::StaticVariable(handler_address));
9301
9302 if (type == OUT_OF_MEMORY) {
9303 // Set external caught exception to false.
9304 ExternalReference external_caught(Top::k_external_caught_exception_address);
9305 __ mov(eax, false);
9306 __ mov(Operand::StaticVariable(external_caught), eax);
9307
9308 // Set pending exception and eax to out of memory exception.
9309 ExternalReference pending_exception(Top::k_pending_exception_address);
9310 __ mov(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException()));
9311 __ mov(Operand::StaticVariable(pending_exception), eax);
9312 }
9313
9314 // Clear the context pointer.
9315 __ xor_(esi, Operand(esi));
9316
9317 // Restore fp from handler and discard handler state.
9318 ASSERT(StackHandlerConstants::kFPOffset == 1 * kPointerSize);
9319 __ pop(ebp);
9320 __ pop(edx); // State.
9321
9322 ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
9323 __ ret(0);
9324}
9325
9326
Leon Clarke4515c472010-02-03 11:58:03 +00009327void CEntryStub::Generate(MacroAssembler* masm) {
Steve Blocka7e24c12009-10-30 11:49:00 +00009328 // eax: number of arguments including receiver
9329 // ebx: pointer to C function (C callee-saved)
9330 // ebp: frame pointer (restored after C call)
9331 // esp: stack pointer (restored after C call)
9332 // esi: current context (C callee-saved)
9333 // edi: JS function of the caller (C callee-saved)
9334
9335 // NOTE: Invocations of builtins may return failure objects instead
9336 // of a proper result. The builtin entry handles this by performing
9337 // a garbage collection and retrying the builtin (twice).
9338
Steve Blocka7e24c12009-10-30 11:49:00 +00009339 // Enter the exit frame that transitions from JavaScript to C++.
Leon Clarke4515c472010-02-03 11:58:03 +00009340 __ EnterExitFrame(mode_);
Steve Blocka7e24c12009-10-30 11:49:00 +00009341
9342 // eax: result parameter for PerformGC, if any (setup below)
9343 // ebx: pointer to builtin function (C callee-saved)
9344 // ebp: frame pointer (restored after C call)
9345 // esp: stack pointer (restored after C call)
9346 // edi: number of arguments including receiver (C callee-saved)
9347 // esi: argv pointer (C callee-saved)
9348
9349 Label throw_normal_exception;
9350 Label throw_termination_exception;
9351 Label throw_out_of_memory_exception;
9352
9353 // Call into the runtime system.
9354 GenerateCore(masm,
9355 &throw_normal_exception,
9356 &throw_termination_exception,
9357 &throw_out_of_memory_exception,
Steve Blocka7e24c12009-10-30 11:49:00 +00009358 false,
9359 false);
9360
9361 // Do space-specific GC and retry runtime call.
9362 GenerateCore(masm,
9363 &throw_normal_exception,
9364 &throw_termination_exception,
9365 &throw_out_of_memory_exception,
Steve Blocka7e24c12009-10-30 11:49:00 +00009366 true,
9367 false);
9368
9369 // Do full GC and retry runtime call one final time.
9370 Failure* failure = Failure::InternalError();
9371 __ mov(eax, Immediate(reinterpret_cast<int32_t>(failure)));
9372 GenerateCore(masm,
9373 &throw_normal_exception,
9374 &throw_termination_exception,
9375 &throw_out_of_memory_exception,
Steve Blocka7e24c12009-10-30 11:49:00 +00009376 true,
9377 true);
9378
9379 __ bind(&throw_out_of_memory_exception);
9380 GenerateThrowUncatchable(masm, OUT_OF_MEMORY);
9381
9382 __ bind(&throw_termination_exception);
9383 GenerateThrowUncatchable(masm, TERMINATION);
9384
9385 __ bind(&throw_normal_exception);
9386 GenerateThrowTOS(masm);
9387}
9388
9389
9390void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
9391 Label invoke, exit;
9392#ifdef ENABLE_LOGGING_AND_PROFILING
9393 Label not_outermost_js, not_outermost_js_2;
9394#endif
9395
9396 // Setup frame.
9397 __ push(ebp);
9398 __ mov(ebp, Operand(esp));
9399
9400 // Push marker in two places.
9401 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
9402 __ push(Immediate(Smi::FromInt(marker))); // context slot
9403 __ push(Immediate(Smi::FromInt(marker))); // function slot
9404 // Save callee-saved registers (C calling conventions).
9405 __ push(edi);
9406 __ push(esi);
9407 __ push(ebx);
9408
9409 // Save copies of the top frame descriptor on the stack.
9410 ExternalReference c_entry_fp(Top::k_c_entry_fp_address);
9411 __ push(Operand::StaticVariable(c_entry_fp));
9412
9413#ifdef ENABLE_LOGGING_AND_PROFILING
9414 // If this is the outermost JS call, set js_entry_sp value.
9415 ExternalReference js_entry_sp(Top::k_js_entry_sp_address);
9416 __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0));
9417 __ j(not_equal, &not_outermost_js);
9418 __ mov(Operand::StaticVariable(js_entry_sp), ebp);
9419 __ bind(&not_outermost_js);
9420#endif
9421
9422 // Call a faked try-block that does the invoke.
9423 __ call(&invoke);
9424
9425 // Caught exception: Store result (exception) in the pending
9426 // exception field in the JSEnv and return a failure sentinel.
9427 ExternalReference pending_exception(Top::k_pending_exception_address);
9428 __ mov(Operand::StaticVariable(pending_exception), eax);
9429 __ mov(eax, reinterpret_cast<int32_t>(Failure::Exception()));
9430 __ jmp(&exit);
9431
9432 // Invoke: Link this frame into the handler chain.
9433 __ bind(&invoke);
9434 __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER);
9435
9436 // Clear any pending exceptions.
9437 __ mov(edx,
9438 Operand::StaticVariable(ExternalReference::the_hole_value_location()));
9439 __ mov(Operand::StaticVariable(pending_exception), edx);
9440
9441 // Fake a receiver (NULL).
9442 __ push(Immediate(0)); // receiver
9443
9444 // Invoke the function by calling through JS entry trampoline
9445 // builtin and pop the faked function when we return. Notice that we
9446 // cannot store a reference to the trampoline code directly in this
9447 // stub, because the builtin stubs may not have been generated yet.
9448 if (is_construct) {
9449 ExternalReference construct_entry(Builtins::JSConstructEntryTrampoline);
9450 __ mov(edx, Immediate(construct_entry));
9451 } else {
9452 ExternalReference entry(Builtins::JSEntryTrampoline);
9453 __ mov(edx, Immediate(entry));
9454 }
9455 __ mov(edx, Operand(edx, 0)); // deref address
9456 __ lea(edx, FieldOperand(edx, Code::kHeaderSize));
9457 __ call(Operand(edx));
9458
9459 // Unlink this frame from the handler chain.
9460 __ pop(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
9461 // Pop next_sp.
9462 __ add(Operand(esp), Immediate(StackHandlerConstants::kSize - kPointerSize));
9463
9464#ifdef ENABLE_LOGGING_AND_PROFILING
9465 // If current EBP value is the same as js_entry_sp value, it means that
9466 // the current function is the outermost.
9467 __ cmp(ebp, Operand::StaticVariable(js_entry_sp));
9468 __ j(not_equal, &not_outermost_js_2);
9469 __ mov(Operand::StaticVariable(js_entry_sp), Immediate(0));
9470 __ bind(&not_outermost_js_2);
9471#endif
9472
9473 // Restore the top frame descriptor from the stack.
9474 __ bind(&exit);
9475 __ pop(Operand::StaticVariable(ExternalReference(Top::k_c_entry_fp_address)));
9476
9477 // Restore callee-saved registers (C calling conventions).
9478 __ pop(ebx);
9479 __ pop(esi);
9480 __ pop(edi);
9481 __ add(Operand(esp), Immediate(2 * kPointerSize)); // remove markers
9482
9483 // Restore frame pointer and return.
9484 __ pop(ebp);
9485 __ ret(0);
9486}
9487
9488
9489void InstanceofStub::Generate(MacroAssembler* masm) {
9490 // Get the object - go slow case if it's a smi.
9491 Label slow;
9492 __ mov(eax, Operand(esp, 2 * kPointerSize)); // 2 ~ return address, function
9493 __ test(eax, Immediate(kSmiTagMask));
9494 __ j(zero, &slow, not_taken);
9495
9496 // Check that the left hand is a JS object.
9497 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset)); // eax - object map
9498 __ movzx_b(ecx, FieldOperand(eax, Map::kInstanceTypeOffset)); // ecx - type
9499 __ cmp(ecx, FIRST_JS_OBJECT_TYPE);
9500 __ j(less, &slow, not_taken);
9501 __ cmp(ecx, LAST_JS_OBJECT_TYPE);
9502 __ j(greater, &slow, not_taken);
9503
9504 // Get the prototype of the function.
9505 __ mov(edx, Operand(esp, 1 * kPointerSize)); // 1 ~ return address
9506 __ TryGetFunctionPrototype(edx, ebx, ecx, &slow);
9507
9508 // Check that the function prototype is a JS object.
9509 __ test(ebx, Immediate(kSmiTagMask));
9510 __ j(zero, &slow, not_taken);
9511 __ mov(ecx, FieldOperand(ebx, HeapObject::kMapOffset));
9512 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
9513 __ cmp(ecx, FIRST_JS_OBJECT_TYPE);
9514 __ j(less, &slow, not_taken);
9515 __ cmp(ecx, LAST_JS_OBJECT_TYPE);
9516 __ j(greater, &slow, not_taken);
9517
9518 // Register mapping: eax is object map and ebx is function prototype.
9519 __ mov(ecx, FieldOperand(eax, Map::kPrototypeOffset));
9520
9521 // Loop through the prototype chain looking for the function prototype.
9522 Label loop, is_instance, is_not_instance;
9523 __ bind(&loop);
9524 __ cmp(ecx, Operand(ebx));
9525 __ j(equal, &is_instance);
9526 __ cmp(Operand(ecx), Immediate(Factory::null_value()));
9527 __ j(equal, &is_not_instance);
9528 __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
9529 __ mov(ecx, FieldOperand(ecx, Map::kPrototypeOffset));
9530 __ jmp(&loop);
9531
9532 __ bind(&is_instance);
9533 __ Set(eax, Immediate(0));
9534 __ ret(2 * kPointerSize);
9535
9536 __ bind(&is_not_instance);
9537 __ Set(eax, Immediate(Smi::FromInt(1)));
9538 __ ret(2 * kPointerSize);
9539
9540 // Slow-case: Go through the JavaScript implementation.
9541 __ bind(&slow);
9542 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
9543}
9544
9545
Leon Clarkee46be812010-01-19 14:06:41 +00009546// Unfortunately you have to run without snapshots to see most of these
9547// names in the profile since most compare stubs end up in the snapshot.
9548const char* CompareStub::GetName() {
9549 switch (cc_) {
9550 case less: return "CompareStub_LT";
9551 case greater: return "CompareStub_GT";
9552 case less_equal: return "CompareStub_LE";
9553 case greater_equal: return "CompareStub_GE";
9554 case not_equal: {
9555 if (strict_) {
9556 if (never_nan_nan_) {
9557 return "CompareStub_NE_STRICT_NO_NAN";
9558 } else {
9559 return "CompareStub_NE_STRICT";
9560 }
9561 } else {
9562 if (never_nan_nan_) {
9563 return "CompareStub_NE_NO_NAN";
9564 } else {
9565 return "CompareStub_NE";
9566 }
9567 }
9568 }
9569 case equal: {
9570 if (strict_) {
9571 if (never_nan_nan_) {
9572 return "CompareStub_EQ_STRICT_NO_NAN";
9573 } else {
9574 return "CompareStub_EQ_STRICT";
9575 }
9576 } else {
9577 if (never_nan_nan_) {
9578 return "CompareStub_EQ_NO_NAN";
9579 } else {
9580 return "CompareStub_EQ";
9581 }
9582 }
9583 }
9584 default: return "CompareStub";
9585 }
9586}
9587
9588
Steve Blocka7e24c12009-10-30 11:49:00 +00009589int CompareStub::MinorKey() {
Leon Clarkee46be812010-01-19 14:06:41 +00009590 // Encode the three parameters in a unique 16 bit value.
9591 ASSERT(static_cast<unsigned>(cc_) < (1 << 14));
9592 int nnn_value = (never_nan_nan_ ? 2 : 0);
9593 if (cc_ != equal) nnn_value = 0; // Avoid duplicate stubs.
9594 return (static_cast<unsigned>(cc_) << 2) | nnn_value | (strict_ ? 1 : 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00009595}
9596
Steve Blockd0582a62009-12-15 09:54:21 +00009597
9598void StringAddStub::Generate(MacroAssembler* masm) {
9599 Label string_add_runtime;
9600
9601 // Load the two arguments.
9602 __ mov(eax, Operand(esp, 2 * kPointerSize)); // First argument.
9603 __ mov(edx, Operand(esp, 1 * kPointerSize)); // Second argument.
9604
9605 // Make sure that both arguments are strings if not known in advance.
9606 if (string_check_) {
9607 __ test(eax, Immediate(kSmiTagMask));
9608 __ j(zero, &string_add_runtime);
9609 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, ebx);
9610 __ j(above_equal, &string_add_runtime);
9611
9612 // First argument is a a string, test second.
9613 __ test(edx, Immediate(kSmiTagMask));
9614 __ j(zero, &string_add_runtime);
9615 __ CmpObjectType(edx, FIRST_NONSTRING_TYPE, ebx);
9616 __ j(above_equal, &string_add_runtime);
9617 }
9618
9619 // Both arguments are strings.
9620 // eax: first string
9621 // edx: second string
9622 // Check if either of the strings are empty. In that case return the other.
9623 Label second_not_zero_length, both_not_zero_length;
9624 __ mov(ecx, FieldOperand(edx, String::kLengthOffset));
9625 __ test(ecx, Operand(ecx));
9626 __ j(not_zero, &second_not_zero_length);
9627 // Second string is empty, result is first string which is already in eax.
9628 __ IncrementCounter(&Counters::string_add_native, 1);
9629 __ ret(2 * kPointerSize);
9630 __ bind(&second_not_zero_length);
9631 __ mov(ebx, FieldOperand(eax, String::kLengthOffset));
9632 __ test(ebx, Operand(ebx));
9633 __ j(not_zero, &both_not_zero_length);
9634 // First string is empty, result is second string which is in edx.
9635 __ mov(eax, edx);
9636 __ IncrementCounter(&Counters::string_add_native, 1);
9637 __ ret(2 * kPointerSize);
9638
9639 // Both strings are non-empty.
9640 // eax: first string
9641 // ebx: length of first string
9642 // ecx: length of second string
9643 // edx: second string
9644 // Look at the length of the result of adding the two strings.
9645 Label string_add_flat_result;
9646 __ bind(&both_not_zero_length);
9647 __ add(ebx, Operand(ecx));
9648 // Use the runtime system when adding two one character strings, as it
9649 // contains optimizations for this specific case using the symbol table.
9650 __ cmp(ebx, 2);
9651 __ j(equal, &string_add_runtime);
9652 // Check if resulting string will be flat.
9653 __ cmp(ebx, String::kMinNonFlatLength);
9654 __ j(below, &string_add_flat_result);
9655 // Handle exceptionally long strings in the runtime system.
9656 ASSERT((String::kMaxLength & 0x80000000) == 0);
9657 __ cmp(ebx, String::kMaxLength);
9658 __ j(above, &string_add_runtime);
9659
9660 // If result is not supposed to be flat allocate a cons string object. If both
9661 // strings are ascii the result is an ascii cons string.
9662 Label non_ascii, allocated;
9663 __ mov(edi, FieldOperand(eax, HeapObject::kMapOffset));
9664 __ movzx_b(ecx, FieldOperand(edi, Map::kInstanceTypeOffset));
9665 __ mov(edi, FieldOperand(edx, HeapObject::kMapOffset));
9666 __ movzx_b(edi, FieldOperand(edi, Map::kInstanceTypeOffset));
9667 __ and_(ecx, Operand(edi));
Leon Clarkee46be812010-01-19 14:06:41 +00009668 ASSERT(kStringEncodingMask == kAsciiStringTag);
Steve Blockd0582a62009-12-15 09:54:21 +00009669 __ test(ecx, Immediate(kAsciiStringTag));
9670 __ j(zero, &non_ascii);
9671 // Allocate an acsii cons string.
9672 __ AllocateAsciiConsString(ecx, edi, no_reg, &string_add_runtime);
9673 __ bind(&allocated);
9674 // Fill the fields of the cons string.
9675 __ mov(FieldOperand(ecx, ConsString::kLengthOffset), ebx);
9676 __ mov(FieldOperand(ecx, ConsString::kHashFieldOffset),
9677 Immediate(String::kEmptyHashField));
9678 __ mov(FieldOperand(ecx, ConsString::kFirstOffset), eax);
9679 __ mov(FieldOperand(ecx, ConsString::kSecondOffset), edx);
9680 __ mov(eax, ecx);
9681 __ IncrementCounter(&Counters::string_add_native, 1);
9682 __ ret(2 * kPointerSize);
9683 __ bind(&non_ascii);
9684 // Allocate a two byte cons string.
9685 __ AllocateConsString(ecx, edi, no_reg, &string_add_runtime);
9686 __ jmp(&allocated);
9687
9688 // Handle creating a flat result. First check that both strings are not
9689 // external strings.
9690 // eax: first string
9691 // ebx: length of resulting flat string
9692 // edx: second string
9693 __ bind(&string_add_flat_result);
9694 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
9695 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
9696 __ and_(ecx, kStringRepresentationMask);
9697 __ cmp(ecx, kExternalStringTag);
9698 __ j(equal, &string_add_runtime);
9699 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
9700 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
9701 __ and_(ecx, kStringRepresentationMask);
9702 __ cmp(ecx, kExternalStringTag);
9703 __ j(equal, &string_add_runtime);
9704 // Now check if both strings are ascii strings.
9705 // eax: first string
9706 // ebx: length of resulting flat string
9707 // edx: second string
9708 Label non_ascii_string_add_flat_result;
9709 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
9710 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
Leon Clarkee46be812010-01-19 14:06:41 +00009711 ASSERT(kStringEncodingMask == kAsciiStringTag);
Steve Blockd0582a62009-12-15 09:54:21 +00009712 __ test(ecx, Immediate(kAsciiStringTag));
9713 __ j(zero, &non_ascii_string_add_flat_result);
9714 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
9715 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
9716 __ test(ecx, Immediate(kAsciiStringTag));
9717 __ j(zero, &string_add_runtime);
9718 // Both strings are ascii strings. As they are short they are both flat.
9719 __ AllocateAsciiString(eax, ebx, ecx, edx, edi, &string_add_runtime);
9720 // eax: result string
9721 __ mov(ecx, eax);
9722 // Locate first character of result.
9723 __ add(Operand(ecx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
9724 // Load first argument and locate first character.
9725 __ mov(edx, Operand(esp, 2 * kPointerSize));
9726 __ mov(edi, FieldOperand(edx, String::kLengthOffset));
9727 __ add(Operand(edx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
9728 // eax: result string
9729 // ecx: first character of result
9730 // edx: first char of first argument
9731 // edi: length of first argument
9732 GenerateCopyCharacters(masm, ecx, edx, edi, ebx, true);
9733 // Load second argument and locate first character.
9734 __ mov(edx, Operand(esp, 1 * kPointerSize));
9735 __ mov(edi, FieldOperand(edx, String::kLengthOffset));
9736 __ add(Operand(edx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
9737 // eax: result string
9738 // ecx: next character of result
9739 // edx: first char of second argument
9740 // edi: length of second argument
9741 GenerateCopyCharacters(masm, ecx, edx, edi, ebx, true);
9742 __ IncrementCounter(&Counters::string_add_native, 1);
9743 __ ret(2 * kPointerSize);
9744
9745 // Handle creating a flat two byte result.
9746 // eax: first string - known to be two byte
9747 // ebx: length of resulting flat string
9748 // edx: second string
9749 __ bind(&non_ascii_string_add_flat_result);
9750 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
9751 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
9752 __ and_(ecx, kAsciiStringTag);
9753 __ j(not_zero, &string_add_runtime);
9754 // Both strings are two byte strings. As they are short they are both
9755 // flat.
9756 __ AllocateTwoByteString(eax, ebx, ecx, edx, edi, &string_add_runtime);
9757 // eax: result string
9758 __ mov(ecx, eax);
9759 // Locate first character of result.
9760 __ add(Operand(ecx),
9761 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
9762 // Load first argument and locate first character.
9763 __ mov(edx, Operand(esp, 2 * kPointerSize));
9764 __ mov(edi, FieldOperand(edx, String::kLengthOffset));
9765 __ add(Operand(edx),
9766 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
9767 // eax: result string
9768 // ecx: first character of result
9769 // edx: first char of first argument
9770 // edi: length of first argument
9771 GenerateCopyCharacters(masm, ecx, edx, edi, ebx, false);
9772 // Load second argument and locate first character.
9773 __ mov(edx, Operand(esp, 1 * kPointerSize));
9774 __ mov(edi, FieldOperand(edx, String::kLengthOffset));
9775 __ add(Operand(edx), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
9776 // eax: result string
9777 // ecx: next character of result
9778 // edx: first char of second argument
9779 // edi: length of second argument
9780 GenerateCopyCharacters(masm, ecx, edx, edi, ebx, false);
9781 __ IncrementCounter(&Counters::string_add_native, 1);
9782 __ ret(2 * kPointerSize);
9783
9784 // Just jump to runtime to add the two strings.
9785 __ bind(&string_add_runtime);
9786 __ TailCallRuntime(ExternalReference(Runtime::kStringAdd), 2, 1);
9787}
9788
9789
Leon Clarkee46be812010-01-19 14:06:41 +00009790void StringStubBase::GenerateCopyCharacters(MacroAssembler* masm,
9791 Register dest,
9792 Register src,
9793 Register count,
9794 Register scratch,
9795 bool ascii) {
Steve Blockd0582a62009-12-15 09:54:21 +00009796 Label loop;
9797 __ bind(&loop);
9798 // This loop just copies one character at a time, as it is only used for very
9799 // short strings.
9800 if (ascii) {
9801 __ mov_b(scratch, Operand(src, 0));
9802 __ mov_b(Operand(dest, 0), scratch);
9803 __ add(Operand(src), Immediate(1));
9804 __ add(Operand(dest), Immediate(1));
9805 } else {
9806 __ mov_w(scratch, Operand(src, 0));
9807 __ mov_w(Operand(dest, 0), scratch);
9808 __ add(Operand(src), Immediate(2));
9809 __ add(Operand(dest), Immediate(2));
9810 }
9811 __ sub(Operand(count), Immediate(1));
9812 __ j(not_zero, &loop);
9813}
9814
9815
Leon Clarkee46be812010-01-19 14:06:41 +00009816void StringStubBase::GenerateCopyCharactersREP(MacroAssembler* masm,
9817 Register dest,
9818 Register src,
9819 Register count,
9820 Register scratch,
9821 bool ascii) {
9822 // Copy characters using rep movs of doublewords. Align destination on 4 byte
9823 // boundary before starting rep movs. Copy remaining characters after running
9824 // rep movs.
9825 ASSERT(dest.is(edi)); // rep movs destination
9826 ASSERT(src.is(esi)); // rep movs source
9827 ASSERT(count.is(ecx)); // rep movs count
9828 ASSERT(!scratch.is(dest));
9829 ASSERT(!scratch.is(src));
9830 ASSERT(!scratch.is(count));
9831
9832 // Nothing to do for zero characters.
9833 Label done;
9834 __ test(count, Operand(count));
9835 __ j(zero, &done);
9836
9837 // Make count the number of bytes to copy.
9838 if (!ascii) {
9839 __ shl(count, 1);
9840 }
9841
9842 // Don't enter the rep movs if there are less than 4 bytes to copy.
9843 Label last_bytes;
9844 __ test(count, Immediate(~3));
9845 __ j(zero, &last_bytes);
9846
9847 // Copy from edi to esi using rep movs instruction.
9848 __ mov(scratch, count);
9849 __ sar(count, 2); // Number of doublewords to copy.
9850 __ rep_movs();
9851
9852 // Find number of bytes left.
9853 __ mov(count, scratch);
9854 __ and_(count, 3);
9855
9856 // Check if there are more bytes to copy.
9857 __ bind(&last_bytes);
9858 __ test(count, Operand(count));
9859 __ j(zero, &done);
9860
9861 // Copy remaining characters.
9862 Label loop;
9863 __ bind(&loop);
9864 __ mov_b(scratch, Operand(src, 0));
9865 __ mov_b(Operand(dest, 0), scratch);
9866 __ add(Operand(src), Immediate(1));
9867 __ add(Operand(dest), Immediate(1));
9868 __ sub(Operand(count), Immediate(1));
9869 __ j(not_zero, &loop);
9870
9871 __ bind(&done);
9872}
9873
9874
9875void SubStringStub::Generate(MacroAssembler* masm) {
9876 Label runtime;
9877
9878 // Stack frame on entry.
9879 // esp[0]: return address
9880 // esp[4]: to
9881 // esp[8]: from
9882 // esp[12]: string
9883
9884 // Make sure first argument is a string.
9885 __ mov(eax, Operand(esp, 3 * kPointerSize));
9886 ASSERT_EQ(0, kSmiTag);
9887 __ test(eax, Immediate(kSmiTagMask));
9888 __ j(zero, &runtime);
9889 Condition is_string = masm->IsObjectStringType(eax, ebx, ebx);
9890 __ j(NegateCondition(is_string), &runtime);
9891
9892 // eax: string
9893 // ebx: instance type
9894 // Calculate length of sub string using the smi values.
9895 __ mov(ecx, Operand(esp, 1 * kPointerSize)); // to
9896 __ test(ecx, Immediate(kSmiTagMask));
9897 __ j(not_zero, &runtime);
9898 __ mov(edx, Operand(esp, 2 * kPointerSize)); // from
9899 __ test(edx, Immediate(kSmiTagMask));
9900 __ j(not_zero, &runtime);
9901 __ sub(ecx, Operand(edx));
9902 // Handle sub-strings of length 2 and less in the runtime system.
9903 __ SmiUntag(ecx); // Result length is no longer smi.
9904 __ cmp(ecx, 2);
9905 __ j(below_equal, &runtime);
9906
9907 // eax: string
9908 // ebx: instance type
9909 // ecx: result string length
9910 // Check for flat ascii string
9911 Label non_ascii_flat;
9912 __ and_(ebx, kStringRepresentationMask | kStringEncodingMask);
9913 __ cmp(ebx, kSeqStringTag | kAsciiStringTag);
9914 __ j(not_equal, &non_ascii_flat);
9915
9916 // Allocate the result.
9917 __ AllocateAsciiString(eax, ecx, ebx, edx, edi, &runtime);
9918
9919 // eax: result string
9920 // ecx: result string length
9921 __ mov(edx, esi); // esi used by following code.
9922 // Locate first character of result.
9923 __ mov(edi, eax);
9924 __ add(Operand(edi), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
9925 // Load string argument and locate character of sub string start.
9926 __ mov(esi, Operand(esp, 3 * kPointerSize));
9927 __ add(Operand(esi), Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
9928 __ mov(ebx, Operand(esp, 2 * kPointerSize)); // from
9929 __ SmiUntag(ebx);
9930 __ add(esi, Operand(ebx));
9931
9932 // eax: result string
9933 // ecx: result length
9934 // edx: original value of esi
9935 // edi: first character of result
9936 // esi: character of sub string start
9937 GenerateCopyCharactersREP(masm, edi, esi, ecx, ebx, true);
9938 __ mov(esi, edx); // Restore esi.
9939 __ IncrementCounter(&Counters::sub_string_native, 1);
9940 __ ret(3 * kPointerSize);
9941
9942 __ bind(&non_ascii_flat);
9943 // eax: string
9944 // ebx: instance type & kStringRepresentationMask | kStringEncodingMask
9945 // ecx: result string length
9946 // Check for flat two byte string
9947 __ cmp(ebx, kSeqStringTag | kTwoByteStringTag);
9948 __ j(not_equal, &runtime);
9949
9950 // Allocate the result.
9951 __ AllocateTwoByteString(eax, ecx, ebx, edx, edi, &runtime);
9952
9953 // eax: result string
9954 // ecx: result string length
9955 __ mov(edx, esi); // esi used by following code.
9956 // Locate first character of result.
9957 __ mov(edi, eax);
9958 __ add(Operand(edi),
9959 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
9960 // Load string argument and locate character of sub string start.
9961 __ mov(esi, Operand(esp, 3 * kPointerSize));
Andrei Popescu31002712010-02-23 13:46:05 +00009962 __ add(Operand(esi),
9963 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
Leon Clarkee46be812010-01-19 14:06:41 +00009964 __ mov(ebx, Operand(esp, 2 * kPointerSize)); // from
9965 // As from is a smi it is 2 times the value which matches the size of a two
9966 // byte character.
9967 ASSERT_EQ(0, kSmiTag);
9968 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
9969 __ add(esi, Operand(ebx));
9970
9971 // eax: result string
9972 // ecx: result length
9973 // edx: original value of esi
9974 // edi: first character of result
9975 // esi: character of sub string start
9976 GenerateCopyCharactersREP(masm, edi, esi, ecx, ebx, false);
9977 __ mov(esi, edx); // Restore esi.
9978 __ IncrementCounter(&Counters::sub_string_native, 1);
9979 __ ret(3 * kPointerSize);
9980
9981 // Just jump to runtime to create the sub string.
9982 __ bind(&runtime);
9983 __ TailCallRuntime(ExternalReference(Runtime::kSubString), 3, 1);
9984}
9985
9986
9987void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
9988 Register left,
9989 Register right,
9990 Register scratch1,
9991 Register scratch2,
9992 Register scratch3) {
Leon Clarked91b9f72010-01-27 17:25:45 +00009993 Label result_not_equal;
9994 Label result_greater;
9995 Label compare_lengths;
9996 // Find minimum length.
9997 Label left_shorter;
Leon Clarkee46be812010-01-19 14:06:41 +00009998 __ mov(scratch1, FieldOperand(left, String::kLengthOffset));
Leon Clarked91b9f72010-01-27 17:25:45 +00009999 __ mov(scratch3, scratch1);
10000 __ sub(scratch3, FieldOperand(right, String::kLengthOffset));
10001
10002 Register length_delta = scratch3;
10003
10004 __ j(less_equal, &left_shorter);
10005 // Right string is shorter. Change scratch1 to be length of right string.
10006 __ sub(scratch1, Operand(length_delta));
10007 __ bind(&left_shorter);
10008
10009 Register min_length = scratch1;
10010
10011 // If either length is zero, just compare lengths.
10012 __ test(min_length, Operand(min_length));
10013 __ j(zero, &compare_lengths);
10014
10015 // Change index to run from -min_length to -1 by adding min_length
10016 // to string start. This means that loop ends when index reaches zero,
10017 // which doesn't need an additional compare.
10018 __ lea(left,
10019 FieldOperand(left,
10020 min_length, times_1,
10021 SeqAsciiString::kHeaderSize));
10022 __ lea(right,
10023 FieldOperand(right,
10024 min_length, times_1,
10025 SeqAsciiString::kHeaderSize));
10026 __ neg(min_length);
10027
10028 Register index = min_length; // index = -min_length;
10029
10030 {
10031 // Compare loop.
10032 Label loop;
10033 __ bind(&loop);
10034 // Compare characters.
10035 __ mov_b(scratch2, Operand(left, index, times_1, 0));
10036 __ cmpb(scratch2, Operand(right, index, times_1, 0));
10037 __ j(not_equal, &result_not_equal);
10038 __ add(Operand(index), Immediate(1));
10039 __ j(not_zero, &loop);
Leon Clarkee46be812010-01-19 14:06:41 +000010040 }
10041
Leon Clarked91b9f72010-01-27 17:25:45 +000010042 // Compare lengths - strings up to min-length are equal.
Leon Clarkee46be812010-01-19 14:06:41 +000010043 __ bind(&compare_lengths);
Leon Clarked91b9f72010-01-27 17:25:45 +000010044 __ test(length_delta, Operand(length_delta));
Leon Clarkee46be812010-01-19 14:06:41 +000010045 __ j(not_zero, &result_not_equal);
10046
10047 // Result is EQUAL.
10048 ASSERT_EQ(0, EQUAL);
10049 ASSERT_EQ(0, kSmiTag);
Leon Clarked91b9f72010-01-27 17:25:45 +000010050 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
Leon Clarkee46be812010-01-19 14:06:41 +000010051 __ ret(2 * kPointerSize);
Leon Clarked91b9f72010-01-27 17:25:45 +000010052
Leon Clarkee46be812010-01-19 14:06:41 +000010053 __ bind(&result_not_equal);
10054 __ j(greater, &result_greater);
10055
10056 // Result is LESS.
Leon Clarked91b9f72010-01-27 17:25:45 +000010057 __ Set(eax, Immediate(Smi::FromInt(LESS)));
Leon Clarkee46be812010-01-19 14:06:41 +000010058 __ ret(2 * kPointerSize);
10059
10060 // Result is GREATER.
10061 __ bind(&result_greater);
Leon Clarked91b9f72010-01-27 17:25:45 +000010062 __ Set(eax, Immediate(Smi::FromInt(GREATER)));
Leon Clarkee46be812010-01-19 14:06:41 +000010063 __ ret(2 * kPointerSize);
10064}
10065
10066
10067void StringCompareStub::Generate(MacroAssembler* masm) {
10068 Label runtime;
10069
10070 // Stack frame on entry.
10071 // esp[0]: return address
10072 // esp[4]: right string
10073 // esp[8]: left string
10074
10075 __ mov(edx, Operand(esp, 2 * kPointerSize)); // left
10076 __ mov(eax, Operand(esp, 1 * kPointerSize)); // right
10077
10078 Label not_same;
10079 __ cmp(edx, Operand(eax));
10080 __ j(not_equal, &not_same);
10081 ASSERT_EQ(0, EQUAL);
10082 ASSERT_EQ(0, kSmiTag);
Leon Clarked91b9f72010-01-27 17:25:45 +000010083 __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
Leon Clarkee46be812010-01-19 14:06:41 +000010084 __ IncrementCounter(&Counters::string_compare_native, 1);
10085 __ ret(2 * kPointerSize);
10086
10087 __ bind(&not_same);
10088
Leon Clarked91b9f72010-01-27 17:25:45 +000010089 // Check that both objects are sequential ascii strings.
10090 __ JumpIfNotBothSequentialAsciiStrings(edx, eax, ecx, ebx, &runtime);
Leon Clarkee46be812010-01-19 14:06:41 +000010091
10092 // Compare flat ascii strings.
Leon Clarked91b9f72010-01-27 17:25:45 +000010093 __ IncrementCounter(&Counters::string_compare_native, 1);
Leon Clarkee46be812010-01-19 14:06:41 +000010094 GenerateCompareFlatAsciiStrings(masm, edx, eax, ecx, ebx, edi);
10095
Leon Clarkee46be812010-01-19 14:06:41 +000010096 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
10097 // tagged as a small integer.
10098 __ bind(&runtime);
10099 __ TailCallRuntime(ExternalReference(Runtime::kStringCompare), 2, 1);
10100}
10101
Steve Blocka7e24c12009-10-30 11:49:00 +000010102#undef __
10103
10104} } // namespace v8::internal