blob: 06a4341a50b02ffa94bdc88f54ba383597187e2b [file] [log] [blame]
Leon Clarked91b9f72010-01-27 17:25:45 +00001// Copyright 2010 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_ARM)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "bootstrapper.h"
Kristian Monsen80d68ea2010-09-08 11:05:35 +010033#include "code-stubs.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000034#include "codegen-inl.h"
Steve Blockd0582a62009-12-15 09:54:21 +000035#include "compiler.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000036#include "debug.h"
Steve Block6ded16b2010-05-10 14:33:55 +010037#include "ic-inl.h"
38#include "jsregexp.h"
Kristian Monsen25f61362010-05-21 11:50:48 +010039#include "jump-target-light-inl.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000040#include "parser.h"
Steve Block6ded16b2010-05-10 14:33:55 +010041#include "regexp-macro-assembler.h"
42#include "regexp-stack.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000043#include "register-allocator-inl.h"
44#include "runtime.h"
45#include "scopes.h"
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -080046#include "stub-cache.h"
Steve Block6ded16b2010-05-10 14:33:55 +010047#include "virtual-frame-inl.h"
Kristian Monsen25f61362010-05-21 11:50:48 +010048#include "virtual-frame-arm-inl.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000049
50namespace v8 {
51namespace internal {
52
Kristian Monsen25f61362010-05-21 11:50:48 +010053
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010054#define __ ACCESS_MASM(masm_)
Steve Blocka7e24c12009-10-30 11:49:00 +000055
56// -------------------------------------------------------------------------
57// Platform-specific DeferredCode functions.
58
59void DeferredCode::SaveRegisters() {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010060 // On ARM you either have a completely spilled frame or you
61 // handle it yourself, but at the moment there's no automation
62 // of registers and deferred code.
Steve Blocka7e24c12009-10-30 11:49:00 +000063}
64
65
66void DeferredCode::RestoreRegisters() {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010067}
68
69
70// -------------------------------------------------------------------------
71// Platform-specific RuntimeCallHelper functions.
72
73void VirtualFrameRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
74 frame_state_->frame()->AssertIsSpilled();
75}
76
77
78void VirtualFrameRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
79}
80
81
82void ICRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
83 masm->EnterInternalFrame();
84}
85
86
87void ICRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
88 masm->LeaveInternalFrame();
Steve Blocka7e24c12009-10-30 11:49:00 +000089}
90
91
92// -------------------------------------------------------------------------
93// CodeGenState implementation.
94
95CodeGenState::CodeGenState(CodeGenerator* owner)
96 : owner_(owner),
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010097 previous_(owner->state()) {
98 owner->set_state(this);
Steve Blocka7e24c12009-10-30 11:49:00 +000099}
100
101
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100102ConditionCodeGenState::ConditionCodeGenState(CodeGenerator* owner,
103 JumpTarget* true_target,
104 JumpTarget* false_target)
105 : CodeGenState(owner),
Steve Blocka7e24c12009-10-30 11:49:00 +0000106 true_target_(true_target),
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100107 false_target_(false_target) {
108 owner->set_state(this);
109}
110
111
112TypeInfoCodeGenState::TypeInfoCodeGenState(CodeGenerator* owner,
113 Slot* slot,
114 TypeInfo type_info)
115 : CodeGenState(owner),
116 slot_(slot) {
117 owner->set_state(this);
118 old_type_info_ = owner->set_type_info(slot, type_info);
Steve Blocka7e24c12009-10-30 11:49:00 +0000119}
120
121
122CodeGenState::~CodeGenState() {
123 ASSERT(owner_->state() == this);
124 owner_->set_state(previous_);
125}
126
127
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100128TypeInfoCodeGenState::~TypeInfoCodeGenState() {
129 owner()->set_type_info(slot_, old_type_info_);
130}
131
Steve Blocka7e24c12009-10-30 11:49:00 +0000132// -------------------------------------------------------------------------
133// CodeGenerator implementation
134
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100135int CodeGenerator::inlined_write_barrier_size_ = -1;
136
Andrei Popescu31002712010-02-23 13:46:05 +0000137CodeGenerator::CodeGenerator(MacroAssembler* masm)
138 : deferred_(8),
Leon Clarke4515c472010-02-03 11:58:03 +0000139 masm_(masm),
Andrei Popescu31002712010-02-23 13:46:05 +0000140 info_(NULL),
Steve Blocka7e24c12009-10-30 11:49:00 +0000141 frame_(NULL),
142 allocator_(NULL),
143 cc_reg_(al),
144 state_(NULL),
Steve Block6ded16b2010-05-10 14:33:55 +0100145 loop_nesting_(0),
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100146 type_info_(NULL),
Steve Block8defd9f2010-07-08 12:39:36 +0100147 function_return_(JumpTarget::BIDIRECTIONAL),
Steve Blocka7e24c12009-10-30 11:49:00 +0000148 function_return_is_shadowed_(false) {
149}
150
151
152// Calling conventions:
153// fp: caller's frame pointer
154// sp: stack pointer
155// r1: called JS function
156// cp: callee's context
157
Andrei Popescu402d9372010-02-26 13:31:12 +0000158void CodeGenerator::Generate(CompilationInfo* info) {
Steve Blockd0582a62009-12-15 09:54:21 +0000159 // Record the position for debugging purposes.
Andrei Popescu31002712010-02-23 13:46:05 +0000160 CodeForFunctionPosition(info->function());
Steve Block6ded16b2010-05-10 14:33:55 +0100161 Comment cmnt(masm_, "[ function compiled by virtual frame code generator");
Steve Blocka7e24c12009-10-30 11:49:00 +0000162
163 // Initialize state.
Andrei Popescu31002712010-02-23 13:46:05 +0000164 info_ = info;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100165
166 int slots = scope()->num_parameters() + scope()->num_stack_slots();
167 ScopedVector<TypeInfo> type_info_array(slots);
168 type_info_ = &type_info_array;
169
Steve Blocka7e24c12009-10-30 11:49:00 +0000170 ASSERT(allocator_ == NULL);
171 RegisterAllocator register_allocator(this);
172 allocator_ = &register_allocator;
173 ASSERT(frame_ == NULL);
174 frame_ = new VirtualFrame();
175 cc_reg_ = al;
Steve Block6ded16b2010-05-10 14:33:55 +0100176
177 // Adjust for function-level loop nesting.
178 ASSERT_EQ(0, loop_nesting_);
Ben Murdochf87a2032010-10-22 12:50:53 +0100179 loop_nesting_ = info->is_in_loop() ? 1 : 0;
Steve Block6ded16b2010-05-10 14:33:55 +0100180
Steve Blocka7e24c12009-10-30 11:49:00 +0000181 {
182 CodeGenState state(this);
183
184 // Entry:
185 // Stack: receiver, arguments
186 // lr: return address
187 // fp: caller's frame pointer
188 // sp: stack pointer
189 // r1: called JS function
190 // cp: callee's context
191 allocator_->Initialize();
Leon Clarke4515c472010-02-03 11:58:03 +0000192
Steve Blocka7e24c12009-10-30 11:49:00 +0000193#ifdef DEBUG
194 if (strlen(FLAG_stop_at) > 0 &&
Andrei Popescu31002712010-02-23 13:46:05 +0000195 info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000196 frame_->SpillAll();
197 __ stop("stop-at");
198 }
199#endif
200
Iain Merrick75681382010-08-19 15:07:18 +0100201 frame_->Enter();
202 // tos: code slot
Leon Clarke4515c472010-02-03 11:58:03 +0000203
Iain Merrick75681382010-08-19 15:07:18 +0100204 // Allocate space for locals and initialize them. This also checks
205 // for stack overflow.
206 frame_->AllocateStackSlots();
Leon Clarke4515c472010-02-03 11:58:03 +0000207
Iain Merrick75681382010-08-19 15:07:18 +0100208 frame_->AssertIsSpilled();
209 int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
210 if (heap_slots > 0) {
211 // Allocate local context.
212 // Get outer context and create a new context based on it.
213 __ ldr(r0, frame_->Function());
214 frame_->EmitPush(r0);
215 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
216 FastNewContextStub stub(heap_slots);
217 frame_->CallStub(&stub, 1);
218 } else {
219 frame_->CallRuntime(Runtime::kNewContext, 1);
220 }
Leon Clarke4515c472010-02-03 11:58:03 +0000221
222#ifdef DEBUG
Iain Merrick75681382010-08-19 15:07:18 +0100223 JumpTarget verified_true;
224 __ cmp(r0, cp);
225 verified_true.Branch(eq);
226 __ stop("NewContext: r0 is expected to be the same as cp");
227 verified_true.Bind();
Leon Clarke4515c472010-02-03 11:58:03 +0000228#endif
Iain Merrick75681382010-08-19 15:07:18 +0100229 // Update context local.
230 __ str(cp, frame_->Context());
231 }
Leon Clarke4515c472010-02-03 11:58:03 +0000232
Iain Merrick75681382010-08-19 15:07:18 +0100233 // TODO(1241774): Improve this code:
234 // 1) only needed if we have a context
235 // 2) no need to recompute context ptr every single time
236 // 3) don't copy parameter operand code from SlotOperand!
237 {
238 Comment cmnt2(masm_, "[ copy context parameters into .context");
239 // Note that iteration order is relevant here! If we have the same
240 // parameter twice (e.g., function (x, y, x)), and that parameter
241 // needs to be copied into the context, it must be the last argument
242 // passed to the parameter that needs to be copied. This is a rare
243 // case so we don't check for it, instead we rely on the copying
244 // order: such a parameter is copied repeatedly into the same
245 // context location and thus the last value is what is seen inside
246 // the function.
247 frame_->AssertIsSpilled();
248 for (int i = 0; i < scope()->num_parameters(); i++) {
249 Variable* par = scope()->parameter(i);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100250 Slot* slot = par->AsSlot();
Iain Merrick75681382010-08-19 15:07:18 +0100251 if (slot != NULL && slot->type() == Slot::CONTEXT) {
252 ASSERT(!scope()->is_global_scope()); // No params in global scope.
253 __ ldr(r1, frame_->ParameterAt(i));
254 // Loads r2 with context; used below in RecordWrite.
255 __ str(r1, SlotOperand(slot, r2));
256 // Load the offset into r3.
257 int slot_offset =
258 FixedArray::kHeaderSize + slot->index() * kPointerSize;
259 __ RecordWrite(r2, Operand(slot_offset), r3, r1);
Leon Clarke4515c472010-02-03 11:58:03 +0000260 }
261 }
Iain Merrick75681382010-08-19 15:07:18 +0100262 }
Leon Clarke4515c472010-02-03 11:58:03 +0000263
Iain Merrick75681382010-08-19 15:07:18 +0100264 // Store the arguments object. This must happen after context
265 // initialization because the arguments object may be stored in
266 // the context.
267 if (ArgumentsMode() != NO_ARGUMENTS_ALLOCATION) {
268 StoreArgumentsObject(true);
269 }
Leon Clarke4515c472010-02-03 11:58:03 +0000270
Iain Merrick75681382010-08-19 15:07:18 +0100271 // Initialize ThisFunction reference if present.
272 if (scope()->is_function_scope() && scope()->function() != NULL) {
273 frame_->EmitPushRoot(Heap::kTheHoleValueRootIndex);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100274 StoreToSlot(scope()->function()->AsSlot(), NOT_CONST_INIT);
Leon Clarke4515c472010-02-03 11:58:03 +0000275 }
276
Steve Blocka7e24c12009-10-30 11:49:00 +0000277 // Initialize the function return target after the locals are set
278 // up, because it needs the expected frame height from the frame.
Kristian Monsen25f61362010-05-21 11:50:48 +0100279 function_return_.SetExpectedHeight();
Steve Blocka7e24c12009-10-30 11:49:00 +0000280 function_return_is_shadowed_ = false;
281
Steve Blocka7e24c12009-10-30 11:49:00 +0000282 // Generate code to 'execute' declarations and initialize functions
283 // (source elements). In case of an illegal redeclaration we need to
284 // handle that instead of processing the declarations.
Andrei Popescu31002712010-02-23 13:46:05 +0000285 if (scope()->HasIllegalRedeclaration()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000286 Comment cmnt(masm_, "[ illegal redeclarations");
Andrei Popescu31002712010-02-23 13:46:05 +0000287 scope()->VisitIllegalRedeclaration(this);
Steve Blocka7e24c12009-10-30 11:49:00 +0000288 } else {
289 Comment cmnt(masm_, "[ declarations");
Andrei Popescu31002712010-02-23 13:46:05 +0000290 ProcessDeclarations(scope()->declarations());
Steve Blocka7e24c12009-10-30 11:49:00 +0000291 // Bail out if a stack-overflow exception occurred when processing
292 // declarations.
293 if (HasStackOverflow()) return;
294 }
295
296 if (FLAG_trace) {
297 frame_->CallRuntime(Runtime::kTraceEnter, 0);
298 // Ignore the return value.
299 }
300
301 // Compile the body of the function in a vanilla state. Don't
302 // bother compiling all the code if the scope has an illegal
303 // redeclaration.
Andrei Popescu31002712010-02-23 13:46:05 +0000304 if (!scope()->HasIllegalRedeclaration()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000305 Comment cmnt(masm_, "[ function body");
306#ifdef DEBUG
307 bool is_builtin = Bootstrapper::IsActive();
308 bool should_trace =
309 is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls;
310 if (should_trace) {
311 frame_->CallRuntime(Runtime::kDebugTrace, 0);
312 // Ignore the return value.
313 }
314#endif
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100315 VisitStatements(info->function()->body());
Steve Blocka7e24c12009-10-30 11:49:00 +0000316 }
317 }
318
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100319 // Handle the return from the function.
320 if (has_valid_frame()) {
321 // If there is a valid frame, control flow can fall off the end of
322 // the body. In that case there is an implicit return statement.
323 ASSERT(!function_return_is_shadowed_);
324 frame_->PrepareForReturn();
Steve Blocka7e24c12009-10-30 11:49:00 +0000325 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100326 if (function_return_.is_bound()) {
327 function_return_.Jump();
328 } else {
329 function_return_.Bind();
330 GenerateReturnSequence();
331 }
332 } else if (function_return_.is_linked()) {
333 // If the return target has dangling jumps to it, then we have not
334 // yet generated the return sequence. This can happen when (a)
335 // control does not flow off the end of the body so we did not
336 // compile an artificial return statement just above, and (b) there
337 // are return statements in the body but (c) they are all shadowed.
Steve Blocka7e24c12009-10-30 11:49:00 +0000338 function_return_.Bind();
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100339 GenerateReturnSequence();
Steve Blocka7e24c12009-10-30 11:49:00 +0000340 }
341
Steve Block6ded16b2010-05-10 14:33:55 +0100342 // Adjust for function-level loop nesting.
Ben Murdochf87a2032010-10-22 12:50:53 +0100343 ASSERT(loop_nesting_ == info->is_in_loop()? 1 : 0);
Steve Block6ded16b2010-05-10 14:33:55 +0100344 loop_nesting_ = 0;
345
Steve Blocka7e24c12009-10-30 11:49:00 +0000346 // Code generation state must be reset.
347 ASSERT(!has_cc());
348 ASSERT(state_ == NULL);
Steve Block6ded16b2010-05-10 14:33:55 +0100349 ASSERT(loop_nesting() == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +0000350 ASSERT(!function_return_is_shadowed_);
351 function_return_.Unuse();
352 DeleteFrame();
353
354 // Process any deferred code using the register allocator.
355 if (!HasStackOverflow()) {
356 ProcessDeferred();
357 }
358
359 allocator_ = NULL;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100360 type_info_ = NULL;
361}
362
363
364int CodeGenerator::NumberOfSlot(Slot* slot) {
365 if (slot == NULL) return kInvalidSlotNumber;
366 switch (slot->type()) {
367 case Slot::PARAMETER:
368 return slot->index();
369 case Slot::LOCAL:
370 return slot->index() + scope()->num_parameters();
371 default:
372 break;
373 }
374 return kInvalidSlotNumber;
Steve Blocka7e24c12009-10-30 11:49:00 +0000375}
376
377
378MemOperand CodeGenerator::SlotOperand(Slot* slot, Register tmp) {
379 // Currently, this assertion will fail if we try to assign to
380 // a constant variable that is constant because it is read-only
381 // (such as the variable referring to a named function expression).
382 // We need to implement assignments to read-only variables.
383 // Ideally, we should do this during AST generation (by converting
384 // such assignments into expression statements); however, in general
385 // we may not be able to make the decision until past AST generation,
386 // that is when the entire program is known.
387 ASSERT(slot != NULL);
388 int index = slot->index();
389 switch (slot->type()) {
390 case Slot::PARAMETER:
391 return frame_->ParameterAt(index);
392
393 case Slot::LOCAL:
394 return frame_->LocalAt(index);
395
396 case Slot::CONTEXT: {
397 // Follow the context chain if necessary.
398 ASSERT(!tmp.is(cp)); // do not overwrite context register
399 Register context = cp;
400 int chain_length = scope()->ContextChainLength(slot->var()->scope());
401 for (int i = 0; i < chain_length; i++) {
402 // Load the closure.
403 // (All contexts, even 'with' contexts, have a closure,
404 // and it is the same for all contexts inside a function.
405 // There is no need to go to the function context first.)
406 __ ldr(tmp, ContextOperand(context, Context::CLOSURE_INDEX));
407 // Load the function context (which is the incoming, outer context).
408 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset));
409 context = tmp;
410 }
411 // We may have a 'with' context now. Get the function context.
412 // (In fact this mov may never be the needed, since the scope analysis
413 // may not permit a direct context access in this case and thus we are
414 // always at a function context. However it is safe to dereference be-
415 // cause the function context of a function context is itself. Before
416 // deleting this mov we should try to create a counter-example first,
417 // though...)
418 __ ldr(tmp, ContextOperand(context, Context::FCONTEXT_INDEX));
419 return ContextOperand(tmp, index);
420 }
421
422 default:
423 UNREACHABLE();
424 return MemOperand(r0, 0);
425 }
426}
427
428
429MemOperand CodeGenerator::ContextSlotOperandCheckExtensions(
430 Slot* slot,
431 Register tmp,
432 Register tmp2,
433 JumpTarget* slow) {
434 ASSERT(slot->type() == Slot::CONTEXT);
435 Register context = cp;
436
437 for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) {
438 if (s->num_heap_slots() > 0) {
439 if (s->calls_eval()) {
440 // Check that extension is NULL.
441 __ ldr(tmp2, ContextOperand(context, Context::EXTENSION_INDEX));
442 __ tst(tmp2, tmp2);
443 slow->Branch(ne);
444 }
445 __ ldr(tmp, ContextOperand(context, Context::CLOSURE_INDEX));
446 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset));
447 context = tmp;
448 }
449 }
450 // Check that last extension is NULL.
451 __ ldr(tmp2, ContextOperand(context, Context::EXTENSION_INDEX));
452 __ tst(tmp2, tmp2);
453 slow->Branch(ne);
454 __ ldr(tmp, ContextOperand(context, Context::FCONTEXT_INDEX));
455 return ContextOperand(tmp, slot->index());
456}
457
458
459// Loads a value on TOS. If it is a boolean value, the result may have been
460// (partially) translated into branches, or it may have set the condition
461// code register. If force_cc is set, the value is forced to set the
462// condition code register and no value is pushed. If the condition code
463// register was set, has_cc() is true and cc_reg_ contains the condition to
464// test for 'true'.
465void CodeGenerator::LoadCondition(Expression* x,
Steve Blocka7e24c12009-10-30 11:49:00 +0000466 JumpTarget* true_target,
467 JumpTarget* false_target,
468 bool force_cc) {
469 ASSERT(!has_cc());
470 int original_height = frame_->height();
471
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100472 { ConditionCodeGenState new_state(this, true_target, false_target);
Steve Blocka7e24c12009-10-30 11:49:00 +0000473 Visit(x);
474
475 // If we hit a stack overflow, we may not have actually visited
476 // the expression. In that case, we ensure that we have a
477 // valid-looking frame state because we will continue to generate
478 // code as we unwind the C++ stack.
479 //
480 // It's possible to have both a stack overflow and a valid frame
481 // state (eg, a subexpression overflowed, visiting it returned
482 // with a dummied frame state, and visiting this expression
483 // returned with a normal-looking state).
484 if (HasStackOverflow() &&
485 has_valid_frame() &&
486 !has_cc() &&
487 frame_->height() == original_height) {
488 true_target->Jump();
489 }
490 }
491 if (force_cc && frame_ != NULL && !has_cc()) {
492 // Convert the TOS value to a boolean in the condition code register.
493 ToBoolean(true_target, false_target);
494 }
495 ASSERT(!force_cc || !has_valid_frame() || has_cc());
496 ASSERT(!has_valid_frame() ||
497 (has_cc() && frame_->height() == original_height) ||
498 (!has_cc() && frame_->height() == original_height + 1));
499}
500
501
Steve Blockd0582a62009-12-15 09:54:21 +0000502void CodeGenerator::Load(Expression* expr) {
Iain Merrick75681382010-08-19 15:07:18 +0100503 // We generally assume that we are not in a spilled scope for most
504 // of the code generator. A failure to ensure this caused issue 815
505 // and this assert is designed to catch similar issues.
506 frame_->AssertIsNotSpilled();
Steve Blocka7e24c12009-10-30 11:49:00 +0000507#ifdef DEBUG
508 int original_height = frame_->height();
509#endif
510 JumpTarget true_target;
511 JumpTarget false_target;
Steve Blockd0582a62009-12-15 09:54:21 +0000512 LoadCondition(expr, &true_target, &false_target, false);
Steve Blocka7e24c12009-10-30 11:49:00 +0000513
514 if (has_cc()) {
515 // Convert cc_reg_ into a boolean value.
516 JumpTarget loaded;
517 JumpTarget materialize_true;
518 materialize_true.Branch(cc_reg_);
Steve Block8defd9f2010-07-08 12:39:36 +0100519 frame_->EmitPushRoot(Heap::kFalseValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +0000520 loaded.Jump();
521 materialize_true.Bind();
Steve Block8defd9f2010-07-08 12:39:36 +0100522 frame_->EmitPushRoot(Heap::kTrueValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +0000523 loaded.Bind();
524 cc_reg_ = al;
525 }
526
527 if (true_target.is_linked() || false_target.is_linked()) {
528 // We have at least one condition value that has been "translated"
529 // into a branch, thus it needs to be loaded explicitly.
530 JumpTarget loaded;
531 if (frame_ != NULL) {
532 loaded.Jump(); // Don't lose the current TOS.
533 }
534 bool both = true_target.is_linked() && false_target.is_linked();
535 // Load "true" if necessary.
536 if (true_target.is_linked()) {
537 true_target.Bind();
Steve Block8defd9f2010-07-08 12:39:36 +0100538 frame_->EmitPushRoot(Heap::kTrueValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +0000539 }
540 // If both "true" and "false" need to be loaded jump across the code for
541 // "false".
542 if (both) {
543 loaded.Jump();
544 }
545 // Load "false" if necessary.
546 if (false_target.is_linked()) {
547 false_target.Bind();
Steve Block8defd9f2010-07-08 12:39:36 +0100548 frame_->EmitPushRoot(Heap::kFalseValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +0000549 }
550 // A value is loaded on all paths reaching this point.
551 loaded.Bind();
552 }
553 ASSERT(has_valid_frame());
554 ASSERT(!has_cc());
Steve Block6ded16b2010-05-10 14:33:55 +0100555 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +0000556}
557
558
559void CodeGenerator::LoadGlobal() {
Steve Block6ded16b2010-05-10 14:33:55 +0100560 Register reg = frame_->GetTOSRegister();
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800561 __ ldr(reg, GlobalObjectOperand());
Steve Block6ded16b2010-05-10 14:33:55 +0100562 frame_->EmitPush(reg);
Steve Blocka7e24c12009-10-30 11:49:00 +0000563}
564
565
566void CodeGenerator::LoadGlobalReceiver(Register scratch) {
Steve Block8defd9f2010-07-08 12:39:36 +0100567 Register reg = frame_->GetTOSRegister();
568 __ ldr(reg, ContextOperand(cp, Context::GLOBAL_INDEX));
569 __ ldr(reg,
570 FieldMemOperand(reg, GlobalObject::kGlobalReceiverOffset));
571 frame_->EmitPush(reg);
Steve Blocka7e24c12009-10-30 11:49:00 +0000572}
573
574
Steve Block6ded16b2010-05-10 14:33:55 +0100575ArgumentsAllocationMode CodeGenerator::ArgumentsMode() {
576 if (scope()->arguments() == NULL) return NO_ARGUMENTS_ALLOCATION;
577 ASSERT(scope()->arguments_shadow() != NULL);
578 // We don't want to do lazy arguments allocation for functions that
579 // have heap-allocated contexts, because it interfers with the
580 // uninitialized const tracking in the context objects.
581 return (scope()->num_heap_slots() > 0)
582 ? EAGER_ARGUMENTS_ALLOCATION
583 : LAZY_ARGUMENTS_ALLOCATION;
584}
585
586
587void CodeGenerator::StoreArgumentsObject(bool initial) {
Steve Block6ded16b2010-05-10 14:33:55 +0100588 ArgumentsAllocationMode mode = ArgumentsMode();
589 ASSERT(mode != NO_ARGUMENTS_ALLOCATION);
590
591 Comment cmnt(masm_, "[ store arguments object");
592 if (mode == LAZY_ARGUMENTS_ALLOCATION && initial) {
593 // When using lazy arguments allocation, we store the hole value
594 // as a sentinel indicating that the arguments object hasn't been
595 // allocated yet.
Steve Block8defd9f2010-07-08 12:39:36 +0100596 frame_->EmitPushRoot(Heap::kTheHoleValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +0100597 } else {
Steve Block8defd9f2010-07-08 12:39:36 +0100598 frame_->SpillAll();
Steve Block6ded16b2010-05-10 14:33:55 +0100599 ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT);
600 __ ldr(r2, frame_->Function());
601 // The receiver is below the arguments, the return address, and the
602 // frame pointer on the stack.
603 const int kReceiverDisplacement = 2 + scope()->num_parameters();
604 __ add(r1, fp, Operand(kReceiverDisplacement * kPointerSize));
605 __ mov(r0, Operand(Smi::FromInt(scope()->num_parameters())));
606 frame_->Adjust(3);
607 __ Push(r2, r1, r0);
608 frame_->CallStub(&stub, 3);
609 frame_->EmitPush(r0);
610 }
611
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100612 Variable* arguments = scope()->arguments();
613 Variable* shadow = scope()->arguments_shadow();
614 ASSERT(arguments != NULL && arguments->AsSlot() != NULL);
615 ASSERT(shadow != NULL && shadow->AsSlot() != NULL);
Steve Block6ded16b2010-05-10 14:33:55 +0100616 JumpTarget done;
617 if (mode == LAZY_ARGUMENTS_ALLOCATION && !initial) {
618 // We have to skip storing into the arguments slot if it has
619 // already been written to. This can happen if the a function
620 // has a local variable named 'arguments'.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100621 LoadFromSlot(scope()->arguments()->AsSlot(), NOT_INSIDE_TYPEOF);
Steve Block8defd9f2010-07-08 12:39:36 +0100622 Register arguments = frame_->PopToRegister();
Steve Block6ded16b2010-05-10 14:33:55 +0100623 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
Steve Block8defd9f2010-07-08 12:39:36 +0100624 __ cmp(arguments, ip);
Steve Block6ded16b2010-05-10 14:33:55 +0100625 done.Branch(ne);
626 }
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100627 StoreToSlot(arguments->AsSlot(), NOT_CONST_INIT);
Steve Block6ded16b2010-05-10 14:33:55 +0100628 if (mode == LAZY_ARGUMENTS_ALLOCATION) done.Bind();
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100629 StoreToSlot(shadow->AsSlot(), NOT_CONST_INIT);
Steve Block6ded16b2010-05-10 14:33:55 +0100630}
631
632
Steve Blockd0582a62009-12-15 09:54:21 +0000633void CodeGenerator::LoadTypeofExpression(Expression* expr) {
634 // Special handling of identifiers as subexpressions of typeof.
Steve Blockd0582a62009-12-15 09:54:21 +0000635 Variable* variable = expr->AsVariableProxy()->AsVariable();
Steve Blocka7e24c12009-10-30 11:49:00 +0000636 if (variable != NULL && !variable->is_this() && variable->is_global()) {
Steve Blockd0582a62009-12-15 09:54:21 +0000637 // For a global variable we build the property reference
638 // <global>.<variable> and perform a (regular non-contextual) property
639 // load to make sure we do not get reference errors.
Steve Blocka7e24c12009-10-30 11:49:00 +0000640 Slot global(variable, Slot::CONTEXT, Context::GLOBAL_INDEX);
641 Literal key(variable->name());
Steve Blocka7e24c12009-10-30 11:49:00 +0000642 Property property(&global, &key, RelocInfo::kNoPosition);
Steve Blockd0582a62009-12-15 09:54:21 +0000643 Reference ref(this, &property);
Steve Block6ded16b2010-05-10 14:33:55 +0100644 ref.GetValue();
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100645 } else if (variable != NULL && variable->AsSlot() != NULL) {
Steve Blockd0582a62009-12-15 09:54:21 +0000646 // For a variable that rewrites to a slot, we signal it is the immediate
647 // subexpression of a typeof.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100648 LoadFromSlotCheckForArguments(variable->AsSlot(), INSIDE_TYPEOF);
Steve Blocka7e24c12009-10-30 11:49:00 +0000649 } else {
Steve Blockd0582a62009-12-15 09:54:21 +0000650 // Anything else can be handled normally.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100651 Load(expr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000652 }
653}
654
655
Leon Clarked91b9f72010-01-27 17:25:45 +0000656Reference::Reference(CodeGenerator* cgen,
657 Expression* expression,
658 bool persist_after_get)
659 : cgen_(cgen),
660 expression_(expression),
661 type_(ILLEGAL),
662 persist_after_get_(persist_after_get) {
Iain Merrick75681382010-08-19 15:07:18 +0100663 // We generally assume that we are not in a spilled scope for most
664 // of the code generator. A failure to ensure this caused issue 815
665 // and this assert is designed to catch similar issues.
666 cgen->frame()->AssertIsNotSpilled();
Steve Blocka7e24c12009-10-30 11:49:00 +0000667 cgen->LoadReference(this);
668}
669
670
671Reference::~Reference() {
Leon Clarked91b9f72010-01-27 17:25:45 +0000672 ASSERT(is_unloaded() || is_illegal());
Steve Blocka7e24c12009-10-30 11:49:00 +0000673}
674
675
676void CodeGenerator::LoadReference(Reference* ref) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000677 Comment cmnt(masm_, "[ LoadReference");
678 Expression* e = ref->expression();
679 Property* property = e->AsProperty();
680 Variable* var = e->AsVariableProxy()->AsVariable();
681
682 if (property != NULL) {
683 // The expression is either a property or a variable proxy that rewrites
684 // to a property.
Steve Block6ded16b2010-05-10 14:33:55 +0100685 Load(property->obj());
Leon Clarkee46be812010-01-19 14:06:41 +0000686 if (property->key()->IsPropertyName()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000687 ref->set_type(Reference::NAMED);
688 } else {
Steve Block6ded16b2010-05-10 14:33:55 +0100689 Load(property->key());
Steve Blocka7e24c12009-10-30 11:49:00 +0000690 ref->set_type(Reference::KEYED);
691 }
692 } else if (var != NULL) {
693 // The expression is a variable proxy that does not rewrite to a
694 // property. Global variables are treated as named property references.
695 if (var->is_global()) {
696 LoadGlobal();
697 ref->set_type(Reference::NAMED);
698 } else {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100699 ASSERT(var->AsSlot() != NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000700 ref->set_type(Reference::SLOT);
701 }
702 } else {
703 // Anything else is a runtime error.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100704 Load(e);
Steve Blocka7e24c12009-10-30 11:49:00 +0000705 frame_->CallRuntime(Runtime::kThrowReferenceError, 1);
706 }
707}
708
709
710void CodeGenerator::UnloadReference(Reference* ref) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000711 int size = ref->size();
Leon Clarked91b9f72010-01-27 17:25:45 +0000712 ref->set_unloaded();
Steve Block6ded16b2010-05-10 14:33:55 +0100713 if (size == 0) return;
714
715 // Pop a reference from the stack while preserving TOS.
716 VirtualFrame::RegisterAllocationScope scope(this);
717 Comment cmnt(masm_, "[ UnloadReference");
718 if (size > 0) {
719 Register tos = frame_->PopToRegister();
720 frame_->Drop(size);
721 frame_->EmitPush(tos);
722 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000723}
724
725
726// ECMA-262, section 9.2, page 30: ToBoolean(). Convert the given
727// register to a boolean in the condition code register. The code
728// may jump to 'false_target' in case the register converts to 'false'.
729void CodeGenerator::ToBoolean(JumpTarget* true_target,
730 JumpTarget* false_target) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000731 // Note: The generated code snippet does not change stack variables.
732 // Only the condition code should be set.
Steve Block8defd9f2010-07-08 12:39:36 +0100733 bool known_smi = frame_->KnownSmiAt(0);
734 Register tos = frame_->PopToRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +0000735
736 // Fast case checks
737
738 // Check if the value is 'false'.
Steve Block8defd9f2010-07-08 12:39:36 +0100739 if (!known_smi) {
740 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
741 __ cmp(tos, ip);
742 false_target->Branch(eq);
Steve Blocka7e24c12009-10-30 11:49:00 +0000743
Steve Block8defd9f2010-07-08 12:39:36 +0100744 // Check if the value is 'true'.
745 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
746 __ cmp(tos, ip);
747 true_target->Branch(eq);
Steve Blocka7e24c12009-10-30 11:49:00 +0000748
Steve Block8defd9f2010-07-08 12:39:36 +0100749 // Check if the value is 'undefined'.
750 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
751 __ cmp(tos, ip);
752 false_target->Branch(eq);
753 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000754
755 // Check if the value is a smi.
Steve Block8defd9f2010-07-08 12:39:36 +0100756 __ cmp(tos, Operand(Smi::FromInt(0)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000757
Steve Block8defd9f2010-07-08 12:39:36 +0100758 if (!known_smi) {
759 false_target->Branch(eq);
760 __ tst(tos, Operand(kSmiTagMask));
761 true_target->Branch(eq);
762
Iain Merrick75681382010-08-19 15:07:18 +0100763 // Slow case.
764 if (CpuFeatures::IsSupported(VFP3)) {
765 CpuFeatures::Scope scope(VFP3);
766 // Implements the slow case by using ToBooleanStub.
767 // The ToBooleanStub takes a single argument, and
768 // returns a non-zero value for true, or zero for false.
769 // Both the argument value and the return value use the
770 // register assigned to tos_
771 ToBooleanStub stub(tos);
772 frame_->CallStub(&stub, 0);
773 // Convert the result in "tos" to a condition code.
Iain Merrick9ac36c92010-09-13 15:29:50 +0100774 __ cmp(tos, Operand(0, RelocInfo::NONE));
Iain Merrick75681382010-08-19 15:07:18 +0100775 } else {
776 // Implements slow case by calling the runtime.
777 frame_->EmitPush(tos);
778 frame_->CallRuntime(Runtime::kToBool, 1);
779 // Convert the result (r0) to a condition code.
780 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
781 __ cmp(r0, ip);
782 }
Steve Block8defd9f2010-07-08 12:39:36 +0100783 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000784
785 cc_reg_ = ne;
786}
787
788
789void CodeGenerator::GenericBinaryOperation(Token::Value op,
790 OverwriteMode overwrite_mode,
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100791 GenerateInlineSmi inline_smi,
Steve Blocka7e24c12009-10-30 11:49:00 +0000792 int constant_rhs) {
Steve Block6ded16b2010-05-10 14:33:55 +0100793 // top of virtual frame: y
794 // 2nd elt. on virtual frame : x
795 // result : top of virtual frame
796
797 // Stub is entered with a call: 'return address' is in lr.
798 switch (op) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100799 case Token::ADD:
800 case Token::SUB:
801 if (inline_smi) {
802 JumpTarget done;
803 Register rhs = frame_->PopToRegister();
804 Register lhs = frame_->PopToRegister(rhs);
805 Register scratch = VirtualFrame::scratch0();
806 __ orr(scratch, rhs, Operand(lhs));
807 // Check they are both small and positive.
808 __ tst(scratch, Operand(kSmiTagMask | 0xc0000000));
809 ASSERT(rhs.is(r0) || lhs.is(r0)); // r0 is free now.
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100810 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100811 if (op == Token::ADD) {
812 __ add(r0, lhs, Operand(rhs), LeaveCC, eq);
813 } else {
814 __ sub(r0, lhs, Operand(rhs), LeaveCC, eq);
815 }
816 done.Branch(eq);
817 GenericBinaryOpStub stub(op, overwrite_mode, lhs, rhs, constant_rhs);
818 frame_->SpillAll();
819 frame_->CallStub(&stub, 0);
820 done.Bind();
821 frame_->EmitPush(r0);
822 break;
823 } else {
824 // Fall through!
825 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000826 case Token::BIT_OR:
827 case Token::BIT_AND:
828 case Token::BIT_XOR:
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100829 if (inline_smi) {
830 bool rhs_is_smi = frame_->KnownSmiAt(0);
831 bool lhs_is_smi = frame_->KnownSmiAt(1);
832 Register rhs = frame_->PopToRegister();
833 Register lhs = frame_->PopToRegister(rhs);
834 Register smi_test_reg;
835 Condition cond;
836 if (!rhs_is_smi || !lhs_is_smi) {
837 if (rhs_is_smi) {
838 smi_test_reg = lhs;
839 } else if (lhs_is_smi) {
840 smi_test_reg = rhs;
841 } else {
842 smi_test_reg = VirtualFrame::scratch0();
843 __ orr(smi_test_reg, rhs, Operand(lhs));
844 }
845 // Check they are both Smis.
846 __ tst(smi_test_reg, Operand(kSmiTagMask));
847 cond = eq;
848 } else {
849 cond = al;
850 }
851 ASSERT(rhs.is(r0) || lhs.is(r0)); // r0 is free now.
852 if (op == Token::BIT_OR) {
853 __ orr(r0, lhs, Operand(rhs), LeaveCC, cond);
854 } else if (op == Token::BIT_AND) {
855 __ and_(r0, lhs, Operand(rhs), LeaveCC, cond);
856 } else {
857 ASSERT(op == Token::BIT_XOR);
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100858 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100859 __ eor(r0, lhs, Operand(rhs), LeaveCC, cond);
860 }
861 if (cond != al) {
862 JumpTarget done;
863 done.Branch(cond);
864 GenericBinaryOpStub stub(op, overwrite_mode, lhs, rhs, constant_rhs);
865 frame_->SpillAll();
866 frame_->CallStub(&stub, 0);
867 done.Bind();
868 }
869 frame_->EmitPush(r0);
870 break;
871 } else {
872 // Fall through!
873 }
874 case Token::MUL:
875 case Token::DIV:
876 case Token::MOD:
Steve Blocka7e24c12009-10-30 11:49:00 +0000877 case Token::SHL:
878 case Token::SHR:
879 case Token::SAR: {
Steve Block6ded16b2010-05-10 14:33:55 +0100880 Register rhs = frame_->PopToRegister();
881 Register lhs = frame_->PopToRegister(rhs); // Don't pop to rhs register.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100882 GenericBinaryOpStub stub(op, overwrite_mode, lhs, rhs, constant_rhs);
883 frame_->SpillAll();
884 frame_->CallStub(&stub, 0);
Steve Block6ded16b2010-05-10 14:33:55 +0100885 frame_->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +0000886 break;
887 }
888
Steve Block6ded16b2010-05-10 14:33:55 +0100889 case Token::COMMA: {
890 Register scratch = frame_->PopToRegister();
891 // Simply discard left value.
Steve Blocka7e24c12009-10-30 11:49:00 +0000892 frame_->Drop();
Steve Block6ded16b2010-05-10 14:33:55 +0100893 frame_->EmitPush(scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000894 break;
Steve Block6ded16b2010-05-10 14:33:55 +0100895 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000896
897 default:
898 // Other cases should have been handled before this point.
899 UNREACHABLE();
900 break;
901 }
902}
903
904
905class DeferredInlineSmiOperation: public DeferredCode {
906 public:
907 DeferredInlineSmiOperation(Token::Value op,
908 int value,
909 bool reversed,
Steve Block6ded16b2010-05-10 14:33:55 +0100910 OverwriteMode overwrite_mode,
911 Register tos)
Steve Blocka7e24c12009-10-30 11:49:00 +0000912 : op_(op),
913 value_(value),
914 reversed_(reversed),
Steve Block6ded16b2010-05-10 14:33:55 +0100915 overwrite_mode_(overwrite_mode),
916 tos_register_(tos) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000917 set_comment("[ DeferredInlinedSmiOperation");
918 }
919
920 virtual void Generate();
Iain Merrick9ac36c92010-09-13 15:29:50 +0100921 // This stub makes explicit calls to SaveRegisters(), RestoreRegisters() and
922 // Exit(). Currently on ARM SaveRegisters() and RestoreRegisters() are empty
923 // methods, it is the responsibility of the deferred code to save and restore
924 // registers.
925 virtual bool AutoSaveAndRestore() { return false; }
926
927 void JumpToNonSmiInput(Condition cond);
928 void JumpToAnswerOutOfRange(Condition cond);
Steve Blocka7e24c12009-10-30 11:49:00 +0000929
930 private:
Iain Merrick9ac36c92010-09-13 15:29:50 +0100931 void GenerateNonSmiInput();
932 void GenerateAnswerOutOfRange();
933 void WriteNonSmiAnswer(Register answer,
934 Register heap_number,
935 Register scratch);
936
Steve Blocka7e24c12009-10-30 11:49:00 +0000937 Token::Value op_;
938 int value_;
939 bool reversed_;
940 OverwriteMode overwrite_mode_;
Steve Block6ded16b2010-05-10 14:33:55 +0100941 Register tos_register_;
Iain Merrick9ac36c92010-09-13 15:29:50 +0100942 Label non_smi_input_;
943 Label answer_out_of_range_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000944};
945
946
Iain Merrick9ac36c92010-09-13 15:29:50 +0100947// For bit operations we try harder and handle the case where the input is not
948// a Smi but a 32bits integer without calling the generic stub.
949void DeferredInlineSmiOperation::JumpToNonSmiInput(Condition cond) {
950 ASSERT(Token::IsBitOp(op_));
951
952 __ b(cond, &non_smi_input_);
953}
954
955
956// For bit operations the result is always 32bits so we handle the case where
957// the result does not fit in a Smi without calling the generic stub.
958void DeferredInlineSmiOperation::JumpToAnswerOutOfRange(Condition cond) {
959 ASSERT(Token::IsBitOp(op_));
960
961 if ((op_ == Token::SHR) && !CpuFeatures::IsSupported(VFP3)) {
962 // >>> requires an unsigned to double conversion and the non VFP code
963 // does not support this conversion.
964 __ b(cond, entry_label());
965 } else {
966 __ b(cond, &answer_out_of_range_);
967 }
968}
969
Steve Block8defd9f2010-07-08 12:39:36 +0100970
971// On entry the non-constant side of the binary operation is in tos_register_
972// and the constant smi side is nowhere. The tos_register_ is not used by the
973// virtual frame. On exit the answer is in the tos_register_ and the virtual
974// frame is unchanged.
Steve Blocka7e24c12009-10-30 11:49:00 +0000975void DeferredInlineSmiOperation::Generate() {
Steve Block8defd9f2010-07-08 12:39:36 +0100976 VirtualFrame copied_frame(*frame_state()->frame());
977 copied_frame.SpillAll();
978
Steve Block6ded16b2010-05-10 14:33:55 +0100979 Register lhs = r1;
980 Register rhs = r0;
Steve Blocka7e24c12009-10-30 11:49:00 +0000981 switch (op_) {
982 case Token::ADD: {
983 // Revert optimistic add.
984 if (reversed_) {
Steve Block6ded16b2010-05-10 14:33:55 +0100985 __ sub(r0, tos_register_, Operand(Smi::FromInt(value_)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000986 __ mov(r1, Operand(Smi::FromInt(value_)));
987 } else {
Steve Block6ded16b2010-05-10 14:33:55 +0100988 __ sub(r1, tos_register_, Operand(Smi::FromInt(value_)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000989 __ mov(r0, Operand(Smi::FromInt(value_)));
990 }
991 break;
992 }
993
994 case Token::SUB: {
995 // Revert optimistic sub.
996 if (reversed_) {
Steve Block6ded16b2010-05-10 14:33:55 +0100997 __ rsb(r0, tos_register_, Operand(Smi::FromInt(value_)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000998 __ mov(r1, Operand(Smi::FromInt(value_)));
999 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01001000 __ add(r1, tos_register_, Operand(Smi::FromInt(value_)));
Steve Blocka7e24c12009-10-30 11:49:00 +00001001 __ mov(r0, Operand(Smi::FromInt(value_)));
1002 }
1003 break;
1004 }
1005
1006 // For these operations there is no optimistic operation that needs to be
1007 // reverted.
1008 case Token::MUL:
1009 case Token::MOD:
1010 case Token::BIT_OR:
1011 case Token::BIT_XOR:
Steve Block8defd9f2010-07-08 12:39:36 +01001012 case Token::BIT_AND:
Steve Blocka7e24c12009-10-30 11:49:00 +00001013 case Token::SHL:
1014 case Token::SHR:
1015 case Token::SAR: {
Steve Block8defd9f2010-07-08 12:39:36 +01001016 if (tos_register_.is(r1)) {
1017 __ mov(r0, Operand(Smi::FromInt(value_)));
1018 } else {
1019 ASSERT(tos_register_.is(r0));
1020 __ mov(r1, Operand(Smi::FromInt(value_)));
1021 }
1022 if (reversed_ == tos_register_.is(r1)) {
Steve Block6ded16b2010-05-10 14:33:55 +01001023 lhs = r0;
1024 rhs = r1;
Steve Blocka7e24c12009-10-30 11:49:00 +00001025 }
1026 break;
1027 }
1028
1029 default:
1030 // Other cases should have been handled before this point.
1031 UNREACHABLE();
1032 break;
1033 }
1034
Steve Block6ded16b2010-05-10 14:33:55 +01001035 GenericBinaryOpStub stub(op_, overwrite_mode_, lhs, rhs, value_);
Steve Blocka7e24c12009-10-30 11:49:00 +00001036 __ CallStub(&stub);
Steve Block8defd9f2010-07-08 12:39:36 +01001037
Steve Block6ded16b2010-05-10 14:33:55 +01001038 // The generic stub returns its value in r0, but that's not
1039 // necessarily what we want. We want whatever the inlined code
1040 // expected, which is that the answer is in the same register as
1041 // the operand was.
1042 __ Move(tos_register_, r0);
Steve Block8defd9f2010-07-08 12:39:36 +01001043
1044 // The tos register was not in use for the virtual frame that we
1045 // came into this function with, so we can merge back to that frame
1046 // without trashing it.
1047 copied_frame.MergeTo(frame_state()->frame());
Iain Merrick9ac36c92010-09-13 15:29:50 +01001048
1049 Exit();
1050
1051 if (non_smi_input_.is_linked()) {
1052 GenerateNonSmiInput();
1053 }
1054
1055 if (answer_out_of_range_.is_linked()) {
1056 GenerateAnswerOutOfRange();
1057 }
1058}
1059
1060
1061// Convert and write the integer answer into heap_number.
1062void DeferredInlineSmiOperation::WriteNonSmiAnswer(Register answer,
1063 Register heap_number,
1064 Register scratch) {
1065 if (CpuFeatures::IsSupported(VFP3)) {
1066 CpuFeatures::Scope scope(VFP3);
1067 __ vmov(s0, answer);
1068 if (op_ == Token::SHR) {
1069 __ vcvt_f64_u32(d0, s0);
1070 } else {
1071 __ vcvt_f64_s32(d0, s0);
1072 }
1073 __ sub(scratch, heap_number, Operand(kHeapObjectTag));
1074 __ vstr(d0, scratch, HeapNumber::kValueOffset);
1075 } else {
1076 WriteInt32ToHeapNumberStub stub(answer, heap_number, scratch);
1077 __ CallStub(&stub);
1078 }
1079}
1080
1081
1082void DeferredInlineSmiOperation::GenerateNonSmiInput() {
1083 // We know the left hand side is not a Smi and the right hand side is an
1084 // immediate value (value_) which can be represented as a Smi. We only
1085 // handle bit operations.
1086 ASSERT(Token::IsBitOp(op_));
1087
1088 if (FLAG_debug_code) {
1089 __ Abort("Should not fall through!");
1090 }
1091
1092 __ bind(&non_smi_input_);
1093 if (FLAG_debug_code) {
1094 __ AbortIfSmi(tos_register_);
1095 }
1096
1097 // This routine uses the registers from r2 to r6. At the moment they are
1098 // not used by the register allocator, but when they are it should use
1099 // SpillAll and MergeTo like DeferredInlineSmiOperation::Generate() above.
1100
1101 Register heap_number_map = r7;
1102 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
1103 __ ldr(r3, FieldMemOperand(tos_register_, HeapNumber::kMapOffset));
1104 __ cmp(r3, heap_number_map);
1105 // Not a number, fall back to the GenericBinaryOpStub.
1106 __ b(ne, entry_label());
1107
1108 Register int32 = r2;
1109 // Not a 32bits signed int, fall back to the GenericBinaryOpStub.
1110 __ ConvertToInt32(tos_register_, int32, r4, r5, entry_label());
1111
1112 // tos_register_ (r0 or r1): Original heap number.
1113 // int32: signed 32bits int.
1114
1115 Label result_not_a_smi;
1116 int shift_value = value_ & 0x1f;
1117 switch (op_) {
1118 case Token::BIT_OR: __ orr(int32, int32, Operand(value_)); break;
1119 case Token::BIT_XOR: __ eor(int32, int32, Operand(value_)); break;
1120 case Token::BIT_AND: __ and_(int32, int32, Operand(value_)); break;
1121 case Token::SAR:
1122 ASSERT(!reversed_);
1123 if (shift_value != 0) {
1124 __ mov(int32, Operand(int32, ASR, shift_value));
1125 }
1126 break;
1127 case Token::SHR:
1128 ASSERT(!reversed_);
1129 if (shift_value != 0) {
1130 __ mov(int32, Operand(int32, LSR, shift_value), SetCC);
1131 } else {
1132 // SHR is special because it is required to produce a positive answer.
1133 __ cmp(int32, Operand(0, RelocInfo::NONE));
1134 }
1135 if (CpuFeatures::IsSupported(VFP3)) {
1136 __ b(mi, &result_not_a_smi);
1137 } else {
1138 // Non VFP code cannot convert from unsigned to double, so fall back
1139 // to GenericBinaryOpStub.
1140 __ b(mi, entry_label());
1141 }
1142 break;
1143 case Token::SHL:
1144 ASSERT(!reversed_);
1145 if (shift_value != 0) {
1146 __ mov(int32, Operand(int32, LSL, shift_value));
1147 }
1148 break;
1149 default: UNREACHABLE();
1150 }
1151 // Check that the *signed* result fits in a smi. Not necessary for AND, SAR
1152 // if the shift if more than 0 or SHR if the shit is more than 1.
1153 if (!( (op_ == Token::AND) ||
1154 ((op_ == Token::SAR) && (shift_value > 0)) ||
1155 ((op_ == Token::SHR) && (shift_value > 1)))) {
1156 __ add(r3, int32, Operand(0x40000000), SetCC);
1157 __ b(mi, &result_not_a_smi);
1158 }
1159 __ mov(tos_register_, Operand(int32, LSL, kSmiTagSize));
1160 Exit();
1161
1162 if (result_not_a_smi.is_linked()) {
1163 __ bind(&result_not_a_smi);
1164 if (overwrite_mode_ != OVERWRITE_LEFT) {
1165 ASSERT((overwrite_mode_ == NO_OVERWRITE) ||
1166 (overwrite_mode_ == OVERWRITE_RIGHT));
1167 // If the allocation fails, fall back to the GenericBinaryOpStub.
1168 __ AllocateHeapNumber(r4, r5, r6, heap_number_map, entry_label());
1169 // Nothing can go wrong now, so overwrite tos.
1170 __ mov(tos_register_, Operand(r4));
1171 }
1172
1173 // int32: answer as signed 32bits integer.
1174 // tos_register_: Heap number to write the answer into.
1175 WriteNonSmiAnswer(int32, tos_register_, r3);
1176
1177 Exit();
1178 }
1179}
1180
1181
1182void DeferredInlineSmiOperation::GenerateAnswerOutOfRange() {
1183 // The input from a bitwise operation were Smis but the result cannot fit
John Reck59135872010-11-02 12:39:01 -07001184 // into a Smi, so we store it into a heap number. VirtualFrame::scratch0()
1185 // holds the untagged result to be converted. tos_register_ contains the
1186 // input. See the calls to JumpToAnswerOutOfRange to see how we got here.
Iain Merrick9ac36c92010-09-13 15:29:50 +01001187 ASSERT(Token::IsBitOp(op_));
1188 ASSERT(!reversed_);
1189
John Reck59135872010-11-02 12:39:01 -07001190 Register untagged_result = VirtualFrame::scratch0();
1191
Iain Merrick9ac36c92010-09-13 15:29:50 +01001192 if (FLAG_debug_code) {
1193 __ Abort("Should not fall through!");
1194 }
1195
1196 __ bind(&answer_out_of_range_);
1197 if (((value_ & 0x1f) == 0) && (op_ == Token::SHR)) {
John Reck59135872010-11-02 12:39:01 -07001198 // >>> 0 is a special case where the untagged_result register is not set up
1199 // yet. We untag the input to get it.
1200 __ mov(untagged_result, Operand(tos_register_, ASR, kSmiTagSize));
Iain Merrick9ac36c92010-09-13 15:29:50 +01001201 }
1202
1203 // This routine uses the registers from r2 to r6. At the moment they are
1204 // not used by the register allocator, but when they are it should use
1205 // SpillAll and MergeTo like DeferredInlineSmiOperation::Generate() above.
1206
1207 // Allocate the result heap number.
John Reck59135872010-11-02 12:39:01 -07001208 Register heap_number_map = VirtualFrame::scratch1();
Iain Merrick9ac36c92010-09-13 15:29:50 +01001209 Register heap_number = r4;
1210 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
1211 // If the allocation fails, fall back to the GenericBinaryOpStub.
1212 __ AllocateHeapNumber(heap_number, r5, r6, heap_number_map, entry_label());
John Reck59135872010-11-02 12:39:01 -07001213 WriteNonSmiAnswer(untagged_result, heap_number, r3);
Iain Merrick9ac36c92010-09-13 15:29:50 +01001214 __ mov(tos_register_, Operand(heap_number));
1215
1216 Exit();
Steve Blocka7e24c12009-10-30 11:49:00 +00001217}
1218
1219
1220static bool PopCountLessThanEqual2(unsigned int x) {
1221 x &= x - 1;
1222 return (x & (x - 1)) == 0;
1223}
1224
1225
1226// Returns the index of the lowest bit set.
1227static int BitPosition(unsigned x) {
1228 int bit_posn = 0;
1229 while ((x & 0xf) == 0) {
1230 bit_posn += 4;
1231 x >>= 4;
1232 }
1233 while ((x & 1) == 0) {
1234 bit_posn++;
1235 x >>= 1;
1236 }
1237 return bit_posn;
1238}
1239
1240
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001241// Can we multiply by x with max two shifts and an add.
1242// This answers yes to all integers from 2 to 10.
1243static bool IsEasyToMultiplyBy(int x) {
1244 if (x < 2) return false; // Avoid special cases.
1245 if (x > (Smi::kMaxValue + 1) >> 2) return false; // Almost always overflows.
1246 if (IsPowerOf2(x)) return true; // Simple shift.
1247 if (PopCountLessThanEqual2(x)) return true; // Shift and add and shift.
1248 if (IsPowerOf2(x + 1)) return true; // Patterns like 11111.
1249 return false;
1250}
1251
1252
1253// Can multiply by anything that IsEasyToMultiplyBy returns true for.
1254// Source and destination may be the same register. This routine does
1255// not set carry and overflow the way a mul instruction would.
1256static void InlineMultiplyByKnownInt(MacroAssembler* masm,
1257 Register source,
1258 Register destination,
1259 int known_int) {
1260 if (IsPowerOf2(known_int)) {
1261 masm->mov(destination, Operand(source, LSL, BitPosition(known_int)));
1262 } else if (PopCountLessThanEqual2(known_int)) {
1263 int first_bit = BitPosition(known_int);
1264 int second_bit = BitPosition(known_int ^ (1 << first_bit));
1265 masm->add(destination, source,
1266 Operand(source, LSL, second_bit - first_bit));
1267 if (first_bit != 0) {
1268 masm->mov(destination, Operand(destination, LSL, first_bit));
1269 }
1270 } else {
1271 ASSERT(IsPowerOf2(known_int + 1)); // Patterns like 1111.
1272 int the_bit = BitPosition(known_int + 1);
1273 masm->rsb(destination, source, Operand(source, LSL, the_bit));
1274 }
1275}
1276
1277
Steve Blocka7e24c12009-10-30 11:49:00 +00001278void CodeGenerator::SmiOperation(Token::Value op,
1279 Handle<Object> value,
1280 bool reversed,
1281 OverwriteMode mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001282 int int_value = Smi::cast(*value)->value();
1283
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001284 bool both_sides_are_smi = frame_->KnownSmiAt(0);
1285
Steve Block6ded16b2010-05-10 14:33:55 +01001286 bool something_to_inline;
1287 switch (op) {
1288 case Token::ADD:
1289 case Token::SUB:
1290 case Token::BIT_AND:
1291 case Token::BIT_OR:
1292 case Token::BIT_XOR: {
1293 something_to_inline = true;
1294 break;
1295 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001296 case Token::SHL: {
1297 something_to_inline = (both_sides_are_smi || !reversed);
1298 break;
1299 }
Steve Block6ded16b2010-05-10 14:33:55 +01001300 case Token::SHR:
1301 case Token::SAR: {
1302 if (reversed) {
1303 something_to_inline = false;
1304 } else {
1305 something_to_inline = true;
1306 }
1307 break;
1308 }
1309 case Token::MOD: {
1310 if (reversed || int_value < 2 || !IsPowerOf2(int_value)) {
1311 something_to_inline = false;
1312 } else {
1313 something_to_inline = true;
1314 }
1315 break;
1316 }
1317 case Token::MUL: {
1318 if (!IsEasyToMultiplyBy(int_value)) {
1319 something_to_inline = false;
1320 } else {
1321 something_to_inline = true;
1322 }
1323 break;
1324 }
1325 default: {
1326 something_to_inline = false;
1327 break;
1328 }
1329 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001330
Steve Block6ded16b2010-05-10 14:33:55 +01001331 if (!something_to_inline) {
1332 if (!reversed) {
1333 // Push the rhs onto the virtual frame by putting it in a TOS register.
1334 Register rhs = frame_->GetTOSRegister();
1335 __ mov(rhs, Operand(value));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001336 frame_->EmitPush(rhs, TypeInfo::Smi());
1337 GenericBinaryOperation(op, mode, GENERATE_INLINE_SMI, int_value);
Steve Block6ded16b2010-05-10 14:33:55 +01001338 } else {
1339 // Pop the rhs, then push lhs and rhs in the right order. Only performs
1340 // at most one pop, the rest takes place in TOS registers.
1341 Register lhs = frame_->GetTOSRegister(); // Get reg for pushing.
1342 Register rhs = frame_->PopToRegister(lhs); // Don't use lhs for this.
1343 __ mov(lhs, Operand(value));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001344 frame_->EmitPush(lhs, TypeInfo::Smi());
1345 TypeInfo t = both_sides_are_smi ? TypeInfo::Smi() : TypeInfo::Unknown();
1346 frame_->EmitPush(rhs, t);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001347 GenericBinaryOperation(op, mode, GENERATE_INLINE_SMI,
1348 GenericBinaryOpStub::kUnknownIntValue);
Steve Block6ded16b2010-05-10 14:33:55 +01001349 }
1350 return;
1351 }
1352
1353 // We move the top of stack to a register (normally no move is invoved).
1354 Register tos = frame_->PopToRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +00001355 switch (op) {
1356 case Token::ADD: {
1357 DeferredCode* deferred =
Steve Block6ded16b2010-05-10 14:33:55 +01001358 new DeferredInlineSmiOperation(op, int_value, reversed, mode, tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001359
Steve Block6ded16b2010-05-10 14:33:55 +01001360 __ add(tos, tos, Operand(value), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00001361 deferred->Branch(vs);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001362 if (!both_sides_are_smi) {
1363 __ tst(tos, Operand(kSmiTagMask));
1364 deferred->Branch(ne);
1365 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001366 deferred->BindExit();
Steve Block6ded16b2010-05-10 14:33:55 +01001367 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001368 break;
1369 }
1370
1371 case Token::SUB: {
1372 DeferredCode* deferred =
Steve Block6ded16b2010-05-10 14:33:55 +01001373 new DeferredInlineSmiOperation(op, int_value, reversed, mode, tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001374
1375 if (reversed) {
Steve Block6ded16b2010-05-10 14:33:55 +01001376 __ rsb(tos, tos, Operand(value), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00001377 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01001378 __ sub(tos, tos, Operand(value), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00001379 }
1380 deferred->Branch(vs);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001381 if (!both_sides_are_smi) {
1382 __ tst(tos, Operand(kSmiTagMask));
1383 deferred->Branch(ne);
1384 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001385 deferred->BindExit();
Steve Block6ded16b2010-05-10 14:33:55 +01001386 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001387 break;
1388 }
1389
1390
1391 case Token::BIT_OR:
1392 case Token::BIT_XOR:
1393 case Token::BIT_AND: {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001394 if (both_sides_are_smi) {
1395 switch (op) {
1396 case Token::BIT_OR: __ orr(tos, tos, Operand(value)); break;
1397 case Token::BIT_XOR: __ eor(tos, tos, Operand(value)); break;
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001398 case Token::BIT_AND: __ And(tos, tos, Operand(value)); break;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001399 default: UNREACHABLE();
1400 }
1401 frame_->EmitPush(tos, TypeInfo::Smi());
1402 } else {
Iain Merrick9ac36c92010-09-13 15:29:50 +01001403 DeferredInlineSmiOperation* deferred =
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001404 new DeferredInlineSmiOperation(op, int_value, reversed, mode, tos);
1405 __ tst(tos, Operand(kSmiTagMask));
Iain Merrick9ac36c92010-09-13 15:29:50 +01001406 deferred->JumpToNonSmiInput(ne);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001407 switch (op) {
1408 case Token::BIT_OR: __ orr(tos, tos, Operand(value)); break;
1409 case Token::BIT_XOR: __ eor(tos, tos, Operand(value)); break;
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001410 case Token::BIT_AND: __ And(tos, tos, Operand(value)); break;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001411 default: UNREACHABLE();
1412 }
1413 deferred->BindExit();
1414 TypeInfo result_type =
1415 (op == Token::BIT_AND) ? TypeInfo::Smi() : TypeInfo::Integer32();
1416 frame_->EmitPush(tos, result_type);
Steve Blocka7e24c12009-10-30 11:49:00 +00001417 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001418 break;
1419 }
1420
1421 case Token::SHL:
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001422 if (reversed) {
1423 ASSERT(both_sides_are_smi);
1424 int max_shift = 0;
1425 int max_result = int_value == 0 ? 1 : int_value;
1426 while (Smi::IsValid(max_result << 1)) {
1427 max_shift++;
1428 max_result <<= 1;
1429 }
1430 DeferredCode* deferred =
1431 new DeferredInlineSmiOperation(op, int_value, true, mode, tos);
1432 // Mask off the last 5 bits of the shift operand (rhs). This is part
1433 // of the definition of shift in JS and we know we have a Smi so we
1434 // can safely do this. The masked version gets passed to the
1435 // deferred code, but that makes no difference.
1436 __ and_(tos, tos, Operand(Smi::FromInt(0x1f)));
1437 __ cmp(tos, Operand(Smi::FromInt(max_shift)));
1438 deferred->Branch(ge);
1439 Register scratch = VirtualFrame::scratch0();
1440 __ mov(scratch, Operand(tos, ASR, kSmiTagSize)); // Untag.
1441 __ mov(tos, Operand(Smi::FromInt(int_value))); // Load constant.
1442 __ mov(tos, Operand(tos, LSL, scratch)); // Shift constant.
1443 deferred->BindExit();
1444 TypeInfo result = TypeInfo::Integer32();
1445 frame_->EmitPush(tos, result);
1446 break;
1447 }
1448 // Fall through!
Steve Blocka7e24c12009-10-30 11:49:00 +00001449 case Token::SHR:
1450 case Token::SAR: {
Steve Block6ded16b2010-05-10 14:33:55 +01001451 ASSERT(!reversed);
Iain Merrick9ac36c92010-09-13 15:29:50 +01001452 int shift_value = int_value & 0x1f;
Steve Block791712a2010-08-27 10:21:07 +01001453 TypeInfo result = TypeInfo::Number();
1454
1455 if (op == Token::SHR) {
Iain Merrick9ac36c92010-09-13 15:29:50 +01001456 if (shift_value > 1) {
Steve Block791712a2010-08-27 10:21:07 +01001457 result = TypeInfo::Smi();
Iain Merrick9ac36c92010-09-13 15:29:50 +01001458 } else if (shift_value > 0) {
Steve Block791712a2010-08-27 10:21:07 +01001459 result = TypeInfo::Integer32();
Iain Merrick75681382010-08-19 15:07:18 +01001460 }
Steve Block791712a2010-08-27 10:21:07 +01001461 } else if (op == Token::SAR) {
Iain Merrick9ac36c92010-09-13 15:29:50 +01001462 if (shift_value > 0) {
Steve Block791712a2010-08-27 10:21:07 +01001463 result = TypeInfo::Smi();
1464 } else {
1465 result = TypeInfo::Integer32();
1466 }
1467 } else {
1468 ASSERT(op == Token::SHL);
1469 result = TypeInfo::Integer32();
Iain Merrick75681382010-08-19 15:07:18 +01001470 }
Steve Block791712a2010-08-27 10:21:07 +01001471
Iain Merrick9ac36c92010-09-13 15:29:50 +01001472 DeferredInlineSmiOperation* deferred =
Steve Block6ded16b2010-05-10 14:33:55 +01001473 new DeferredInlineSmiOperation(op, shift_value, false, mode, tos);
Iain Merrick9ac36c92010-09-13 15:29:50 +01001474 if (!both_sides_are_smi) {
1475 __ tst(tos, Operand(kSmiTagMask));
1476 deferred->JumpToNonSmiInput(ne);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001477 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001478 switch (op) {
1479 case Token::SHL: {
1480 if (shift_value != 0) {
John Reck59135872010-11-02 12:39:01 -07001481 Register untagged_result = VirtualFrame::scratch0();
1482 Register scratch = VirtualFrame::scratch1();
Kristian Monsen25f61362010-05-21 11:50:48 +01001483 int adjusted_shift = shift_value - kSmiTagSize;
1484 ASSERT(adjusted_shift >= 0);
Iain Merrick9ac36c92010-09-13 15:29:50 +01001485
Kristian Monsen25f61362010-05-21 11:50:48 +01001486 if (adjusted_shift != 0) {
John Reck59135872010-11-02 12:39:01 -07001487 __ mov(untagged_result, Operand(tos, LSL, adjusted_shift));
1488 } else {
1489 __ mov(untagged_result, Operand(tos));
Kristian Monsen25f61362010-05-21 11:50:48 +01001490 }
Iain Merrick9ac36c92010-09-13 15:29:50 +01001491 // Check that the *signed* result fits in a smi.
John Reck59135872010-11-02 12:39:01 -07001492 __ add(scratch, untagged_result, Operand(0x40000000), SetCC);
Iain Merrick9ac36c92010-09-13 15:29:50 +01001493 deferred->JumpToAnswerOutOfRange(mi);
John Reck59135872010-11-02 12:39:01 -07001494 __ mov(tos, Operand(untagged_result, LSL, kSmiTagSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00001495 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001496 break;
1497 }
1498 case Token::SHR: {
Steve Blocka7e24c12009-10-30 11:49:00 +00001499 if (shift_value != 0) {
John Reck59135872010-11-02 12:39:01 -07001500 Register untagged_result = VirtualFrame::scratch0();
1501 // Remove tag.
1502 __ mov(untagged_result, Operand(tos, ASR, kSmiTagSize));
1503 __ mov(untagged_result, Operand(untagged_result, LSR, shift_value));
Kristian Monsen25f61362010-05-21 11:50:48 +01001504 if (shift_value == 1) {
Iain Merrick9ac36c92010-09-13 15:29:50 +01001505 // Check that the *unsigned* result fits in a smi.
1506 // Neither of the two high-order bits can be set:
Kristian Monsen25f61362010-05-21 11:50:48 +01001507 // - 0x80000000: high bit would be lost when smi tagging
Iain Merrick9ac36c92010-09-13 15:29:50 +01001508 // - 0x40000000: this number would convert to negative when Smi
1509 // tagging.
1510 // These two cases can only happen with shifts by 0 or 1 when
1511 // handed a valid smi.
John Reck59135872010-11-02 12:39:01 -07001512 __ tst(untagged_result, Operand(0xc0000000));
Iain Merrick9ac36c92010-09-13 15:29:50 +01001513 deferred->JumpToAnswerOutOfRange(ne);
Kristian Monsen25f61362010-05-21 11:50:48 +01001514 }
John Reck59135872010-11-02 12:39:01 -07001515 __ mov(tos, Operand(untagged_result, LSL, kSmiTagSize));
Iain Merrick9ac36c92010-09-13 15:29:50 +01001516 } else {
1517 __ cmp(tos, Operand(0, RelocInfo::NONE));
1518 deferred->JumpToAnswerOutOfRange(mi);
Steve Blocka7e24c12009-10-30 11:49:00 +00001519 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001520 break;
1521 }
1522 case Token::SAR: {
1523 if (shift_value != 0) {
Iain Merrick9ac36c92010-09-13 15:29:50 +01001524 // Do the shift and the tag removal in one operation. If the shift
Kristian Monsen25f61362010-05-21 11:50:48 +01001525 // is 31 bits (the highest possible value) then we emit the
Iain Merrick9ac36c92010-09-13 15:29:50 +01001526 // instruction as a shift by 0 which in the ARM ISA means shift
1527 // arithmetically by 32.
Kristian Monsen25f61362010-05-21 11:50:48 +01001528 __ mov(tos, Operand(tos, ASR, (kSmiTagSize + shift_value) & 0x1f));
Kristian Monsen25f61362010-05-21 11:50:48 +01001529 __ mov(tos, Operand(tos, LSL, kSmiTagSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00001530 }
1531 break;
1532 }
1533 default: UNREACHABLE();
1534 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001535 deferred->BindExit();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001536 frame_->EmitPush(tos, result);
Steve Blocka7e24c12009-10-30 11:49:00 +00001537 break;
1538 }
1539
1540 case Token::MOD: {
Steve Block6ded16b2010-05-10 14:33:55 +01001541 ASSERT(!reversed);
1542 ASSERT(int_value >= 2);
1543 ASSERT(IsPowerOf2(int_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001544 DeferredCode* deferred =
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001545 new DeferredInlineSmiOperation(op, int_value, reversed, mode, tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001546 unsigned mask = (0x80000000u | kSmiTagMask);
Steve Block6ded16b2010-05-10 14:33:55 +01001547 __ tst(tos, Operand(mask));
Steve Blocka7e24c12009-10-30 11:49:00 +00001548 deferred->Branch(ne); // Go to deferred code on non-Smis and negative.
1549 mask = (int_value << kSmiTagSize) - 1;
Steve Block6ded16b2010-05-10 14:33:55 +01001550 __ and_(tos, tos, Operand(mask));
Steve Blocka7e24c12009-10-30 11:49:00 +00001551 deferred->BindExit();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001552 // Mod of positive power of 2 Smi gives a Smi if the lhs is an integer.
1553 frame_->EmitPush(
1554 tos,
1555 both_sides_are_smi ? TypeInfo::Smi() : TypeInfo::Number());
Steve Blocka7e24c12009-10-30 11:49:00 +00001556 break;
1557 }
1558
1559 case Token::MUL: {
Steve Block6ded16b2010-05-10 14:33:55 +01001560 ASSERT(IsEasyToMultiplyBy(int_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001561 DeferredCode* deferred =
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001562 new DeferredInlineSmiOperation(op, int_value, reversed, mode, tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001563 unsigned max_smi_that_wont_overflow = Smi::kMaxValue / int_value;
1564 max_smi_that_wont_overflow <<= kSmiTagSize;
1565 unsigned mask = 0x80000000u;
1566 while ((mask & max_smi_that_wont_overflow) == 0) {
1567 mask |= mask >> 1;
1568 }
1569 mask |= kSmiTagMask;
1570 // This does a single mask that checks for a too high value in a
1571 // conservative way and for a non-Smi. It also filters out negative
1572 // numbers, unfortunately, but since this code is inline we prefer
1573 // brevity to comprehensiveness.
Steve Block6ded16b2010-05-10 14:33:55 +01001574 __ tst(tos, Operand(mask));
Steve Blocka7e24c12009-10-30 11:49:00 +00001575 deferred->Branch(ne);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001576 InlineMultiplyByKnownInt(masm_, tos, tos, int_value);
Steve Blocka7e24c12009-10-30 11:49:00 +00001577 deferred->BindExit();
Steve Block6ded16b2010-05-10 14:33:55 +01001578 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001579 break;
1580 }
1581
1582 default:
Steve Block6ded16b2010-05-10 14:33:55 +01001583 UNREACHABLE();
Steve Blocka7e24c12009-10-30 11:49:00 +00001584 break;
1585 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001586}
1587
1588
1589void CodeGenerator::Comparison(Condition cc,
1590 Expression* left,
1591 Expression* right,
1592 bool strict) {
Steve Block6ded16b2010-05-10 14:33:55 +01001593 VirtualFrame::RegisterAllocationScope scope(this);
Steve Blocka7e24c12009-10-30 11:49:00 +00001594
Steve Block6ded16b2010-05-10 14:33:55 +01001595 if (left != NULL) Load(left);
1596 if (right != NULL) Load(right);
1597
Steve Blocka7e24c12009-10-30 11:49:00 +00001598 // sp[0] : y
1599 // sp[1] : x
1600 // result : cc register
1601
1602 // Strict only makes sense for equality comparisons.
1603 ASSERT(!strict || cc == eq);
1604
Steve Block6ded16b2010-05-10 14:33:55 +01001605 Register lhs;
1606 Register rhs;
1607
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001608 bool lhs_is_smi;
1609 bool rhs_is_smi;
1610
Steve Block6ded16b2010-05-10 14:33:55 +01001611 // We load the top two stack positions into registers chosen by the virtual
1612 // frame. This should keep the register shuffling to a minimum.
Steve Blocka7e24c12009-10-30 11:49:00 +00001613 // Implement '>' and '<=' by reversal to obtain ECMA-262 conversion order.
1614 if (cc == gt || cc == le) {
1615 cc = ReverseCondition(cc);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001616 lhs_is_smi = frame_->KnownSmiAt(0);
1617 rhs_is_smi = frame_->KnownSmiAt(1);
Steve Block6ded16b2010-05-10 14:33:55 +01001618 lhs = frame_->PopToRegister();
1619 rhs = frame_->PopToRegister(lhs); // Don't pop to the same register again!
Steve Blocka7e24c12009-10-30 11:49:00 +00001620 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001621 rhs_is_smi = frame_->KnownSmiAt(0);
1622 lhs_is_smi = frame_->KnownSmiAt(1);
Steve Block6ded16b2010-05-10 14:33:55 +01001623 rhs = frame_->PopToRegister();
1624 lhs = frame_->PopToRegister(rhs); // Don't pop to the same register again!
Steve Blocka7e24c12009-10-30 11:49:00 +00001625 }
Steve Block6ded16b2010-05-10 14:33:55 +01001626
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001627 bool both_sides_are_smi = (lhs_is_smi && rhs_is_smi);
1628
Steve Block6ded16b2010-05-10 14:33:55 +01001629 ASSERT(rhs.is(r0) || rhs.is(r1));
1630 ASSERT(lhs.is(r0) || lhs.is(r1));
1631
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001632 JumpTarget exit;
Steve Blocka7e24c12009-10-30 11:49:00 +00001633
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001634 if (!both_sides_are_smi) {
1635 // Now we have the two sides in r0 and r1. We flush any other registers
1636 // because the stub doesn't know about register allocation.
1637 frame_->SpillAll();
1638 Register scratch = VirtualFrame::scratch0();
1639 Register smi_test_reg;
1640 if (lhs_is_smi) {
1641 smi_test_reg = rhs;
1642 } else if (rhs_is_smi) {
1643 smi_test_reg = lhs;
1644 } else {
1645 __ orr(scratch, lhs, Operand(rhs));
1646 smi_test_reg = scratch;
1647 }
1648 __ tst(smi_test_reg, Operand(kSmiTagMask));
1649 JumpTarget smi;
1650 smi.Branch(eq);
1651
1652 // Perform non-smi comparison by stub.
1653 // CompareStub takes arguments in r0 and r1, returns <0, >0 or 0 in r0.
1654 // We call with 0 args because there are 0 on the stack.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001655 CompareStub stub(cc, strict, NO_SMI_COMPARE_IN_STUB, lhs, rhs);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001656 frame_->CallStub(&stub, 0);
Iain Merrick9ac36c92010-09-13 15:29:50 +01001657 __ cmp(r0, Operand(0, RelocInfo::NONE));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001658 exit.Jump();
1659
1660 smi.Bind();
Steve Block6ded16b2010-05-10 14:33:55 +01001661 }
1662
Steve Blocka7e24c12009-10-30 11:49:00 +00001663 // Do smi comparisons by pointer comparison.
Steve Block6ded16b2010-05-10 14:33:55 +01001664 __ cmp(lhs, Operand(rhs));
Steve Blocka7e24c12009-10-30 11:49:00 +00001665
1666 exit.Bind();
1667 cc_reg_ = cc;
1668}
1669
1670
Steve Blocka7e24c12009-10-30 11:49:00 +00001671// Call the function on the stack with the given arguments.
1672void CodeGenerator::CallWithArguments(ZoneList<Expression*>* args,
Leon Clarkee46be812010-01-19 14:06:41 +00001673 CallFunctionFlags flags,
1674 int position) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001675 // Push the arguments ("left-to-right") on the stack.
1676 int arg_count = args->length();
1677 for (int i = 0; i < arg_count; i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001678 Load(args->at(i));
Steve Blocka7e24c12009-10-30 11:49:00 +00001679 }
1680
1681 // Record the position for debugging purposes.
1682 CodeForSourcePosition(position);
1683
1684 // Use the shared code stub to call the function.
1685 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Leon Clarkee46be812010-01-19 14:06:41 +00001686 CallFunctionStub call_function(arg_count, in_loop, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00001687 frame_->CallStub(&call_function, arg_count + 1);
1688
1689 // Restore context and pop function from the stack.
1690 __ ldr(cp, frame_->Context());
1691 frame_->Drop(); // discard the TOS
1692}
1693
1694
Steve Block6ded16b2010-05-10 14:33:55 +01001695void CodeGenerator::CallApplyLazy(Expression* applicand,
1696 Expression* receiver,
1697 VariableProxy* arguments,
1698 int position) {
1699 // An optimized implementation of expressions of the form
1700 // x.apply(y, arguments).
1701 // If the arguments object of the scope has not been allocated,
1702 // and x.apply is Function.prototype.apply, this optimization
1703 // just copies y and the arguments of the current function on the
1704 // stack, as receiver and arguments, and calls x.
1705 // In the implementation comments, we call x the applicand
1706 // and y the receiver.
Steve Block6ded16b2010-05-10 14:33:55 +01001707
1708 ASSERT(ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION);
1709 ASSERT(arguments->IsArguments());
1710
1711 // Load applicand.apply onto the stack. This will usually
1712 // give us a megamorphic load site. Not super, but it works.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001713 Load(applicand);
Steve Block6ded16b2010-05-10 14:33:55 +01001714 Handle<String> name = Factory::LookupAsciiSymbol("apply");
Leon Clarkef7060e22010-06-03 12:02:55 +01001715 frame_->Dup();
Steve Block6ded16b2010-05-10 14:33:55 +01001716 frame_->CallLoadIC(name, RelocInfo::CODE_TARGET);
1717 frame_->EmitPush(r0);
1718
1719 // Load the receiver and the existing arguments object onto the
1720 // expression stack. Avoid allocating the arguments object here.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001721 Load(receiver);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001722 LoadFromSlot(scope()->arguments()->AsSlot(), NOT_INSIDE_TYPEOF);
Steve Block6ded16b2010-05-10 14:33:55 +01001723
Steve Block8defd9f2010-07-08 12:39:36 +01001724 // At this point the top two stack elements are probably in registers
1725 // since they were just loaded. Ensure they are in regs and get the
1726 // regs.
1727 Register receiver_reg = frame_->Peek2();
1728 Register arguments_reg = frame_->Peek();
1729
1730 // From now on the frame is spilled.
1731 frame_->SpillAll();
1732
Steve Block6ded16b2010-05-10 14:33:55 +01001733 // Emit the source position information after having loaded the
1734 // receiver and the arguments.
1735 CodeForSourcePosition(position);
1736 // Contents of the stack at this point:
1737 // sp[0]: arguments object of the current function or the hole.
1738 // sp[1]: receiver
1739 // sp[2]: applicand.apply
1740 // sp[3]: applicand.
1741
1742 // Check if the arguments object has been lazily allocated
1743 // already. If so, just use that instead of copying the arguments
1744 // from the stack. This also deals with cases where a local variable
1745 // named 'arguments' has been introduced.
Steve Block8defd9f2010-07-08 12:39:36 +01001746 JumpTarget slow;
1747 Label done;
Steve Block6ded16b2010-05-10 14:33:55 +01001748 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
Steve Block8defd9f2010-07-08 12:39:36 +01001749 __ cmp(ip, arguments_reg);
1750 slow.Branch(ne);
Steve Block6ded16b2010-05-10 14:33:55 +01001751
1752 Label build_args;
1753 // Get rid of the arguments object probe.
1754 frame_->Drop();
1755 // Stack now has 3 elements on it.
1756 // Contents of stack at this point:
Steve Block8defd9f2010-07-08 12:39:36 +01001757 // sp[0]: receiver - in the receiver_reg register.
Steve Block6ded16b2010-05-10 14:33:55 +01001758 // sp[1]: applicand.apply
1759 // sp[2]: applicand.
1760
1761 // Check that the receiver really is a JavaScript object.
Steve Block8defd9f2010-07-08 12:39:36 +01001762 __ BranchOnSmi(receiver_reg, &build_args);
Steve Block6ded16b2010-05-10 14:33:55 +01001763 // We allow all JSObjects including JSFunctions. As long as
1764 // JS_FUNCTION_TYPE is the last instance type and it is right
1765 // after LAST_JS_OBJECT_TYPE, we do not have to check the upper
1766 // bound.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01001767 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
1768 STATIC_ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
Steve Block8defd9f2010-07-08 12:39:36 +01001769 __ CompareObjectType(receiver_reg, r2, r3, FIRST_JS_OBJECT_TYPE);
Steve Block6ded16b2010-05-10 14:33:55 +01001770 __ b(lt, &build_args);
1771
1772 // Check that applicand.apply is Function.prototype.apply.
1773 __ ldr(r0, MemOperand(sp, kPointerSize));
1774 __ BranchOnSmi(r0, &build_args);
1775 __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE);
1776 __ b(ne, &build_args);
Steve Block6ded16b2010-05-10 14:33:55 +01001777 Handle<Code> apply_code(Builtins::builtin(Builtins::FunctionApply));
Steve Block791712a2010-08-27 10:21:07 +01001778 __ ldr(r1, FieldMemOperand(r0, JSFunction::kCodeEntryOffset));
1779 __ sub(r1, r1, Operand(Code::kHeaderSize - kHeapObjectTag));
Steve Block6ded16b2010-05-10 14:33:55 +01001780 __ cmp(r1, Operand(apply_code));
1781 __ b(ne, &build_args);
1782
1783 // Check that applicand is a function.
1784 __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
1785 __ BranchOnSmi(r1, &build_args);
1786 __ CompareObjectType(r1, r2, r3, JS_FUNCTION_TYPE);
1787 __ b(ne, &build_args);
1788
1789 // Copy the arguments to this function possibly from the
1790 // adaptor frame below it.
1791 Label invoke, adapted;
1792 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1793 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
1794 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1795 __ b(eq, &adapted);
1796
1797 // No arguments adaptor frame. Copy fixed number of arguments.
1798 __ mov(r0, Operand(scope()->num_parameters()));
1799 for (int i = 0; i < scope()->num_parameters(); i++) {
1800 __ ldr(r2, frame_->ParameterAt(i));
1801 __ push(r2);
1802 }
1803 __ jmp(&invoke);
1804
1805 // Arguments adaptor frame present. Copy arguments from there, but
1806 // avoid copying too many arguments to avoid stack overflows.
1807 __ bind(&adapted);
1808 static const uint32_t kArgumentsLimit = 1 * KB;
1809 __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset));
1810 __ mov(r0, Operand(r0, LSR, kSmiTagSize));
1811 __ mov(r3, r0);
1812 __ cmp(r0, Operand(kArgumentsLimit));
1813 __ b(gt, &build_args);
1814
1815 // Loop through the arguments pushing them onto the execution
1816 // stack. We don't inform the virtual frame of the push, so we don't
1817 // have to worry about getting rid of the elements from the virtual
1818 // frame.
1819 Label loop;
1820 // r3 is a small non-negative integer, due to the test above.
Iain Merrick9ac36c92010-09-13 15:29:50 +01001821 __ cmp(r3, Operand(0, RelocInfo::NONE));
Steve Block6ded16b2010-05-10 14:33:55 +01001822 __ b(eq, &invoke);
1823 // Compute the address of the first argument.
1824 __ add(r2, r2, Operand(r3, LSL, kPointerSizeLog2));
1825 __ add(r2, r2, Operand(kPointerSize));
1826 __ bind(&loop);
1827 // Post-decrement argument address by kPointerSize on each iteration.
1828 __ ldr(r4, MemOperand(r2, kPointerSize, NegPostIndex));
1829 __ push(r4);
1830 __ sub(r3, r3, Operand(1), SetCC);
1831 __ b(gt, &loop);
1832
1833 // Invoke the function.
1834 __ bind(&invoke);
1835 ParameterCount actual(r0);
1836 __ InvokeFunction(r1, actual, CALL_FUNCTION);
1837 // Drop applicand.apply and applicand from the stack, and push
1838 // the result of the function call, but leave the spilled frame
1839 // unchanged, with 3 elements, so it is correct when we compile the
1840 // slow-case code.
1841 __ add(sp, sp, Operand(2 * kPointerSize));
1842 __ push(r0);
1843 // Stack now has 1 element:
1844 // sp[0]: result
1845 __ jmp(&done);
1846
1847 // Slow-case: Allocate the arguments object since we know it isn't
1848 // there, and fall-through to the slow-case where we call
1849 // applicand.apply.
1850 __ bind(&build_args);
1851 // Stack now has 3 elements, because we have jumped from where:
1852 // sp[0]: receiver
1853 // sp[1]: applicand.apply
1854 // sp[2]: applicand.
1855 StoreArgumentsObject(false);
1856
1857 // Stack and frame now have 4 elements.
Steve Block8defd9f2010-07-08 12:39:36 +01001858 slow.Bind();
Steve Block6ded16b2010-05-10 14:33:55 +01001859
1860 // Generic computation of x.apply(y, args) with no special optimization.
1861 // Flip applicand.apply and applicand on the stack, so
1862 // applicand looks like the receiver of the applicand.apply call.
1863 // Then process it as a normal function call.
1864 __ ldr(r0, MemOperand(sp, 3 * kPointerSize));
1865 __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
Leon Clarkef7060e22010-06-03 12:02:55 +01001866 __ Strd(r0, r1, MemOperand(sp, 2 * kPointerSize));
Steve Block6ded16b2010-05-10 14:33:55 +01001867
1868 CallFunctionStub call_function(2, NOT_IN_LOOP, NO_CALL_FUNCTION_FLAGS);
1869 frame_->CallStub(&call_function, 3);
1870 // The function and its two arguments have been dropped.
1871 frame_->Drop(); // Drop the receiver as well.
1872 frame_->EmitPush(r0);
Ben Murdochbb769b22010-08-11 14:56:33 +01001873 frame_->SpillAll(); // A spilled frame is also jumping to label done.
Steve Block6ded16b2010-05-10 14:33:55 +01001874 // Stack now has 1 element:
1875 // sp[0]: result
1876 __ bind(&done);
1877
1878 // Restore the context register after a call.
1879 __ ldr(cp, frame_->Context());
1880}
1881
1882
Steve Blocka7e24c12009-10-30 11:49:00 +00001883void CodeGenerator::Branch(bool if_true, JumpTarget* target) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001884 ASSERT(has_cc());
1885 Condition cc = if_true ? cc_reg_ : NegateCondition(cc_reg_);
1886 target->Branch(cc);
1887 cc_reg_ = al;
1888}
1889
1890
1891void CodeGenerator::CheckStack() {
Steve Block8defd9f2010-07-08 12:39:36 +01001892 frame_->SpillAll();
Steve Blockd0582a62009-12-15 09:54:21 +00001893 Comment cmnt(masm_, "[ check stack");
1894 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
Steve Blockd0582a62009-12-15 09:54:21 +00001895 masm_->cmp(sp, Operand(ip));
1896 StackCheckStub stub;
1897 // Call the stub if lower.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001898 masm_->mov(ip,
Steve Blockd0582a62009-12-15 09:54:21 +00001899 Operand(reinterpret_cast<intptr_t>(stub.GetCode().location()),
1900 RelocInfo::CODE_TARGET),
1901 LeaveCC,
1902 lo);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001903 masm_->Call(ip, lo);
Steve Blocka7e24c12009-10-30 11:49:00 +00001904}
1905
1906
1907void CodeGenerator::VisitStatements(ZoneList<Statement*>* statements) {
1908#ifdef DEBUG
1909 int original_height = frame_->height();
1910#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00001911 for (int i = 0; frame_ != NULL && i < statements->length(); i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001912 Visit(statements->at(i));
Steve Blocka7e24c12009-10-30 11:49:00 +00001913 }
1914 ASSERT(!has_valid_frame() || frame_->height() == original_height);
1915}
1916
1917
1918void CodeGenerator::VisitBlock(Block* node) {
1919#ifdef DEBUG
1920 int original_height = frame_->height();
1921#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00001922 Comment cmnt(masm_, "[ Block");
1923 CodeForStatementPosition(node);
Kristian Monsen25f61362010-05-21 11:50:48 +01001924 node->break_target()->SetExpectedHeight();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001925 VisitStatements(node->statements());
Steve Blocka7e24c12009-10-30 11:49:00 +00001926 if (node->break_target()->is_linked()) {
1927 node->break_target()->Bind();
1928 }
1929 node->break_target()->Unuse();
1930 ASSERT(!has_valid_frame() || frame_->height() == original_height);
1931}
1932
1933
1934void CodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
Steve Block3ce2e202009-11-05 08:53:23 +00001935 frame_->EmitPush(cp);
Steve Block6ded16b2010-05-10 14:33:55 +01001936 frame_->EmitPush(Operand(pairs));
1937 frame_->EmitPush(Operand(Smi::FromInt(is_eval() ? 1 : 0)));
1938
Steve Blocka7e24c12009-10-30 11:49:00 +00001939 frame_->CallRuntime(Runtime::kDeclareGlobals, 3);
1940 // The result is discarded.
1941}
1942
1943
1944void CodeGenerator::VisitDeclaration(Declaration* node) {
1945#ifdef DEBUG
1946 int original_height = frame_->height();
1947#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00001948 Comment cmnt(masm_, "[ Declaration");
1949 Variable* var = node->proxy()->var();
1950 ASSERT(var != NULL); // must have been resolved
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001951 Slot* slot = var->AsSlot();
Steve Blocka7e24c12009-10-30 11:49:00 +00001952
1953 // If it was not possible to allocate the variable at compile time,
1954 // we need to "declare" it at runtime to make sure it actually
1955 // exists in the local context.
1956 if (slot != NULL && slot->type() == Slot::LOOKUP) {
1957 // Variables with a "LOOKUP" slot were introduced as non-locals
1958 // during variable resolution and must have mode DYNAMIC.
1959 ASSERT(var->is_dynamic());
1960 // For now, just do a runtime call.
1961 frame_->EmitPush(cp);
Steve Block6ded16b2010-05-10 14:33:55 +01001962 frame_->EmitPush(Operand(var->name()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001963 // Declaration nodes are always declared in only two modes.
1964 ASSERT(node->mode() == Variable::VAR || node->mode() == Variable::CONST);
1965 PropertyAttributes attr = node->mode() == Variable::VAR ? NONE : READ_ONLY;
Steve Block6ded16b2010-05-10 14:33:55 +01001966 frame_->EmitPush(Operand(Smi::FromInt(attr)));
Steve Blocka7e24c12009-10-30 11:49:00 +00001967 // Push initial value, if any.
1968 // Note: For variables we must not push an initial value (such as
1969 // 'undefined') because we may have a (legal) redeclaration and we
1970 // must not destroy the current value.
1971 if (node->mode() == Variable::CONST) {
Steve Block6ded16b2010-05-10 14:33:55 +01001972 frame_->EmitPushRoot(Heap::kTheHoleValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00001973 } else if (node->fun() != NULL) {
Steve Block6ded16b2010-05-10 14:33:55 +01001974 Load(node->fun());
Steve Blocka7e24c12009-10-30 11:49:00 +00001975 } else {
Iain Merrick9ac36c92010-09-13 15:29:50 +01001976 frame_->EmitPush(Operand(0, RelocInfo::NONE));
Steve Blocka7e24c12009-10-30 11:49:00 +00001977 }
Steve Block6ded16b2010-05-10 14:33:55 +01001978
Steve Blocka7e24c12009-10-30 11:49:00 +00001979 frame_->CallRuntime(Runtime::kDeclareContextSlot, 4);
1980 // Ignore the return value (declarations are statements).
Steve Block6ded16b2010-05-10 14:33:55 +01001981
Steve Blocka7e24c12009-10-30 11:49:00 +00001982 ASSERT(frame_->height() == original_height);
1983 return;
1984 }
1985
1986 ASSERT(!var->is_global());
1987
1988 // If we have a function or a constant, we need to initialize the variable.
1989 Expression* val = NULL;
1990 if (node->mode() == Variable::CONST) {
1991 val = new Literal(Factory::the_hole_value());
1992 } else {
1993 val = node->fun(); // NULL if we don't have a function
1994 }
1995
Steve Block8defd9f2010-07-08 12:39:36 +01001996
Steve Blocka7e24c12009-10-30 11:49:00 +00001997 if (val != NULL) {
Steve Block8defd9f2010-07-08 12:39:36 +01001998 WriteBarrierCharacter wb_info =
1999 val->type()->IsLikelySmi() ? LIKELY_SMI : UNLIKELY_SMI;
2000 if (val->AsLiteral() != NULL) wb_info = NEVER_NEWSPACE;
Steve Block6ded16b2010-05-10 14:33:55 +01002001 // Set initial value.
2002 Reference target(this, node->proxy());
2003 Load(val);
Steve Block8defd9f2010-07-08 12:39:36 +01002004 target.SetValue(NOT_CONST_INIT, wb_info);
Steve Block6ded16b2010-05-10 14:33:55 +01002005
Steve Blocka7e24c12009-10-30 11:49:00 +00002006 // Get rid of the assigned value (declarations are statements).
2007 frame_->Drop();
2008 }
2009 ASSERT(frame_->height() == original_height);
2010}
2011
2012
2013void CodeGenerator::VisitExpressionStatement(ExpressionStatement* node) {
2014#ifdef DEBUG
2015 int original_height = frame_->height();
2016#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002017 Comment cmnt(masm_, "[ ExpressionStatement");
2018 CodeForStatementPosition(node);
2019 Expression* expression = node->expression();
2020 expression->MarkAsStatement();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002021 Load(expression);
Steve Blocka7e24c12009-10-30 11:49:00 +00002022 frame_->Drop();
2023 ASSERT(frame_->height() == original_height);
2024}
2025
2026
2027void CodeGenerator::VisitEmptyStatement(EmptyStatement* node) {
2028#ifdef DEBUG
2029 int original_height = frame_->height();
2030#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002031 Comment cmnt(masm_, "// EmptyStatement");
2032 CodeForStatementPosition(node);
2033 // nothing to do
2034 ASSERT(frame_->height() == original_height);
2035}
2036
2037
2038void CodeGenerator::VisitIfStatement(IfStatement* node) {
2039#ifdef DEBUG
2040 int original_height = frame_->height();
2041#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002042 Comment cmnt(masm_, "[ IfStatement");
2043 // Generate different code depending on which parts of the if statement
2044 // are present or not.
2045 bool has_then_stm = node->HasThenStatement();
2046 bool has_else_stm = node->HasElseStatement();
2047
2048 CodeForStatementPosition(node);
2049
2050 JumpTarget exit;
2051 if (has_then_stm && has_else_stm) {
2052 Comment cmnt(masm_, "[ IfThenElse");
2053 JumpTarget then;
2054 JumpTarget else_;
2055 // if (cond)
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002056 LoadCondition(node->condition(), &then, &else_, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00002057 if (frame_ != NULL) {
2058 Branch(false, &else_);
2059 }
2060 // then
2061 if (frame_ != NULL || then.is_linked()) {
2062 then.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002063 Visit(node->then_statement());
Steve Blocka7e24c12009-10-30 11:49:00 +00002064 }
2065 if (frame_ != NULL) {
2066 exit.Jump();
2067 }
2068 // else
2069 if (else_.is_linked()) {
2070 else_.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002071 Visit(node->else_statement());
Steve Blocka7e24c12009-10-30 11:49:00 +00002072 }
2073
2074 } else if (has_then_stm) {
2075 Comment cmnt(masm_, "[ IfThen");
2076 ASSERT(!has_else_stm);
2077 JumpTarget then;
2078 // if (cond)
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002079 LoadCondition(node->condition(), &then, &exit, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00002080 if (frame_ != NULL) {
2081 Branch(false, &exit);
2082 }
2083 // then
2084 if (frame_ != NULL || then.is_linked()) {
2085 then.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002086 Visit(node->then_statement());
Steve Blocka7e24c12009-10-30 11:49:00 +00002087 }
2088
2089 } else if (has_else_stm) {
2090 Comment cmnt(masm_, "[ IfElse");
2091 ASSERT(!has_then_stm);
2092 JumpTarget else_;
2093 // if (!cond)
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002094 LoadCondition(node->condition(), &exit, &else_, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00002095 if (frame_ != NULL) {
2096 Branch(true, &exit);
2097 }
2098 // else
2099 if (frame_ != NULL || else_.is_linked()) {
2100 else_.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002101 Visit(node->else_statement());
Steve Blocka7e24c12009-10-30 11:49:00 +00002102 }
2103
2104 } else {
2105 Comment cmnt(masm_, "[ If");
2106 ASSERT(!has_then_stm && !has_else_stm);
2107 // if (cond)
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002108 LoadCondition(node->condition(), &exit, &exit, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00002109 if (frame_ != NULL) {
2110 if (has_cc()) {
2111 cc_reg_ = al;
2112 } else {
2113 frame_->Drop();
2114 }
2115 }
2116 }
2117
2118 // end
2119 if (exit.is_linked()) {
2120 exit.Bind();
2121 }
2122 ASSERT(!has_valid_frame() || frame_->height() == original_height);
2123}
2124
2125
2126void CodeGenerator::VisitContinueStatement(ContinueStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002127 Comment cmnt(masm_, "[ ContinueStatement");
2128 CodeForStatementPosition(node);
2129 node->target()->continue_target()->Jump();
2130}
2131
2132
2133void CodeGenerator::VisitBreakStatement(BreakStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002134 Comment cmnt(masm_, "[ BreakStatement");
2135 CodeForStatementPosition(node);
2136 node->target()->break_target()->Jump();
2137}
2138
2139
2140void CodeGenerator::VisitReturnStatement(ReturnStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002141 Comment cmnt(masm_, "[ ReturnStatement");
2142
2143 CodeForStatementPosition(node);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002144 Load(node->expression());
Iain Merrick75681382010-08-19 15:07:18 +01002145 frame_->PopToR0();
2146 frame_->PrepareForReturn();
Steve Blocka7e24c12009-10-30 11:49:00 +00002147 if (function_return_is_shadowed_) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002148 function_return_.Jump();
2149 } else {
2150 // Pop the result from the frame and prepare the frame for
2151 // returning thus making it easier to merge.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01002152 if (function_return_.is_bound()) {
2153 // If the function return label is already bound we reuse the
2154 // code by jumping to the return site.
2155 function_return_.Jump();
2156 } else {
2157 function_return_.Bind();
2158 GenerateReturnSequence();
2159 }
2160 }
2161}
Steve Blocka7e24c12009-10-30 11:49:00 +00002162
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01002163
2164void CodeGenerator::GenerateReturnSequence() {
2165 if (FLAG_trace) {
2166 // Push the return value on the stack as the parameter.
2167 // Runtime::TraceExit returns the parameter as it is.
2168 frame_->EmitPush(r0);
2169 frame_->CallRuntime(Runtime::kTraceExit, 1);
2170 }
2171
2172#ifdef DEBUG
2173 // Add a label for checking the size of the code used for returning.
2174 Label check_exit_codesize;
2175 masm_->bind(&check_exit_codesize);
2176#endif
2177 // Make sure that the constant pool is not emitted inside of the return
2178 // sequence.
2179 { Assembler::BlockConstPoolScope block_const_pool(masm_);
2180 // Tear down the frame which will restore the caller's frame pointer and
2181 // the link register.
2182 frame_->Exit();
2183
2184 // Here we use masm_-> instead of the __ macro to avoid the code coverage
2185 // tool from instrumenting as we rely on the code size here.
2186 int32_t sp_delta = (scope()->num_parameters() + 1) * kPointerSize;
2187 masm_->add(sp, sp, Operand(sp_delta));
2188 masm_->Jump(lr);
2189 DeleteFrame();
2190
2191#ifdef DEBUG
2192 // Check that the size of the code used for returning matches what is
2193 // expected by the debugger. If the sp_delts above cannot be encoded in
2194 // the add instruction the add will generate two instructions.
2195 int return_sequence_length =
2196 masm_->InstructionsGeneratedSince(&check_exit_codesize);
2197 CHECK(return_sequence_length ==
2198 Assembler::kJSReturnSequenceInstructions ||
2199 return_sequence_length ==
2200 Assembler::kJSReturnSequenceInstructions + 1);
2201#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002202 }
2203}
2204
2205
2206void CodeGenerator::VisitWithEnterStatement(WithEnterStatement* node) {
2207#ifdef DEBUG
2208 int original_height = frame_->height();
2209#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002210 Comment cmnt(masm_, "[ WithEnterStatement");
2211 CodeForStatementPosition(node);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002212 Load(node->expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00002213 if (node->is_catch_block()) {
2214 frame_->CallRuntime(Runtime::kPushCatchContext, 1);
2215 } else {
2216 frame_->CallRuntime(Runtime::kPushContext, 1);
2217 }
2218#ifdef DEBUG
2219 JumpTarget verified_true;
Steve Block6ded16b2010-05-10 14:33:55 +01002220 __ cmp(r0, cp);
Steve Blocka7e24c12009-10-30 11:49:00 +00002221 verified_true.Branch(eq);
2222 __ stop("PushContext: r0 is expected to be the same as cp");
2223 verified_true.Bind();
2224#endif
2225 // Update context local.
2226 __ str(cp, frame_->Context());
2227 ASSERT(frame_->height() == original_height);
2228}
2229
2230
2231void CodeGenerator::VisitWithExitStatement(WithExitStatement* node) {
2232#ifdef DEBUG
2233 int original_height = frame_->height();
2234#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002235 Comment cmnt(masm_, "[ WithExitStatement");
2236 CodeForStatementPosition(node);
2237 // Pop context.
2238 __ ldr(cp, ContextOperand(cp, Context::PREVIOUS_INDEX));
2239 // Update context local.
2240 __ str(cp, frame_->Context());
2241 ASSERT(frame_->height() == original_height);
2242}
2243
2244
2245void CodeGenerator::VisitSwitchStatement(SwitchStatement* node) {
2246#ifdef DEBUG
2247 int original_height = frame_->height();
2248#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002249 Comment cmnt(masm_, "[ SwitchStatement");
2250 CodeForStatementPosition(node);
Kristian Monsen25f61362010-05-21 11:50:48 +01002251 node->break_target()->SetExpectedHeight();
Steve Blocka7e24c12009-10-30 11:49:00 +00002252
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002253 Load(node->tag());
Steve Blocka7e24c12009-10-30 11:49:00 +00002254
2255 JumpTarget next_test;
2256 JumpTarget fall_through;
2257 JumpTarget default_entry;
2258 JumpTarget default_exit(JumpTarget::BIDIRECTIONAL);
2259 ZoneList<CaseClause*>* cases = node->cases();
2260 int length = cases->length();
2261 CaseClause* default_clause = NULL;
2262
2263 for (int i = 0; i < length; i++) {
2264 CaseClause* clause = cases->at(i);
2265 if (clause->is_default()) {
2266 // Remember the default clause and compile it at the end.
2267 default_clause = clause;
2268 continue;
2269 }
2270
2271 Comment cmnt(masm_, "[ Case clause");
2272 // Compile the test.
2273 next_test.Bind();
2274 next_test.Unuse();
2275 // Duplicate TOS.
Steve Block8defd9f2010-07-08 12:39:36 +01002276 frame_->Dup();
Steve Blocka7e24c12009-10-30 11:49:00 +00002277 Comparison(eq, NULL, clause->label(), true);
2278 Branch(false, &next_test);
2279
2280 // Before entering the body from the test, remove the switch value from
2281 // the stack.
2282 frame_->Drop();
2283
2284 // Label the body so that fall through is enabled.
2285 if (i > 0 && cases->at(i - 1)->is_default()) {
2286 default_exit.Bind();
2287 } else {
2288 fall_through.Bind();
2289 fall_through.Unuse();
2290 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002291 VisitStatements(clause->statements());
Steve Blocka7e24c12009-10-30 11:49:00 +00002292
2293 // If control flow can fall through from the body, jump to the next body
2294 // or the end of the statement.
2295 if (frame_ != NULL) {
2296 if (i < length - 1 && cases->at(i + 1)->is_default()) {
2297 default_entry.Jump();
2298 } else {
2299 fall_through.Jump();
2300 }
2301 }
2302 }
2303
2304 // The final "test" removes the switch value.
2305 next_test.Bind();
2306 frame_->Drop();
2307
2308 // If there is a default clause, compile it.
2309 if (default_clause != NULL) {
2310 Comment cmnt(masm_, "[ Default clause");
2311 default_entry.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002312 VisitStatements(default_clause->statements());
Steve Blocka7e24c12009-10-30 11:49:00 +00002313 // If control flow can fall out of the default and there is a case after
Steve Block8defd9f2010-07-08 12:39:36 +01002314 // it, jump to that case's body.
Steve Blocka7e24c12009-10-30 11:49:00 +00002315 if (frame_ != NULL && default_exit.is_bound()) {
2316 default_exit.Jump();
2317 }
2318 }
2319
2320 if (fall_through.is_linked()) {
2321 fall_through.Bind();
2322 }
2323
2324 if (node->break_target()->is_linked()) {
2325 node->break_target()->Bind();
2326 }
2327 node->break_target()->Unuse();
2328 ASSERT(!has_valid_frame() || frame_->height() == original_height);
2329}
2330
2331
Steve Block3ce2e202009-11-05 08:53:23 +00002332void CodeGenerator::VisitDoWhileStatement(DoWhileStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002333#ifdef DEBUG
2334 int original_height = frame_->height();
2335#endif
Steve Block3ce2e202009-11-05 08:53:23 +00002336 Comment cmnt(masm_, "[ DoWhileStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00002337 CodeForStatementPosition(node);
Kristian Monsen25f61362010-05-21 11:50:48 +01002338 node->break_target()->SetExpectedHeight();
Steve Block3ce2e202009-11-05 08:53:23 +00002339 JumpTarget body(JumpTarget::BIDIRECTIONAL);
Steve Block6ded16b2010-05-10 14:33:55 +01002340 IncrementLoopNesting();
Steve Blocka7e24c12009-10-30 11:49:00 +00002341
Steve Block3ce2e202009-11-05 08:53:23 +00002342 // Label the top of the loop for the backward CFG edge. If the test
2343 // is always true we can use the continue target, and if the test is
2344 // always false there is no need.
2345 ConditionAnalysis info = AnalyzeCondition(node->cond());
2346 switch (info) {
2347 case ALWAYS_TRUE:
Kristian Monsen25f61362010-05-21 11:50:48 +01002348 node->continue_target()->SetExpectedHeight();
Steve Blocka7e24c12009-10-30 11:49:00 +00002349 node->continue_target()->Bind();
Steve Block3ce2e202009-11-05 08:53:23 +00002350 break;
2351 case ALWAYS_FALSE:
Kristian Monsen25f61362010-05-21 11:50:48 +01002352 node->continue_target()->SetExpectedHeight();
Steve Block3ce2e202009-11-05 08:53:23 +00002353 break;
2354 case DONT_KNOW:
Kristian Monsen25f61362010-05-21 11:50:48 +01002355 node->continue_target()->SetExpectedHeight();
Steve Block3ce2e202009-11-05 08:53:23 +00002356 body.Bind();
2357 break;
2358 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002359
Steve Block3ce2e202009-11-05 08:53:23 +00002360 CheckStack(); // TODO(1222600): ignore if body contains calls.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002361 Visit(node->body());
Steve Blocka7e24c12009-10-30 11:49:00 +00002362
Steve Blockd0582a62009-12-15 09:54:21 +00002363 // Compile the test.
Steve Block3ce2e202009-11-05 08:53:23 +00002364 switch (info) {
2365 case ALWAYS_TRUE:
2366 // If control can fall off the end of the body, jump back to the
2367 // top.
Steve Blocka7e24c12009-10-30 11:49:00 +00002368 if (has_valid_frame()) {
Steve Block3ce2e202009-11-05 08:53:23 +00002369 node->continue_target()->Jump();
Steve Blocka7e24c12009-10-30 11:49:00 +00002370 }
2371 break;
Steve Block3ce2e202009-11-05 08:53:23 +00002372 case ALWAYS_FALSE:
2373 // If we have a continue in the body, we only have to bind its
2374 // jump target.
2375 if (node->continue_target()->is_linked()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002376 node->continue_target()->Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00002377 }
Steve Block3ce2e202009-11-05 08:53:23 +00002378 break;
2379 case DONT_KNOW:
2380 // We have to compile the test expression if it can be reached by
2381 // control flow falling out of the body or via continue.
2382 if (node->continue_target()->is_linked()) {
2383 node->continue_target()->Bind();
2384 }
2385 if (has_valid_frame()) {
Steve Blockd0582a62009-12-15 09:54:21 +00002386 Comment cmnt(masm_, "[ DoWhileCondition");
2387 CodeForDoWhileConditionPosition(node);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002388 LoadCondition(node->cond(), &body, node->break_target(), true);
Steve Blocka7e24c12009-10-30 11:49:00 +00002389 if (has_valid_frame()) {
Steve Block3ce2e202009-11-05 08:53:23 +00002390 // A invalid frame here indicates that control did not
2391 // fall out of the test expression.
2392 Branch(true, &body);
Steve Blocka7e24c12009-10-30 11:49:00 +00002393 }
2394 }
2395 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00002396 }
2397
2398 if (node->break_target()->is_linked()) {
2399 node->break_target()->Bind();
2400 }
Steve Block6ded16b2010-05-10 14:33:55 +01002401 DecrementLoopNesting();
Steve Block3ce2e202009-11-05 08:53:23 +00002402 ASSERT(!has_valid_frame() || frame_->height() == original_height);
2403}
2404
2405
2406void CodeGenerator::VisitWhileStatement(WhileStatement* node) {
2407#ifdef DEBUG
2408 int original_height = frame_->height();
2409#endif
Steve Block3ce2e202009-11-05 08:53:23 +00002410 Comment cmnt(masm_, "[ WhileStatement");
2411 CodeForStatementPosition(node);
2412
2413 // If the test is never true and has no side effects there is no need
2414 // to compile the test or body.
2415 ConditionAnalysis info = AnalyzeCondition(node->cond());
2416 if (info == ALWAYS_FALSE) return;
2417
Kristian Monsen25f61362010-05-21 11:50:48 +01002418 node->break_target()->SetExpectedHeight();
Steve Block6ded16b2010-05-10 14:33:55 +01002419 IncrementLoopNesting();
Steve Block3ce2e202009-11-05 08:53:23 +00002420
2421 // Label the top of the loop with the continue target for the backward
2422 // CFG edge.
Kristian Monsen25f61362010-05-21 11:50:48 +01002423 node->continue_target()->SetExpectedHeight();
Steve Block3ce2e202009-11-05 08:53:23 +00002424 node->continue_target()->Bind();
2425
2426 if (info == DONT_KNOW) {
Steve Block8defd9f2010-07-08 12:39:36 +01002427 JumpTarget body(JumpTarget::BIDIRECTIONAL);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002428 LoadCondition(node->cond(), &body, node->break_target(), true);
Steve Block3ce2e202009-11-05 08:53:23 +00002429 if (has_valid_frame()) {
2430 // A NULL frame indicates that control did not fall out of the
2431 // test expression.
2432 Branch(false, node->break_target());
2433 }
2434 if (has_valid_frame() || body.is_linked()) {
2435 body.Bind();
2436 }
2437 }
2438
2439 if (has_valid_frame()) {
2440 CheckStack(); // TODO(1222600): ignore if body contains calls.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002441 Visit(node->body());
Steve Block3ce2e202009-11-05 08:53:23 +00002442
2443 // If control flow can fall out of the body, jump back to the top.
2444 if (has_valid_frame()) {
2445 node->continue_target()->Jump();
2446 }
2447 }
2448 if (node->break_target()->is_linked()) {
2449 node->break_target()->Bind();
2450 }
Steve Block6ded16b2010-05-10 14:33:55 +01002451 DecrementLoopNesting();
Steve Block3ce2e202009-11-05 08:53:23 +00002452 ASSERT(!has_valid_frame() || frame_->height() == original_height);
2453}
2454
2455
2456void CodeGenerator::VisitForStatement(ForStatement* node) {
2457#ifdef DEBUG
2458 int original_height = frame_->height();
2459#endif
Steve Block3ce2e202009-11-05 08:53:23 +00002460 Comment cmnt(masm_, "[ ForStatement");
2461 CodeForStatementPosition(node);
2462 if (node->init() != NULL) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002463 Visit(node->init());
Steve Block3ce2e202009-11-05 08:53:23 +00002464 }
2465
2466 // If the test is never true there is no need to compile the test or
2467 // body.
2468 ConditionAnalysis info = AnalyzeCondition(node->cond());
2469 if (info == ALWAYS_FALSE) return;
2470
Kristian Monsen25f61362010-05-21 11:50:48 +01002471 node->break_target()->SetExpectedHeight();
Steve Block6ded16b2010-05-10 14:33:55 +01002472 IncrementLoopNesting();
Steve Block3ce2e202009-11-05 08:53:23 +00002473
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002474 // We know that the loop index is a smi if it is not modified in the
2475 // loop body and it is checked against a constant limit in the loop
2476 // condition. In this case, we reset the static type information of the
2477 // loop index to smi before compiling the body, the update expression, and
2478 // the bottom check of the loop condition.
2479 TypeInfoCodeGenState type_info_scope(this,
2480 node->is_fast_smi_loop() ?
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002481 node->loop_variable()->AsSlot() :
2482 NULL,
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002483 TypeInfo::Smi());
2484
Steve Block3ce2e202009-11-05 08:53:23 +00002485 // If there is no update statement, label the top of the loop with the
2486 // continue target, otherwise with the loop target.
2487 JumpTarget loop(JumpTarget::BIDIRECTIONAL);
2488 if (node->next() == NULL) {
Kristian Monsen25f61362010-05-21 11:50:48 +01002489 node->continue_target()->SetExpectedHeight();
Steve Block3ce2e202009-11-05 08:53:23 +00002490 node->continue_target()->Bind();
2491 } else {
Kristian Monsen25f61362010-05-21 11:50:48 +01002492 node->continue_target()->SetExpectedHeight();
Steve Block3ce2e202009-11-05 08:53:23 +00002493 loop.Bind();
2494 }
2495
2496 // If the test is always true, there is no need to compile it.
2497 if (info == DONT_KNOW) {
2498 JumpTarget body;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002499 LoadCondition(node->cond(), &body, node->break_target(), true);
Steve Block3ce2e202009-11-05 08:53:23 +00002500 if (has_valid_frame()) {
2501 Branch(false, node->break_target());
2502 }
2503 if (has_valid_frame() || body.is_linked()) {
2504 body.Bind();
2505 }
2506 }
2507
2508 if (has_valid_frame()) {
2509 CheckStack(); // TODO(1222600): ignore if body contains calls.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002510 Visit(node->body());
Steve Block3ce2e202009-11-05 08:53:23 +00002511
2512 if (node->next() == NULL) {
2513 // If there is no update statement and control flow can fall out
2514 // of the loop, jump directly to the continue label.
2515 if (has_valid_frame()) {
2516 node->continue_target()->Jump();
2517 }
2518 } else {
2519 // If there is an update statement and control flow can reach it
2520 // via falling out of the body of the loop or continuing, we
2521 // compile the update statement.
2522 if (node->continue_target()->is_linked()) {
2523 node->continue_target()->Bind();
2524 }
2525 if (has_valid_frame()) {
2526 // Record source position of the statement as this code which is
2527 // after the code for the body actually belongs to the loop
2528 // statement and not the body.
2529 CodeForStatementPosition(node);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002530 Visit(node->next());
Steve Block3ce2e202009-11-05 08:53:23 +00002531 loop.Jump();
2532 }
2533 }
2534 }
2535 if (node->break_target()->is_linked()) {
2536 node->break_target()->Bind();
2537 }
Steve Block6ded16b2010-05-10 14:33:55 +01002538 DecrementLoopNesting();
Steve Blocka7e24c12009-10-30 11:49:00 +00002539 ASSERT(!has_valid_frame() || frame_->height() == original_height);
2540}
2541
2542
2543void CodeGenerator::VisitForInStatement(ForInStatement* node) {
2544#ifdef DEBUG
2545 int original_height = frame_->height();
2546#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002547 Comment cmnt(masm_, "[ ForInStatement");
2548 CodeForStatementPosition(node);
2549
2550 JumpTarget primitive;
2551 JumpTarget jsobject;
2552 JumpTarget fixed_array;
2553 JumpTarget entry(JumpTarget::BIDIRECTIONAL);
2554 JumpTarget end_del_check;
2555 JumpTarget exit;
2556
2557 // Get the object to enumerate over (converted to JSObject).
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002558 Load(node->enumerable());
Steve Blocka7e24c12009-10-30 11:49:00 +00002559
Iain Merrick75681382010-08-19 15:07:18 +01002560 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +00002561 // Both SpiderMonkey and kjs ignore null and undefined in contrast
2562 // to the specification. 12.6.4 mandates a call to ToObject.
2563 frame_->EmitPop(r0);
2564 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
2565 __ cmp(r0, ip);
2566 exit.Branch(eq);
2567 __ LoadRoot(ip, Heap::kNullValueRootIndex);
2568 __ cmp(r0, ip);
2569 exit.Branch(eq);
2570
2571 // Stack layout in body:
2572 // [iteration counter (Smi)]
2573 // [length of array]
2574 // [FixedArray]
2575 // [Map or 0]
2576 // [Object]
2577
2578 // Check if enumerable is already a JSObject
2579 __ tst(r0, Operand(kSmiTagMask));
2580 primitive.Branch(eq);
2581 __ CompareObjectType(r0, r1, r1, FIRST_JS_OBJECT_TYPE);
2582 jsobject.Branch(hs);
2583
2584 primitive.Bind();
2585 frame_->EmitPush(r0);
Steve Blockd0582a62009-12-15 09:54:21 +00002586 frame_->InvokeBuiltin(Builtins::TO_OBJECT, CALL_JS, 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00002587
2588 jsobject.Bind();
2589 // Get the set of properties (as a FixedArray or Map).
Steve Blockd0582a62009-12-15 09:54:21 +00002590 // r0: value to be iterated over
2591 frame_->EmitPush(r0); // Push the object being iterated over.
2592
2593 // Check cache validity in generated code. This is a fast case for
2594 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
2595 // guarantee cache validity, call the runtime system to check cache
2596 // validity or get the property names in a fixed array.
2597 JumpTarget call_runtime;
2598 JumpTarget loop(JumpTarget::BIDIRECTIONAL);
2599 JumpTarget check_prototype;
2600 JumpTarget use_cache;
2601 __ mov(r1, Operand(r0));
2602 loop.Bind();
2603 // Check that there are no elements.
2604 __ ldr(r2, FieldMemOperand(r1, JSObject::kElementsOffset));
2605 __ LoadRoot(r4, Heap::kEmptyFixedArrayRootIndex);
2606 __ cmp(r2, r4);
2607 call_runtime.Branch(ne);
2608 // Check that instance descriptors are not empty so that we can
2609 // check for an enum cache. Leave the map in r3 for the subsequent
2610 // prototype load.
2611 __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
2612 __ ldr(r2, FieldMemOperand(r3, Map::kInstanceDescriptorsOffset));
2613 __ LoadRoot(ip, Heap::kEmptyDescriptorArrayRootIndex);
2614 __ cmp(r2, ip);
2615 call_runtime.Branch(eq);
2616 // Check that there in an enum cache in the non-empty instance
2617 // descriptors. This is the case if the next enumeration index
2618 // field does not contain a smi.
2619 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumerationIndexOffset));
2620 __ tst(r2, Operand(kSmiTagMask));
2621 call_runtime.Branch(eq);
2622 // For all objects but the receiver, check that the cache is empty.
2623 // r4: empty fixed array root.
2624 __ cmp(r1, r0);
2625 check_prototype.Branch(eq);
2626 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheBridgeCacheOffset));
2627 __ cmp(r2, r4);
2628 call_runtime.Branch(ne);
2629 check_prototype.Bind();
2630 // Load the prototype from the map and loop if non-null.
2631 __ ldr(r1, FieldMemOperand(r3, Map::kPrototypeOffset));
2632 __ LoadRoot(ip, Heap::kNullValueRootIndex);
2633 __ cmp(r1, ip);
2634 loop.Branch(ne);
2635 // The enum cache is valid. Load the map of the object being
2636 // iterated over and use the cache for the iteration.
2637 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
2638 use_cache.Jump();
2639
2640 call_runtime.Bind();
2641 // Call the runtime to get the property names for the object.
2642 frame_->EmitPush(r0); // push the object (slot 4) for the runtime call
Steve Blocka7e24c12009-10-30 11:49:00 +00002643 frame_->CallRuntime(Runtime::kGetPropertyNamesFast, 1);
2644
Steve Blockd0582a62009-12-15 09:54:21 +00002645 // If we got a map from the runtime call, we can do a fast
2646 // modification check. Otherwise, we got a fixed array, and we have
2647 // to do a slow check.
2648 // r0: map or fixed array (result from call to
2649 // Runtime::kGetPropertyNamesFast)
Steve Blocka7e24c12009-10-30 11:49:00 +00002650 __ mov(r2, Operand(r0));
2651 __ ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset));
2652 __ LoadRoot(ip, Heap::kMetaMapRootIndex);
2653 __ cmp(r1, ip);
2654 fixed_array.Branch(ne);
2655
Steve Blockd0582a62009-12-15 09:54:21 +00002656 use_cache.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00002657 // Get enum cache
Steve Blockd0582a62009-12-15 09:54:21 +00002658 // r0: map (either the result from a call to
2659 // Runtime::kGetPropertyNamesFast or has been fetched directly from
2660 // the object)
Steve Blocka7e24c12009-10-30 11:49:00 +00002661 __ mov(r1, Operand(r0));
2662 __ ldr(r1, FieldMemOperand(r1, Map::kInstanceDescriptorsOffset));
2663 __ ldr(r1, FieldMemOperand(r1, DescriptorArray::kEnumerationIndexOffset));
2664 __ ldr(r2,
2665 FieldMemOperand(r1, DescriptorArray::kEnumCacheBridgeCacheOffset));
2666
2667 frame_->EmitPush(r0); // map
2668 frame_->EmitPush(r2); // enum cache bridge cache
2669 __ ldr(r0, FieldMemOperand(r2, FixedArray::kLengthOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00002670 frame_->EmitPush(r0);
2671 __ mov(r0, Operand(Smi::FromInt(0)));
2672 frame_->EmitPush(r0);
2673 entry.Jump();
2674
2675 fixed_array.Bind();
2676 __ mov(r1, Operand(Smi::FromInt(0)));
2677 frame_->EmitPush(r1); // insert 0 in place of Map
2678 frame_->EmitPush(r0);
2679
2680 // Push the length of the array and the initial index onto the stack.
2681 __ ldr(r0, FieldMemOperand(r0, FixedArray::kLengthOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00002682 frame_->EmitPush(r0);
2683 __ mov(r0, Operand(Smi::FromInt(0))); // init index
2684 frame_->EmitPush(r0);
2685
2686 // Condition.
2687 entry.Bind();
2688 // sp[0] : index
2689 // sp[1] : array/enum cache length
2690 // sp[2] : array or enum cache
2691 // sp[3] : 0 or map
2692 // sp[4] : enumerable
2693 // Grab the current frame's height for the break and continue
2694 // targets only after all the state is pushed on the frame.
Kristian Monsen25f61362010-05-21 11:50:48 +01002695 node->break_target()->SetExpectedHeight();
2696 node->continue_target()->SetExpectedHeight();
Steve Blocka7e24c12009-10-30 11:49:00 +00002697
Kristian Monsen25f61362010-05-21 11:50:48 +01002698 // Load the current count to r0, load the length to r1.
Leon Clarkef7060e22010-06-03 12:02:55 +01002699 __ Ldrd(r0, r1, frame_->ElementAt(0));
Steve Block6ded16b2010-05-10 14:33:55 +01002700 __ cmp(r0, r1); // compare to the array length
Steve Blocka7e24c12009-10-30 11:49:00 +00002701 node->break_target()->Branch(hs);
2702
Steve Blocka7e24c12009-10-30 11:49:00 +00002703 // Get the i'th entry of the array.
2704 __ ldr(r2, frame_->ElementAt(2));
2705 __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2706 __ ldr(r3, MemOperand(r2, r0, LSL, kPointerSizeLog2 - kSmiTagSize));
2707
2708 // Get Map or 0.
2709 __ ldr(r2, frame_->ElementAt(3));
2710 // Check if this (still) matches the map of the enumerable.
2711 // If not, we have to filter the key.
2712 __ ldr(r1, frame_->ElementAt(4));
2713 __ ldr(r1, FieldMemOperand(r1, HeapObject::kMapOffset));
2714 __ cmp(r1, Operand(r2));
2715 end_del_check.Branch(eq);
2716
2717 // Convert the entry to a string (or null if it isn't a property anymore).
2718 __ ldr(r0, frame_->ElementAt(4)); // push enumerable
2719 frame_->EmitPush(r0);
2720 frame_->EmitPush(r3); // push entry
Steve Blockd0582a62009-12-15 09:54:21 +00002721 frame_->InvokeBuiltin(Builtins::FILTER_KEY, CALL_JS, 2);
Iain Merrick75681382010-08-19 15:07:18 +01002722 __ mov(r3, Operand(r0), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00002723 // If the property has been removed while iterating, we just skip it.
Steve Blocka7e24c12009-10-30 11:49:00 +00002724 node->continue_target()->Branch(eq);
2725
2726 end_del_check.Bind();
2727 // Store the entry in the 'each' expression and take another spin in the
2728 // loop. r3: i'th entry of the enum cache (or string there of)
2729 frame_->EmitPush(r3); // push entry
Iain Merrick75681382010-08-19 15:07:18 +01002730 { VirtualFrame::RegisterAllocationScope scope(this);
2731 Reference each(this, node->each());
Steve Blocka7e24c12009-10-30 11:49:00 +00002732 if (!each.is_illegal()) {
2733 if (each.size() > 0) {
Iain Merrick75681382010-08-19 15:07:18 +01002734 // Loading a reference may leave the frame in an unspilled state.
2735 frame_->SpillAll(); // Sync stack to memory.
2736 // Get the value (under the reference on the stack) from memory.
Steve Blocka7e24c12009-10-30 11:49:00 +00002737 __ ldr(r0, frame_->ElementAt(each.size()));
2738 frame_->EmitPush(r0);
Steve Block8defd9f2010-07-08 12:39:36 +01002739 each.SetValue(NOT_CONST_INIT, UNLIKELY_SMI);
Iain Merrick75681382010-08-19 15:07:18 +01002740 frame_->Drop(2); // The result of the set and the extra pushed value.
Leon Clarked91b9f72010-01-27 17:25:45 +00002741 } else {
2742 // If the reference was to a slot we rely on the convenient property
Iain Merrick75681382010-08-19 15:07:18 +01002743 // that it doesn't matter whether a value (eg, ebx pushed above) is
Leon Clarked91b9f72010-01-27 17:25:45 +00002744 // right on top of or right underneath a zero-sized reference.
Steve Block8defd9f2010-07-08 12:39:36 +01002745 each.SetValue(NOT_CONST_INIT, UNLIKELY_SMI);
Iain Merrick75681382010-08-19 15:07:18 +01002746 frame_->Drop(1); // Drop the result of the set operation.
Steve Blocka7e24c12009-10-30 11:49:00 +00002747 }
2748 }
2749 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002750 // Body.
2751 CheckStack(); // TODO(1222600): ignore if body contains calls.
Iain Merrick75681382010-08-19 15:07:18 +01002752 { VirtualFrame::RegisterAllocationScope scope(this);
2753 Visit(node->body());
2754 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002755
2756 // Next. Reestablish a spilled frame in case we are coming here via
2757 // a continue in the body.
2758 node->continue_target()->Bind();
2759 frame_->SpillAll();
2760 frame_->EmitPop(r0);
2761 __ add(r0, r0, Operand(Smi::FromInt(1)));
2762 frame_->EmitPush(r0);
2763 entry.Jump();
2764
2765 // Cleanup. No need to spill because VirtualFrame::Drop is safe for
2766 // any frame.
2767 node->break_target()->Bind();
2768 frame_->Drop(5);
2769
2770 // Exit.
2771 exit.Bind();
2772 node->continue_target()->Unuse();
2773 node->break_target()->Unuse();
2774 ASSERT(frame_->height() == original_height);
2775}
2776
2777
Steve Block3ce2e202009-11-05 08:53:23 +00002778void CodeGenerator::VisitTryCatchStatement(TryCatchStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002779#ifdef DEBUG
2780 int original_height = frame_->height();
2781#endif
Steve Block6ded16b2010-05-10 14:33:55 +01002782 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Block3ce2e202009-11-05 08:53:23 +00002783 Comment cmnt(masm_, "[ TryCatchStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00002784 CodeForStatementPosition(node);
2785
2786 JumpTarget try_block;
2787 JumpTarget exit;
2788
2789 try_block.Call();
2790 // --- Catch block ---
2791 frame_->EmitPush(r0);
2792
2793 // Store the caught exception in the catch variable.
Leon Clarkee46be812010-01-19 14:06:41 +00002794 Variable* catch_var = node->catch_var()->var();
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002795 ASSERT(catch_var != NULL && catch_var->AsSlot() != NULL);
2796 StoreToSlot(catch_var->AsSlot(), NOT_CONST_INIT);
Steve Blocka7e24c12009-10-30 11:49:00 +00002797
2798 // Remove the exception from the stack.
2799 frame_->Drop();
2800
Iain Merrick75681382010-08-19 15:07:18 +01002801 { VirtualFrame::RegisterAllocationScope scope(this);
2802 VisitStatements(node->catch_block()->statements());
2803 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002804 if (frame_ != NULL) {
2805 exit.Jump();
2806 }
2807
2808
2809 // --- Try block ---
2810 try_block.Bind();
2811
2812 frame_->PushTryHandler(TRY_CATCH_HANDLER);
2813 int handler_height = frame_->height();
2814
2815 // Shadow the labels for all escapes from the try block, including
2816 // returns. During shadowing, the original label is hidden as the
2817 // LabelShadow and operations on the original actually affect the
2818 // shadowing label.
2819 //
2820 // We should probably try to unify the escaping labels and the return
2821 // label.
2822 int nof_escapes = node->escaping_targets()->length();
2823 List<ShadowTarget*> shadows(1 + nof_escapes);
2824
2825 // Add the shadow target for the function return.
2826 static const int kReturnShadowIndex = 0;
2827 shadows.Add(new ShadowTarget(&function_return_));
2828 bool function_return_was_shadowed = function_return_is_shadowed_;
2829 function_return_is_shadowed_ = true;
2830 ASSERT(shadows[kReturnShadowIndex]->other_target() == &function_return_);
2831
2832 // Add the remaining shadow targets.
2833 for (int i = 0; i < nof_escapes; i++) {
2834 shadows.Add(new ShadowTarget(node->escaping_targets()->at(i)));
2835 }
2836
2837 // Generate code for the statements in the try block.
Iain Merrick75681382010-08-19 15:07:18 +01002838 { VirtualFrame::RegisterAllocationScope scope(this);
2839 VisitStatements(node->try_block()->statements());
2840 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002841
2842 // Stop the introduced shadowing and count the number of required unlinks.
2843 // After shadowing stops, the original labels are unshadowed and the
2844 // LabelShadows represent the formerly shadowing labels.
2845 bool has_unlinks = false;
2846 for (int i = 0; i < shadows.length(); i++) {
2847 shadows[i]->StopShadowing();
2848 has_unlinks = has_unlinks || shadows[i]->is_linked();
2849 }
2850 function_return_is_shadowed_ = function_return_was_shadowed;
2851
2852 // Get an external reference to the handler address.
2853 ExternalReference handler_address(Top::k_handler_address);
2854
2855 // If we can fall off the end of the try block, unlink from try chain.
2856 if (has_valid_frame()) {
2857 // The next handler address is on top of the frame. Unlink from
2858 // the handler list and drop the rest of this handler from the
2859 // frame.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002860 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Iain Merrick75681382010-08-19 15:07:18 +01002861 frame_->EmitPop(r1); // r0 can contain the return value.
Steve Blocka7e24c12009-10-30 11:49:00 +00002862 __ mov(r3, Operand(handler_address));
2863 __ str(r1, MemOperand(r3));
2864 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
2865 if (has_unlinks) {
2866 exit.Jump();
2867 }
2868 }
2869
2870 // Generate unlink code for the (formerly) shadowing labels that have been
2871 // jumped to. Deallocate each shadow target.
2872 for (int i = 0; i < shadows.length(); i++) {
2873 if (shadows[i]->is_linked()) {
2874 // Unlink from try chain;
2875 shadows[i]->Bind();
2876 // Because we can be jumping here (to spilled code) from unspilled
2877 // code, we need to reestablish a spilled frame at this block.
2878 frame_->SpillAll();
2879
2880 // Reload sp from the top handler, because some statements that we
2881 // break from (eg, for...in) may have left stuff on the stack.
2882 __ mov(r3, Operand(handler_address));
2883 __ ldr(sp, MemOperand(r3));
2884 frame_->Forget(frame_->height() - handler_height);
2885
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002886 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Iain Merrick75681382010-08-19 15:07:18 +01002887 frame_->EmitPop(r1); // r0 can contain the return value.
Steve Blocka7e24c12009-10-30 11:49:00 +00002888 __ str(r1, MemOperand(r3));
2889 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
2890
2891 if (!function_return_is_shadowed_ && i == kReturnShadowIndex) {
2892 frame_->PrepareForReturn();
2893 }
2894 shadows[i]->other_target()->Jump();
2895 }
2896 }
2897
2898 exit.Bind();
2899 ASSERT(!has_valid_frame() || frame_->height() == original_height);
2900}
2901
2902
Steve Block3ce2e202009-11-05 08:53:23 +00002903void CodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002904#ifdef DEBUG
2905 int original_height = frame_->height();
2906#endif
Steve Block6ded16b2010-05-10 14:33:55 +01002907 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Block3ce2e202009-11-05 08:53:23 +00002908 Comment cmnt(masm_, "[ TryFinallyStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00002909 CodeForStatementPosition(node);
2910
2911 // State: Used to keep track of reason for entering the finally
2912 // block. Should probably be extended to hold information for
2913 // break/continue from within the try block.
2914 enum { FALLING, THROWING, JUMPING };
2915
2916 JumpTarget try_block;
2917 JumpTarget finally_block;
2918
2919 try_block.Call();
2920
2921 frame_->EmitPush(r0); // save exception object on the stack
2922 // In case of thrown exceptions, this is where we continue.
2923 __ mov(r2, Operand(Smi::FromInt(THROWING)));
2924 finally_block.Jump();
2925
2926 // --- Try block ---
2927 try_block.Bind();
2928
2929 frame_->PushTryHandler(TRY_FINALLY_HANDLER);
2930 int handler_height = frame_->height();
2931
2932 // Shadow the labels for all escapes from the try block, including
2933 // returns. Shadowing hides the original label as the LabelShadow and
2934 // operations on the original actually affect the shadowing label.
2935 //
2936 // We should probably try to unify the escaping labels and the return
2937 // label.
2938 int nof_escapes = node->escaping_targets()->length();
2939 List<ShadowTarget*> shadows(1 + nof_escapes);
2940
2941 // Add the shadow target for the function return.
2942 static const int kReturnShadowIndex = 0;
2943 shadows.Add(new ShadowTarget(&function_return_));
2944 bool function_return_was_shadowed = function_return_is_shadowed_;
2945 function_return_is_shadowed_ = true;
2946 ASSERT(shadows[kReturnShadowIndex]->other_target() == &function_return_);
2947
2948 // Add the remaining shadow targets.
2949 for (int i = 0; i < nof_escapes; i++) {
2950 shadows.Add(new ShadowTarget(node->escaping_targets()->at(i)));
2951 }
2952
2953 // Generate code for the statements in the try block.
Iain Merrick75681382010-08-19 15:07:18 +01002954 { VirtualFrame::RegisterAllocationScope scope(this);
2955 VisitStatements(node->try_block()->statements());
2956 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002957
2958 // Stop the introduced shadowing and count the number of required unlinks.
2959 // After shadowing stops, the original labels are unshadowed and the
2960 // LabelShadows represent the formerly shadowing labels.
2961 int nof_unlinks = 0;
2962 for (int i = 0; i < shadows.length(); i++) {
2963 shadows[i]->StopShadowing();
2964 if (shadows[i]->is_linked()) nof_unlinks++;
2965 }
2966 function_return_is_shadowed_ = function_return_was_shadowed;
2967
2968 // Get an external reference to the handler address.
2969 ExternalReference handler_address(Top::k_handler_address);
2970
2971 // If we can fall off the end of the try block, unlink from the try
2972 // chain and set the state on the frame to FALLING.
2973 if (has_valid_frame()) {
2974 // The next handler address is on top of the frame.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002975 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00002976 frame_->EmitPop(r1);
2977 __ mov(r3, Operand(handler_address));
2978 __ str(r1, MemOperand(r3));
2979 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
2980
2981 // Fake a top of stack value (unneeded when FALLING) and set the
2982 // state in r2, then jump around the unlink blocks if any.
2983 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2984 frame_->EmitPush(r0);
2985 __ mov(r2, Operand(Smi::FromInt(FALLING)));
2986 if (nof_unlinks > 0) {
2987 finally_block.Jump();
2988 }
2989 }
2990
2991 // Generate code to unlink and set the state for the (formerly)
2992 // shadowing targets that have been jumped to.
2993 for (int i = 0; i < shadows.length(); i++) {
2994 if (shadows[i]->is_linked()) {
2995 // If we have come from the shadowed return, the return value is
2996 // in (a non-refcounted reference to) r0. We must preserve it
2997 // until it is pushed.
2998 //
2999 // Because we can be jumping here (to spilled code) from
3000 // unspilled code, we need to reestablish a spilled frame at
3001 // this block.
3002 shadows[i]->Bind();
3003 frame_->SpillAll();
3004
3005 // Reload sp from the top handler, because some statements that
3006 // we break from (eg, for...in) may have left stuff on the
3007 // stack.
3008 __ mov(r3, Operand(handler_address));
3009 __ ldr(sp, MemOperand(r3));
3010 frame_->Forget(frame_->height() - handler_height);
3011
3012 // Unlink this handler and drop it from the frame. The next
3013 // handler address is currently on top of the frame.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01003014 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00003015 frame_->EmitPop(r1);
3016 __ str(r1, MemOperand(r3));
3017 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
3018
3019 if (i == kReturnShadowIndex) {
3020 // If this label shadowed the function return, materialize the
3021 // return value on the stack.
3022 frame_->EmitPush(r0);
3023 } else {
3024 // Fake TOS for targets that shadowed breaks and continues.
3025 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
3026 frame_->EmitPush(r0);
3027 }
3028 __ mov(r2, Operand(Smi::FromInt(JUMPING + i)));
3029 if (--nof_unlinks > 0) {
3030 // If this is not the last unlink block, jump around the next.
3031 finally_block.Jump();
3032 }
3033 }
3034 }
3035
3036 // --- Finally block ---
3037 finally_block.Bind();
3038
3039 // Push the state on the stack.
3040 frame_->EmitPush(r2);
3041
3042 // We keep two elements on the stack - the (possibly faked) result
3043 // and the state - while evaluating the finally block.
3044 //
3045 // Generate code for the statements in the finally block.
Iain Merrick75681382010-08-19 15:07:18 +01003046 { VirtualFrame::RegisterAllocationScope scope(this);
3047 VisitStatements(node->finally_block()->statements());
3048 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003049
3050 if (has_valid_frame()) {
3051 // Restore state and return value or faked TOS.
3052 frame_->EmitPop(r2);
3053 frame_->EmitPop(r0);
3054 }
3055
3056 // Generate code to jump to the right destination for all used
3057 // formerly shadowing targets. Deallocate each shadow target.
3058 for (int i = 0; i < shadows.length(); i++) {
3059 if (has_valid_frame() && shadows[i]->is_bound()) {
3060 JumpTarget* original = shadows[i]->other_target();
3061 __ cmp(r2, Operand(Smi::FromInt(JUMPING + i)));
3062 if (!function_return_is_shadowed_ && i == kReturnShadowIndex) {
3063 JumpTarget skip;
3064 skip.Branch(ne);
3065 frame_->PrepareForReturn();
3066 original->Jump();
3067 skip.Bind();
3068 } else {
3069 original->Branch(eq);
3070 }
3071 }
3072 }
3073
3074 if (has_valid_frame()) {
3075 // Check if we need to rethrow the exception.
3076 JumpTarget exit;
3077 __ cmp(r2, Operand(Smi::FromInt(THROWING)));
3078 exit.Branch(ne);
3079
3080 // Rethrow exception.
3081 frame_->EmitPush(r0);
3082 frame_->CallRuntime(Runtime::kReThrow, 1);
3083
3084 // Done.
3085 exit.Bind();
3086 }
3087 ASSERT(!has_valid_frame() || frame_->height() == original_height);
3088}
3089
3090
3091void CodeGenerator::VisitDebuggerStatement(DebuggerStatement* node) {
3092#ifdef DEBUG
3093 int original_height = frame_->height();
3094#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003095 Comment cmnt(masm_, "[ DebuggerStatament");
3096 CodeForStatementPosition(node);
3097#ifdef ENABLE_DEBUGGER_SUPPORT
Andrei Popescu402d9372010-02-26 13:31:12 +00003098 frame_->DebugBreak();
Steve Blocka7e24c12009-10-30 11:49:00 +00003099#endif
3100 // Ignore the return value.
3101 ASSERT(frame_->height() == original_height);
3102}
3103
3104
Steve Block6ded16b2010-05-10 14:33:55 +01003105void CodeGenerator::InstantiateFunction(
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003106 Handle<SharedFunctionInfo> function_info,
3107 bool pretenure) {
Leon Clarkee46be812010-01-19 14:06:41 +00003108 // Use the fast case closure allocation code that allocates in new
3109 // space for nested functions that don't need literals cloning.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003110 if (scope()->is_function_scope() &&
3111 function_info->num_literals() == 0 &&
3112 !pretenure) {
Leon Clarkee46be812010-01-19 14:06:41 +00003113 FastNewClosureStub stub;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003114 frame_->EmitPush(Operand(function_info));
3115 frame_->SpillAll();
Leon Clarkee46be812010-01-19 14:06:41 +00003116 frame_->CallStub(&stub, 1);
3117 frame_->EmitPush(r0);
3118 } else {
3119 // Create a new closure.
3120 frame_->EmitPush(cp);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003121 frame_->EmitPush(Operand(function_info));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003122 frame_->EmitPush(Operand(pretenure
3123 ? Factory::true_value()
3124 : Factory::false_value()));
3125 frame_->CallRuntime(Runtime::kNewClosure, 3);
Leon Clarkee46be812010-01-19 14:06:41 +00003126 frame_->EmitPush(r0);
3127 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003128}
3129
3130
3131void CodeGenerator::VisitFunctionLiteral(FunctionLiteral* node) {
3132#ifdef DEBUG
3133 int original_height = frame_->height();
3134#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003135 Comment cmnt(masm_, "[ FunctionLiteral");
3136
Steve Block6ded16b2010-05-10 14:33:55 +01003137 // Build the function info and instantiate it.
3138 Handle<SharedFunctionInfo> function_info =
Ben Murdochf87a2032010-10-22 12:50:53 +01003139 Compiler::BuildFunctionInfo(node, script());
3140 if (function_info.is_null()) {
3141 SetStackOverflow();
Steve Blocka7e24c12009-10-30 11:49:00 +00003142 ASSERT(frame_->height() == original_height);
3143 return;
3144 }
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003145 InstantiateFunction(function_info, node->pretenure());
Steve Block6ded16b2010-05-10 14:33:55 +01003146 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003147}
3148
3149
Steve Block6ded16b2010-05-10 14:33:55 +01003150void CodeGenerator::VisitSharedFunctionInfoLiteral(
3151 SharedFunctionInfoLiteral* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003152#ifdef DEBUG
3153 int original_height = frame_->height();
3154#endif
Steve Block6ded16b2010-05-10 14:33:55 +01003155 Comment cmnt(masm_, "[ SharedFunctionInfoLiteral");
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003156 InstantiateFunction(node->shared_function_info(), false);
Steve Block6ded16b2010-05-10 14:33:55 +01003157 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003158}
3159
3160
3161void CodeGenerator::VisitConditional(Conditional* node) {
3162#ifdef DEBUG
3163 int original_height = frame_->height();
3164#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003165 Comment cmnt(masm_, "[ Conditional");
3166 JumpTarget then;
3167 JumpTarget else_;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003168 LoadCondition(node->condition(), &then, &else_, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00003169 if (has_valid_frame()) {
3170 Branch(false, &else_);
3171 }
3172 if (has_valid_frame() || then.is_linked()) {
3173 then.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003174 Load(node->then_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00003175 }
3176 if (else_.is_linked()) {
3177 JumpTarget exit;
3178 if (has_valid_frame()) exit.Jump();
3179 else_.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003180 Load(node->else_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00003181 if (exit.is_linked()) exit.Bind();
3182 }
Steve Block6ded16b2010-05-10 14:33:55 +01003183 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003184}
3185
3186
3187void CodeGenerator::LoadFromSlot(Slot* slot, TypeofState typeof_state) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003188 if (slot->type() == Slot::LOOKUP) {
3189 ASSERT(slot->var()->is_dynamic());
3190
Steve Block6ded16b2010-05-10 14:33:55 +01003191 // JumpTargets do not yet support merging frames so the frame must be
3192 // spilled when jumping to these targets.
Steve Blocka7e24c12009-10-30 11:49:00 +00003193 JumpTarget slow;
3194 JumpTarget done;
3195
Kristian Monsen25f61362010-05-21 11:50:48 +01003196 // Generate fast case for loading from slots that correspond to
3197 // local/global variables or arguments unless they are shadowed by
3198 // eval-introduced bindings.
3199 EmitDynamicLoadFromSlotFastCase(slot,
3200 typeof_state,
3201 &slow,
3202 &done);
Steve Blocka7e24c12009-10-30 11:49:00 +00003203
3204 slow.Bind();
3205 frame_->EmitPush(cp);
Steve Block8defd9f2010-07-08 12:39:36 +01003206 frame_->EmitPush(Operand(slot->var()->name()));
Steve Blocka7e24c12009-10-30 11:49:00 +00003207
3208 if (typeof_state == INSIDE_TYPEOF) {
3209 frame_->CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
3210 } else {
3211 frame_->CallRuntime(Runtime::kLoadContextSlot, 2);
3212 }
3213
3214 done.Bind();
3215 frame_->EmitPush(r0);
3216
3217 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01003218 Register scratch = VirtualFrame::scratch0();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003219 TypeInfo info = type_info(slot);
3220 frame_->EmitPush(SlotOperand(slot, scratch), info);
Steve Block8defd9f2010-07-08 12:39:36 +01003221
Steve Blocka7e24c12009-10-30 11:49:00 +00003222 if (slot->var()->mode() == Variable::CONST) {
3223 // Const slots may contain 'the hole' value (the constant hasn't been
3224 // initialized yet) which needs to be converted into the 'undefined'
3225 // value.
3226 Comment cmnt(masm_, "[ Unhole const");
Steve Block8defd9f2010-07-08 12:39:36 +01003227 Register tos = frame_->PopToRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +00003228 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
Steve Block8defd9f2010-07-08 12:39:36 +01003229 __ cmp(tos, ip);
3230 __ LoadRoot(tos, Heap::kUndefinedValueRootIndex, eq);
3231 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00003232 }
3233 }
3234}
3235
3236
Steve Block6ded16b2010-05-10 14:33:55 +01003237void CodeGenerator::LoadFromSlotCheckForArguments(Slot* slot,
3238 TypeofState state) {
Steve Block8defd9f2010-07-08 12:39:36 +01003239 VirtualFrame::RegisterAllocationScope scope(this);
Steve Block6ded16b2010-05-10 14:33:55 +01003240 LoadFromSlot(slot, state);
3241
3242 // Bail out quickly if we're not using lazy arguments allocation.
3243 if (ArgumentsMode() != LAZY_ARGUMENTS_ALLOCATION) return;
3244
3245 // ... or if the slot isn't a non-parameter arguments slot.
3246 if (slot->type() == Slot::PARAMETER || !slot->is_arguments()) return;
3247
Steve Block8defd9f2010-07-08 12:39:36 +01003248 // Load the loaded value from the stack into a register but leave it on the
Steve Block6ded16b2010-05-10 14:33:55 +01003249 // stack.
Steve Block8defd9f2010-07-08 12:39:36 +01003250 Register tos = frame_->Peek();
Steve Block6ded16b2010-05-10 14:33:55 +01003251
3252 // If the loaded value is the sentinel that indicates that we
3253 // haven't loaded the arguments object yet, we need to do it now.
3254 JumpTarget exit;
3255 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
Steve Block8defd9f2010-07-08 12:39:36 +01003256 __ cmp(tos, ip);
Steve Block6ded16b2010-05-10 14:33:55 +01003257 exit.Branch(ne);
3258 frame_->Drop();
3259 StoreArgumentsObject(false);
3260 exit.Bind();
3261}
3262
3263
Leon Clarkee46be812010-01-19 14:06:41 +00003264void CodeGenerator::StoreToSlot(Slot* slot, InitState init_state) {
3265 ASSERT(slot != NULL);
Steve Block8defd9f2010-07-08 12:39:36 +01003266 VirtualFrame::RegisterAllocationScope scope(this);
Leon Clarkee46be812010-01-19 14:06:41 +00003267 if (slot->type() == Slot::LOOKUP) {
3268 ASSERT(slot->var()->is_dynamic());
3269
3270 // For now, just do a runtime call.
3271 frame_->EmitPush(cp);
Steve Block8defd9f2010-07-08 12:39:36 +01003272 frame_->EmitPush(Operand(slot->var()->name()));
Leon Clarkee46be812010-01-19 14:06:41 +00003273
3274 if (init_state == CONST_INIT) {
3275 // Same as the case for a normal store, but ignores attribute
3276 // (e.g. READ_ONLY) of context slot so that we can initialize
3277 // const properties (introduced via eval("const foo = (some
3278 // expr);")). Also, uses the current function context instead of
3279 // the top context.
3280 //
3281 // Note that we must declare the foo upon entry of eval(), via a
3282 // context slot declaration, but we cannot initialize it at the
3283 // same time, because the const declaration may be at the end of
3284 // the eval code (sigh...) and the const variable may have been
3285 // used before (where its value is 'undefined'). Thus, we can only
3286 // do the initialization when we actually encounter the expression
3287 // and when the expression operands are defined and valid, and
3288 // thus we need the split into 2 operations: declaration of the
3289 // context slot followed by initialization.
3290 frame_->CallRuntime(Runtime::kInitializeConstContextSlot, 3);
3291 } else {
3292 frame_->CallRuntime(Runtime::kStoreContextSlot, 3);
3293 }
3294 // Storing a variable must keep the (new) value on the expression
3295 // stack. This is necessary for compiling assignment expressions.
3296 frame_->EmitPush(r0);
3297
3298 } else {
3299 ASSERT(!slot->var()->is_dynamic());
Steve Block6ded16b2010-05-10 14:33:55 +01003300 Register scratch = VirtualFrame::scratch0();
Steve Block8defd9f2010-07-08 12:39:36 +01003301 Register scratch2 = VirtualFrame::scratch1();
Leon Clarkee46be812010-01-19 14:06:41 +00003302
Steve Block6ded16b2010-05-10 14:33:55 +01003303 // The frame must be spilled when branching to this target.
Leon Clarkee46be812010-01-19 14:06:41 +00003304 JumpTarget exit;
Steve Block6ded16b2010-05-10 14:33:55 +01003305
Leon Clarkee46be812010-01-19 14:06:41 +00003306 if (init_state == CONST_INIT) {
3307 ASSERT(slot->var()->mode() == Variable::CONST);
3308 // Only the first const initialization must be executed (the slot
3309 // still contains 'the hole' value). When the assignment is
3310 // executed, the code is identical to a normal store (see below).
3311 Comment cmnt(masm_, "[ Init const");
Steve Block6ded16b2010-05-10 14:33:55 +01003312 __ ldr(scratch, SlotOperand(slot, scratch));
Leon Clarkee46be812010-01-19 14:06:41 +00003313 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01003314 __ cmp(scratch, ip);
Leon Clarkee46be812010-01-19 14:06:41 +00003315 exit.Branch(ne);
3316 }
3317
3318 // We must execute the store. Storing a variable must keep the
3319 // (new) value on the stack. This is necessary for compiling
3320 // assignment expressions.
3321 //
3322 // Note: We will reach here even with slot->var()->mode() ==
3323 // Variable::CONST because of const declarations which will
3324 // initialize consts to 'the hole' value and by doing so, end up
3325 // calling this code. r2 may be loaded with context; used below in
3326 // RecordWrite.
Steve Block6ded16b2010-05-10 14:33:55 +01003327 Register tos = frame_->Peek();
3328 __ str(tos, SlotOperand(slot, scratch));
Leon Clarkee46be812010-01-19 14:06:41 +00003329 if (slot->type() == Slot::CONTEXT) {
3330 // Skip write barrier if the written value is a smi.
Steve Block6ded16b2010-05-10 14:33:55 +01003331 __ tst(tos, Operand(kSmiTagMask));
3332 // We don't use tos any more after here.
Leon Clarkee46be812010-01-19 14:06:41 +00003333 exit.Branch(eq);
Steve Block6ded16b2010-05-10 14:33:55 +01003334 // scratch is loaded with context when calling SlotOperand above.
Leon Clarkee46be812010-01-19 14:06:41 +00003335 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
Steve Block8defd9f2010-07-08 12:39:36 +01003336 // We need an extra register. Until we have a way to do that in the
3337 // virtual frame we will cheat and ask for a free TOS register.
3338 Register scratch3 = frame_->GetTOSRegister();
3339 __ RecordWrite(scratch, Operand(offset), scratch2, scratch3);
Leon Clarkee46be812010-01-19 14:06:41 +00003340 }
3341 // If we definitely did not jump over the assignment, we do not need
3342 // to bind the exit label. Doing so can defeat peephole
3343 // optimization.
3344 if (init_state == CONST_INIT || slot->type() == Slot::CONTEXT) {
3345 exit.Bind();
3346 }
3347 }
3348}
3349
3350
Steve Blocka7e24c12009-10-30 11:49:00 +00003351void CodeGenerator::LoadFromGlobalSlotCheckExtensions(Slot* slot,
3352 TypeofState typeof_state,
Steve Blocka7e24c12009-10-30 11:49:00 +00003353 JumpTarget* slow) {
3354 // Check that no extension objects have been created by calls to
3355 // eval from the current scope to the global scope.
Steve Block6ded16b2010-05-10 14:33:55 +01003356 Register tmp = frame_->scratch0();
3357 Register tmp2 = frame_->scratch1();
Steve Blocka7e24c12009-10-30 11:49:00 +00003358 Register context = cp;
3359 Scope* s = scope();
3360 while (s != NULL) {
3361 if (s->num_heap_slots() > 0) {
3362 if (s->calls_eval()) {
Steve Block6ded16b2010-05-10 14:33:55 +01003363 frame_->SpillAll();
Steve Blocka7e24c12009-10-30 11:49:00 +00003364 // Check that extension is NULL.
3365 __ ldr(tmp2, ContextOperand(context, Context::EXTENSION_INDEX));
3366 __ tst(tmp2, tmp2);
3367 slow->Branch(ne);
3368 }
3369 // Load next context in chain.
3370 __ ldr(tmp, ContextOperand(context, Context::CLOSURE_INDEX));
3371 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset));
3372 context = tmp;
3373 }
3374 // If no outer scope calls eval, we do not need to check more
3375 // context extensions.
3376 if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break;
3377 s = s->outer_scope();
3378 }
3379
3380 if (s->is_eval_scope()) {
Steve Block6ded16b2010-05-10 14:33:55 +01003381 frame_->SpillAll();
Steve Blocka7e24c12009-10-30 11:49:00 +00003382 Label next, fast;
Steve Block6ded16b2010-05-10 14:33:55 +01003383 __ Move(tmp, context);
Steve Blocka7e24c12009-10-30 11:49:00 +00003384 __ bind(&next);
3385 // Terminate at global context.
3386 __ ldr(tmp2, FieldMemOperand(tmp, HeapObject::kMapOffset));
3387 __ LoadRoot(ip, Heap::kGlobalContextMapRootIndex);
3388 __ cmp(tmp2, ip);
3389 __ b(eq, &fast);
3390 // Check that extension is NULL.
3391 __ ldr(tmp2, ContextOperand(tmp, Context::EXTENSION_INDEX));
3392 __ tst(tmp2, tmp2);
3393 slow->Branch(ne);
3394 // Load next context in chain.
3395 __ ldr(tmp, ContextOperand(tmp, Context::CLOSURE_INDEX));
3396 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset));
3397 __ b(&next);
3398 __ bind(&fast);
3399 }
3400
Steve Blocka7e24c12009-10-30 11:49:00 +00003401 // Load the global object.
3402 LoadGlobal();
Steve Block6ded16b2010-05-10 14:33:55 +01003403 // Setup the name register and call load IC.
3404 frame_->CallLoadIC(slot->var()->name(),
3405 typeof_state == INSIDE_TYPEOF
3406 ? RelocInfo::CODE_TARGET
3407 : RelocInfo::CODE_TARGET_CONTEXT);
Steve Blocka7e24c12009-10-30 11:49:00 +00003408}
3409
3410
Kristian Monsen25f61362010-05-21 11:50:48 +01003411void CodeGenerator::EmitDynamicLoadFromSlotFastCase(Slot* slot,
3412 TypeofState typeof_state,
3413 JumpTarget* slow,
3414 JumpTarget* done) {
3415 // Generate fast-case code for variables that might be shadowed by
3416 // eval-introduced variables. Eval is used a lot without
3417 // introducing variables. In those cases, we do not want to
3418 // perform a runtime call for all variables in the scope
3419 // containing the eval.
3420 if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) {
3421 LoadFromGlobalSlotCheckExtensions(slot, typeof_state, slow);
3422 frame_->SpillAll();
3423 done->Jump();
3424
3425 } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) {
3426 frame_->SpillAll();
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003427 Slot* potential_slot = slot->var()->local_if_not_shadowed()->AsSlot();
Kristian Monsen25f61362010-05-21 11:50:48 +01003428 Expression* rewrite = slot->var()->local_if_not_shadowed()->rewrite();
3429 if (potential_slot != NULL) {
3430 // Generate fast case for locals that rewrite to slots.
3431 __ ldr(r0,
3432 ContextSlotOperandCheckExtensions(potential_slot,
3433 r1,
3434 r2,
3435 slow));
3436 if (potential_slot->var()->mode() == Variable::CONST) {
3437 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
3438 __ cmp(r0, ip);
3439 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
3440 }
3441 done->Jump();
3442 } else if (rewrite != NULL) {
3443 // Generate fast case for argument loads.
3444 Property* property = rewrite->AsProperty();
3445 if (property != NULL) {
3446 VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
3447 Literal* key_literal = property->key()->AsLiteral();
3448 if (obj_proxy != NULL &&
3449 key_literal != NULL &&
3450 obj_proxy->IsArguments() &&
3451 key_literal->handle()->IsSmi()) {
3452 // Load arguments object if there are no eval-introduced
3453 // variables. Then load the argument from the arguments
3454 // object using keyed load.
3455 __ ldr(r0,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003456 ContextSlotOperandCheckExtensions(obj_proxy->var()->AsSlot(),
Kristian Monsen25f61362010-05-21 11:50:48 +01003457 r1,
3458 r2,
3459 slow));
3460 frame_->EmitPush(r0);
3461 __ mov(r1, Operand(key_literal->handle()));
3462 frame_->EmitPush(r1);
3463 EmitKeyedLoad();
3464 done->Jump();
3465 }
3466 }
3467 }
3468 }
3469}
3470
3471
Steve Blocka7e24c12009-10-30 11:49:00 +00003472void CodeGenerator::VisitSlot(Slot* node) {
3473#ifdef DEBUG
3474 int original_height = frame_->height();
3475#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003476 Comment cmnt(masm_, "[ Slot");
Steve Block6ded16b2010-05-10 14:33:55 +01003477 LoadFromSlotCheckForArguments(node, NOT_INSIDE_TYPEOF);
3478 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003479}
3480
3481
3482void CodeGenerator::VisitVariableProxy(VariableProxy* node) {
3483#ifdef DEBUG
3484 int original_height = frame_->height();
3485#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003486 Comment cmnt(masm_, "[ VariableProxy");
3487
3488 Variable* var = node->var();
3489 Expression* expr = var->rewrite();
3490 if (expr != NULL) {
3491 Visit(expr);
3492 } else {
3493 ASSERT(var->is_global());
3494 Reference ref(this, node);
Steve Block6ded16b2010-05-10 14:33:55 +01003495 ref.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00003496 }
Steve Block6ded16b2010-05-10 14:33:55 +01003497 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003498}
3499
3500
3501void CodeGenerator::VisitLiteral(Literal* node) {
3502#ifdef DEBUG
3503 int original_height = frame_->height();
3504#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003505 Comment cmnt(masm_, "[ Literal");
Steve Block6ded16b2010-05-10 14:33:55 +01003506 Register reg = frame_->GetTOSRegister();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003507 bool is_smi = node->handle()->IsSmi();
Steve Block6ded16b2010-05-10 14:33:55 +01003508 __ mov(reg, Operand(node->handle()));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003509 frame_->EmitPush(reg, is_smi ? TypeInfo::Smi() : TypeInfo::Unknown());
Steve Block6ded16b2010-05-10 14:33:55 +01003510 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003511}
3512
3513
3514void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) {
3515#ifdef DEBUG
3516 int original_height = frame_->height();
3517#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003518 Comment cmnt(masm_, "[ RexExp Literal");
3519
Steve Block8defd9f2010-07-08 12:39:36 +01003520 Register tmp = VirtualFrame::scratch0();
3521 // Free up a TOS register that can be used to push the literal.
3522 Register literal = frame_->GetTOSRegister();
3523
Steve Blocka7e24c12009-10-30 11:49:00 +00003524 // Retrieve the literal array and check the allocated entry.
3525
3526 // Load the function of this activation.
Steve Block8defd9f2010-07-08 12:39:36 +01003527 __ ldr(tmp, frame_->Function());
Steve Blocka7e24c12009-10-30 11:49:00 +00003528
3529 // Load the literals array of the function.
Steve Block8defd9f2010-07-08 12:39:36 +01003530 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kLiteralsOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00003531
3532 // Load the literal at the ast saved index.
3533 int literal_offset =
3534 FixedArray::kHeaderSize + node->literal_index() * kPointerSize;
Steve Block8defd9f2010-07-08 12:39:36 +01003535 __ ldr(literal, FieldMemOperand(tmp, literal_offset));
Steve Blocka7e24c12009-10-30 11:49:00 +00003536
Ben Murdochbb769b22010-08-11 14:56:33 +01003537 JumpTarget materialized;
Steve Blocka7e24c12009-10-30 11:49:00 +00003538 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
Steve Block8defd9f2010-07-08 12:39:36 +01003539 __ cmp(literal, ip);
3540 // This branch locks the virtual frame at the done label to match the
3541 // one we have here, where the literal register is not on the stack and
3542 // nothing is spilled.
Ben Murdochbb769b22010-08-11 14:56:33 +01003543 materialized.Branch(ne);
Steve Blocka7e24c12009-10-30 11:49:00 +00003544
Steve Block8defd9f2010-07-08 12:39:36 +01003545 // If the entry is undefined we call the runtime system to compute
Steve Blocka7e24c12009-10-30 11:49:00 +00003546 // the literal.
Steve Block8defd9f2010-07-08 12:39:36 +01003547 // literal array (0)
3548 frame_->EmitPush(tmp);
3549 // literal index (1)
3550 frame_->EmitPush(Operand(Smi::FromInt(node->literal_index())));
3551 // RegExp pattern (2)
3552 frame_->EmitPush(Operand(node->pattern()));
3553 // RegExp flags (3)
3554 frame_->EmitPush(Operand(node->flags()));
Steve Blocka7e24c12009-10-30 11:49:00 +00003555 frame_->CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
Steve Block8defd9f2010-07-08 12:39:36 +01003556 __ Move(literal, r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00003557
Ben Murdochbb769b22010-08-11 14:56:33 +01003558 materialized.Bind();
3559
Steve Block8defd9f2010-07-08 12:39:36 +01003560 frame_->EmitPush(literal);
Ben Murdochbb769b22010-08-11 14:56:33 +01003561 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
3562 frame_->EmitPush(Operand(Smi::FromInt(size)));
3563 frame_->CallRuntime(Runtime::kAllocateInNewSpace, 1);
3564 // TODO(lrn): Use AllocateInNewSpace macro with fallback to runtime.
3565 // r0 is newly allocated space.
3566
3567 // Reuse literal variable with (possibly) a new register, still holding
3568 // the materialized boilerplate.
3569 literal = frame_->PopToRegister(r0);
3570
3571 __ CopyFields(r0, literal, tmp.bit(), size / kPointerSize);
3572
3573 // Push the clone.
3574 frame_->EmitPush(r0);
Steve Block6ded16b2010-05-10 14:33:55 +01003575 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003576}
3577
3578
Steve Blocka7e24c12009-10-30 11:49:00 +00003579void CodeGenerator::VisitObjectLiteral(ObjectLiteral* node) {
3580#ifdef DEBUG
3581 int original_height = frame_->height();
3582#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003583 Comment cmnt(masm_, "[ ObjectLiteral");
3584
Steve Block8defd9f2010-07-08 12:39:36 +01003585 Register literal = frame_->GetTOSRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +00003586 // Load the function of this activation.
Steve Block8defd9f2010-07-08 12:39:36 +01003587 __ ldr(literal, frame_->Function());
Leon Clarkee46be812010-01-19 14:06:41 +00003588 // Literal array.
Steve Block8defd9f2010-07-08 12:39:36 +01003589 __ ldr(literal, FieldMemOperand(literal, JSFunction::kLiteralsOffset));
3590 frame_->EmitPush(literal);
Leon Clarkee46be812010-01-19 14:06:41 +00003591 // Literal index.
Steve Block8defd9f2010-07-08 12:39:36 +01003592 frame_->EmitPush(Operand(Smi::FromInt(node->literal_index())));
Leon Clarkee46be812010-01-19 14:06:41 +00003593 // Constant properties.
Steve Block8defd9f2010-07-08 12:39:36 +01003594 frame_->EmitPush(Operand(node->constant_properties()));
Steve Block6ded16b2010-05-10 14:33:55 +01003595 // Should the object literal have fast elements?
Steve Block8defd9f2010-07-08 12:39:36 +01003596 frame_->EmitPush(Operand(Smi::FromInt(node->fast_elements() ? 1 : 0)));
Leon Clarkee46be812010-01-19 14:06:41 +00003597 if (node->depth() > 1) {
Steve Block6ded16b2010-05-10 14:33:55 +01003598 frame_->CallRuntime(Runtime::kCreateObjectLiteral, 4);
Leon Clarkee46be812010-01-19 14:06:41 +00003599 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01003600 frame_->CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
Steve Blocka7e24c12009-10-30 11:49:00 +00003601 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003602 frame_->EmitPush(r0); // save the result
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08003603
3604 // Mark all computed expressions that are bound to a key that
3605 // is shadowed by a later occurrence of the same key. For the
3606 // marked expressions, no store code is emitted.
3607 node->CalculateEmitStore();
3608
Steve Blocka7e24c12009-10-30 11:49:00 +00003609 for (int i = 0; i < node->properties()->length(); i++) {
Andrei Popescu402d9372010-02-26 13:31:12 +00003610 // At the start of each iteration, the top of stack contains
3611 // the newly created object literal.
Steve Blocka7e24c12009-10-30 11:49:00 +00003612 ObjectLiteral::Property* property = node->properties()->at(i);
3613 Literal* key = property->key();
3614 Expression* value = property->value();
3615 switch (property->kind()) {
3616 case ObjectLiteral::Property::CONSTANT:
3617 break;
3618 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
3619 if (CompileTimeValue::IsCompileTimeValue(property->value())) break;
3620 // else fall through
Andrei Popescu402d9372010-02-26 13:31:12 +00003621 case ObjectLiteral::Property::COMPUTED:
3622 if (key->handle()->IsSymbol()) {
3623 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003624 Load(value);
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08003625 if (property->emit_store()) {
3626 frame_->PopToR0();
3627 // Fetch the object literal.
3628 frame_->SpillAllButCopyTOSToR1();
3629 __ mov(r2, Operand(key->handle()));
3630 frame_->CallCodeObject(ic, RelocInfo::CODE_TARGET, 0);
3631 } else {
3632 frame_->Drop();
3633 }
Andrei Popescu402d9372010-02-26 13:31:12 +00003634 break;
3635 }
3636 // else fall through
Steve Blocka7e24c12009-10-30 11:49:00 +00003637 case ObjectLiteral::Property::PROTOTYPE: {
Steve Block8defd9f2010-07-08 12:39:36 +01003638 frame_->Dup();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003639 Load(key);
3640 Load(value);
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08003641 if (property->emit_store()) {
3642 frame_->CallRuntime(Runtime::kSetProperty, 3);
3643 } else {
3644 frame_->Drop(3);
3645 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003646 break;
3647 }
3648 case ObjectLiteral::Property::SETTER: {
Steve Block8defd9f2010-07-08 12:39:36 +01003649 frame_->Dup();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003650 Load(key);
Steve Block8defd9f2010-07-08 12:39:36 +01003651 frame_->EmitPush(Operand(Smi::FromInt(1)));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003652 Load(value);
Steve Blocka7e24c12009-10-30 11:49:00 +00003653 frame_->CallRuntime(Runtime::kDefineAccessor, 4);
Steve Blocka7e24c12009-10-30 11:49:00 +00003654 break;
3655 }
3656 case ObjectLiteral::Property::GETTER: {
Steve Block8defd9f2010-07-08 12:39:36 +01003657 frame_->Dup();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003658 Load(key);
Steve Block8defd9f2010-07-08 12:39:36 +01003659 frame_->EmitPush(Operand(Smi::FromInt(0)));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003660 Load(value);
Steve Blocka7e24c12009-10-30 11:49:00 +00003661 frame_->CallRuntime(Runtime::kDefineAccessor, 4);
Steve Blocka7e24c12009-10-30 11:49:00 +00003662 break;
3663 }
3664 }
3665 }
Steve Block6ded16b2010-05-10 14:33:55 +01003666 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003667}
3668
3669
Steve Blocka7e24c12009-10-30 11:49:00 +00003670void CodeGenerator::VisitArrayLiteral(ArrayLiteral* node) {
3671#ifdef DEBUG
3672 int original_height = frame_->height();
3673#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003674 Comment cmnt(masm_, "[ ArrayLiteral");
3675
Steve Block8defd9f2010-07-08 12:39:36 +01003676 Register tos = frame_->GetTOSRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +00003677 // Load the function of this activation.
Steve Block8defd9f2010-07-08 12:39:36 +01003678 __ ldr(tos, frame_->Function());
Andrei Popescu402d9372010-02-26 13:31:12 +00003679 // Load the literals array of the function.
Steve Block8defd9f2010-07-08 12:39:36 +01003680 __ ldr(tos, FieldMemOperand(tos, JSFunction::kLiteralsOffset));
3681 frame_->EmitPush(tos);
3682 frame_->EmitPush(Operand(Smi::FromInt(node->literal_index())));
3683 frame_->EmitPush(Operand(node->constant_elements()));
Andrei Popescu402d9372010-02-26 13:31:12 +00003684 int length = node->values()->length();
Iain Merrick75681382010-08-19 15:07:18 +01003685 if (node->constant_elements()->map() == Heap::fixed_cow_array_map()) {
3686 FastCloneShallowArrayStub stub(
3687 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
3688 frame_->CallStub(&stub, 3);
3689 __ IncrementCounter(&Counters::cow_arrays_created_stub, 1, r1, r2);
3690 } else if (node->depth() > 1) {
Leon Clarkee46be812010-01-19 14:06:41 +00003691 frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3);
Iain Merrick75681382010-08-19 15:07:18 +01003692 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
Leon Clarkee46be812010-01-19 14:06:41 +00003693 frame_->CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
Andrei Popescu402d9372010-02-26 13:31:12 +00003694 } else {
Iain Merrick75681382010-08-19 15:07:18 +01003695 FastCloneShallowArrayStub stub(
3696 FastCloneShallowArrayStub::CLONE_ELEMENTS, length);
Andrei Popescu402d9372010-02-26 13:31:12 +00003697 frame_->CallStub(&stub, 3);
Steve Blocka7e24c12009-10-30 11:49:00 +00003698 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003699 frame_->EmitPush(r0); // save the result
Leon Clarkee46be812010-01-19 14:06:41 +00003700 // r0: created object literal
Steve Blocka7e24c12009-10-30 11:49:00 +00003701
3702 // Generate code to set the elements in the array that are not
3703 // literals.
3704 for (int i = 0; i < node->values()->length(); i++) {
3705 Expression* value = node->values()->at(i);
3706
3707 // If value is a literal the property value is already set in the
3708 // boilerplate object.
3709 if (value->AsLiteral() != NULL) continue;
3710 // If value is a materialized literal the property value is already set
3711 // in the boilerplate object if it is simple.
3712 if (CompileTimeValue::IsCompileTimeValue(value)) continue;
3713
3714 // The property must be set by generated code.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003715 Load(value);
Steve Block8defd9f2010-07-08 12:39:36 +01003716 frame_->PopToR0();
Steve Blocka7e24c12009-10-30 11:49:00 +00003717 // Fetch the object literal.
Steve Block8defd9f2010-07-08 12:39:36 +01003718 frame_->SpillAllButCopyTOSToR1();
3719
Steve Blocka7e24c12009-10-30 11:49:00 +00003720 // Get the elements array.
3721 __ ldr(r1, FieldMemOperand(r1, JSObject::kElementsOffset));
3722
3723 // Write to the indexed properties array.
3724 int offset = i * kPointerSize + FixedArray::kHeaderSize;
3725 __ str(r0, FieldMemOperand(r1, offset));
3726
3727 // Update the write barrier for the array address.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003728 __ RecordWrite(r1, Operand(offset), r3, r2);
Steve Blocka7e24c12009-10-30 11:49:00 +00003729 }
Steve Block6ded16b2010-05-10 14:33:55 +01003730 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003731}
3732
3733
3734void CodeGenerator::VisitCatchExtensionObject(CatchExtensionObject* node) {
3735#ifdef DEBUG
3736 int original_height = frame_->height();
3737#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003738 // Call runtime routine to allocate the catch extension object and
3739 // assign the exception value to the catch variable.
3740 Comment cmnt(masm_, "[ CatchExtensionObject");
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003741 Load(node->key());
3742 Load(node->value());
Steve Blocka7e24c12009-10-30 11:49:00 +00003743 frame_->CallRuntime(Runtime::kCreateCatchExtensionObject, 2);
3744 frame_->EmitPush(r0);
Steve Block6ded16b2010-05-10 14:33:55 +01003745 ASSERT_EQ(original_height + 1, frame_->height());
3746}
3747
3748
3749void CodeGenerator::EmitSlotAssignment(Assignment* node) {
3750#ifdef DEBUG
3751 int original_height = frame_->height();
3752#endif
3753 Comment cmnt(masm(), "[ Variable Assignment");
3754 Variable* var = node->target()->AsVariableProxy()->AsVariable();
3755 ASSERT(var != NULL);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003756 Slot* slot = var->AsSlot();
Steve Block6ded16b2010-05-10 14:33:55 +01003757 ASSERT(slot != NULL);
3758
3759 // Evaluate the right-hand side.
3760 if (node->is_compound()) {
3761 // For a compound assignment the right-hand side is a binary operation
3762 // between the current property value and the actual right-hand side.
3763 LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF);
3764
3765 // Perform the binary operation.
3766 Literal* literal = node->value()->AsLiteral();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003767 bool overwrite_value = node->value()->ResultOverwriteAllowed();
Steve Block6ded16b2010-05-10 14:33:55 +01003768 if (literal != NULL && literal->handle()->IsSmi()) {
3769 SmiOperation(node->binary_op(),
3770 literal->handle(),
3771 false,
3772 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
3773 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003774 GenerateInlineSmi inline_smi =
3775 loop_nesting() > 0 ? GENERATE_INLINE_SMI : DONT_GENERATE_INLINE_SMI;
3776 if (literal != NULL) {
3777 ASSERT(!literal->handle()->IsSmi());
3778 inline_smi = DONT_GENERATE_INLINE_SMI;
3779 }
Steve Block6ded16b2010-05-10 14:33:55 +01003780 Load(node->value());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003781 GenericBinaryOperation(node->binary_op(),
3782 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE,
3783 inline_smi);
Steve Block6ded16b2010-05-10 14:33:55 +01003784 }
3785 } else {
3786 Load(node->value());
3787 }
3788
3789 // Perform the assignment.
3790 if (var->mode() != Variable::CONST || node->op() == Token::INIT_CONST) {
3791 CodeForSourcePosition(node->position());
3792 StoreToSlot(slot,
3793 node->op() == Token::INIT_CONST ? CONST_INIT : NOT_CONST_INIT);
3794 }
3795 ASSERT_EQ(original_height + 1, frame_->height());
3796}
3797
3798
3799void CodeGenerator::EmitNamedPropertyAssignment(Assignment* node) {
3800#ifdef DEBUG
3801 int original_height = frame_->height();
3802#endif
3803 Comment cmnt(masm(), "[ Named Property Assignment");
3804 Variable* var = node->target()->AsVariableProxy()->AsVariable();
3805 Property* prop = node->target()->AsProperty();
3806 ASSERT(var == NULL || (prop == NULL && var->is_global()));
3807
3808 // Initialize name and evaluate the receiver sub-expression if necessary. If
3809 // the receiver is trivial it is not placed on the stack at this point, but
3810 // loaded whenever actually needed.
3811 Handle<String> name;
3812 bool is_trivial_receiver = false;
3813 if (var != NULL) {
3814 name = var->name();
3815 } else {
3816 Literal* lit = prop->key()->AsLiteral();
3817 ASSERT_NOT_NULL(lit);
3818 name = Handle<String>::cast(lit->handle());
3819 // Do not materialize the receiver on the frame if it is trivial.
3820 is_trivial_receiver = prop->obj()->IsTrivial();
3821 if (!is_trivial_receiver) Load(prop->obj());
3822 }
3823
3824 // Change to slow case in the beginning of an initialization block to
3825 // avoid the quadratic behavior of repeatedly adding fast properties.
3826 if (node->starts_initialization_block()) {
3827 // Initialization block consists of assignments of the form expr.x = ..., so
3828 // this will never be an assignment to a variable, so there must be a
3829 // receiver object.
3830 ASSERT_EQ(NULL, var);
3831 if (is_trivial_receiver) {
3832 Load(prop->obj());
3833 } else {
3834 frame_->Dup();
3835 }
3836 frame_->CallRuntime(Runtime::kToSlowProperties, 1);
3837 }
3838
3839 // Change to fast case at the end of an initialization block. To prepare for
3840 // that add an extra copy of the receiver to the frame, so that it can be
3841 // converted back to fast case after the assignment.
3842 if (node->ends_initialization_block() && !is_trivial_receiver) {
3843 frame_->Dup();
3844 }
3845
3846 // Stack layout:
3847 // [tos] : receiver (only materialized if non-trivial)
3848 // [tos+1] : receiver if at the end of an initialization block
3849
3850 // Evaluate the right-hand side.
3851 if (node->is_compound()) {
3852 // For a compound assignment the right-hand side is a binary operation
3853 // between the current property value and the actual right-hand side.
3854 if (is_trivial_receiver) {
3855 Load(prop->obj());
3856 } else if (var != NULL) {
3857 LoadGlobal();
3858 } else {
3859 frame_->Dup();
3860 }
3861 EmitNamedLoad(name, var != NULL);
Steve Block6ded16b2010-05-10 14:33:55 +01003862
3863 // Perform the binary operation.
3864 Literal* literal = node->value()->AsLiteral();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003865 bool overwrite_value = node->value()->ResultOverwriteAllowed();
Steve Block6ded16b2010-05-10 14:33:55 +01003866 if (literal != NULL && literal->handle()->IsSmi()) {
3867 SmiOperation(node->binary_op(),
3868 literal->handle(),
3869 false,
3870 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
3871 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003872 GenerateInlineSmi inline_smi =
3873 loop_nesting() > 0 ? GENERATE_INLINE_SMI : DONT_GENERATE_INLINE_SMI;
3874 if (literal != NULL) {
3875 ASSERT(!literal->handle()->IsSmi());
3876 inline_smi = DONT_GENERATE_INLINE_SMI;
3877 }
Steve Block6ded16b2010-05-10 14:33:55 +01003878 Load(node->value());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003879 GenericBinaryOperation(node->binary_op(),
3880 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE,
3881 inline_smi);
Steve Block6ded16b2010-05-10 14:33:55 +01003882 }
3883 } else {
3884 // For non-compound assignment just load the right-hand side.
3885 Load(node->value());
3886 }
3887
3888 // Stack layout:
3889 // [tos] : value
3890 // [tos+1] : receiver (only materialized if non-trivial)
3891 // [tos+2] : receiver if at the end of an initialization block
3892
3893 // Perform the assignment. It is safe to ignore constants here.
3894 ASSERT(var == NULL || var->mode() != Variable::CONST);
3895 ASSERT_NE(Token::INIT_CONST, node->op());
3896 if (is_trivial_receiver) {
3897 // Load the receiver and swap with the value.
3898 Load(prop->obj());
3899 Register t0 = frame_->PopToRegister();
3900 Register t1 = frame_->PopToRegister(t0);
3901 frame_->EmitPush(t0);
3902 frame_->EmitPush(t1);
3903 }
3904 CodeForSourcePosition(node->position());
3905 bool is_contextual = (var != NULL);
3906 EmitNamedStore(name, is_contextual);
3907 frame_->EmitPush(r0);
3908
3909 // Change to fast case at the end of an initialization block.
3910 if (node->ends_initialization_block()) {
3911 ASSERT_EQ(NULL, var);
3912 // The argument to the runtime call is the receiver.
3913 if (is_trivial_receiver) {
3914 Load(prop->obj());
3915 } else {
3916 // A copy of the receiver is below the value of the assignment. Swap
3917 // the receiver and the value of the assignment expression.
3918 Register t0 = frame_->PopToRegister();
3919 Register t1 = frame_->PopToRegister(t0);
3920 frame_->EmitPush(t0);
3921 frame_->EmitPush(t1);
3922 }
3923 frame_->CallRuntime(Runtime::kToFastProperties, 1);
3924 }
3925
3926 // Stack layout:
3927 // [tos] : result
3928
3929 ASSERT_EQ(original_height + 1, frame_->height());
3930}
3931
3932
3933void CodeGenerator::EmitKeyedPropertyAssignment(Assignment* node) {
3934#ifdef DEBUG
3935 int original_height = frame_->height();
3936#endif
3937 Comment cmnt(masm_, "[ Keyed Property Assignment");
3938 Property* prop = node->target()->AsProperty();
3939 ASSERT_NOT_NULL(prop);
3940
3941 // Evaluate the receiver subexpression.
3942 Load(prop->obj());
3943
Steve Block8defd9f2010-07-08 12:39:36 +01003944 WriteBarrierCharacter wb_info;
3945
Steve Block6ded16b2010-05-10 14:33:55 +01003946 // Change to slow case in the beginning of an initialization block to
3947 // avoid the quadratic behavior of repeatedly adding fast properties.
3948 if (node->starts_initialization_block()) {
3949 frame_->Dup();
3950 frame_->CallRuntime(Runtime::kToSlowProperties, 1);
3951 }
3952
3953 // Change to fast case at the end of an initialization block. To prepare for
3954 // that add an extra copy of the receiver to the frame, so that it can be
3955 // converted back to fast case after the assignment.
3956 if (node->ends_initialization_block()) {
3957 frame_->Dup();
3958 }
3959
3960 // Evaluate the key subexpression.
3961 Load(prop->key());
3962
3963 // Stack layout:
3964 // [tos] : key
3965 // [tos+1] : receiver
3966 // [tos+2] : receiver if at the end of an initialization block
Steve Block8defd9f2010-07-08 12:39:36 +01003967 //
Steve Block6ded16b2010-05-10 14:33:55 +01003968 // Evaluate the right-hand side.
3969 if (node->is_compound()) {
3970 // For a compound assignment the right-hand side is a binary operation
3971 // between the current property value and the actual right-hand side.
Kristian Monsen25f61362010-05-21 11:50:48 +01003972 // Duplicate receiver and key for loading the current property value.
3973 frame_->Dup2();
Steve Block6ded16b2010-05-10 14:33:55 +01003974 EmitKeyedLoad();
3975 frame_->EmitPush(r0);
3976
3977 // Perform the binary operation.
3978 Literal* literal = node->value()->AsLiteral();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003979 bool overwrite_value = node->value()->ResultOverwriteAllowed();
Steve Block6ded16b2010-05-10 14:33:55 +01003980 if (literal != NULL && literal->handle()->IsSmi()) {
3981 SmiOperation(node->binary_op(),
3982 literal->handle(),
3983 false,
3984 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
3985 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003986 GenerateInlineSmi inline_smi =
3987 loop_nesting() > 0 ? GENERATE_INLINE_SMI : DONT_GENERATE_INLINE_SMI;
3988 if (literal != NULL) {
3989 ASSERT(!literal->handle()->IsSmi());
3990 inline_smi = DONT_GENERATE_INLINE_SMI;
3991 }
Steve Block6ded16b2010-05-10 14:33:55 +01003992 Load(node->value());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003993 GenericBinaryOperation(node->binary_op(),
3994 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE,
3995 inline_smi);
Steve Block6ded16b2010-05-10 14:33:55 +01003996 }
Steve Block8defd9f2010-07-08 12:39:36 +01003997 wb_info = node->type()->IsLikelySmi() ? LIKELY_SMI : UNLIKELY_SMI;
Steve Block6ded16b2010-05-10 14:33:55 +01003998 } else {
3999 // For non-compound assignment just load the right-hand side.
4000 Load(node->value());
Steve Block8defd9f2010-07-08 12:39:36 +01004001 wb_info = node->value()->AsLiteral() != NULL ?
4002 NEVER_NEWSPACE :
4003 (node->value()->type()->IsLikelySmi() ? LIKELY_SMI : UNLIKELY_SMI);
Steve Block6ded16b2010-05-10 14:33:55 +01004004 }
4005
4006 // Stack layout:
4007 // [tos] : value
4008 // [tos+1] : key
4009 // [tos+2] : receiver
4010 // [tos+3] : receiver if at the end of an initialization block
4011
4012 // Perform the assignment. It is safe to ignore constants here.
4013 ASSERT(node->op() != Token::INIT_CONST);
4014 CodeForSourcePosition(node->position());
Steve Block8defd9f2010-07-08 12:39:36 +01004015 EmitKeyedStore(prop->key()->type(), wb_info);
Steve Block6ded16b2010-05-10 14:33:55 +01004016 frame_->EmitPush(r0);
4017
4018 // Stack layout:
4019 // [tos] : result
4020 // [tos+1] : receiver if at the end of an initialization block
4021
4022 // Change to fast case at the end of an initialization block.
4023 if (node->ends_initialization_block()) {
4024 // The argument to the runtime call is the extra copy of the receiver,
4025 // which is below the value of the assignment. Swap the receiver and
4026 // the value of the assignment expression.
4027 Register t0 = frame_->PopToRegister();
4028 Register t1 = frame_->PopToRegister(t0);
4029 frame_->EmitPush(t1);
4030 frame_->EmitPush(t0);
4031 frame_->CallRuntime(Runtime::kToFastProperties, 1);
4032 }
4033
4034 // Stack layout:
4035 // [tos] : result
4036
4037 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00004038}
4039
4040
4041void CodeGenerator::VisitAssignment(Assignment* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01004042 VirtualFrame::RegisterAllocationScope scope(this);
Steve Blocka7e24c12009-10-30 11:49:00 +00004043#ifdef DEBUG
4044 int original_height = frame_->height();
4045#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00004046 Comment cmnt(masm_, "[ Assignment");
4047
Steve Block6ded16b2010-05-10 14:33:55 +01004048 Variable* var = node->target()->AsVariableProxy()->AsVariable();
4049 Property* prop = node->target()->AsProperty();
Steve Blocka7e24c12009-10-30 11:49:00 +00004050
Steve Block6ded16b2010-05-10 14:33:55 +01004051 if (var != NULL && !var->is_global()) {
4052 EmitSlotAssignment(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00004053
Steve Block6ded16b2010-05-10 14:33:55 +01004054 } else if ((prop != NULL && prop->key()->IsPropertyName()) ||
4055 (var != NULL && var->is_global())) {
4056 // Properties whose keys are property names and global variables are
4057 // treated as named property references. We do not need to consider
4058 // global 'this' because it is not a valid left-hand side.
4059 EmitNamedPropertyAssignment(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00004060
Steve Block6ded16b2010-05-10 14:33:55 +01004061 } else if (prop != NULL) {
4062 // Other properties (including rewritten parameters for a function that
4063 // uses arguments) are keyed property assignments.
4064 EmitKeyedPropertyAssignment(node);
4065
4066 } else {
4067 // Invalid left-hand side.
4068 Load(node->target());
4069 frame_->CallRuntime(Runtime::kThrowReferenceError, 1);
4070 // The runtime call doesn't actually return but the code generator will
4071 // still generate code and expects a certain frame height.
4072 frame_->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00004073 }
Steve Block6ded16b2010-05-10 14:33:55 +01004074 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00004075}
4076
4077
4078void CodeGenerator::VisitThrow(Throw* node) {
4079#ifdef DEBUG
4080 int original_height = frame_->height();
4081#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00004082 Comment cmnt(masm_, "[ Throw");
4083
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004084 Load(node->exception());
Steve Blocka7e24c12009-10-30 11:49:00 +00004085 CodeForSourcePosition(node->position());
4086 frame_->CallRuntime(Runtime::kThrow, 1);
4087 frame_->EmitPush(r0);
Steve Block6ded16b2010-05-10 14:33:55 +01004088 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00004089}
4090
4091
4092void CodeGenerator::VisitProperty(Property* node) {
4093#ifdef DEBUG
4094 int original_height = frame_->height();
4095#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00004096 Comment cmnt(masm_, "[ Property");
4097
4098 { Reference property(this, node);
Steve Block6ded16b2010-05-10 14:33:55 +01004099 property.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00004100 }
Steve Block6ded16b2010-05-10 14:33:55 +01004101 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00004102}
4103
4104
4105void CodeGenerator::VisitCall(Call* node) {
4106#ifdef DEBUG
4107 int original_height = frame_->height();
4108#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00004109 Comment cmnt(masm_, "[ Call");
4110
4111 Expression* function = node->expression();
4112 ZoneList<Expression*>* args = node->arguments();
4113
4114 // Standard function call.
4115 // Check if the function is a variable or a property.
4116 Variable* var = function->AsVariableProxy()->AsVariable();
4117 Property* property = function->AsProperty();
4118
4119 // ------------------------------------------------------------------------
4120 // Fast-case: Use inline caching.
4121 // ---
4122 // According to ECMA-262, section 11.2.3, page 44, the function to call
4123 // must be resolved after the arguments have been evaluated. The IC code
4124 // automatically handles this by loading the arguments before the function
4125 // is resolved in cache misses (this also holds for megamorphic calls).
4126 // ------------------------------------------------------------------------
4127
4128 if (var != NULL && var->is_possibly_eval()) {
4129 // ----------------------------------
4130 // JavaScript example: 'eval(arg)' // eval is not known to be shadowed
4131 // ----------------------------------
4132
4133 // In a call to eval, we first call %ResolvePossiblyDirectEval to
4134 // resolve the function we need to call and the receiver of the
4135 // call. Then we call the resolved function using the given
4136 // arguments.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004137
Steve Blocka7e24c12009-10-30 11:49:00 +00004138 // Prepare stack for call to resolved function.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004139 Load(function);
4140
4141 // Allocate a frame slot for the receiver.
Steve Block8defd9f2010-07-08 12:39:36 +01004142 frame_->EmitPushRoot(Heap::kUndefinedValueRootIndex);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004143
4144 // Load the arguments.
Steve Blocka7e24c12009-10-30 11:49:00 +00004145 int arg_count = args->length();
4146 for (int i = 0; i < arg_count; i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004147 Load(args->at(i));
Steve Blocka7e24c12009-10-30 11:49:00 +00004148 }
4149
Steve Block8defd9f2010-07-08 12:39:36 +01004150 VirtualFrame::SpilledScope spilled_scope(frame_);
4151
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004152 // If we know that eval can only be shadowed by eval-introduced
4153 // variables we attempt to load the global eval function directly
4154 // in generated code. If we succeed, there is no need to perform a
4155 // context lookup in the runtime system.
4156 JumpTarget done;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004157 if (var->AsSlot() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) {
4158 ASSERT(var->AsSlot()->type() == Slot::LOOKUP);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004159 JumpTarget slow;
4160 // Prepare the stack for the call to
4161 // ResolvePossiblyDirectEvalNoLookup by pushing the loaded
4162 // function, the first argument to the eval call and the
4163 // receiver.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004164 LoadFromGlobalSlotCheckExtensions(var->AsSlot(),
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004165 NOT_INSIDE_TYPEOF,
4166 &slow);
4167 frame_->EmitPush(r0);
4168 if (arg_count > 0) {
4169 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
4170 frame_->EmitPush(r1);
4171 } else {
4172 frame_->EmitPush(r2);
4173 }
4174 __ ldr(r1, frame_->Receiver());
4175 frame_->EmitPush(r1);
4176
4177 frame_->CallRuntime(Runtime::kResolvePossiblyDirectEvalNoLookup, 3);
4178
4179 done.Jump();
4180 slow.Bind();
4181 }
4182
4183 // Prepare the stack for the call to ResolvePossiblyDirectEval by
4184 // pushing the loaded function, the first argument to the eval
4185 // call and the receiver.
Steve Blocka7e24c12009-10-30 11:49:00 +00004186 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize + kPointerSize));
4187 frame_->EmitPush(r1);
4188 if (arg_count > 0) {
4189 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
4190 frame_->EmitPush(r1);
4191 } else {
4192 frame_->EmitPush(r2);
4193 }
Leon Clarkee46be812010-01-19 14:06:41 +00004194 __ ldr(r1, frame_->Receiver());
4195 frame_->EmitPush(r1);
4196
Steve Blocka7e24c12009-10-30 11:49:00 +00004197 // Resolve the call.
Leon Clarkee46be812010-01-19 14:06:41 +00004198 frame_->CallRuntime(Runtime::kResolvePossiblyDirectEval, 3);
Steve Blocka7e24c12009-10-30 11:49:00 +00004199
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004200 // If we generated fast-case code bind the jump-target where fast
4201 // and slow case merge.
4202 if (done.is_linked()) done.Bind();
4203
Steve Blocka7e24c12009-10-30 11:49:00 +00004204 // Touch up stack with the right values for the function and the receiver.
Leon Clarkee46be812010-01-19 14:06:41 +00004205 __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00004206 __ str(r1, MemOperand(sp, arg_count * kPointerSize));
4207
4208 // Call the function.
4209 CodeForSourcePosition(node->position());
4210
4211 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Leon Clarkee46be812010-01-19 14:06:41 +00004212 CallFunctionStub call_function(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
Steve Blocka7e24c12009-10-30 11:49:00 +00004213 frame_->CallStub(&call_function, arg_count + 1);
4214
4215 __ ldr(cp, frame_->Context());
4216 // Remove the function from the stack.
4217 frame_->Drop();
4218 frame_->EmitPush(r0);
4219
4220 } else if (var != NULL && !var->is_this() && var->is_global()) {
4221 // ----------------------------------
4222 // JavaScript example: 'foo(1, 2, 3)' // foo is global
4223 // ----------------------------------
Steve Blocka7e24c12009-10-30 11:49:00 +00004224 // Pass the global object as the receiver and let the IC stub
4225 // patch the stack to use the global proxy as 'this' in the
4226 // invoked function.
4227 LoadGlobal();
4228
4229 // Load the arguments.
4230 int arg_count = args->length();
4231 for (int i = 0; i < arg_count; i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004232 Load(args->at(i));
Steve Blocka7e24c12009-10-30 11:49:00 +00004233 }
4234
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004235 VirtualFrame::SpilledScope spilled_scope(frame_);
Andrei Popescu402d9372010-02-26 13:31:12 +00004236 // Setup the name register and call the IC initialization code.
4237 __ mov(r2, Operand(var->name()));
Steve Blocka7e24c12009-10-30 11:49:00 +00004238 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004239 Handle<Code> stub = StubCache::ComputeCallInitialize(arg_count, in_loop);
Steve Blocka7e24c12009-10-30 11:49:00 +00004240 CodeForSourcePosition(node->position());
4241 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET_CONTEXT,
4242 arg_count + 1);
4243 __ ldr(cp, frame_->Context());
Steve Blocka7e24c12009-10-30 11:49:00 +00004244 frame_->EmitPush(r0);
4245
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004246 } else if (var != NULL && var->AsSlot() != NULL &&
4247 var->AsSlot()->type() == Slot::LOOKUP) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004248 // ----------------------------------
Kristian Monsen25f61362010-05-21 11:50:48 +01004249 // JavaScript examples:
4250 //
4251 // with (obj) foo(1, 2, 3) // foo may be in obj.
4252 //
4253 // function f() {};
4254 // function g() {
4255 // eval(...);
4256 // f(); // f could be in extension object.
4257 // }
Steve Blocka7e24c12009-10-30 11:49:00 +00004258 // ----------------------------------
4259
Kristian Monsen25f61362010-05-21 11:50:48 +01004260 JumpTarget slow, done;
4261
4262 // Generate fast case for loading functions from slots that
4263 // correspond to local/global variables or arguments unless they
4264 // are shadowed by eval-introduced bindings.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004265 EmitDynamicLoadFromSlotFastCase(var->AsSlot(),
Kristian Monsen25f61362010-05-21 11:50:48 +01004266 NOT_INSIDE_TYPEOF,
4267 &slow,
4268 &done);
4269
4270 slow.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00004271 // Load the function
4272 frame_->EmitPush(cp);
Iain Merrick75681382010-08-19 15:07:18 +01004273 frame_->EmitPush(Operand(var->name()));
Steve Blocka7e24c12009-10-30 11:49:00 +00004274 frame_->CallRuntime(Runtime::kLoadContextSlot, 2);
4275 // r0: slot value; r1: receiver
4276
4277 // Load the receiver.
4278 frame_->EmitPush(r0); // function
4279 frame_->EmitPush(r1); // receiver
4280
Kristian Monsen25f61362010-05-21 11:50:48 +01004281 // If fast case code has been generated, emit code to push the
4282 // function and receiver and have the slow path jump around this
4283 // code.
4284 if (done.is_linked()) {
4285 JumpTarget call;
4286 call.Jump();
4287 done.Bind();
4288 frame_->EmitPush(r0); // function
Iain Merrick75681382010-08-19 15:07:18 +01004289 LoadGlobalReceiver(VirtualFrame::scratch0()); // receiver
Kristian Monsen25f61362010-05-21 11:50:48 +01004290 call.Bind();
4291 }
4292
4293 // Call the function. At this point, everything is spilled but the
4294 // function and receiver are in r0 and r1.
Leon Clarkee46be812010-01-19 14:06:41 +00004295 CallWithArguments(args, NO_CALL_FUNCTION_FLAGS, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00004296 frame_->EmitPush(r0);
4297
4298 } else if (property != NULL) {
4299 // Check if the key is a literal string.
4300 Literal* literal = property->key()->AsLiteral();
4301
4302 if (literal != NULL && literal->handle()->IsSymbol()) {
4303 // ------------------------------------------------------------------
4304 // JavaScript example: 'object.foo(1, 2, 3)' or 'map["key"](1, 2, 3)'
4305 // ------------------------------------------------------------------
4306
Steve Block6ded16b2010-05-10 14:33:55 +01004307 Handle<String> name = Handle<String>::cast(literal->handle());
Steve Blocka7e24c12009-10-30 11:49:00 +00004308
Steve Block6ded16b2010-05-10 14:33:55 +01004309 if (ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION &&
4310 name->IsEqualTo(CStrVector("apply")) &&
4311 args->length() == 2 &&
4312 args->at(1)->AsVariableProxy() != NULL &&
4313 args->at(1)->AsVariableProxy()->IsArguments()) {
4314 // Use the optimized Function.prototype.apply that avoids
4315 // allocating lazily allocated arguments objects.
4316 CallApplyLazy(property->obj(),
4317 args->at(0),
4318 args->at(1)->AsVariableProxy(),
4319 node->position());
4320
4321 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004322 Load(property->obj()); // Receiver.
Steve Block6ded16b2010-05-10 14:33:55 +01004323 // Load the arguments.
4324 int arg_count = args->length();
4325 for (int i = 0; i < arg_count; i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004326 Load(args->at(i));
Steve Block6ded16b2010-05-10 14:33:55 +01004327 }
4328
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004329 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Block6ded16b2010-05-10 14:33:55 +01004330 // Set the name register and call the IC initialization code.
4331 __ mov(r2, Operand(name));
4332 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004333 Handle<Code> stub =
4334 StubCache::ComputeCallInitialize(arg_count, in_loop);
Steve Block6ded16b2010-05-10 14:33:55 +01004335 CodeForSourcePosition(node->position());
4336 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET, arg_count + 1);
4337 __ ldr(cp, frame_->Context());
4338 frame_->EmitPush(r0);
4339 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004340
4341 } else {
4342 // -------------------------------------------
4343 // JavaScript example: 'array[index](1, 2, 3)'
4344 // -------------------------------------------
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004345
4346 // Load the receiver and name of the function.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004347 Load(property->obj());
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004348 Load(property->key());
4349
Steve Blocka7e24c12009-10-30 11:49:00 +00004350 if (property->is_synthetic()) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01004351 EmitKeyedLoad();
4352 // Put the function below the receiver.
Leon Clarked91b9f72010-01-27 17:25:45 +00004353 // Use the global receiver.
Kristian Monsen25f61362010-05-21 11:50:48 +01004354 frame_->EmitPush(r0); // Function.
Iain Merrick75681382010-08-19 15:07:18 +01004355 LoadGlobalReceiver(VirtualFrame::scratch0());
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01004356 // Call the function.
4357 CallWithArguments(args, RECEIVER_MIGHT_BE_VALUE, node->position());
4358 frame_->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00004359 } else {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004360 // Swap the name of the function and the receiver on the stack to follow
4361 // the calling convention for call ICs.
4362 Register key = frame_->PopToRegister();
4363 Register receiver = frame_->PopToRegister(key);
4364 frame_->EmitPush(key);
4365 frame_->EmitPush(receiver);
4366
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01004367 // Load the arguments.
4368 int arg_count = args->length();
4369 for (int i = 0; i < arg_count; i++) {
4370 Load(args->at(i));
4371 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004372
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004373 // Load the key into r2 and call the IC initialization code.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01004374 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004375 Handle<Code> stub =
4376 StubCache::ComputeKeyedCallInitialize(arg_count, in_loop);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01004377 CodeForSourcePosition(node->position());
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004378 frame_->SpillAll();
4379 __ ldr(r2, frame_->ElementAt(arg_count + 1));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01004380 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET, arg_count + 1);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004381 frame_->Drop(); // Drop the key still on the stack.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01004382 __ ldr(cp, frame_->Context());
4383 frame_->EmitPush(r0);
4384 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004385 }
4386
4387 } else {
4388 // ----------------------------------
4389 // JavaScript example: 'foo(1, 2, 3)' // foo is not global
4390 // ----------------------------------
4391
4392 // Load the function.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004393 Load(function);
4394
Steve Blocka7e24c12009-10-30 11:49:00 +00004395 // Pass the global proxy as the receiver.
Iain Merrick75681382010-08-19 15:07:18 +01004396 LoadGlobalReceiver(VirtualFrame::scratch0());
Steve Blocka7e24c12009-10-30 11:49:00 +00004397
4398 // Call the function.
Leon Clarkee46be812010-01-19 14:06:41 +00004399 CallWithArguments(args, NO_CALL_FUNCTION_FLAGS, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00004400 frame_->EmitPush(r0);
4401 }
Steve Block6ded16b2010-05-10 14:33:55 +01004402 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00004403}
4404
4405
4406void CodeGenerator::VisitCallNew(CallNew* node) {
4407#ifdef DEBUG
4408 int original_height = frame_->height();
4409#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00004410 Comment cmnt(masm_, "[ CallNew");
4411
4412 // According to ECMA-262, section 11.2.2, page 44, the function
4413 // expression in new calls must be evaluated before the
4414 // arguments. This is different from ordinary calls, where the
4415 // actual function to call is resolved after the arguments have been
4416 // evaluated.
4417
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004418 // Push constructor on the stack. If it's not a function it's used as
4419 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
4420 // ignored.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004421 Load(node->expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00004422
4423 // Push the arguments ("left-to-right") on the stack.
4424 ZoneList<Expression*>* args = node->arguments();
4425 int arg_count = args->length();
4426 for (int i = 0; i < arg_count; i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004427 Load(args->at(i));
Steve Blocka7e24c12009-10-30 11:49:00 +00004428 }
4429
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004430 // Spill everything from here to simplify the implementation.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004431 VirtualFrame::SpilledScope spilled_scope(frame_);
4432
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004433 // Load the argument count into r0 and the function into r1 as per
4434 // calling convention.
Steve Blocka7e24c12009-10-30 11:49:00 +00004435 __ mov(r0, Operand(arg_count));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004436 __ ldr(r1, frame_->ElementAt(arg_count));
Steve Blocka7e24c12009-10-30 11:49:00 +00004437
4438 // Call the construct call builtin that handles allocation and
4439 // constructor invocation.
4440 CodeForSourcePosition(node->position());
4441 Handle<Code> ic(Builtins::builtin(Builtins::JSConstructCall));
Leon Clarke4515c472010-02-03 11:58:03 +00004442 frame_->CallCodeObject(ic, RelocInfo::CONSTRUCT_CALL, arg_count + 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004443 frame_->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00004444
Steve Block6ded16b2010-05-10 14:33:55 +01004445 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00004446}
4447
4448
4449void CodeGenerator::GenerateClassOf(ZoneList<Expression*>* args) {
Iain Merrick75681382010-08-19 15:07:18 +01004450 Register scratch = VirtualFrame::scratch0();
4451 JumpTarget null, function, leave, non_function_constructor;
Steve Blocka7e24c12009-10-30 11:49:00 +00004452
Iain Merrick75681382010-08-19 15:07:18 +01004453 // Load the object into register.
4454 ASSERT(args->length() == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004455 Load(args->at(0));
Iain Merrick75681382010-08-19 15:07:18 +01004456 Register tos = frame_->PopToRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +00004457
4458 // If the object is a smi, we return null.
Iain Merrick75681382010-08-19 15:07:18 +01004459 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00004460 null.Branch(eq);
4461
4462 // Check that the object is a JS object but take special care of JS
4463 // functions to make sure they have 'Function' as their class.
Iain Merrick75681382010-08-19 15:07:18 +01004464 __ CompareObjectType(tos, tos, scratch, FIRST_JS_OBJECT_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00004465 null.Branch(lt);
4466
4467 // As long as JS_FUNCTION_TYPE is the last instance type and it is
4468 // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
4469 // LAST_JS_OBJECT_TYPE.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01004470 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
4471 STATIC_ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
Iain Merrick75681382010-08-19 15:07:18 +01004472 __ cmp(scratch, Operand(JS_FUNCTION_TYPE));
Steve Blocka7e24c12009-10-30 11:49:00 +00004473 function.Branch(eq);
4474
4475 // Check if the constructor in the map is a function.
Iain Merrick75681382010-08-19 15:07:18 +01004476 __ ldr(tos, FieldMemOperand(tos, Map::kConstructorOffset));
4477 __ CompareObjectType(tos, scratch, scratch, JS_FUNCTION_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00004478 non_function_constructor.Branch(ne);
4479
Iain Merrick75681382010-08-19 15:07:18 +01004480 // The tos register now contains the constructor function. Grab the
Steve Blocka7e24c12009-10-30 11:49:00 +00004481 // instance class name from there.
Iain Merrick75681382010-08-19 15:07:18 +01004482 __ ldr(tos, FieldMemOperand(tos, JSFunction::kSharedFunctionInfoOffset));
4483 __ ldr(tos,
4484 FieldMemOperand(tos, SharedFunctionInfo::kInstanceClassNameOffset));
4485 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00004486 leave.Jump();
4487
4488 // Functions have class 'Function'.
4489 function.Bind();
Iain Merrick75681382010-08-19 15:07:18 +01004490 __ mov(tos, Operand(Factory::function_class_symbol()));
4491 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00004492 leave.Jump();
4493
4494 // Objects with a non-function constructor have class 'Object'.
4495 non_function_constructor.Bind();
Iain Merrick75681382010-08-19 15:07:18 +01004496 __ mov(tos, Operand(Factory::Object_symbol()));
4497 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00004498 leave.Jump();
4499
4500 // Non-JS objects have class null.
4501 null.Bind();
Iain Merrick75681382010-08-19 15:07:18 +01004502 __ LoadRoot(tos, Heap::kNullValueRootIndex);
4503 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00004504
4505 // All done.
4506 leave.Bind();
4507}
4508
4509
4510void CodeGenerator::GenerateValueOf(ZoneList<Expression*>* args) {
Iain Merrick75681382010-08-19 15:07:18 +01004511 Register scratch = VirtualFrame::scratch0();
Steve Blocka7e24c12009-10-30 11:49:00 +00004512 JumpTarget leave;
Iain Merrick75681382010-08-19 15:07:18 +01004513
4514 ASSERT(args->length() == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004515 Load(args->at(0));
Iain Merrick75681382010-08-19 15:07:18 +01004516 Register tos = frame_->PopToRegister(); // tos contains object.
Steve Blocka7e24c12009-10-30 11:49:00 +00004517 // if (object->IsSmi()) return the object.
Iain Merrick75681382010-08-19 15:07:18 +01004518 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00004519 leave.Branch(eq);
4520 // It is a heap object - get map. If (!object->IsJSValue()) return the object.
Iain Merrick75681382010-08-19 15:07:18 +01004521 __ CompareObjectType(tos, scratch, scratch, JS_VALUE_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00004522 leave.Branch(ne);
4523 // Load the value.
Iain Merrick75681382010-08-19 15:07:18 +01004524 __ ldr(tos, FieldMemOperand(tos, JSValue::kValueOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004525 leave.Bind();
Iain Merrick75681382010-08-19 15:07:18 +01004526 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00004527}
4528
4529
4530void CodeGenerator::GenerateSetValueOf(ZoneList<Expression*>* args) {
Iain Merrick75681382010-08-19 15:07:18 +01004531 Register scratch1 = VirtualFrame::scratch0();
4532 Register scratch2 = VirtualFrame::scratch1();
Steve Blocka7e24c12009-10-30 11:49:00 +00004533 JumpTarget leave;
Iain Merrick75681382010-08-19 15:07:18 +01004534
4535 ASSERT(args->length() == 2);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004536 Load(args->at(0)); // Load the object.
4537 Load(args->at(1)); // Load the value.
Iain Merrick75681382010-08-19 15:07:18 +01004538 Register value = frame_->PopToRegister();
4539 Register object = frame_->PopToRegister(value);
Steve Blocka7e24c12009-10-30 11:49:00 +00004540 // if (object->IsSmi()) return object.
Iain Merrick75681382010-08-19 15:07:18 +01004541 __ tst(object, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00004542 leave.Branch(eq);
4543 // It is a heap object - get map. If (!object->IsJSValue()) return the object.
Iain Merrick75681382010-08-19 15:07:18 +01004544 __ CompareObjectType(object, scratch1, scratch1, JS_VALUE_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00004545 leave.Branch(ne);
4546 // Store the value.
Iain Merrick75681382010-08-19 15:07:18 +01004547 __ str(value, FieldMemOperand(object, JSValue::kValueOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004548 // Update the write barrier.
Iain Merrick75681382010-08-19 15:07:18 +01004549 __ RecordWrite(object,
4550 Operand(JSValue::kValueOffset - kHeapObjectTag),
4551 scratch1,
4552 scratch2);
Steve Blocka7e24c12009-10-30 11:49:00 +00004553 // Leave.
4554 leave.Bind();
Iain Merrick75681382010-08-19 15:07:18 +01004555 frame_->EmitPush(value);
Steve Blocka7e24c12009-10-30 11:49:00 +00004556}
4557
4558
4559void CodeGenerator::GenerateIsSmi(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004560 ASSERT(args->length() == 1);
Leon Clarkef7060e22010-06-03 12:02:55 +01004561 Load(args->at(0));
4562 Register reg = frame_->PopToRegister();
4563 __ tst(reg, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00004564 cc_reg_ = eq;
4565}
4566
4567
4568void CodeGenerator::GenerateLog(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004569 // See comment in CodeGenerator::GenerateLog in codegen-ia32.cc.
4570 ASSERT_EQ(args->length(), 3);
4571#ifdef ENABLE_LOGGING_AND_PROFILING
4572 if (ShouldGenerateLog(args->at(0))) {
Leon Clarkef7060e22010-06-03 12:02:55 +01004573 Load(args->at(1));
4574 Load(args->at(2));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004575 frame_->CallRuntime(Runtime::kLog, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00004576 }
4577#endif
Leon Clarkef7060e22010-06-03 12:02:55 +01004578 frame_->EmitPushRoot(Heap::kUndefinedValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00004579}
4580
4581
4582void CodeGenerator::GenerateIsNonNegativeSmi(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004583 ASSERT(args->length() == 1);
Leon Clarkef7060e22010-06-03 12:02:55 +01004584 Load(args->at(0));
4585 Register reg = frame_->PopToRegister();
4586 __ tst(reg, Operand(kSmiTagMask | 0x80000000u));
Steve Blocka7e24c12009-10-30 11:49:00 +00004587 cc_reg_ = eq;
4588}
4589
4590
Steve Block8defd9f2010-07-08 12:39:36 +01004591// Generates the Math.pow method.
Steve Block6ded16b2010-05-10 14:33:55 +01004592void CodeGenerator::GenerateMathPow(ZoneList<Expression*>* args) {
4593 ASSERT(args->length() == 2);
4594 Load(args->at(0));
4595 Load(args->at(1));
Steve Block8defd9f2010-07-08 12:39:36 +01004596
4597 if (!CpuFeatures::IsSupported(VFP3)) {
4598 frame_->CallRuntime(Runtime::kMath_pow, 2);
4599 frame_->EmitPush(r0);
4600 } else {
4601 CpuFeatures::Scope scope(VFP3);
4602 JumpTarget runtime, done;
4603 Label exponent_nonsmi, base_nonsmi, powi, not_minus_half, allocate_return;
4604
4605 Register scratch1 = VirtualFrame::scratch0();
4606 Register scratch2 = VirtualFrame::scratch1();
4607
4608 // Get base and exponent to registers.
4609 Register exponent = frame_->PopToRegister();
4610 Register base = frame_->PopToRegister(exponent);
4611 Register heap_number_map = no_reg;
4612
4613 // Set the frame for the runtime jump target. The code below jumps to the
4614 // jump target label so the frame needs to be established before that.
4615 ASSERT(runtime.entry_frame() == NULL);
4616 runtime.set_entry_frame(frame_);
4617
4618 __ BranchOnNotSmi(exponent, &exponent_nonsmi);
4619 __ BranchOnNotSmi(base, &base_nonsmi);
4620
4621 heap_number_map = r6;
4622 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
4623
4624 // Exponent is a smi and base is a smi. Get the smi value into vfp register
4625 // d1.
4626 __ SmiToDoubleVFPRegister(base, d1, scratch1, s0);
4627 __ b(&powi);
4628
4629 __ bind(&base_nonsmi);
4630 // Exponent is smi and base is non smi. Get the double value from the base
4631 // into vfp register d1.
4632 __ ObjectToDoubleVFPRegister(base, d1,
4633 scratch1, scratch2, heap_number_map, s0,
4634 runtime.entry_label());
4635
4636 __ bind(&powi);
4637
4638 // Load 1.0 into d0.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01004639 __ vmov(d0, 1.0);
Steve Block8defd9f2010-07-08 12:39:36 +01004640
4641 // Get the absolute untagged value of the exponent and use that for the
4642 // calculation.
4643 __ mov(scratch1, Operand(exponent, ASR, kSmiTagSize), SetCC);
Iain Merrick9ac36c92010-09-13 15:29:50 +01004644 // Negate if negative.
4645 __ rsb(scratch1, scratch1, Operand(0, RelocInfo::NONE), LeaveCC, mi);
Steve Block8defd9f2010-07-08 12:39:36 +01004646 __ vmov(d2, d0, mi); // 1.0 needed in d2 later if exponent is negative.
4647
4648 // Run through all the bits in the exponent. The result is calculated in d0
4649 // and d1 holds base^(bit^2).
4650 Label more_bits;
4651 __ bind(&more_bits);
4652 __ mov(scratch1, Operand(scratch1, LSR, 1), SetCC);
4653 __ vmul(d0, d0, d1, cs); // Multiply with base^(bit^2) if bit is set.
4654 __ vmul(d1, d1, d1, ne); // Don't bother calculating next d1 if done.
4655 __ b(ne, &more_bits);
4656
4657 // If exponent is positive we are done.
Iain Merrick9ac36c92010-09-13 15:29:50 +01004658 __ cmp(exponent, Operand(0, RelocInfo::NONE));
Steve Block8defd9f2010-07-08 12:39:36 +01004659 __ b(ge, &allocate_return);
4660
4661 // If exponent is negative result is 1/result (d2 already holds 1.0 in that
4662 // case). However if d0 has reached infinity this will not provide the
4663 // correct result, so call runtime if that is the case.
4664 __ mov(scratch2, Operand(0x7FF00000));
Iain Merrick9ac36c92010-09-13 15:29:50 +01004665 __ mov(scratch1, Operand(0, RelocInfo::NONE));
Steve Block8defd9f2010-07-08 12:39:36 +01004666 __ vmov(d1, scratch1, scratch2); // Load infinity into d1.
4667 __ vcmp(d0, d1);
4668 __ vmrs(pc);
4669 runtime.Branch(eq); // d0 reached infinity.
4670 __ vdiv(d0, d2, d0);
4671 __ b(&allocate_return);
4672
4673 __ bind(&exponent_nonsmi);
4674 // Special handling of raising to the power of -0.5 and 0.5. First check
4675 // that the value is a heap number and that the lower bits (which for both
4676 // values are zero).
4677 heap_number_map = r6;
4678 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
4679 __ ldr(scratch1, FieldMemOperand(exponent, HeapObject::kMapOffset));
4680 __ ldr(scratch2, FieldMemOperand(exponent, HeapNumber::kMantissaOffset));
4681 __ cmp(scratch1, heap_number_map);
4682 runtime.Branch(ne);
4683 __ tst(scratch2, scratch2);
4684 runtime.Branch(ne);
4685
4686 // Load the higher bits (which contains the floating point exponent).
4687 __ ldr(scratch1, FieldMemOperand(exponent, HeapNumber::kExponentOffset));
4688
4689 // Compare exponent with -0.5.
4690 __ cmp(scratch1, Operand(0xbfe00000));
4691 __ b(ne, &not_minus_half);
4692
4693 // Get the double value from the base into vfp register d0.
4694 __ ObjectToDoubleVFPRegister(base, d0,
4695 scratch1, scratch2, heap_number_map, s0,
4696 runtime.entry_label(),
4697 AVOID_NANS_AND_INFINITIES);
4698
4699 // Load 1.0 into d2.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01004700 __ vmov(d2, 1.0);
Steve Block8defd9f2010-07-08 12:39:36 +01004701
4702 // Calculate the reciprocal of the square root. 1/sqrt(x) = sqrt(1/x).
4703 __ vdiv(d0, d2, d0);
4704 __ vsqrt(d0, d0);
4705
4706 __ b(&allocate_return);
4707
4708 __ bind(&not_minus_half);
4709 // Compare exponent with 0.5.
4710 __ cmp(scratch1, Operand(0x3fe00000));
4711 runtime.Branch(ne);
4712
4713 // Get the double value from the base into vfp register d0.
4714 __ ObjectToDoubleVFPRegister(base, d0,
4715 scratch1, scratch2, heap_number_map, s0,
4716 runtime.entry_label(),
4717 AVOID_NANS_AND_INFINITIES);
4718 __ vsqrt(d0, d0);
4719
4720 __ bind(&allocate_return);
4721 Register scratch3 = r5;
4722 __ AllocateHeapNumberWithValue(scratch3, d0, scratch1, scratch2,
4723 heap_number_map, runtime.entry_label());
4724 __ mov(base, scratch3);
4725 done.Jump();
4726
4727 runtime.Bind();
4728
4729 // Push back the arguments again for the runtime call.
4730 frame_->EmitPush(base);
4731 frame_->EmitPush(exponent);
4732 frame_->CallRuntime(Runtime::kMath_pow, 2);
4733 __ Move(base, r0);
4734
4735 done.Bind();
4736 frame_->EmitPush(base);
4737 }
Steve Block6ded16b2010-05-10 14:33:55 +01004738}
4739
4740
Steve Block8defd9f2010-07-08 12:39:36 +01004741// Generates the Math.sqrt method.
Steve Block6ded16b2010-05-10 14:33:55 +01004742void CodeGenerator::GenerateMathSqrt(ZoneList<Expression*>* args) {
4743 ASSERT(args->length() == 1);
4744 Load(args->at(0));
Steve Block8defd9f2010-07-08 12:39:36 +01004745
4746 if (!CpuFeatures::IsSupported(VFP3)) {
4747 frame_->CallRuntime(Runtime::kMath_sqrt, 1);
4748 frame_->EmitPush(r0);
4749 } else {
4750 CpuFeatures::Scope scope(VFP3);
4751 JumpTarget runtime, done;
4752
4753 Register scratch1 = VirtualFrame::scratch0();
4754 Register scratch2 = VirtualFrame::scratch1();
4755
4756 // Get the value from the frame.
4757 Register tos = frame_->PopToRegister();
4758
4759 // Set the frame for the runtime jump target. The code below jumps to the
4760 // jump target label so the frame needs to be established before that.
4761 ASSERT(runtime.entry_frame() == NULL);
4762 runtime.set_entry_frame(frame_);
4763
4764 Register heap_number_map = r6;
John Reck59135872010-11-02 12:39:01 -07004765 Register new_heap_number = r5;
Steve Block8defd9f2010-07-08 12:39:36 +01004766 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
4767
4768 // Get the double value from the heap number into vfp register d0.
4769 __ ObjectToDoubleVFPRegister(tos, d0,
4770 scratch1, scratch2, heap_number_map, s0,
4771 runtime.entry_label());
4772
4773 // Calculate the square root of d0 and place result in a heap number object.
4774 __ vsqrt(d0, d0);
John Reck59135872010-11-02 12:39:01 -07004775 __ AllocateHeapNumberWithValue(new_heap_number,
4776 d0,
4777 scratch1, scratch2,
4778 heap_number_map,
4779 runtime.entry_label());
4780 __ mov(tos, Operand(new_heap_number));
Steve Block8defd9f2010-07-08 12:39:36 +01004781 done.Jump();
4782
4783 runtime.Bind();
4784 // Push back the argument again for the runtime call.
4785 frame_->EmitPush(tos);
4786 frame_->CallRuntime(Runtime::kMath_sqrt, 1);
4787 __ Move(tos, r0);
4788
4789 done.Bind();
4790 frame_->EmitPush(tos);
4791 }
Steve Block6ded16b2010-05-10 14:33:55 +01004792}
4793
4794
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004795class DeferredStringCharCodeAt : public DeferredCode {
4796 public:
4797 DeferredStringCharCodeAt(Register object,
4798 Register index,
4799 Register scratch,
4800 Register result)
4801 : result_(result),
4802 char_code_at_generator_(object,
4803 index,
4804 scratch,
4805 result,
4806 &need_conversion_,
4807 &need_conversion_,
4808 &index_out_of_range_,
4809 STRING_INDEX_IS_NUMBER) {}
4810
4811 StringCharCodeAtGenerator* fast_case_generator() {
4812 return &char_code_at_generator_;
4813 }
4814
4815 virtual void Generate() {
4816 VirtualFrameRuntimeCallHelper call_helper(frame_state());
4817 char_code_at_generator_.GenerateSlow(masm(), call_helper);
4818
4819 __ bind(&need_conversion_);
4820 // Move the undefined value into the result register, which will
4821 // trigger conversion.
4822 __ LoadRoot(result_, Heap::kUndefinedValueRootIndex);
4823 __ jmp(exit_label());
4824
4825 __ bind(&index_out_of_range_);
4826 // When the index is out of range, the spec requires us to return
4827 // NaN.
4828 __ LoadRoot(result_, Heap::kNanValueRootIndex);
4829 __ jmp(exit_label());
4830 }
4831
4832 private:
4833 Register result_;
4834
4835 Label need_conversion_;
4836 Label index_out_of_range_;
4837
4838 StringCharCodeAtGenerator char_code_at_generator_;
4839};
4840
4841
4842// This generates code that performs a String.prototype.charCodeAt() call
4843// or returns a smi in order to trigger conversion.
4844void CodeGenerator::GenerateStringCharCodeAt(ZoneList<Expression*>* args) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004845 Comment(masm_, "[ GenerateStringCharCodeAt");
Steve Blocka7e24c12009-10-30 11:49:00 +00004846 ASSERT(args->length() == 2);
Steve Blockd0582a62009-12-15 09:54:21 +00004847
Leon Clarkef7060e22010-06-03 12:02:55 +01004848 Load(args->at(0));
4849 Load(args->at(1));
Steve Blockd0582a62009-12-15 09:54:21 +00004850
Iain Merrick75681382010-08-19 15:07:18 +01004851 Register index = frame_->PopToRegister();
4852 Register object = frame_->PopToRegister(index);
Steve Blockd0582a62009-12-15 09:54:21 +00004853
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004854 // We need two extra registers.
Iain Merrick75681382010-08-19 15:07:18 +01004855 Register scratch = VirtualFrame::scratch0();
4856 Register result = VirtualFrame::scratch1();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004857
4858 DeferredStringCharCodeAt* deferred =
4859 new DeferredStringCharCodeAt(object,
4860 index,
4861 scratch,
4862 result);
4863 deferred->fast_case_generator()->GenerateFast(masm_);
4864 deferred->BindExit();
Leon Clarkef7060e22010-06-03 12:02:55 +01004865 frame_->EmitPush(result);
Steve Blocka7e24c12009-10-30 11:49:00 +00004866}
4867
4868
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004869class DeferredStringCharFromCode : public DeferredCode {
4870 public:
4871 DeferredStringCharFromCode(Register code,
4872 Register result)
4873 : char_from_code_generator_(code, result) {}
4874
4875 StringCharFromCodeGenerator* fast_case_generator() {
4876 return &char_from_code_generator_;
4877 }
4878
4879 virtual void Generate() {
4880 VirtualFrameRuntimeCallHelper call_helper(frame_state());
4881 char_from_code_generator_.GenerateSlow(masm(), call_helper);
4882 }
4883
4884 private:
4885 StringCharFromCodeGenerator char_from_code_generator_;
4886};
4887
4888
4889// Generates code for creating a one-char string from a char code.
4890void CodeGenerator::GenerateStringCharFromCode(ZoneList<Expression*>* args) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004891 Comment(masm_, "[ GenerateStringCharFromCode");
Steve Block6ded16b2010-05-10 14:33:55 +01004892 ASSERT(args->length() == 1);
4893
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004894 Load(args->at(0));
4895
Iain Merrick75681382010-08-19 15:07:18 +01004896 Register result = frame_->GetTOSRegister();
4897 Register code = frame_->PopToRegister(result);
Steve Block6ded16b2010-05-10 14:33:55 +01004898
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004899 DeferredStringCharFromCode* deferred = new DeferredStringCharFromCode(
4900 code, result);
4901 deferred->fast_case_generator()->GenerateFast(masm_);
4902 deferred->BindExit();
4903 frame_->EmitPush(result);
4904}
4905
4906
4907class DeferredStringCharAt : public DeferredCode {
4908 public:
4909 DeferredStringCharAt(Register object,
4910 Register index,
4911 Register scratch1,
4912 Register scratch2,
4913 Register result)
4914 : result_(result),
4915 char_at_generator_(object,
4916 index,
4917 scratch1,
4918 scratch2,
4919 result,
4920 &need_conversion_,
4921 &need_conversion_,
4922 &index_out_of_range_,
4923 STRING_INDEX_IS_NUMBER) {}
4924
4925 StringCharAtGenerator* fast_case_generator() {
4926 return &char_at_generator_;
4927 }
4928
4929 virtual void Generate() {
4930 VirtualFrameRuntimeCallHelper call_helper(frame_state());
4931 char_at_generator_.GenerateSlow(masm(), call_helper);
4932
4933 __ bind(&need_conversion_);
4934 // Move smi zero into the result register, which will trigger
4935 // conversion.
4936 __ mov(result_, Operand(Smi::FromInt(0)));
4937 __ jmp(exit_label());
4938
4939 __ bind(&index_out_of_range_);
4940 // When the index is out of range, the spec requires us to return
4941 // the empty string.
4942 __ LoadRoot(result_, Heap::kEmptyStringRootIndex);
4943 __ jmp(exit_label());
4944 }
4945
4946 private:
4947 Register result_;
4948
4949 Label need_conversion_;
4950 Label index_out_of_range_;
4951
4952 StringCharAtGenerator char_at_generator_;
4953};
4954
4955
4956// This generates code that performs a String.prototype.charAt() call
4957// or returns a smi in order to trigger conversion.
4958void CodeGenerator::GenerateStringCharAt(ZoneList<Expression*>* args) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004959 Comment(masm_, "[ GenerateStringCharAt");
4960 ASSERT(args->length() == 2);
4961
4962 Load(args->at(0));
4963 Load(args->at(1));
4964
Iain Merrick75681382010-08-19 15:07:18 +01004965 Register index = frame_->PopToRegister();
4966 Register object = frame_->PopToRegister(index);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004967
4968 // We need three extra registers.
Iain Merrick75681382010-08-19 15:07:18 +01004969 Register scratch1 = VirtualFrame::scratch0();
4970 Register scratch2 = VirtualFrame::scratch1();
4971 // Use r6 without notifying the virtual frame.
4972 Register result = r6;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004973
4974 DeferredStringCharAt* deferred =
4975 new DeferredStringCharAt(object,
4976 index,
4977 scratch1,
4978 scratch2,
4979 result);
4980 deferred->fast_case_generator()->GenerateFast(masm_);
4981 deferred->BindExit();
Steve Block6ded16b2010-05-10 14:33:55 +01004982 frame_->EmitPush(result);
4983}
4984
4985
Steve Blocka7e24c12009-10-30 11:49:00 +00004986void CodeGenerator::GenerateIsArray(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004987 ASSERT(args->length() == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004988 Load(args->at(0));
Steve Blocka7e24c12009-10-30 11:49:00 +00004989 JumpTarget answer;
4990 // We need the CC bits to come out as not_equal in the case where the
4991 // object is a smi. This can't be done with the usual test opcode so
4992 // we use XOR to get the right CC bits.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004993 Register possible_array = frame_->PopToRegister();
4994 Register scratch = VirtualFrame::scratch0();
4995 __ and_(scratch, possible_array, Operand(kSmiTagMask));
4996 __ eor(scratch, scratch, Operand(kSmiTagMask), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00004997 answer.Branch(ne);
4998 // It is a heap object - get the map. Check if the object is a JS array.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004999 __ CompareObjectType(possible_array, scratch, scratch, JS_ARRAY_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00005000 answer.Bind();
5001 cc_reg_ = eq;
5002}
5003
5004
Andrei Popescu402d9372010-02-26 13:31:12 +00005005void CodeGenerator::GenerateIsRegExp(ZoneList<Expression*>* args) {
Andrei Popescu402d9372010-02-26 13:31:12 +00005006 ASSERT(args->length() == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005007 Load(args->at(0));
Andrei Popescu402d9372010-02-26 13:31:12 +00005008 JumpTarget answer;
5009 // We need the CC bits to come out as not_equal in the case where the
5010 // object is a smi. This can't be done with the usual test opcode so
5011 // we use XOR to get the right CC bits.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005012 Register possible_regexp = frame_->PopToRegister();
5013 Register scratch = VirtualFrame::scratch0();
5014 __ and_(scratch, possible_regexp, Operand(kSmiTagMask));
5015 __ eor(scratch, scratch, Operand(kSmiTagMask), SetCC);
Andrei Popescu402d9372010-02-26 13:31:12 +00005016 answer.Branch(ne);
5017 // It is a heap object - get the map. Check if the object is a regexp.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005018 __ CompareObjectType(possible_regexp, scratch, scratch, JS_REGEXP_TYPE);
Andrei Popescu402d9372010-02-26 13:31:12 +00005019 answer.Bind();
5020 cc_reg_ = eq;
5021}
5022
5023
Steve Blockd0582a62009-12-15 09:54:21 +00005024void CodeGenerator::GenerateIsObject(ZoneList<Expression*>* args) {
5025 // This generates a fast version of:
5026 // (typeof(arg) === 'object' || %_ClassOf(arg) == 'RegExp')
Steve Blockd0582a62009-12-15 09:54:21 +00005027 ASSERT(args->length() == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005028 Load(args->at(0));
5029 Register possible_object = frame_->PopToRegister();
5030 __ tst(possible_object, Operand(kSmiTagMask));
Steve Blockd0582a62009-12-15 09:54:21 +00005031 false_target()->Branch(eq);
5032
5033 __ LoadRoot(ip, Heap::kNullValueRootIndex);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005034 __ cmp(possible_object, ip);
Steve Blockd0582a62009-12-15 09:54:21 +00005035 true_target()->Branch(eq);
5036
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005037 Register map_reg = VirtualFrame::scratch0();
5038 __ ldr(map_reg, FieldMemOperand(possible_object, HeapObject::kMapOffset));
Steve Blockd0582a62009-12-15 09:54:21 +00005039 // Undetectable objects behave like undefined when tested with typeof.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005040 __ ldrb(possible_object, FieldMemOperand(map_reg, Map::kBitFieldOffset));
5041 __ tst(possible_object, Operand(1 << Map::kIsUndetectable));
Leon Clarkef7060e22010-06-03 12:02:55 +01005042 false_target()->Branch(ne);
Steve Blockd0582a62009-12-15 09:54:21 +00005043
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005044 __ ldrb(possible_object, FieldMemOperand(map_reg, Map::kInstanceTypeOffset));
5045 __ cmp(possible_object, Operand(FIRST_JS_OBJECT_TYPE));
Steve Blockd0582a62009-12-15 09:54:21 +00005046 false_target()->Branch(lt);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005047 __ cmp(possible_object, Operand(LAST_JS_OBJECT_TYPE));
Steve Blockd0582a62009-12-15 09:54:21 +00005048 cc_reg_ = le;
5049}
5050
5051
Ben Murdoch3bec4d22010-07-22 14:51:16 +01005052void CodeGenerator::GenerateIsSpecObject(ZoneList<Expression*>* args) {
5053 // This generates a fast version of:
5054 // (typeof(arg) === 'object' || %_ClassOf(arg) == 'RegExp' ||
5055 // typeof(arg) == function).
5056 // It includes undetectable objects (as opposed to IsObject).
5057 ASSERT(args->length() == 1);
5058 Load(args->at(0));
5059 Register value = frame_->PopToRegister();
5060 __ tst(value, Operand(kSmiTagMask));
5061 false_target()->Branch(eq);
5062 // Check that this is an object.
5063 __ ldr(value, FieldMemOperand(value, HeapObject::kMapOffset));
5064 __ ldrb(value, FieldMemOperand(value, Map::kInstanceTypeOffset));
5065 __ cmp(value, Operand(FIRST_JS_OBJECT_TYPE));
5066 cc_reg_ = ge;
5067}
5068
5069
Iain Merrick75681382010-08-19 15:07:18 +01005070// Deferred code to check whether the String JavaScript object is safe for using
5071// default value of. This code is called after the bit caching this information
5072// in the map has been checked with the map for the object in the map_result_
5073// register. On return the register map_result_ contains 1 for true and 0 for
5074// false.
5075class DeferredIsStringWrapperSafeForDefaultValueOf : public DeferredCode {
5076 public:
5077 DeferredIsStringWrapperSafeForDefaultValueOf(Register object,
5078 Register map_result,
5079 Register scratch1,
5080 Register scratch2)
5081 : object_(object),
5082 map_result_(map_result),
5083 scratch1_(scratch1),
5084 scratch2_(scratch2) { }
5085
5086 virtual void Generate() {
5087 Label false_result;
5088
5089 // Check that map is loaded as expected.
5090 if (FLAG_debug_code) {
5091 __ ldr(ip, FieldMemOperand(object_, HeapObject::kMapOffset));
5092 __ cmp(map_result_, ip);
5093 __ Assert(eq, "Map not in expected register");
5094 }
5095
5096 // Check for fast case object. Generate false result for slow case object.
5097 __ ldr(scratch1_, FieldMemOperand(object_, JSObject::kPropertiesOffset));
5098 __ ldr(scratch1_, FieldMemOperand(scratch1_, HeapObject::kMapOffset));
5099 __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
5100 __ cmp(scratch1_, ip);
5101 __ b(eq, &false_result);
5102
5103 // Look for valueOf symbol in the descriptor array, and indicate false if
5104 // found. The type is not checked, so if it is a transition it is a false
5105 // negative.
5106 __ ldr(map_result_,
5107 FieldMemOperand(map_result_, Map::kInstanceDescriptorsOffset));
5108 __ ldr(scratch2_, FieldMemOperand(map_result_, FixedArray::kLengthOffset));
5109 // map_result_: descriptor array
5110 // scratch2_: length of descriptor array
5111 // Calculate the end of the descriptor array.
5112 STATIC_ASSERT(kSmiTag == 0);
5113 STATIC_ASSERT(kSmiTagSize == 1);
5114 STATIC_ASSERT(kPointerSize == 4);
5115 __ add(scratch1_,
5116 map_result_,
5117 Operand(FixedArray::kHeaderSize - kHeapObjectTag));
5118 __ add(scratch1_,
5119 scratch1_,
5120 Operand(scratch2_, LSL, kPointerSizeLog2 - kSmiTagSize));
5121
5122 // Calculate location of the first key name.
5123 __ add(map_result_,
5124 map_result_,
5125 Operand(FixedArray::kHeaderSize - kHeapObjectTag +
5126 DescriptorArray::kFirstIndex * kPointerSize));
5127 // Loop through all the keys in the descriptor array. If one of these is the
5128 // symbol valueOf the result is false.
5129 Label entry, loop;
5130 // The use of ip to store the valueOf symbol asumes that it is not otherwise
5131 // used in the loop below.
5132 __ mov(ip, Operand(Factory::value_of_symbol()));
5133 __ jmp(&entry);
5134 __ bind(&loop);
5135 __ ldr(scratch2_, MemOperand(map_result_, 0));
5136 __ cmp(scratch2_, ip);
5137 __ b(eq, &false_result);
5138 __ add(map_result_, map_result_, Operand(kPointerSize));
5139 __ bind(&entry);
5140 __ cmp(map_result_, Operand(scratch1_));
5141 __ b(ne, &loop);
5142
5143 // Reload map as register map_result_ was used as temporary above.
5144 __ ldr(map_result_, FieldMemOperand(object_, HeapObject::kMapOffset));
5145
5146 // If a valueOf property is not found on the object check that it's
5147 // prototype is the un-modified String prototype. If not result is false.
5148 __ ldr(scratch1_, FieldMemOperand(map_result_, Map::kPrototypeOffset));
5149 __ tst(scratch1_, Operand(kSmiTagMask));
5150 __ b(eq, &false_result);
5151 __ ldr(scratch1_, FieldMemOperand(scratch1_, HeapObject::kMapOffset));
5152 __ ldr(scratch2_,
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08005153 ContextOperand(cp, Context::GLOBAL_INDEX));
Iain Merrick75681382010-08-19 15:07:18 +01005154 __ ldr(scratch2_,
5155 FieldMemOperand(scratch2_, GlobalObject::kGlobalContextOffset));
5156 __ ldr(scratch2_,
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08005157 ContextOperand(
Iain Merrick75681382010-08-19 15:07:18 +01005158 scratch2_, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
5159 __ cmp(scratch1_, scratch2_);
5160 __ b(ne, &false_result);
5161
5162 // Set the bit in the map to indicate that it has been checked safe for
5163 // default valueOf and set true result.
5164 __ ldr(scratch1_, FieldMemOperand(map_result_, Map::kBitField2Offset));
5165 __ orr(scratch1_,
5166 scratch1_,
5167 Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
5168 __ str(scratch1_, FieldMemOperand(map_result_, Map::kBitField2Offset));
5169 __ mov(map_result_, Operand(1));
5170 __ jmp(exit_label());
5171 __ bind(&false_result);
5172 // Set false result.
Iain Merrick9ac36c92010-09-13 15:29:50 +01005173 __ mov(map_result_, Operand(0, RelocInfo::NONE));
Iain Merrick75681382010-08-19 15:07:18 +01005174 }
5175
5176 private:
5177 Register object_;
5178 Register map_result_;
5179 Register scratch1_;
5180 Register scratch2_;
5181};
5182
5183
5184void CodeGenerator::GenerateIsStringWrapperSafeForDefaultValueOf(
5185 ZoneList<Expression*>* args) {
5186 ASSERT(args->length() == 1);
5187 Load(args->at(0));
5188 Register obj = frame_->PopToRegister(); // Pop the string wrapper.
5189 if (FLAG_debug_code) {
5190 __ AbortIfSmi(obj);
5191 }
5192
5193 // Check whether this map has already been checked to be safe for default
5194 // valueOf.
5195 Register map_result = VirtualFrame::scratch0();
5196 __ ldr(map_result, FieldMemOperand(obj, HeapObject::kMapOffset));
5197 __ ldrb(ip, FieldMemOperand(map_result, Map::kBitField2Offset));
5198 __ tst(ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
5199 true_target()->Branch(ne);
5200
5201 // We need an additional two scratch registers for the deferred code.
5202 Register scratch1 = VirtualFrame::scratch1();
5203 // Use r6 without notifying the virtual frame.
5204 Register scratch2 = r6;
5205
5206 DeferredIsStringWrapperSafeForDefaultValueOf* deferred =
5207 new DeferredIsStringWrapperSafeForDefaultValueOf(
5208 obj, map_result, scratch1, scratch2);
5209 deferred->Branch(eq);
5210 deferred->BindExit();
5211 __ tst(map_result, Operand(map_result));
5212 cc_reg_ = ne;
5213}
5214
5215
Steve Blockd0582a62009-12-15 09:54:21 +00005216void CodeGenerator::GenerateIsFunction(ZoneList<Expression*>* args) {
5217 // This generates a fast version of:
5218 // (%_ClassOf(arg) === 'Function')
Steve Blockd0582a62009-12-15 09:54:21 +00005219 ASSERT(args->length() == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005220 Load(args->at(0));
5221 Register possible_function = frame_->PopToRegister();
5222 __ tst(possible_function, Operand(kSmiTagMask));
Steve Blockd0582a62009-12-15 09:54:21 +00005223 false_target()->Branch(eq);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005224 Register map_reg = VirtualFrame::scratch0();
5225 Register scratch = VirtualFrame::scratch1();
5226 __ CompareObjectType(possible_function, map_reg, scratch, JS_FUNCTION_TYPE);
Steve Blockd0582a62009-12-15 09:54:21 +00005227 cc_reg_ = eq;
5228}
5229
5230
Leon Clarked91b9f72010-01-27 17:25:45 +00005231void CodeGenerator::GenerateIsUndetectableObject(ZoneList<Expression*>* args) {
Leon Clarked91b9f72010-01-27 17:25:45 +00005232 ASSERT(args->length() == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005233 Load(args->at(0));
5234 Register possible_undetectable = frame_->PopToRegister();
5235 __ tst(possible_undetectable, Operand(kSmiTagMask));
Leon Clarked91b9f72010-01-27 17:25:45 +00005236 false_target()->Branch(eq);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005237 Register scratch = VirtualFrame::scratch0();
5238 __ ldr(scratch,
5239 FieldMemOperand(possible_undetectable, HeapObject::kMapOffset));
5240 __ ldrb(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
5241 __ tst(scratch, Operand(1 << Map::kIsUndetectable));
Leon Clarked91b9f72010-01-27 17:25:45 +00005242 cc_reg_ = ne;
5243}
5244
5245
Steve Blocka7e24c12009-10-30 11:49:00 +00005246void CodeGenerator::GenerateIsConstructCall(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005247 ASSERT(args->length() == 0);
5248
Leon Clarkef7060e22010-06-03 12:02:55 +01005249 Register scratch0 = VirtualFrame::scratch0();
5250 Register scratch1 = VirtualFrame::scratch1();
Steve Blocka7e24c12009-10-30 11:49:00 +00005251 // Get the frame pointer for the calling frame.
Leon Clarkef7060e22010-06-03 12:02:55 +01005252 __ ldr(scratch0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00005253
5254 // Skip the arguments adaptor frame if it exists.
Leon Clarkef7060e22010-06-03 12:02:55 +01005255 __ ldr(scratch1,
5256 MemOperand(scratch0, StandardFrameConstants::kContextOffset));
5257 __ cmp(scratch1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
5258 __ ldr(scratch0,
5259 MemOperand(scratch0, StandardFrameConstants::kCallerFPOffset), eq);
Steve Blocka7e24c12009-10-30 11:49:00 +00005260
5261 // Check the marker in the calling frame.
Leon Clarkef7060e22010-06-03 12:02:55 +01005262 __ ldr(scratch1,
5263 MemOperand(scratch0, StandardFrameConstants::kMarkerOffset));
5264 __ cmp(scratch1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
Steve Blocka7e24c12009-10-30 11:49:00 +00005265 cc_reg_ = eq;
5266}
5267
5268
5269void CodeGenerator::GenerateArgumentsLength(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005270 ASSERT(args->length() == 0);
5271
Leon Clarkef7060e22010-06-03 12:02:55 +01005272 Register tos = frame_->GetTOSRegister();
5273 Register scratch0 = VirtualFrame::scratch0();
5274 Register scratch1 = VirtualFrame::scratch1();
Steve Blocka7e24c12009-10-30 11:49:00 +00005275
Steve Block6ded16b2010-05-10 14:33:55 +01005276 // Check if the calling frame is an arguments adaptor frame.
Leon Clarkef7060e22010-06-03 12:02:55 +01005277 __ ldr(scratch0,
5278 MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
5279 __ ldr(scratch1,
5280 MemOperand(scratch0, StandardFrameConstants::kContextOffset));
5281 __ cmp(scratch1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
5282
5283 // Get the number of formal parameters.
5284 __ mov(tos, Operand(Smi::FromInt(scope()->num_parameters())), LeaveCC, ne);
Steve Block6ded16b2010-05-10 14:33:55 +01005285
5286 // Arguments adaptor case: Read the arguments length from the
5287 // adaptor frame.
Leon Clarkef7060e22010-06-03 12:02:55 +01005288 __ ldr(tos,
5289 MemOperand(scratch0, ArgumentsAdaptorFrameConstants::kLengthOffset),
5290 eq);
Steve Block6ded16b2010-05-10 14:33:55 +01005291
Leon Clarkef7060e22010-06-03 12:02:55 +01005292 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00005293}
5294
5295
Steve Block6ded16b2010-05-10 14:33:55 +01005296void CodeGenerator::GenerateArguments(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005297 ASSERT(args->length() == 1);
5298
5299 // Satisfy contract with ArgumentsAccessStub:
5300 // Load the key into r1 and the formal parameters count into r0.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005301 Load(args->at(0));
Iain Merrick75681382010-08-19 15:07:18 +01005302 frame_->PopToR1();
5303 frame_->SpillAll();
Andrei Popescu31002712010-02-23 13:46:05 +00005304 __ mov(r0, Operand(Smi::FromInt(scope()->num_parameters())));
Steve Blocka7e24c12009-10-30 11:49:00 +00005305
5306 // Call the shared stub to get to arguments[key].
5307 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
5308 frame_->CallStub(&stub, 0);
5309 frame_->EmitPush(r0);
5310}
5311
5312
Steve Block6ded16b2010-05-10 14:33:55 +01005313void CodeGenerator::GenerateRandomHeapNumber(
5314 ZoneList<Expression*>* args) {
5315 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +00005316 ASSERT(args->length() == 0);
Steve Block6ded16b2010-05-10 14:33:55 +01005317
5318 Label slow_allocate_heapnumber;
5319 Label heapnumber_allocated;
5320
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01005321 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
5322 __ AllocateHeapNumber(r4, r1, r2, r6, &slow_allocate_heapnumber);
Steve Block6ded16b2010-05-10 14:33:55 +01005323 __ jmp(&heapnumber_allocated);
5324
5325 __ bind(&slow_allocate_heapnumber);
Ben Murdoch3bec4d22010-07-22 14:51:16 +01005326 // Allocate a heap number.
5327 __ CallRuntime(Runtime::kNumberAlloc, 0);
Steve Block6ded16b2010-05-10 14:33:55 +01005328 __ mov(r4, Operand(r0));
5329
5330 __ bind(&heapnumber_allocated);
5331
5332 // Convert 32 random bits in r0 to 0.(32 random bits) in a double
5333 // by computing:
5334 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
5335 if (CpuFeatures::IsSupported(VFP3)) {
5336 __ PrepareCallCFunction(0, r1);
5337 __ CallCFunction(ExternalReference::random_uint32_function(), 0);
5338
5339 CpuFeatures::Scope scope(VFP3);
5340 // 0x41300000 is the top half of 1.0 x 2^20 as a double.
5341 // Create this constant using mov/orr to avoid PC relative load.
5342 __ mov(r1, Operand(0x41000000));
5343 __ orr(r1, r1, Operand(0x300000));
5344 // Move 0x41300000xxxxxxxx (x = random bits) to VFP.
5345 __ vmov(d7, r0, r1);
5346 // Move 0x4130000000000000 to VFP.
Iain Merrick9ac36c92010-09-13 15:29:50 +01005347 __ mov(r0, Operand(0, RelocInfo::NONE));
Steve Block6ded16b2010-05-10 14:33:55 +01005348 __ vmov(d8, r0, r1);
5349 // Subtract and store the result in the heap number.
5350 __ vsub(d7, d7, d8);
5351 __ sub(r0, r4, Operand(kHeapObjectTag));
5352 __ vstr(d7, r0, HeapNumber::kValueOffset);
5353 frame_->EmitPush(r4);
5354 } else {
5355 __ mov(r0, Operand(r4));
5356 __ PrepareCallCFunction(1, r1);
5357 __ CallCFunction(
5358 ExternalReference::fill_heap_number_with_random_function(), 1);
5359 frame_->EmitPush(r0);
5360 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005361}
5362
5363
Steve Blockd0582a62009-12-15 09:54:21 +00005364void CodeGenerator::GenerateStringAdd(ZoneList<Expression*>* args) {
5365 ASSERT_EQ(2, args->length());
5366
5367 Load(args->at(0));
5368 Load(args->at(1));
5369
Andrei Popescu31002712010-02-23 13:46:05 +00005370 StringAddStub stub(NO_STRING_ADD_FLAGS);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005371 frame_->SpillAll();
Andrei Popescu31002712010-02-23 13:46:05 +00005372 frame_->CallStub(&stub, 2);
Steve Blockd0582a62009-12-15 09:54:21 +00005373 frame_->EmitPush(r0);
5374}
5375
5376
Leon Clarkee46be812010-01-19 14:06:41 +00005377void CodeGenerator::GenerateSubString(ZoneList<Expression*>* args) {
5378 ASSERT_EQ(3, args->length());
5379
5380 Load(args->at(0));
5381 Load(args->at(1));
5382 Load(args->at(2));
5383
Andrei Popescu31002712010-02-23 13:46:05 +00005384 SubStringStub stub;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005385 frame_->SpillAll();
Andrei Popescu31002712010-02-23 13:46:05 +00005386 frame_->CallStub(&stub, 3);
Leon Clarkee46be812010-01-19 14:06:41 +00005387 frame_->EmitPush(r0);
5388}
5389
5390
5391void CodeGenerator::GenerateStringCompare(ZoneList<Expression*>* args) {
5392 ASSERT_EQ(2, args->length());
5393
5394 Load(args->at(0));
5395 Load(args->at(1));
5396
Leon Clarked91b9f72010-01-27 17:25:45 +00005397 StringCompareStub stub;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005398 frame_->SpillAll();
Leon Clarked91b9f72010-01-27 17:25:45 +00005399 frame_->CallStub(&stub, 2);
Leon Clarkee46be812010-01-19 14:06:41 +00005400 frame_->EmitPush(r0);
5401}
5402
5403
5404void CodeGenerator::GenerateRegExpExec(ZoneList<Expression*>* args) {
5405 ASSERT_EQ(4, args->length());
5406
5407 Load(args->at(0));
5408 Load(args->at(1));
5409 Load(args->at(2));
5410 Load(args->at(3));
Steve Block6ded16b2010-05-10 14:33:55 +01005411 RegExpExecStub stub;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005412 frame_->SpillAll();
Steve Block6ded16b2010-05-10 14:33:55 +01005413 frame_->CallStub(&stub, 4);
5414 frame_->EmitPush(r0);
5415}
Leon Clarkee46be812010-01-19 14:06:41 +00005416
Steve Block6ded16b2010-05-10 14:33:55 +01005417
5418void CodeGenerator::GenerateRegExpConstructResult(ZoneList<Expression*>* args) {
5419 // No stub. This code only occurs a few times in regexp.js.
5420 const int kMaxInlineLength = 100;
5421 ASSERT_EQ(3, args->length());
5422 Load(args->at(0)); // Size of array, smi.
5423 Load(args->at(1)); // "index" property value.
5424 Load(args->at(2)); // "input" property value.
5425 {
5426 VirtualFrame::SpilledScope spilled_scope(frame_);
5427 Label slowcase;
5428 Label done;
5429 __ ldr(r1, MemOperand(sp, kPointerSize * 2));
5430 STATIC_ASSERT(kSmiTag == 0);
5431 STATIC_ASSERT(kSmiTagSize == 1);
5432 __ tst(r1, Operand(kSmiTagMask));
5433 __ b(ne, &slowcase);
5434 __ cmp(r1, Operand(Smi::FromInt(kMaxInlineLength)));
5435 __ b(hi, &slowcase);
5436 // Smi-tagging is equivalent to multiplying by 2.
5437 // Allocate RegExpResult followed by FixedArray with size in ebx.
5438 // JSArray: [Map][empty properties][Elements][Length-smi][index][input]
5439 // Elements: [Map][Length][..elements..]
5440 // Size of JSArray with two in-object properties and the header of a
5441 // FixedArray.
5442 int objects_size =
5443 (JSRegExpResult::kSize + FixedArray::kHeaderSize) / kPointerSize;
5444 __ mov(r5, Operand(r1, LSR, kSmiTagSize + kSmiShiftSize));
5445 __ add(r2, r5, Operand(objects_size));
Kristian Monsen25f61362010-05-21 11:50:48 +01005446 __ AllocateInNewSpace(
5447 r2, // In: Size, in words.
5448 r0, // Out: Start of allocation (tagged).
5449 r3, // Scratch register.
5450 r4, // Scratch register.
5451 &slowcase,
5452 static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS));
Steve Block6ded16b2010-05-10 14:33:55 +01005453 // r0: Start of allocated area, object-tagged.
5454 // r1: Number of elements in array, as smi.
5455 // r5: Number of elements, untagged.
5456
5457 // Set JSArray map to global.regexp_result_map().
5458 // Set empty properties FixedArray.
5459 // Set elements to point to FixedArray allocated right after the JSArray.
5460 // Interleave operations for better latency.
5461 __ ldr(r2, ContextOperand(cp, Context::GLOBAL_INDEX));
5462 __ add(r3, r0, Operand(JSRegExpResult::kSize));
5463 __ mov(r4, Operand(Factory::empty_fixed_array()));
5464 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalContextOffset));
5465 __ str(r3, FieldMemOperand(r0, JSObject::kElementsOffset));
5466 __ ldr(r2, ContextOperand(r2, Context::REGEXP_RESULT_MAP_INDEX));
5467 __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset));
5468 __ str(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
5469
5470 // Set input, index and length fields from arguments.
5471 __ ldm(ia_w, sp, static_cast<RegList>(r2.bit() | r4.bit()));
5472 __ str(r1, FieldMemOperand(r0, JSArray::kLengthOffset));
5473 __ add(sp, sp, Operand(kPointerSize));
5474 __ str(r4, FieldMemOperand(r0, JSRegExpResult::kIndexOffset));
5475 __ str(r2, FieldMemOperand(r0, JSRegExpResult::kInputOffset));
5476
5477 // Fill out the elements FixedArray.
5478 // r0: JSArray, tagged.
5479 // r3: FixedArray, tagged.
5480 // r5: Number of elements in array, untagged.
5481
5482 // Set map.
5483 __ mov(r2, Operand(Factory::fixed_array_map()));
5484 __ str(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
5485 // Set FixedArray length.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005486 __ mov(r6, Operand(r5, LSL, kSmiTagSize));
5487 __ str(r6, FieldMemOperand(r3, FixedArray::kLengthOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01005488 // Fill contents of fixed-array with the-hole.
5489 __ mov(r2, Operand(Factory::the_hole_value()));
5490 __ add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
5491 // Fill fixed array elements with hole.
5492 // r0: JSArray, tagged.
5493 // r2: the hole.
5494 // r3: Start of elements in FixedArray.
5495 // r5: Number of elements to fill.
5496 Label loop;
5497 __ tst(r5, Operand(r5));
5498 __ bind(&loop);
5499 __ b(le, &done); // Jump if r1 is negative or zero.
5500 __ sub(r5, r5, Operand(1), SetCC);
5501 __ str(r2, MemOperand(r3, r5, LSL, kPointerSizeLog2));
5502 __ jmp(&loop);
5503
5504 __ bind(&slowcase);
5505 __ CallRuntime(Runtime::kRegExpConstructResult, 3);
5506
5507 __ bind(&done);
5508 }
5509 frame_->Forget(3);
5510 frame_->EmitPush(r0);
5511}
5512
5513
5514class DeferredSearchCache: public DeferredCode {
5515 public:
5516 DeferredSearchCache(Register dst, Register cache, Register key)
5517 : dst_(dst), cache_(cache), key_(key) {
5518 set_comment("[ DeferredSearchCache");
5519 }
5520
5521 virtual void Generate();
5522
5523 private:
5524 Register dst_, cache_, key_;
5525};
5526
5527
5528void DeferredSearchCache::Generate() {
5529 __ Push(cache_, key_);
5530 __ CallRuntime(Runtime::kGetFromCache, 2);
Iain Merrick75681382010-08-19 15:07:18 +01005531 __ Move(dst_, r0);
Steve Block6ded16b2010-05-10 14:33:55 +01005532}
5533
5534
5535void CodeGenerator::GenerateGetFromCache(ZoneList<Expression*>* args) {
5536 ASSERT_EQ(2, args->length());
5537
5538 ASSERT_NE(NULL, args->at(0)->AsLiteral());
5539 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
5540
5541 Handle<FixedArray> jsfunction_result_caches(
5542 Top::global_context()->jsfunction_result_caches());
5543 if (jsfunction_result_caches->length() <= cache_id) {
5544 __ Abort("Attempt to use undefined cache.");
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005545 frame_->EmitPushRoot(Heap::kUndefinedValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01005546 return;
5547 }
5548
5549 Load(args->at(1));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005550
Iain Merrick75681382010-08-19 15:07:18 +01005551 frame_->PopToR1();
5552 frame_->SpillAll();
5553 Register key = r1; // Just poped to r1
5554 Register result = r0; // Free, as frame has just been spilled.
5555 Register scratch1 = VirtualFrame::scratch0();
5556 Register scratch2 = VirtualFrame::scratch1();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005557
Iain Merrick75681382010-08-19 15:07:18 +01005558 __ ldr(scratch1, ContextOperand(cp, Context::GLOBAL_INDEX));
5559 __ ldr(scratch1,
5560 FieldMemOperand(scratch1, GlobalObject::kGlobalContextOffset));
5561 __ ldr(scratch1,
5562 ContextOperand(scratch1, Context::JSFUNCTION_RESULT_CACHES_INDEX));
5563 __ ldr(scratch1,
5564 FieldMemOperand(scratch1, FixedArray::OffsetOfElementAt(cache_id)));
Steve Block6ded16b2010-05-10 14:33:55 +01005565
Iain Merrick75681382010-08-19 15:07:18 +01005566 DeferredSearchCache* deferred =
5567 new DeferredSearchCache(result, scratch1, key);
Steve Block6ded16b2010-05-10 14:33:55 +01005568
5569 const int kFingerOffset =
5570 FixedArray::OffsetOfElementAt(JSFunctionResultCache::kFingerIndex);
Kristian Monsen50ef84f2010-07-29 15:18:00 +01005571 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
Iain Merrick75681382010-08-19 15:07:18 +01005572 __ ldr(result, FieldMemOperand(scratch1, kFingerOffset));
5573 // result now holds finger offset as a smi.
5574 __ add(scratch2, scratch1, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
5575 // scratch2 now points to the start of fixed array elements.
5576 __ ldr(result,
5577 MemOperand(
5578 scratch2, result, LSL, kPointerSizeLog2 - kSmiTagSize, PreIndex));
5579 // Note side effect of PreIndex: scratch2 now points to the key of the pair.
5580 __ cmp(key, result);
Steve Block6ded16b2010-05-10 14:33:55 +01005581 deferred->Branch(ne);
5582
Iain Merrick75681382010-08-19 15:07:18 +01005583 __ ldr(result, MemOperand(scratch2, kPointerSize));
Steve Block6ded16b2010-05-10 14:33:55 +01005584
5585 deferred->BindExit();
Iain Merrick75681382010-08-19 15:07:18 +01005586 frame_->EmitPush(result);
Leon Clarkee46be812010-01-19 14:06:41 +00005587}
5588
5589
Andrei Popescu402d9372010-02-26 13:31:12 +00005590void CodeGenerator::GenerateNumberToString(ZoneList<Expression*>* args) {
5591 ASSERT_EQ(args->length(), 1);
5592
5593 // Load the argument on the stack and jump to the runtime.
5594 Load(args->at(0));
5595
Steve Block6ded16b2010-05-10 14:33:55 +01005596 NumberToStringStub stub;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005597 frame_->SpillAll();
Steve Block6ded16b2010-05-10 14:33:55 +01005598 frame_->CallStub(&stub, 1);
5599 frame_->EmitPush(r0);
5600}
5601
5602
5603class DeferredSwapElements: public DeferredCode {
5604 public:
5605 DeferredSwapElements(Register object, Register index1, Register index2)
5606 : object_(object), index1_(index1), index2_(index2) {
5607 set_comment("[ DeferredSwapElements");
5608 }
5609
5610 virtual void Generate();
5611
5612 private:
5613 Register object_, index1_, index2_;
5614};
5615
5616
5617void DeferredSwapElements::Generate() {
5618 __ push(object_);
5619 __ push(index1_);
5620 __ push(index2_);
5621 __ CallRuntime(Runtime::kSwapElements, 3);
5622}
5623
5624
5625void CodeGenerator::GenerateSwapElements(ZoneList<Expression*>* args) {
5626 Comment cmnt(masm_, "[ GenerateSwapElements");
5627
5628 ASSERT_EQ(3, args->length());
5629
5630 Load(args->at(0));
5631 Load(args->at(1));
5632 Load(args->at(2));
5633
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005634 VirtualFrame::SpilledScope spilled_scope(frame_);
5635
Steve Block6ded16b2010-05-10 14:33:55 +01005636 Register index2 = r2;
5637 Register index1 = r1;
5638 Register object = r0;
5639 Register tmp1 = r3;
5640 Register tmp2 = r4;
5641
5642 frame_->EmitPop(index2);
5643 frame_->EmitPop(index1);
5644 frame_->EmitPop(object);
5645
5646 DeferredSwapElements* deferred =
5647 new DeferredSwapElements(object, index1, index2);
5648
5649 // Fetch the map and check if array is in fast case.
5650 // Check that object doesn't require security checks and
5651 // has no indexed interceptor.
5652 __ CompareObjectType(object, tmp1, tmp2, FIRST_JS_OBJECT_TYPE);
5653 deferred->Branch(lt);
5654 __ ldrb(tmp2, FieldMemOperand(tmp1, Map::kBitFieldOffset));
5655 __ tst(tmp2, Operand(KeyedLoadIC::kSlowCaseBitFieldMask));
5656 deferred->Branch(nz);
5657
Iain Merrick75681382010-08-19 15:07:18 +01005658 // Check the object's elements are in fast case and writable.
Steve Block6ded16b2010-05-10 14:33:55 +01005659 __ ldr(tmp1, FieldMemOperand(object, JSObject::kElementsOffset));
5660 __ ldr(tmp2, FieldMemOperand(tmp1, HeapObject::kMapOffset));
5661 __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
5662 __ cmp(tmp2, ip);
5663 deferred->Branch(ne);
5664
5665 // Smi-tagging is equivalent to multiplying by 2.
5666 STATIC_ASSERT(kSmiTag == 0);
5667 STATIC_ASSERT(kSmiTagSize == 1);
5668
5669 // Check that both indices are smis.
5670 __ mov(tmp2, index1);
5671 __ orr(tmp2, tmp2, index2);
5672 __ tst(tmp2, Operand(kSmiTagMask));
5673 deferred->Branch(nz);
5674
Ben Murdochdb5a90a2011-01-06 18:27:03 +00005675 // Check that both indices are valid.
5676 __ ldr(tmp2, FieldMemOperand(object, JSArray::kLengthOffset));
5677 __ cmp(tmp2, index1);
5678 __ cmp(tmp2, index2, hi);
5679 deferred->Branch(ls);
5680
Steve Block6ded16b2010-05-10 14:33:55 +01005681 // Bring the offsets into the fixed array in tmp1 into index1 and
5682 // index2.
5683 __ mov(tmp2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
5684 __ add(index1, tmp2, Operand(index1, LSL, kPointerSizeLog2 - kSmiTagSize));
5685 __ add(index2, tmp2, Operand(index2, LSL, kPointerSizeLog2 - kSmiTagSize));
5686
5687 // Swap elements.
5688 Register tmp3 = object;
5689 object = no_reg;
5690 __ ldr(tmp3, MemOperand(tmp1, index1));
5691 __ ldr(tmp2, MemOperand(tmp1, index2));
5692 __ str(tmp3, MemOperand(tmp1, index2));
5693 __ str(tmp2, MemOperand(tmp1, index1));
5694
5695 Label done;
5696 __ InNewSpace(tmp1, tmp2, eq, &done);
5697 // Possible optimization: do a check that both values are Smis
5698 // (or them and test against Smi mask.)
5699
5700 __ mov(tmp2, tmp1);
5701 RecordWriteStub recordWrite1(tmp1, index1, tmp3);
5702 __ CallStub(&recordWrite1);
5703
5704 RecordWriteStub recordWrite2(tmp2, index2, tmp3);
5705 __ CallStub(&recordWrite2);
5706
5707 __ bind(&done);
5708
5709 deferred->BindExit();
5710 __ LoadRoot(tmp1, Heap::kUndefinedValueRootIndex);
5711 frame_->EmitPush(tmp1);
5712}
5713
5714
5715void CodeGenerator::GenerateCallFunction(ZoneList<Expression*>* args) {
5716 Comment cmnt(masm_, "[ GenerateCallFunction");
5717
5718 ASSERT(args->length() >= 2);
5719
5720 int n_args = args->length() - 2; // for receiver and function.
5721 Load(args->at(0)); // receiver
5722 for (int i = 0; i < n_args; i++) {
5723 Load(args->at(i + 1));
5724 }
5725 Load(args->at(n_args + 1)); // function
5726 frame_->CallJSFunction(n_args);
Andrei Popescu402d9372010-02-26 13:31:12 +00005727 frame_->EmitPush(r0);
5728}
5729
5730
5731void CodeGenerator::GenerateMathSin(ZoneList<Expression*>* args) {
5732 ASSERT_EQ(args->length(), 1);
Andrei Popescu402d9372010-02-26 13:31:12 +00005733 Load(args->at(0));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005734 if (CpuFeatures::IsSupported(VFP3)) {
5735 TranscendentalCacheStub stub(TranscendentalCache::SIN);
5736 frame_->SpillAllButCopyTOSToR0();
5737 frame_->CallStub(&stub, 1);
5738 } else {
5739 frame_->CallRuntime(Runtime::kMath_sin, 1);
5740 }
Andrei Popescu402d9372010-02-26 13:31:12 +00005741 frame_->EmitPush(r0);
5742}
5743
5744
5745void CodeGenerator::GenerateMathCos(ZoneList<Expression*>* args) {
5746 ASSERT_EQ(args->length(), 1);
Andrei Popescu402d9372010-02-26 13:31:12 +00005747 Load(args->at(0));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005748 if (CpuFeatures::IsSupported(VFP3)) {
5749 TranscendentalCacheStub stub(TranscendentalCache::COS);
5750 frame_->SpillAllButCopyTOSToR0();
5751 frame_->CallStub(&stub, 1);
5752 } else {
5753 frame_->CallRuntime(Runtime::kMath_cos, 1);
5754 }
Andrei Popescu402d9372010-02-26 13:31:12 +00005755 frame_->EmitPush(r0);
5756}
5757
5758
Steve Blocka7e24c12009-10-30 11:49:00 +00005759void CodeGenerator::GenerateObjectEquals(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005760 ASSERT(args->length() == 2);
5761
5762 // Load the two objects into registers and perform the comparison.
Leon Clarkef7060e22010-06-03 12:02:55 +01005763 Load(args->at(0));
5764 Load(args->at(1));
5765 Register lhs = frame_->PopToRegister();
5766 Register rhs = frame_->PopToRegister(lhs);
5767 __ cmp(lhs, rhs);
Steve Blocka7e24c12009-10-30 11:49:00 +00005768 cc_reg_ = eq;
5769}
5770
5771
Ben Murdochbb769b22010-08-11 14:56:33 +01005772void CodeGenerator::GenerateIsRegExpEquivalent(ZoneList<Expression*>* args) {
5773 ASSERT(args->length() == 2);
5774
5775 // Load the two objects into registers and perform the comparison.
5776 Load(args->at(0));
5777 Load(args->at(1));
5778 Register right = frame_->PopToRegister();
5779 Register left = frame_->PopToRegister(right);
5780 Register tmp = frame_->scratch0();
5781 Register tmp2 = frame_->scratch1();
5782
5783 // Jumps to done must have the eq flag set if the test is successful
5784 // and clear if the test has failed.
5785 Label done;
5786
5787 // Fail if either is a non-HeapObject.
5788 __ cmp(left, Operand(right));
5789 __ b(eq, &done);
5790 __ and_(tmp, left, Operand(right));
5791 __ eor(tmp, tmp, Operand(kSmiTagMask));
5792 __ tst(tmp, Operand(kSmiTagMask));
5793 __ b(ne, &done);
5794 __ ldr(tmp, FieldMemOperand(left, HeapObject::kMapOffset));
5795 __ ldrb(tmp2, FieldMemOperand(tmp, Map::kInstanceTypeOffset));
5796 __ cmp(tmp2, Operand(JS_REGEXP_TYPE));
5797 __ b(ne, &done);
5798 __ ldr(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
5799 __ cmp(tmp, Operand(tmp2));
5800 __ b(ne, &done);
5801 __ ldr(tmp, FieldMemOperand(left, JSRegExp::kDataOffset));
5802 __ ldr(tmp2, FieldMemOperand(right, JSRegExp::kDataOffset));
5803 __ cmp(tmp, tmp2);
5804 __ bind(&done);
5805 cc_reg_ = eq;
5806}
5807
5808
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005809void CodeGenerator::GenerateHasCachedArrayIndex(ZoneList<Expression*>* args) {
5810 ASSERT(args->length() == 1);
5811 Load(args->at(0));
5812 Register value = frame_->PopToRegister();
5813 Register tmp = frame_->scratch0();
5814 __ ldr(tmp, FieldMemOperand(value, String::kHashFieldOffset));
5815 __ tst(tmp, Operand(String::kContainsCachedArrayIndexMask));
5816 cc_reg_ = eq;
5817}
5818
5819
5820void CodeGenerator::GenerateGetCachedArrayIndex(ZoneList<Expression*>* args) {
5821 ASSERT(args->length() == 1);
5822 Load(args->at(0));
5823 Register value = frame_->PopToRegister();
5824
5825 __ ldr(value, FieldMemOperand(value, String::kHashFieldOffset));
5826 __ IndexFromHash(value, value);
5827 frame_->EmitPush(value);
5828}
5829
Ben Murdochbb769b22010-08-11 14:56:33 +01005830
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08005831void CodeGenerator::GenerateFastAsciiArrayJoin(ZoneList<Expression*>* args) {
5832 ASSERT(args->length() == 2);
5833 Load(args->at(0));
5834 Register value = frame_->PopToRegister();
5835 __ LoadRoot(value, Heap::kUndefinedValueRootIndex);
5836 frame_->EmitPush(value);
5837}
5838
5839
Steve Blocka7e24c12009-10-30 11:49:00 +00005840void CodeGenerator::VisitCallRuntime(CallRuntime* node) {
5841#ifdef DEBUG
5842 int original_height = frame_->height();
5843#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00005844 if (CheckForInlineRuntimeCall(node)) {
5845 ASSERT((has_cc() && frame_->height() == original_height) ||
5846 (!has_cc() && frame_->height() == original_height + 1));
5847 return;
5848 }
5849
5850 ZoneList<Expression*>* args = node->arguments();
5851 Comment cmnt(masm_, "[ CallRuntime");
5852 Runtime::Function* function = node->function();
5853
5854 if (function == NULL) {
5855 // Prepare stack for calling JS runtime function.
Steve Blocka7e24c12009-10-30 11:49:00 +00005856 // Push the builtins object found in the current global object.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005857 Register scratch = VirtualFrame::scratch0();
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08005858 __ ldr(scratch, GlobalObjectOperand());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005859 Register builtins = frame_->GetTOSRegister();
5860 __ ldr(builtins, FieldMemOperand(scratch, GlobalObject::kBuiltinsOffset));
5861 frame_->EmitPush(builtins);
Steve Blocka7e24c12009-10-30 11:49:00 +00005862 }
5863
5864 // Push the arguments ("left-to-right").
5865 int arg_count = args->length();
5866 for (int i = 0; i < arg_count; i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005867 Load(args->at(i));
Steve Blocka7e24c12009-10-30 11:49:00 +00005868 }
5869
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005870 VirtualFrame::SpilledScope spilled_scope(frame_);
5871
Steve Blocka7e24c12009-10-30 11:49:00 +00005872 if (function == NULL) {
5873 // Call the JS runtime function.
Andrei Popescu402d9372010-02-26 13:31:12 +00005874 __ mov(r2, Operand(node->name()));
Steve Blocka7e24c12009-10-30 11:49:00 +00005875 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08005876 Handle<Code> stub = StubCache::ComputeCallInitialize(arg_count, in_loop);
Steve Blocka7e24c12009-10-30 11:49:00 +00005877 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET, arg_count + 1);
5878 __ ldr(cp, frame_->Context());
Steve Blocka7e24c12009-10-30 11:49:00 +00005879 frame_->EmitPush(r0);
5880 } else {
5881 // Call the C runtime function.
5882 frame_->CallRuntime(function, arg_count);
5883 frame_->EmitPush(r0);
5884 }
Steve Block6ded16b2010-05-10 14:33:55 +01005885 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00005886}
5887
5888
5889void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
5890#ifdef DEBUG
5891 int original_height = frame_->height();
5892#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00005893 Comment cmnt(masm_, "[ UnaryOperation");
5894
5895 Token::Value op = node->op();
5896
5897 if (op == Token::NOT) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005898 LoadCondition(node->expression(), false_target(), true_target(), true);
Steve Blocka7e24c12009-10-30 11:49:00 +00005899 // LoadCondition may (and usually does) leave a test and branch to
5900 // be emitted by the caller. In that case, negate the condition.
5901 if (has_cc()) cc_reg_ = NegateCondition(cc_reg_);
5902
5903 } else if (op == Token::DELETE) {
5904 Property* property = node->expression()->AsProperty();
5905 Variable* variable = node->expression()->AsVariableProxy()->AsVariable();
5906 if (property != NULL) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005907 Load(property->obj());
5908 Load(property->key());
Steve Blockd0582a62009-12-15 09:54:21 +00005909 frame_->InvokeBuiltin(Builtins::DELETE, CALL_JS, 2);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005910 frame_->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00005911
5912 } else if (variable != NULL) {
Kristian Monsen0d5e1162010-09-30 15:31:59 +01005913 Slot* slot = variable->AsSlot();
Steve Blocka7e24c12009-10-30 11:49:00 +00005914 if (variable->is_global()) {
5915 LoadGlobal();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005916 frame_->EmitPush(Operand(variable->name()));
Steve Blockd0582a62009-12-15 09:54:21 +00005917 frame_->InvokeBuiltin(Builtins::DELETE, CALL_JS, 2);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005918 frame_->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00005919
5920 } else if (slot != NULL && slot->type() == Slot::LOOKUP) {
5921 // lookup the context holding the named variable
5922 frame_->EmitPush(cp);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005923 frame_->EmitPush(Operand(variable->name()));
Steve Blocka7e24c12009-10-30 11:49:00 +00005924 frame_->CallRuntime(Runtime::kLookupContext, 2);
5925 // r0: context
5926 frame_->EmitPush(r0);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005927 frame_->EmitPush(Operand(variable->name()));
Steve Blockd0582a62009-12-15 09:54:21 +00005928 frame_->InvokeBuiltin(Builtins::DELETE, CALL_JS, 2);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005929 frame_->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00005930
5931 } else {
5932 // Default: Result of deleting non-global, not dynamically
5933 // introduced variables is false.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005934 frame_->EmitPushRoot(Heap::kFalseValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00005935 }
5936
5937 } else {
5938 // Default: Result of deleting expressions is true.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005939 Load(node->expression()); // may have side-effects
Steve Blocka7e24c12009-10-30 11:49:00 +00005940 frame_->Drop();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005941 frame_->EmitPushRoot(Heap::kTrueValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00005942 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005943
5944 } else if (op == Token::TYPEOF) {
5945 // Special case for loading the typeof expression; see comment on
5946 // LoadTypeofExpression().
5947 LoadTypeofExpression(node->expression());
5948 frame_->CallRuntime(Runtime::kTypeof, 1);
5949 frame_->EmitPush(r0); // r0 has result
5950
5951 } else {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005952 bool can_overwrite = node->expression()->ResultOverwriteAllowed();
Leon Clarkeac952652010-07-15 11:15:24 +01005953 UnaryOverwriteMode overwrite =
5954 can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
5955
5956 bool no_negative_zero = node->expression()->no_negative_zero();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005957 Load(node->expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00005958 switch (op) {
5959 case Token::NOT:
5960 case Token::DELETE:
5961 case Token::TYPEOF:
5962 UNREACHABLE(); // handled above
5963 break;
5964
5965 case Token::SUB: {
Steve Block8defd9f2010-07-08 12:39:36 +01005966 frame_->PopToR0();
Leon Clarkeac952652010-07-15 11:15:24 +01005967 GenericUnaryOpStub stub(
5968 Token::SUB,
5969 overwrite,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01005970 NO_UNARY_FLAGS,
Leon Clarkeac952652010-07-15 11:15:24 +01005971 no_negative_zero ? kIgnoreNegativeZero : kStrictNegativeZero);
Steve Blocka7e24c12009-10-30 11:49:00 +00005972 frame_->CallStub(&stub, 0);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005973 frame_->EmitPush(r0); // r0 has result
Steve Blocka7e24c12009-10-30 11:49:00 +00005974 break;
5975 }
5976
5977 case Token::BIT_NOT: {
Steve Block8defd9f2010-07-08 12:39:36 +01005978 Register tos = frame_->PopToRegister();
5979 JumpTarget not_smi_label;
Steve Blocka7e24c12009-10-30 11:49:00 +00005980 JumpTarget continue_label;
Steve Block8defd9f2010-07-08 12:39:36 +01005981 // Smi check.
5982 __ tst(tos, Operand(kSmiTagMask));
5983 not_smi_label.Branch(ne);
Steve Blocka7e24c12009-10-30 11:49:00 +00005984
Steve Block8defd9f2010-07-08 12:39:36 +01005985 __ mvn(tos, Operand(tos));
5986 __ bic(tos, tos, Operand(kSmiTagMask)); // Bit-clear inverted smi-tag.
5987 frame_->EmitPush(tos);
5988 // The fast case is the first to jump to the continue label, so it gets
5989 // to decide the virtual frame layout.
Steve Blocka7e24c12009-10-30 11:49:00 +00005990 continue_label.Jump();
Leon Clarke4515c472010-02-03 11:58:03 +00005991
Steve Block8defd9f2010-07-08 12:39:36 +01005992 not_smi_label.Bind();
5993 frame_->SpillAll();
5994 __ Move(r0, tos);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01005995 GenericUnaryOpStub stub(Token::BIT_NOT,
5996 overwrite,
5997 NO_UNARY_SMI_CODE_IN_STUB);
Steve Block8defd9f2010-07-08 12:39:36 +01005998 frame_->CallStub(&stub, 0);
5999 frame_->EmitPush(r0);
6000
Steve Blocka7e24c12009-10-30 11:49:00 +00006001 continue_label.Bind();
6002 break;
6003 }
6004
6005 case Token::VOID:
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006006 frame_->Drop();
6007 frame_->EmitPushRoot(Heap::kUndefinedValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00006008 break;
6009
6010 case Token::ADD: {
Steve Block8defd9f2010-07-08 12:39:36 +01006011 Register tos = frame_->Peek();
Steve Blocka7e24c12009-10-30 11:49:00 +00006012 // Smi check.
6013 JumpTarget continue_label;
Steve Block8defd9f2010-07-08 12:39:36 +01006014 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00006015 continue_label.Branch(eq);
Steve Block8defd9f2010-07-08 12:39:36 +01006016
Steve Blockd0582a62009-12-15 09:54:21 +00006017 frame_->InvokeBuiltin(Builtins::TO_NUMBER, CALL_JS, 1);
Steve Block8defd9f2010-07-08 12:39:36 +01006018 frame_->EmitPush(r0);
6019
Steve Blocka7e24c12009-10-30 11:49:00 +00006020 continue_label.Bind();
6021 break;
6022 }
6023 default:
6024 UNREACHABLE();
6025 }
Steve Blocka7e24c12009-10-30 11:49:00 +00006026 }
6027 ASSERT(!has_valid_frame() ||
6028 (has_cc() && frame_->height() == original_height) ||
6029 (!has_cc() && frame_->height() == original_height + 1));
6030}
6031
6032
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006033class DeferredCountOperation: public DeferredCode {
6034 public:
6035 DeferredCountOperation(Register value,
6036 bool is_increment,
6037 bool is_postfix,
6038 int target_size)
6039 : value_(value),
6040 is_increment_(is_increment),
6041 is_postfix_(is_postfix),
6042 target_size_(target_size) {}
6043
6044 virtual void Generate() {
6045 VirtualFrame copied_frame(*frame_state()->frame());
6046
6047 Label slow;
6048 // Check for smi operand.
6049 __ tst(value_, Operand(kSmiTagMask));
6050 __ b(ne, &slow);
6051
6052 // Revert optimistic increment/decrement.
6053 if (is_increment_) {
6054 __ sub(value_, value_, Operand(Smi::FromInt(1)));
6055 } else {
6056 __ add(value_, value_, Operand(Smi::FromInt(1)));
6057 }
6058
6059 // Slow case: Convert to number. At this point the
6060 // value to be incremented is in the value register..
6061 __ bind(&slow);
6062
6063 // Convert the operand to a number.
6064 copied_frame.EmitPush(value_);
6065
6066 copied_frame.InvokeBuiltin(Builtins::TO_NUMBER, CALL_JS, 1);
6067
6068 if (is_postfix_) {
6069 // Postfix: store to result (on the stack).
6070 __ str(r0, MemOperand(sp, target_size_ * kPointerSize));
6071 }
6072
6073 copied_frame.EmitPush(r0);
6074 copied_frame.EmitPush(Operand(Smi::FromInt(1)));
6075
6076 if (is_increment_) {
6077 copied_frame.CallRuntime(Runtime::kNumberAdd, 2);
6078 } else {
6079 copied_frame.CallRuntime(Runtime::kNumberSub, 2);
6080 }
6081
6082 __ Move(value_, r0);
6083
6084 copied_frame.MergeTo(frame_state()->frame());
6085 }
6086
6087 private:
6088 Register value_;
6089 bool is_increment_;
6090 bool is_postfix_;
6091 int target_size_;
6092};
6093
6094
Steve Blocka7e24c12009-10-30 11:49:00 +00006095void CodeGenerator::VisitCountOperation(CountOperation* node) {
6096#ifdef DEBUG
6097 int original_height = frame_->height();
6098#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00006099 Comment cmnt(masm_, "[ CountOperation");
Steve Block8defd9f2010-07-08 12:39:36 +01006100 VirtualFrame::RegisterAllocationScope scope(this);
Steve Blocka7e24c12009-10-30 11:49:00 +00006101
6102 bool is_postfix = node->is_postfix();
6103 bool is_increment = node->op() == Token::INC;
6104
6105 Variable* var = node->expression()->AsVariableProxy()->AsVariable();
6106 bool is_const = (var != NULL && var->mode() == Variable::CONST);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006107 bool is_slot = (var != NULL && var->mode() == Variable::VAR);
Steve Blocka7e24c12009-10-30 11:49:00 +00006108
Kristian Monsen0d5e1162010-09-30 15:31:59 +01006109 if (!is_const && is_slot && type_info(var->AsSlot()).IsSmi()) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006110 // The type info declares that this variable is always a Smi. That
6111 // means it is a Smi both before and after the increment/decrement.
6112 // Lets make use of that to make a very minimal count.
6113 Reference target(this, node->expression(), !is_const);
6114 ASSERT(!target.is_illegal());
6115 target.GetValue(); // Pushes the value.
6116 Register value = frame_->PopToRegister();
6117 if (is_postfix) frame_->EmitPush(value);
6118 if (is_increment) {
6119 __ add(value, value, Operand(Smi::FromInt(1)));
6120 } else {
6121 __ sub(value, value, Operand(Smi::FromInt(1)));
6122 }
6123 frame_->EmitPush(value);
Steve Block8defd9f2010-07-08 12:39:36 +01006124 target.SetValue(NOT_CONST_INIT, LIKELY_SMI);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006125 if (is_postfix) frame_->Pop();
6126 ASSERT_EQ(original_height + 1, frame_->height());
6127 return;
6128 }
6129
6130 // If it's a postfix expression and its result is not ignored and the
6131 // reference is non-trivial, then push a placeholder on the stack now
6132 // to hold the result of the expression.
6133 bool placeholder_pushed = false;
6134 if (!is_slot && is_postfix) {
Kristian Monsen25f61362010-05-21 11:50:48 +01006135 frame_->EmitPush(Operand(Smi::FromInt(0)));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006136 placeholder_pushed = true;
Steve Blocka7e24c12009-10-30 11:49:00 +00006137 }
6138
Leon Clarked91b9f72010-01-27 17:25:45 +00006139 // A constant reference is not saved to, so a constant reference is not a
6140 // compound assignment reference.
6141 { Reference target(this, node->expression(), !is_const);
Steve Blocka7e24c12009-10-30 11:49:00 +00006142 if (target.is_illegal()) {
6143 // Spoof the virtual frame to have the expected height (one higher
6144 // than on entry).
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006145 if (!placeholder_pushed) frame_->EmitPush(Operand(Smi::FromInt(0)));
Steve Block6ded16b2010-05-10 14:33:55 +01006146 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00006147 return;
6148 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006149
Kristian Monsen25f61362010-05-21 11:50:48 +01006150 // This pushes 0, 1 or 2 words on the object to be used later when updating
6151 // the target. It also pushes the current value of the target.
Steve Block6ded16b2010-05-10 14:33:55 +01006152 target.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00006153
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006154 bool value_is_known_smi = frame_->KnownSmiAt(0);
Kristian Monsen25f61362010-05-21 11:50:48 +01006155 Register value = frame_->PopToRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +00006156
6157 // Postfix: Store the old value as the result.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006158 if (placeholder_pushed) {
Kristian Monsen25f61362010-05-21 11:50:48 +01006159 frame_->SetElementAt(value, target.size());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006160 } else if (is_postfix) {
6161 frame_->EmitPush(value);
6162 __ mov(VirtualFrame::scratch0(), value);
6163 value = VirtualFrame::scratch0();
Steve Blocka7e24c12009-10-30 11:49:00 +00006164 }
6165
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006166 // We can't use any type information here since the virtual frame from the
6167 // deferred code may have lost information and we can't merge a virtual
6168 // frame with less specific type knowledge to a virtual frame with more
6169 // specific knowledge that has already used that specific knowledge to
6170 // generate code.
6171 frame_->ForgetTypeInfo();
6172
6173 // The constructor here will capture the current virtual frame and use it to
6174 // merge to after the deferred code has run. No virtual frame changes are
6175 // allowed from here until the 'BindExit' below.
6176 DeferredCode* deferred =
6177 new DeferredCountOperation(value,
6178 is_increment,
6179 is_postfix,
6180 target.size());
6181 if (!value_is_known_smi) {
6182 // Check for smi operand.
6183 __ tst(value, Operand(kSmiTagMask));
6184
6185 deferred->Branch(ne);
6186 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006187
Steve Blocka7e24c12009-10-30 11:49:00 +00006188 // Perform optimistic increment/decrement.
6189 if (is_increment) {
Kristian Monsen25f61362010-05-21 11:50:48 +01006190 __ add(value, value, Operand(Smi::FromInt(1)), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00006191 } else {
Kristian Monsen25f61362010-05-21 11:50:48 +01006192 __ sub(value, value, Operand(Smi::FromInt(1)), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00006193 }
6194
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006195 // If increment/decrement overflows, go to deferred code.
6196 deferred->Branch(vs);
Steve Blocka7e24c12009-10-30 11:49:00 +00006197
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006198 deferred->BindExit();
Steve Blocka7e24c12009-10-30 11:49:00 +00006199
Steve Blocka7e24c12009-10-30 11:49:00 +00006200 // Store the new value in the target if not const.
Kristian Monsen25f61362010-05-21 11:50:48 +01006201 // At this point the answer is in the value register.
Kristian Monsen25f61362010-05-21 11:50:48 +01006202 frame_->EmitPush(value);
6203 // Set the target with the result, leaving the result on
6204 // top of the stack. Removes the target from the stack if
6205 // it has a non-zero size.
Steve Block8defd9f2010-07-08 12:39:36 +01006206 if (!is_const) target.SetValue(NOT_CONST_INIT, LIKELY_SMI);
Steve Blocka7e24c12009-10-30 11:49:00 +00006207 }
6208
6209 // Postfix: Discard the new value and use the old.
Kristian Monsen25f61362010-05-21 11:50:48 +01006210 if (is_postfix) frame_->Pop();
Steve Block6ded16b2010-05-10 14:33:55 +01006211 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00006212}
6213
6214
Steve Block6ded16b2010-05-10 14:33:55 +01006215void CodeGenerator::GenerateLogicalBooleanOperation(BinaryOperation* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00006216 // According to ECMA-262 section 11.11, page 58, the binary logical
6217 // operators must yield the result of one of the two expressions
6218 // before any ToBoolean() conversions. This means that the value
6219 // produced by a && or || operator is not necessarily a boolean.
6220
6221 // NOTE: If the left hand side produces a materialized value (not in
6222 // the CC register), we force the right hand side to do the
6223 // same. This is necessary because we may have to branch to the exit
6224 // after evaluating the left hand side (due to the shortcut
6225 // semantics), but the compiler must (statically) know if the result
6226 // of compiling the binary operation is materialized or not.
Steve Block6ded16b2010-05-10 14:33:55 +01006227 if (node->op() == Token::AND) {
Steve Blocka7e24c12009-10-30 11:49:00 +00006228 JumpTarget is_true;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006229 LoadCondition(node->left(), &is_true, false_target(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00006230 if (has_valid_frame() && !has_cc()) {
6231 // The left-hand side result is on top of the virtual frame.
6232 JumpTarget pop_and_continue;
6233 JumpTarget exit;
6234
Leon Clarkef7060e22010-06-03 12:02:55 +01006235 frame_->Dup();
Steve Blocka7e24c12009-10-30 11:49:00 +00006236 // Avoid popping the result if it converts to 'false' using the
6237 // standard ToBoolean() conversion as described in ECMA-262,
6238 // section 9.2, page 30.
6239 ToBoolean(&pop_and_continue, &exit);
6240 Branch(false, &exit);
6241
6242 // Pop the result of evaluating the first part.
6243 pop_and_continue.Bind();
Leon Clarkef7060e22010-06-03 12:02:55 +01006244 frame_->Pop();
Steve Blocka7e24c12009-10-30 11:49:00 +00006245
6246 // Evaluate right side expression.
6247 is_true.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006248 Load(node->right());
Steve Blocka7e24c12009-10-30 11:49:00 +00006249
6250 // Exit (always with a materialized value).
6251 exit.Bind();
6252 } else if (has_cc() || is_true.is_linked()) {
6253 // The left-hand side is either (a) partially compiled to
6254 // control flow with a final branch left to emit or (b) fully
6255 // compiled to control flow and possibly true.
6256 if (has_cc()) {
6257 Branch(false, false_target());
6258 }
6259 is_true.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006260 LoadCondition(node->right(), true_target(), false_target(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00006261 } else {
6262 // Nothing to do.
6263 ASSERT(!has_valid_frame() && !has_cc() && !is_true.is_linked());
6264 }
6265
Steve Block6ded16b2010-05-10 14:33:55 +01006266 } else {
6267 ASSERT(node->op() == Token::OR);
Steve Blocka7e24c12009-10-30 11:49:00 +00006268 JumpTarget is_false;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006269 LoadCondition(node->left(), true_target(), &is_false, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00006270 if (has_valid_frame() && !has_cc()) {
6271 // The left-hand side result is on top of the virtual frame.
6272 JumpTarget pop_and_continue;
6273 JumpTarget exit;
6274
Leon Clarkef7060e22010-06-03 12:02:55 +01006275 frame_->Dup();
Steve Blocka7e24c12009-10-30 11:49:00 +00006276 // Avoid popping the result if it converts to 'true' using the
6277 // standard ToBoolean() conversion as described in ECMA-262,
6278 // section 9.2, page 30.
6279 ToBoolean(&exit, &pop_and_continue);
6280 Branch(true, &exit);
6281
6282 // Pop the result of evaluating the first part.
6283 pop_and_continue.Bind();
Leon Clarkef7060e22010-06-03 12:02:55 +01006284 frame_->Pop();
Steve Blocka7e24c12009-10-30 11:49:00 +00006285
6286 // Evaluate right side expression.
6287 is_false.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006288 Load(node->right());
Steve Blocka7e24c12009-10-30 11:49:00 +00006289
6290 // Exit (always with a materialized value).
6291 exit.Bind();
6292 } else if (has_cc() || is_false.is_linked()) {
6293 // The left-hand side is either (a) partially compiled to
6294 // control flow with a final branch left to emit or (b) fully
6295 // compiled to control flow and possibly false.
6296 if (has_cc()) {
6297 Branch(true, true_target());
6298 }
6299 is_false.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006300 LoadCondition(node->right(), true_target(), false_target(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00006301 } else {
6302 // Nothing to do.
6303 ASSERT(!has_valid_frame() && !has_cc() && !is_false.is_linked());
6304 }
Steve Block6ded16b2010-05-10 14:33:55 +01006305 }
6306}
Steve Blocka7e24c12009-10-30 11:49:00 +00006307
Steve Block6ded16b2010-05-10 14:33:55 +01006308
6309void CodeGenerator::VisitBinaryOperation(BinaryOperation* node) {
6310#ifdef DEBUG
6311 int original_height = frame_->height();
6312#endif
6313 Comment cmnt(masm_, "[ BinaryOperation");
6314
6315 if (node->op() == Token::AND || node->op() == Token::OR) {
Steve Block6ded16b2010-05-10 14:33:55 +01006316 GenerateLogicalBooleanOperation(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00006317 } else {
6318 // Optimize for the case where (at least) one of the expressions
6319 // is a literal small integer.
6320 Literal* lliteral = node->left()->AsLiteral();
6321 Literal* rliteral = node->right()->AsLiteral();
6322 // NOTE: The code below assumes that the slow cases (calls to runtime)
6323 // never return a constant/immutable object.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01006324 bool overwrite_left = node->left()->ResultOverwriteAllowed();
6325 bool overwrite_right = node->right()->ResultOverwriteAllowed();
Steve Blocka7e24c12009-10-30 11:49:00 +00006326
6327 if (rliteral != NULL && rliteral->handle()->IsSmi()) {
Steve Block6ded16b2010-05-10 14:33:55 +01006328 VirtualFrame::RegisterAllocationScope scope(this);
6329 Load(node->left());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006330 if (frame_->KnownSmiAt(0)) overwrite_left = false;
Steve Blocka7e24c12009-10-30 11:49:00 +00006331 SmiOperation(node->op(),
6332 rliteral->handle(),
6333 false,
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006334 overwrite_left ? OVERWRITE_LEFT : NO_OVERWRITE);
Steve Blocka7e24c12009-10-30 11:49:00 +00006335 } else if (lliteral != NULL && lliteral->handle()->IsSmi()) {
Steve Block6ded16b2010-05-10 14:33:55 +01006336 VirtualFrame::RegisterAllocationScope scope(this);
6337 Load(node->right());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006338 if (frame_->KnownSmiAt(0)) overwrite_right = false;
Steve Blocka7e24c12009-10-30 11:49:00 +00006339 SmiOperation(node->op(),
6340 lliteral->handle(),
6341 true,
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006342 overwrite_right ? OVERWRITE_RIGHT : NO_OVERWRITE);
Steve Blocka7e24c12009-10-30 11:49:00 +00006343 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006344 GenerateInlineSmi inline_smi =
6345 loop_nesting() > 0 ? GENERATE_INLINE_SMI : DONT_GENERATE_INLINE_SMI;
6346 if (lliteral != NULL) {
6347 ASSERT(!lliteral->handle()->IsSmi());
6348 inline_smi = DONT_GENERATE_INLINE_SMI;
6349 }
6350 if (rliteral != NULL) {
6351 ASSERT(!rliteral->handle()->IsSmi());
6352 inline_smi = DONT_GENERATE_INLINE_SMI;
6353 }
Steve Block6ded16b2010-05-10 14:33:55 +01006354 VirtualFrame::RegisterAllocationScope scope(this);
Steve Blocka7e24c12009-10-30 11:49:00 +00006355 OverwriteMode overwrite_mode = NO_OVERWRITE;
6356 if (overwrite_left) {
6357 overwrite_mode = OVERWRITE_LEFT;
6358 } else if (overwrite_right) {
6359 overwrite_mode = OVERWRITE_RIGHT;
6360 }
Steve Block6ded16b2010-05-10 14:33:55 +01006361 Load(node->left());
6362 Load(node->right());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006363 GenericBinaryOperation(node->op(), overwrite_mode, inline_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +00006364 }
Steve Blocka7e24c12009-10-30 11:49:00 +00006365 }
6366 ASSERT(!has_valid_frame() ||
6367 (has_cc() && frame_->height() == original_height) ||
6368 (!has_cc() && frame_->height() == original_height + 1));
6369}
6370
6371
6372void CodeGenerator::VisitThisFunction(ThisFunction* node) {
6373#ifdef DEBUG
6374 int original_height = frame_->height();
6375#endif
Leon Clarkef7060e22010-06-03 12:02:55 +01006376 frame_->EmitPush(MemOperand(frame_->Function()));
Steve Block6ded16b2010-05-10 14:33:55 +01006377 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00006378}
6379
6380
6381void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
6382#ifdef DEBUG
6383 int original_height = frame_->height();
6384#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00006385 Comment cmnt(masm_, "[ CompareOperation");
6386
Steve Block6ded16b2010-05-10 14:33:55 +01006387 VirtualFrame::RegisterAllocationScope nonspilled_scope(this);
6388
Steve Blocka7e24c12009-10-30 11:49:00 +00006389 // Get the expressions from the node.
6390 Expression* left = node->left();
6391 Expression* right = node->right();
6392 Token::Value op = node->op();
6393
Steve Blocka7e24c12009-10-30 11:49:00 +00006394 // To make typeof testing for natives implemented in JavaScript really
6395 // efficient, we generate special code for expressions of the form:
6396 // 'typeof <expression> == <string>'.
6397 UnaryOperation* operation = left->AsUnaryOperation();
6398 if ((op == Token::EQ || op == Token::EQ_STRICT) &&
6399 (operation != NULL && operation->op() == Token::TYPEOF) &&
6400 (right->AsLiteral() != NULL &&
6401 right->AsLiteral()->handle()->IsString())) {
6402 Handle<String> check(String::cast(*right->AsLiteral()->handle()));
6403
Steve Block6ded16b2010-05-10 14:33:55 +01006404 // Load the operand, move it to a register.
Steve Blocka7e24c12009-10-30 11:49:00 +00006405 LoadTypeofExpression(operation->expression());
Steve Block6ded16b2010-05-10 14:33:55 +01006406 Register tos = frame_->PopToRegister();
6407
Steve Block6ded16b2010-05-10 14:33:55 +01006408 Register scratch = VirtualFrame::scratch0();
Steve Blocka7e24c12009-10-30 11:49:00 +00006409
6410 if (check->Equals(Heap::number_symbol())) {
Steve Block6ded16b2010-05-10 14:33:55 +01006411 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00006412 true_target()->Branch(eq);
Steve Block6ded16b2010-05-10 14:33:55 +01006413 __ ldr(tos, FieldMemOperand(tos, HeapObject::kMapOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00006414 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01006415 __ cmp(tos, ip);
Steve Blocka7e24c12009-10-30 11:49:00 +00006416 cc_reg_ = eq;
6417
6418 } else if (check->Equals(Heap::string_symbol())) {
Steve Block6ded16b2010-05-10 14:33:55 +01006419 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00006420 false_target()->Branch(eq);
6421
Steve Block6ded16b2010-05-10 14:33:55 +01006422 __ ldr(tos, FieldMemOperand(tos, HeapObject::kMapOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00006423
6424 // It can be an undetectable string object.
Steve Block6ded16b2010-05-10 14:33:55 +01006425 __ ldrb(scratch, FieldMemOperand(tos, Map::kBitFieldOffset));
6426 __ and_(scratch, scratch, Operand(1 << Map::kIsUndetectable));
6427 __ cmp(scratch, Operand(1 << Map::kIsUndetectable));
Steve Blocka7e24c12009-10-30 11:49:00 +00006428 false_target()->Branch(eq);
6429
Steve Block6ded16b2010-05-10 14:33:55 +01006430 __ ldrb(scratch, FieldMemOperand(tos, Map::kInstanceTypeOffset));
6431 __ cmp(scratch, Operand(FIRST_NONSTRING_TYPE));
Steve Blocka7e24c12009-10-30 11:49:00 +00006432 cc_reg_ = lt;
6433
6434 } else if (check->Equals(Heap::boolean_symbol())) {
6435 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01006436 __ cmp(tos, ip);
Steve Blocka7e24c12009-10-30 11:49:00 +00006437 true_target()->Branch(eq);
6438 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01006439 __ cmp(tos, ip);
Steve Blocka7e24c12009-10-30 11:49:00 +00006440 cc_reg_ = eq;
6441
6442 } else if (check->Equals(Heap::undefined_symbol())) {
6443 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01006444 __ cmp(tos, ip);
Steve Blocka7e24c12009-10-30 11:49:00 +00006445 true_target()->Branch(eq);
6446
Steve Block6ded16b2010-05-10 14:33:55 +01006447 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00006448 false_target()->Branch(eq);
6449
6450 // It can be an undetectable object.
Steve Block6ded16b2010-05-10 14:33:55 +01006451 __ ldr(tos, FieldMemOperand(tos, HeapObject::kMapOffset));
6452 __ ldrb(scratch, FieldMemOperand(tos, Map::kBitFieldOffset));
6453 __ and_(scratch, scratch, Operand(1 << Map::kIsUndetectable));
6454 __ cmp(scratch, Operand(1 << Map::kIsUndetectable));
Steve Blocka7e24c12009-10-30 11:49:00 +00006455
6456 cc_reg_ = eq;
6457
6458 } else if (check->Equals(Heap::function_symbol())) {
Steve Block6ded16b2010-05-10 14:33:55 +01006459 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00006460 false_target()->Branch(eq);
Steve Block6ded16b2010-05-10 14:33:55 +01006461 Register map_reg = scratch;
6462 __ CompareObjectType(tos, map_reg, tos, JS_FUNCTION_TYPE);
Steve Blockd0582a62009-12-15 09:54:21 +00006463 true_target()->Branch(eq);
6464 // Regular expressions are callable so typeof == 'function'.
Steve Block6ded16b2010-05-10 14:33:55 +01006465 __ CompareInstanceType(map_reg, tos, JS_REGEXP_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00006466 cc_reg_ = eq;
6467
6468 } else if (check->Equals(Heap::object_symbol())) {
Steve Block6ded16b2010-05-10 14:33:55 +01006469 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00006470 false_target()->Branch(eq);
6471
Steve Blocka7e24c12009-10-30 11:49:00 +00006472 __ LoadRoot(ip, Heap::kNullValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01006473 __ cmp(tos, ip);
Steve Blocka7e24c12009-10-30 11:49:00 +00006474 true_target()->Branch(eq);
6475
Steve Block6ded16b2010-05-10 14:33:55 +01006476 Register map_reg = scratch;
6477 __ CompareObjectType(tos, map_reg, tos, JS_REGEXP_TYPE);
Steve Blockd0582a62009-12-15 09:54:21 +00006478 false_target()->Branch(eq);
6479
Steve Blocka7e24c12009-10-30 11:49:00 +00006480 // It can be an undetectable object.
Steve Block6ded16b2010-05-10 14:33:55 +01006481 __ ldrb(tos, FieldMemOperand(map_reg, Map::kBitFieldOffset));
6482 __ and_(tos, tos, Operand(1 << Map::kIsUndetectable));
6483 __ cmp(tos, Operand(1 << Map::kIsUndetectable));
Steve Blocka7e24c12009-10-30 11:49:00 +00006484 false_target()->Branch(eq);
6485
Steve Block6ded16b2010-05-10 14:33:55 +01006486 __ ldrb(tos, FieldMemOperand(map_reg, Map::kInstanceTypeOffset));
6487 __ cmp(tos, Operand(FIRST_JS_OBJECT_TYPE));
Steve Blocka7e24c12009-10-30 11:49:00 +00006488 false_target()->Branch(lt);
Steve Block6ded16b2010-05-10 14:33:55 +01006489 __ cmp(tos, Operand(LAST_JS_OBJECT_TYPE));
Steve Blocka7e24c12009-10-30 11:49:00 +00006490 cc_reg_ = le;
6491
6492 } else {
6493 // Uncommon case: typeof testing against a string literal that is
6494 // never returned from the typeof operator.
6495 false_target()->Jump();
6496 }
6497 ASSERT(!has_valid_frame() ||
6498 (has_cc() && frame_->height() == original_height));
6499 return;
6500 }
6501
6502 switch (op) {
6503 case Token::EQ:
6504 Comparison(eq, left, right, false);
6505 break;
6506
6507 case Token::LT:
6508 Comparison(lt, left, right);
6509 break;
6510
6511 case Token::GT:
6512 Comparison(gt, left, right);
6513 break;
6514
6515 case Token::LTE:
6516 Comparison(le, left, right);
6517 break;
6518
6519 case Token::GTE:
6520 Comparison(ge, left, right);
6521 break;
6522
6523 case Token::EQ_STRICT:
6524 Comparison(eq, left, right, true);
6525 break;
6526
6527 case Token::IN: {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006528 Load(left);
6529 Load(right);
Steve Blockd0582a62009-12-15 09:54:21 +00006530 frame_->InvokeBuiltin(Builtins::IN, CALL_JS, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00006531 frame_->EmitPush(r0);
6532 break;
6533 }
6534
6535 case Token::INSTANCEOF: {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006536 Load(left);
6537 Load(right);
Steve Blocka7e24c12009-10-30 11:49:00 +00006538 InstanceofStub stub;
6539 frame_->CallStub(&stub, 2);
6540 // At this point if instanceof succeeded then r0 == 0.
6541 __ tst(r0, Operand(r0));
6542 cc_reg_ = eq;
6543 break;
6544 }
6545
6546 default:
6547 UNREACHABLE();
6548 }
6549 ASSERT((has_cc() && frame_->height() == original_height) ||
6550 (!has_cc() && frame_->height() == original_height + 1));
6551}
6552
6553
Kristian Monsen80d68ea2010-09-08 11:05:35 +01006554void CodeGenerator::VisitCompareToNull(CompareToNull* node) {
6555#ifdef DEBUG
6556 int original_height = frame_->height();
6557#endif
6558 Comment cmnt(masm_, "[ CompareToNull");
6559
6560 Load(node->expression());
6561 Register tos = frame_->PopToRegister();
6562 __ LoadRoot(ip, Heap::kNullValueRootIndex);
6563 __ cmp(tos, ip);
6564
6565 // The 'null' value is only equal to 'undefined' if using non-strict
6566 // comparisons.
6567 if (!node->is_strict()) {
6568 true_target()->Branch(eq);
6569 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
6570 __ cmp(tos, Operand(ip));
6571 true_target()->Branch(eq);
6572
6573 __ tst(tos, Operand(kSmiTagMask));
6574 false_target()->Branch(eq);
6575
6576 // It can be an undetectable object.
6577 __ ldr(tos, FieldMemOperand(tos, HeapObject::kMapOffset));
6578 __ ldrb(tos, FieldMemOperand(tos, Map::kBitFieldOffset));
6579 __ and_(tos, tos, Operand(1 << Map::kIsUndetectable));
6580 __ cmp(tos, Operand(1 << Map::kIsUndetectable));
6581 }
6582
6583 cc_reg_ = eq;
6584 ASSERT(has_cc() && frame_->height() == original_height);
6585}
6586
6587
Steve Block6ded16b2010-05-10 14:33:55 +01006588class DeferredReferenceGetNamedValue: public DeferredCode {
6589 public:
Leon Clarkef7060e22010-06-03 12:02:55 +01006590 explicit DeferredReferenceGetNamedValue(Register receiver,
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006591 Handle<String> name,
6592 bool is_contextual)
6593 : receiver_(receiver),
6594 name_(name),
6595 is_contextual_(is_contextual),
6596 is_dont_delete_(false) {
6597 set_comment(is_contextual
6598 ? "[ DeferredReferenceGetNamedValue (contextual)"
6599 : "[ DeferredReferenceGetNamedValue");
Steve Block6ded16b2010-05-10 14:33:55 +01006600 }
6601
6602 virtual void Generate();
6603
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006604 void set_is_dont_delete(bool value) {
6605 ASSERT(is_contextual_);
6606 is_dont_delete_ = value;
6607 }
6608
Steve Block6ded16b2010-05-10 14:33:55 +01006609 private:
Leon Clarkef7060e22010-06-03 12:02:55 +01006610 Register receiver_;
Steve Block6ded16b2010-05-10 14:33:55 +01006611 Handle<String> name_;
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006612 bool is_contextual_;
6613 bool is_dont_delete_;
Steve Block6ded16b2010-05-10 14:33:55 +01006614};
6615
6616
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006617// Convention for this is that on entry the receiver is in a register that
6618// is not used by the stack. On exit the answer is found in that same
6619// register and the stack has the same height.
Steve Block6ded16b2010-05-10 14:33:55 +01006620void DeferredReferenceGetNamedValue::Generate() {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006621#ifdef DEBUG
6622 int expected_height = frame_state()->frame()->height();
6623#endif
6624 VirtualFrame copied_frame(*frame_state()->frame());
6625 copied_frame.SpillAll();
Leon Clarkef7060e22010-06-03 12:02:55 +01006626
Steve Block6ded16b2010-05-10 14:33:55 +01006627 Register scratch1 = VirtualFrame::scratch0();
6628 Register scratch2 = VirtualFrame::scratch1();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006629 ASSERT(!receiver_.is(scratch1) && !receiver_.is(scratch2));
Steve Block6ded16b2010-05-10 14:33:55 +01006630 __ DecrementCounter(&Counters::named_load_inline, 1, scratch1, scratch2);
6631 __ IncrementCounter(&Counters::named_load_inline_miss, 1, scratch1, scratch2);
6632
Leon Clarkef7060e22010-06-03 12:02:55 +01006633 // Ensure receiver in r0 and name in r2 to match load ic calling convention.
6634 __ Move(r0, receiver_);
Steve Block6ded16b2010-05-10 14:33:55 +01006635 __ mov(r2, Operand(name_));
6636
6637 // The rest of the instructions in the deferred code must be together.
6638 { Assembler::BlockConstPoolScope block_const_pool(masm_);
6639 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006640 RelocInfo::Mode mode = is_contextual_
6641 ? RelocInfo::CODE_TARGET_CONTEXT
6642 : RelocInfo::CODE_TARGET;
6643 __ Call(ic, mode);
6644 // We must mark the code just after the call with the correct marker.
6645 MacroAssembler::NopMarkerTypes code_marker;
6646 if (is_contextual_) {
6647 code_marker = is_dont_delete_
6648 ? MacroAssembler::PROPERTY_ACCESS_INLINED_CONTEXT_DONT_DELETE
6649 : MacroAssembler::PROPERTY_ACCESS_INLINED_CONTEXT;
6650 } else {
6651 code_marker = MacroAssembler::PROPERTY_ACCESS_INLINED;
6652 }
6653 __ MarkCode(code_marker);
Steve Block6ded16b2010-05-10 14:33:55 +01006654
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006655 // At this point the answer is in r0. We move it to the expected register
6656 // if necessary.
6657 __ Move(receiver_, r0);
6658
6659 // Now go back to the frame that we entered with. This will not overwrite
6660 // the receiver register since that register was not in use when we came
6661 // in. The instructions emitted by this merge are skipped over by the
6662 // inline load patching mechanism when looking for the branch instruction
6663 // that tells it where the code to patch is.
6664 copied_frame.MergeTo(frame_state()->frame());
6665
Steve Block6ded16b2010-05-10 14:33:55 +01006666 // Block the constant pool for one more instruction after leaving this
6667 // constant pool block scope to include the branch instruction ending the
6668 // deferred code.
6669 __ BlockConstPoolFor(1);
6670 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006671 ASSERT_EQ(expected_height, frame_state()->frame()->height());
Steve Block6ded16b2010-05-10 14:33:55 +01006672}
6673
6674
6675class DeferredReferenceGetKeyedValue: public DeferredCode {
6676 public:
Kristian Monsen25f61362010-05-21 11:50:48 +01006677 DeferredReferenceGetKeyedValue(Register key, Register receiver)
6678 : key_(key), receiver_(receiver) {
Steve Block6ded16b2010-05-10 14:33:55 +01006679 set_comment("[ DeferredReferenceGetKeyedValue");
6680 }
6681
6682 virtual void Generate();
Kristian Monsen25f61362010-05-21 11:50:48 +01006683
6684 private:
6685 Register key_;
6686 Register receiver_;
Steve Block6ded16b2010-05-10 14:33:55 +01006687};
6688
6689
Steve Block8defd9f2010-07-08 12:39:36 +01006690// Takes key and register in r0 and r1 or vice versa. Returns result
6691// in r0.
Steve Block6ded16b2010-05-10 14:33:55 +01006692void DeferredReferenceGetKeyedValue::Generate() {
Kristian Monsen25f61362010-05-21 11:50:48 +01006693 ASSERT((key_.is(r0) && receiver_.is(r1)) ||
6694 (key_.is(r1) && receiver_.is(r0)));
6695
Steve Block8defd9f2010-07-08 12:39:36 +01006696 VirtualFrame copied_frame(*frame_state()->frame());
6697 copied_frame.SpillAll();
6698
Steve Block6ded16b2010-05-10 14:33:55 +01006699 Register scratch1 = VirtualFrame::scratch0();
6700 Register scratch2 = VirtualFrame::scratch1();
6701 __ DecrementCounter(&Counters::keyed_load_inline, 1, scratch1, scratch2);
6702 __ IncrementCounter(&Counters::keyed_load_inline_miss, 1, scratch1, scratch2);
6703
Kristian Monsen25f61362010-05-21 11:50:48 +01006704 // Ensure key in r0 and receiver in r1 to match keyed load ic calling
6705 // convention.
6706 if (key_.is(r1)) {
6707 __ Swap(r0, r1, ip);
6708 }
6709
Steve Block6ded16b2010-05-10 14:33:55 +01006710 // The rest of the instructions in the deferred code must be together.
6711 { Assembler::BlockConstPoolScope block_const_pool(masm_);
Kristian Monsen25f61362010-05-21 11:50:48 +01006712 // Call keyed load IC. It has the arguments key and receiver in r0 and r1.
Steve Block6ded16b2010-05-10 14:33:55 +01006713 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
6714 __ Call(ic, RelocInfo::CODE_TARGET);
6715 // The call must be followed by a nop instruction to indicate that the
6716 // keyed load has been inlined.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006717 __ MarkCode(MacroAssembler::PROPERTY_ACCESS_INLINED);
Steve Block6ded16b2010-05-10 14:33:55 +01006718
Steve Block8defd9f2010-07-08 12:39:36 +01006719 // Now go back to the frame that we entered with. This will not overwrite
6720 // the receiver or key registers since they were not in use when we came
6721 // in. The instructions emitted by this merge are skipped over by the
6722 // inline load patching mechanism when looking for the branch instruction
6723 // that tells it where the code to patch is.
6724 copied_frame.MergeTo(frame_state()->frame());
6725
Steve Block6ded16b2010-05-10 14:33:55 +01006726 // Block the constant pool for one more instruction after leaving this
6727 // constant pool block scope to include the branch instruction ending the
6728 // deferred code.
6729 __ BlockConstPoolFor(1);
6730 }
6731}
6732
6733
6734class DeferredReferenceSetKeyedValue: public DeferredCode {
6735 public:
Leon Clarkef7060e22010-06-03 12:02:55 +01006736 DeferredReferenceSetKeyedValue(Register value,
6737 Register key,
6738 Register receiver)
6739 : value_(value), key_(key), receiver_(receiver) {
Steve Block6ded16b2010-05-10 14:33:55 +01006740 set_comment("[ DeferredReferenceSetKeyedValue");
6741 }
6742
6743 virtual void Generate();
Leon Clarkef7060e22010-06-03 12:02:55 +01006744
6745 private:
6746 Register value_;
6747 Register key_;
6748 Register receiver_;
Steve Block6ded16b2010-05-10 14:33:55 +01006749};
6750
6751
6752void DeferredReferenceSetKeyedValue::Generate() {
6753 Register scratch1 = VirtualFrame::scratch0();
6754 Register scratch2 = VirtualFrame::scratch1();
6755 __ DecrementCounter(&Counters::keyed_store_inline, 1, scratch1, scratch2);
6756 __ IncrementCounter(
6757 &Counters::keyed_store_inline_miss, 1, scratch1, scratch2);
6758
Leon Clarkef7060e22010-06-03 12:02:55 +01006759 // Ensure value in r0, key in r1 and receiver in r2 to match keyed store ic
6760 // calling convention.
6761 if (value_.is(r1)) {
6762 __ Swap(r0, r1, ip);
6763 }
6764 ASSERT(receiver_.is(r2));
6765
Steve Block6ded16b2010-05-10 14:33:55 +01006766 // The rest of the instructions in the deferred code must be together.
6767 { Assembler::BlockConstPoolScope block_const_pool(masm_);
Leon Clarkef7060e22010-06-03 12:02:55 +01006768 // Call keyed store IC. It has the arguments value, key and receiver in r0,
6769 // r1 and r2.
Steve Block6ded16b2010-05-10 14:33:55 +01006770 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
6771 __ Call(ic, RelocInfo::CODE_TARGET);
6772 // The call must be followed by a nop instruction to indicate that the
6773 // keyed store has been inlined.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006774 __ MarkCode(MacroAssembler::PROPERTY_ACCESS_INLINED);
Steve Block6ded16b2010-05-10 14:33:55 +01006775
6776 // Block the constant pool for one more instruction after leaving this
6777 // constant pool block scope to include the branch instruction ending the
6778 // deferred code.
6779 __ BlockConstPoolFor(1);
6780 }
6781}
6782
6783
Kristian Monsen50ef84f2010-07-29 15:18:00 +01006784class DeferredReferenceSetNamedValue: public DeferredCode {
6785 public:
6786 DeferredReferenceSetNamedValue(Register value,
6787 Register receiver,
6788 Handle<String> name)
6789 : value_(value), receiver_(receiver), name_(name) {
6790 set_comment("[ DeferredReferenceSetNamedValue");
6791 }
6792
6793 virtual void Generate();
6794
6795 private:
6796 Register value_;
6797 Register receiver_;
6798 Handle<String> name_;
6799};
6800
6801
6802// Takes value in r0, receiver in r1 and returns the result (the
6803// value) in r0.
6804void DeferredReferenceSetNamedValue::Generate() {
6805 // Record the entry frame and spill.
6806 VirtualFrame copied_frame(*frame_state()->frame());
6807 copied_frame.SpillAll();
6808
6809 // Ensure value in r0, receiver in r1 to match store ic calling
6810 // convention.
6811 ASSERT(value_.is(r0) && receiver_.is(r1));
6812 __ mov(r2, Operand(name_));
6813
6814 // The rest of the instructions in the deferred code must be together.
6815 { Assembler::BlockConstPoolScope block_const_pool(masm_);
6816 // Call keyed store IC. It has the arguments value, key and receiver in r0,
6817 // r1 and r2.
6818 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
6819 __ Call(ic, RelocInfo::CODE_TARGET);
6820 // The call must be followed by a nop instruction to indicate that the
6821 // named store has been inlined.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006822 __ MarkCode(MacroAssembler::PROPERTY_ACCESS_INLINED);
Kristian Monsen50ef84f2010-07-29 15:18:00 +01006823
6824 // Go back to the frame we entered with. The instructions
6825 // generated by this merge are skipped over by the inline store
6826 // patching mechanism when looking for the branch instruction that
6827 // tells it where the code to patch is.
6828 copied_frame.MergeTo(frame_state()->frame());
6829
6830 // Block the constant pool for one more instruction after leaving this
6831 // constant pool block scope to include the branch instruction ending the
6832 // deferred code.
6833 __ BlockConstPoolFor(1);
6834 }
6835}
6836
6837
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006838// Consumes the top of stack (the receiver) and pushes the result instead.
Steve Block6ded16b2010-05-10 14:33:55 +01006839void CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006840 bool contextual_load_in_builtin =
6841 is_contextual &&
6842 (Bootstrapper::IsActive() ||
6843 (!info_->closure().is_null() && info_->closure()->IsBuiltin()));
6844
6845 if (scope()->is_global_scope() ||
6846 loop_nesting() == 0 ||
6847 contextual_load_in_builtin) {
Steve Block6ded16b2010-05-10 14:33:55 +01006848 Comment cmnt(masm(), "[ Load from named Property");
6849 // Setup the name register and call load IC.
6850 frame_->CallLoadIC(name,
6851 is_contextual
6852 ? RelocInfo::CODE_TARGET_CONTEXT
6853 : RelocInfo::CODE_TARGET);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006854 frame_->EmitPush(r0); // Push answer.
Steve Block6ded16b2010-05-10 14:33:55 +01006855 } else {
6856 // Inline the in-object property case.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006857 Comment cmnt(masm(), is_contextual
6858 ? "[ Inlined contextual property load"
6859 : "[ Inlined named property load");
Steve Block6ded16b2010-05-10 14:33:55 +01006860
6861 // Counter will be decremented in the deferred code. Placed here to avoid
6862 // having it in the instruction stream below where patching will occur.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006863 if (is_contextual) {
6864 __ IncrementCounter(&Counters::named_load_global_inline, 1,
6865 frame_->scratch0(), frame_->scratch1());
6866 } else {
6867 __ IncrementCounter(&Counters::named_load_inline, 1,
6868 frame_->scratch0(), frame_->scratch1());
6869 }
Steve Block6ded16b2010-05-10 14:33:55 +01006870
6871 // The following instructions are the inlined load of an in-object property.
6872 // Parts of this code is patched, so the exact instructions generated needs
6873 // to be fixed. Therefore the instruction pool is blocked when generating
6874 // this code
6875
6876 // Load the receiver from the stack.
Leon Clarkef7060e22010-06-03 12:02:55 +01006877 Register receiver = frame_->PopToRegister();
Steve Block6ded16b2010-05-10 14:33:55 +01006878
6879 DeferredReferenceGetNamedValue* deferred =
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006880 new DeferredReferenceGetNamedValue(receiver, name, is_contextual);
Steve Block6ded16b2010-05-10 14:33:55 +01006881
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006882 bool is_dont_delete = false;
6883 if (is_contextual) {
6884 if (!info_->closure().is_null()) {
6885 // When doing lazy compilation we can check if the global cell
6886 // already exists and use its "don't delete" status as a hint.
6887 AssertNoAllocation no_gc;
6888 v8::internal::GlobalObject* global_object =
6889 info_->closure()->context()->global();
6890 LookupResult lookup;
6891 global_object->LocalLookupRealNamedProperty(*name, &lookup);
6892 if (lookup.IsProperty() && lookup.type() == NORMAL) {
6893 ASSERT(lookup.holder() == global_object);
6894 ASSERT(global_object->property_dictionary()->ValueAt(
6895 lookup.GetDictionaryEntry())->IsJSGlobalPropertyCell());
6896 is_dont_delete = lookup.IsDontDelete();
6897 }
6898 }
6899 if (is_dont_delete) {
6900 __ IncrementCounter(&Counters::dont_delete_hint_hit, 1,
6901 frame_->scratch0(), frame_->scratch1());
6902 }
6903 }
Steve Block6ded16b2010-05-10 14:33:55 +01006904
6905 { Assembler::BlockConstPoolScope block_const_pool(masm_);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006906 if (!is_contextual) {
6907 // Check that the receiver is a heap object.
6908 __ tst(receiver, Operand(kSmiTagMask));
6909 deferred->Branch(eq);
6910 }
6911
6912 // Check for the_hole_value if necessary.
6913 // Below we rely on the number of instructions generated, and we can't
6914 // cope with the Check macro which does not generate a fixed number of
6915 // instructions.
6916 Label skip, check_the_hole, cont;
6917 if (FLAG_debug_code && is_contextual && is_dont_delete) {
6918 __ b(&skip);
6919 __ bind(&check_the_hole);
6920 __ Check(ne, "DontDelete cells can't contain the hole");
6921 __ b(&cont);
6922 __ bind(&skip);
6923 }
6924
6925#ifdef DEBUG
6926 int InlinedNamedLoadInstructions = 5;
6927 Label check_inlined_codesize;
6928 masm_->bind(&check_inlined_codesize);
6929#endif
Steve Block6ded16b2010-05-10 14:33:55 +01006930
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006931 Register scratch = VirtualFrame::scratch0();
6932 Register scratch2 = VirtualFrame::scratch1();
6933
Steve Block6ded16b2010-05-10 14:33:55 +01006934 // Check the map. The null map used below is patched by the inline cache
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006935 // code. Therefore we can't use a LoadRoot call.
6936 __ ldr(scratch, FieldMemOperand(receiver, HeapObject::kMapOffset));
6937 __ mov(scratch2, Operand(Factory::null_value()));
6938 __ cmp(scratch, scratch2);
Steve Block6ded16b2010-05-10 14:33:55 +01006939 deferred->Branch(ne);
6940
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006941 if (is_contextual) {
6942#ifdef DEBUG
6943 InlinedNamedLoadInstructions += 1;
6944#endif
6945 // Load the (initially invalid) cell and get its value.
6946 masm()->mov(receiver, Operand(Factory::null_value()));
6947 __ ldr(receiver,
6948 FieldMemOperand(receiver, JSGlobalPropertyCell::kValueOffset));
6949
6950 deferred->set_is_dont_delete(is_dont_delete);
6951
6952 if (!is_dont_delete) {
6953#ifdef DEBUG
6954 InlinedNamedLoadInstructions += 3;
6955#endif
6956 __ cmp(receiver, Operand(Factory::the_hole_value()));
6957 deferred->Branch(eq);
6958 } else if (FLAG_debug_code) {
6959#ifdef DEBUG
6960 InlinedNamedLoadInstructions += 3;
6961#endif
6962 __ cmp(receiver, Operand(Factory::the_hole_value()));
6963 __ b(&check_the_hole, eq);
6964 __ bind(&cont);
6965 }
6966 } else {
6967 // Initially use an invalid index. The index will be patched by the
6968 // inline cache code.
6969 __ ldr(receiver, MemOperand(receiver, 0));
6970 }
Steve Block6ded16b2010-05-10 14:33:55 +01006971
6972 // Make sure that the expected number of instructions are generated.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006973 // If the code before is updated, the offsets in ic-arm.cc
6974 // LoadIC::PatchInlinedContextualLoad and PatchInlinedLoad need
6975 // to be updated.
6976 ASSERT_EQ(InlinedNamedLoadInstructions,
Steve Block6ded16b2010-05-10 14:33:55 +01006977 masm_->InstructionsGeneratedSince(&check_inlined_codesize));
6978 }
6979
6980 deferred->BindExit();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006981 // At this point the receiver register has the result, either from the
6982 // deferred code or from the inlined code.
6983 frame_->EmitPush(receiver);
Steve Block6ded16b2010-05-10 14:33:55 +01006984 }
6985}
6986
6987
6988void CodeGenerator::EmitNamedStore(Handle<String> name, bool is_contextual) {
6989#ifdef DEBUG
Kristian Monsen50ef84f2010-07-29 15:18:00 +01006990 int expected_height = frame()->height() - (is_contextual ? 1 : 2);
Steve Block6ded16b2010-05-10 14:33:55 +01006991#endif
Steve Block6ded16b2010-05-10 14:33:55 +01006992
Kristian Monsen50ef84f2010-07-29 15:18:00 +01006993 Result result;
6994 if (is_contextual || scope()->is_global_scope() || loop_nesting() == 0) {
6995 frame()->CallStoreIC(name, is_contextual);
6996 } else {
6997 // Inline the in-object property case.
6998 JumpTarget slow, done;
6999
7000 // Get the value and receiver from the stack.
7001 frame()->PopToR0();
7002 Register value = r0;
7003 frame()->PopToR1();
7004 Register receiver = r1;
7005
7006 DeferredReferenceSetNamedValue* deferred =
7007 new DeferredReferenceSetNamedValue(value, receiver, name);
7008
7009 // Check that the receiver is a heap object.
7010 __ tst(receiver, Operand(kSmiTagMask));
7011 deferred->Branch(eq);
7012
7013 // The following instructions are the part of the inlined
7014 // in-object property store code which can be patched. Therefore
7015 // the exact number of instructions generated must be fixed, so
7016 // the constant pool is blocked while generating this code.
7017 { Assembler::BlockConstPoolScope block_const_pool(masm_);
7018 Register scratch0 = VirtualFrame::scratch0();
7019 Register scratch1 = VirtualFrame::scratch1();
7020
7021 // Check the map. Initially use an invalid map to force a
7022 // failure. The map check will be patched in the runtime system.
7023 __ ldr(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
7024
7025#ifdef DEBUG
7026 Label check_inlined_codesize;
7027 masm_->bind(&check_inlined_codesize);
7028#endif
7029 __ mov(scratch0, Operand(Factory::null_value()));
7030 __ cmp(scratch0, scratch1);
7031 deferred->Branch(ne);
7032
7033 int offset = 0;
7034 __ str(value, MemOperand(receiver, offset));
7035
7036 // Update the write barrier and record its size. We do not use
7037 // the RecordWrite macro here because we want the offset
7038 // addition instruction first to make it easy to patch.
7039 Label record_write_start, record_write_done;
7040 __ bind(&record_write_start);
7041 // Add offset into the object.
7042 __ add(scratch0, receiver, Operand(offset));
7043 // Test that the object is not in the new space. We cannot set
7044 // region marks for new space pages.
7045 __ InNewSpace(receiver, scratch1, eq, &record_write_done);
7046 // Record the actual write.
7047 __ RecordWriteHelper(receiver, scratch0, scratch1);
7048 __ bind(&record_write_done);
7049 // Clobber all input registers when running with the debug-code flag
7050 // turned on to provoke errors.
7051 if (FLAG_debug_code) {
7052 __ mov(receiver, Operand(BitCast<int32_t>(kZapValue)));
7053 __ mov(scratch0, Operand(BitCast<int32_t>(kZapValue)));
7054 __ mov(scratch1, Operand(BitCast<int32_t>(kZapValue)));
7055 }
7056 // Check that this is the first inlined write barrier or that
7057 // this inlined write barrier has the same size as all the other
7058 // inlined write barriers.
7059 ASSERT((inlined_write_barrier_size_ == -1) ||
7060 (inlined_write_barrier_size_ ==
7061 masm()->InstructionsGeneratedSince(&record_write_start)));
7062 inlined_write_barrier_size_ =
7063 masm()->InstructionsGeneratedSince(&record_write_start);
7064
7065 // Make sure that the expected number of instructions are generated.
7066 ASSERT_EQ(GetInlinedNamedStoreInstructionsAfterPatch(),
7067 masm()->InstructionsGeneratedSince(&check_inlined_codesize));
7068 }
7069 deferred->BindExit();
7070 }
7071 ASSERT_EQ(expected_height, frame()->height());
Steve Block6ded16b2010-05-10 14:33:55 +01007072}
7073
7074
7075void CodeGenerator::EmitKeyedLoad() {
7076 if (loop_nesting() == 0) {
7077 Comment cmnt(masm_, "[ Load from keyed property");
7078 frame_->CallKeyedLoadIC();
7079 } else {
7080 // Inline the keyed load.
7081 Comment cmnt(masm_, "[ Inlined load from keyed property");
7082
7083 // Counter will be decremented in the deferred code. Placed here to avoid
7084 // having it in the instruction stream below where patching will occur.
7085 __ IncrementCounter(&Counters::keyed_load_inline, 1,
7086 frame_->scratch0(), frame_->scratch1());
7087
Kristian Monsen25f61362010-05-21 11:50:48 +01007088 // Load the key and receiver from the stack.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007089 bool key_is_known_smi = frame_->KnownSmiAt(0);
Kristian Monsen25f61362010-05-21 11:50:48 +01007090 Register key = frame_->PopToRegister();
7091 Register receiver = frame_->PopToRegister(key);
Steve Block6ded16b2010-05-10 14:33:55 +01007092
Kristian Monsen25f61362010-05-21 11:50:48 +01007093 // The deferred code expects key and receiver in registers.
Steve Block6ded16b2010-05-10 14:33:55 +01007094 DeferredReferenceGetKeyedValue* deferred =
Kristian Monsen25f61362010-05-21 11:50:48 +01007095 new DeferredReferenceGetKeyedValue(key, receiver);
Steve Block6ded16b2010-05-10 14:33:55 +01007096
7097 // Check that the receiver is a heap object.
7098 __ tst(receiver, Operand(kSmiTagMask));
7099 deferred->Branch(eq);
7100
7101 // The following instructions are the part of the inlined load keyed
7102 // property code which can be patched. Therefore the exact number of
7103 // instructions generated need to be fixed, so the constant pool is blocked
7104 // while generating this code.
Steve Block6ded16b2010-05-10 14:33:55 +01007105 { Assembler::BlockConstPoolScope block_const_pool(masm_);
7106 Register scratch1 = VirtualFrame::scratch0();
7107 Register scratch2 = VirtualFrame::scratch1();
7108 // Check the map. The null map used below is patched by the inline cache
7109 // code.
7110 __ ldr(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007111
7112 // Check that the key is a smi.
7113 if (!key_is_known_smi) {
7114 __ tst(key, Operand(kSmiTagMask));
7115 deferred->Branch(ne);
7116 }
7117
Kristian Monsen25f61362010-05-21 11:50:48 +01007118#ifdef DEBUG
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007119 Label check_inlined_codesize;
7120 masm_->bind(&check_inlined_codesize);
Kristian Monsen25f61362010-05-21 11:50:48 +01007121#endif
Steve Block6ded16b2010-05-10 14:33:55 +01007122 __ mov(scratch2, Operand(Factory::null_value()));
7123 __ cmp(scratch1, scratch2);
7124 deferred->Branch(ne);
7125
Iain Merrick75681382010-08-19 15:07:18 +01007126 // Get the elements array from the receiver.
Steve Block6ded16b2010-05-10 14:33:55 +01007127 __ ldr(scratch1, FieldMemOperand(receiver, JSObject::kElementsOffset));
Iain Merrick75681382010-08-19 15:07:18 +01007128 __ AssertFastElements(scratch1);
Steve Block6ded16b2010-05-10 14:33:55 +01007129
7130 // Check that key is within bounds. Use unsigned comparison to handle
7131 // negative keys.
7132 __ ldr(scratch2, FieldMemOperand(scratch1, FixedArray::kLengthOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007133 __ cmp(scratch2, key);
Steve Block6ded16b2010-05-10 14:33:55 +01007134 deferred->Branch(ls); // Unsigned less equal.
7135
7136 // Load and check that the result is not the hole (key is a smi).
7137 __ LoadRoot(scratch2, Heap::kTheHoleValueRootIndex);
7138 __ add(scratch1,
7139 scratch1,
7140 Operand(FixedArray::kHeaderSize - kHeapObjectTag));
Kristian Monsen25f61362010-05-21 11:50:48 +01007141 __ ldr(scratch1,
Steve Block6ded16b2010-05-10 14:33:55 +01007142 MemOperand(scratch1, key, LSL,
7143 kPointerSizeLog2 - (kSmiTagSize + kSmiShiftSize)));
Kristian Monsen25f61362010-05-21 11:50:48 +01007144 __ cmp(scratch1, scratch2);
Steve Block6ded16b2010-05-10 14:33:55 +01007145 deferred->Branch(eq);
7146
Kristian Monsen25f61362010-05-21 11:50:48 +01007147 __ mov(r0, scratch1);
Steve Block6ded16b2010-05-10 14:33:55 +01007148 // Make sure that the expected number of instructions are generated.
Steve Block8defd9f2010-07-08 12:39:36 +01007149 ASSERT_EQ(GetInlinedKeyedLoadInstructionsAfterPatch(),
Steve Block6ded16b2010-05-10 14:33:55 +01007150 masm_->InstructionsGeneratedSince(&check_inlined_codesize));
7151 }
7152
7153 deferred->BindExit();
7154 }
7155}
7156
7157
Steve Block8defd9f2010-07-08 12:39:36 +01007158void CodeGenerator::EmitKeyedStore(StaticType* key_type,
7159 WriteBarrierCharacter wb_info) {
Steve Block6ded16b2010-05-10 14:33:55 +01007160 // Generate inlined version of the keyed store if the code is in a loop
7161 // and the key is likely to be a smi.
7162 if (loop_nesting() > 0 && key_type->IsLikelySmi()) {
7163 // Inline the keyed store.
7164 Comment cmnt(masm_, "[ Inlined store to keyed property");
7165
Leon Clarkef7060e22010-06-03 12:02:55 +01007166 Register scratch1 = VirtualFrame::scratch0();
7167 Register scratch2 = VirtualFrame::scratch1();
7168 Register scratch3 = r3;
Steve Block6ded16b2010-05-10 14:33:55 +01007169
7170 // Counter will be decremented in the deferred code. Placed here to avoid
7171 // having it in the instruction stream below where patching will occur.
7172 __ IncrementCounter(&Counters::keyed_store_inline, 1,
Leon Clarkef7060e22010-06-03 12:02:55 +01007173 scratch1, scratch2);
7174
Steve Block8defd9f2010-07-08 12:39:36 +01007175
7176
Leon Clarkef7060e22010-06-03 12:02:55 +01007177 // Load the value, key and receiver from the stack.
Steve Block8defd9f2010-07-08 12:39:36 +01007178 bool value_is_harmless = frame_->KnownSmiAt(0);
7179 if (wb_info == NEVER_NEWSPACE) value_is_harmless = true;
7180 bool key_is_smi = frame_->KnownSmiAt(1);
Leon Clarkef7060e22010-06-03 12:02:55 +01007181 Register value = frame_->PopToRegister();
7182 Register key = frame_->PopToRegister(value);
Steve Block8defd9f2010-07-08 12:39:36 +01007183 VirtualFrame::SpilledScope spilled(frame_);
Leon Clarkef7060e22010-06-03 12:02:55 +01007184 Register receiver = r2;
7185 frame_->EmitPop(receiver);
Steve Block8defd9f2010-07-08 12:39:36 +01007186
7187#ifdef DEBUG
7188 bool we_remembered_the_write_barrier = value_is_harmless;
7189#endif
Leon Clarkef7060e22010-06-03 12:02:55 +01007190
7191 // The deferred code expects value, key and receiver in registers.
7192 DeferredReferenceSetKeyedValue* deferred =
7193 new DeferredReferenceSetKeyedValue(value, key, receiver);
Steve Block6ded16b2010-05-10 14:33:55 +01007194
7195 // Check that the value is a smi. As this inlined code does not set the
7196 // write barrier it is only possible to store smi values.
Steve Block8defd9f2010-07-08 12:39:36 +01007197 if (!value_is_harmless) {
7198 // If the value is not likely to be a Smi then let's test the fixed array
7199 // for new space instead. See below.
7200 if (wb_info == LIKELY_SMI) {
7201 __ tst(value, Operand(kSmiTagMask));
7202 deferred->Branch(ne);
7203#ifdef DEBUG
7204 we_remembered_the_write_barrier = true;
7205#endif
7206 }
7207 }
Steve Block6ded16b2010-05-10 14:33:55 +01007208
Steve Block8defd9f2010-07-08 12:39:36 +01007209 if (!key_is_smi) {
7210 // Check that the key is a smi.
7211 __ tst(key, Operand(kSmiTagMask));
7212 deferred->Branch(ne);
7213 }
Steve Block6ded16b2010-05-10 14:33:55 +01007214
7215 // Check that the receiver is a heap object.
Leon Clarkef7060e22010-06-03 12:02:55 +01007216 __ tst(receiver, Operand(kSmiTagMask));
Steve Block6ded16b2010-05-10 14:33:55 +01007217 deferred->Branch(eq);
7218
7219 // Check that the receiver is a JSArray.
Leon Clarkef7060e22010-06-03 12:02:55 +01007220 __ CompareObjectType(receiver, scratch1, scratch1, JS_ARRAY_TYPE);
Steve Block6ded16b2010-05-10 14:33:55 +01007221 deferred->Branch(ne);
7222
7223 // Check that the key is within bounds. Both the key and the length of
7224 // the JSArray are smis. Use unsigned comparison to handle negative keys.
Leon Clarkef7060e22010-06-03 12:02:55 +01007225 __ ldr(scratch1, FieldMemOperand(receiver, JSArray::kLengthOffset));
7226 __ cmp(scratch1, key);
Steve Block6ded16b2010-05-10 14:33:55 +01007227 deferred->Branch(ls); // Unsigned less equal.
7228
Steve Block8defd9f2010-07-08 12:39:36 +01007229 // Get the elements array from the receiver.
7230 __ ldr(scratch1, FieldMemOperand(receiver, JSObject::kElementsOffset));
7231 if (!value_is_harmless && wb_info != LIKELY_SMI) {
7232 Label ok;
7233 __ and_(scratch2, scratch1, Operand(ExternalReference::new_space_mask()));
7234 __ cmp(scratch2, Operand(ExternalReference::new_space_start()));
7235 __ tst(value, Operand(kSmiTagMask), ne);
7236 deferred->Branch(ne);
7237#ifdef DEBUG
7238 we_remembered_the_write_barrier = true;
7239#endif
7240 }
7241 // Check that the elements array is not a dictionary.
7242 __ ldr(scratch2, FieldMemOperand(scratch1, JSObject::kMapOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01007243 // The following instructions are the part of the inlined store keyed
7244 // property code which can be patched. Therefore the exact number of
7245 // instructions generated need to be fixed, so the constant pool is blocked
7246 // while generating this code.
Steve Block6ded16b2010-05-10 14:33:55 +01007247 { Assembler::BlockConstPoolScope block_const_pool(masm_);
Steve Block8defd9f2010-07-08 12:39:36 +01007248#ifdef DEBUG
7249 Label check_inlined_codesize;
7250 masm_->bind(&check_inlined_codesize);
7251#endif
7252
Steve Block6ded16b2010-05-10 14:33:55 +01007253 // Read the fixed array map from the constant pool (not from the root
7254 // array) so that the value can be patched. When debugging, we patch this
7255 // comparison to always fail so that we will hit the IC call in the
7256 // deferred code which will allow the debugger to break for fast case
7257 // stores.
Leon Clarkef7060e22010-06-03 12:02:55 +01007258 __ mov(scratch3, Operand(Factory::fixed_array_map()));
7259 __ cmp(scratch2, scratch3);
Steve Block6ded16b2010-05-10 14:33:55 +01007260 deferred->Branch(ne);
7261
7262 // Store the value.
Leon Clarkef7060e22010-06-03 12:02:55 +01007263 __ add(scratch1, scratch1,
7264 Operand(FixedArray::kHeaderSize - kHeapObjectTag));
7265 __ str(value,
7266 MemOperand(scratch1, key, LSL,
7267 kPointerSizeLog2 - (kSmiTagSize + kSmiShiftSize)));
Steve Block6ded16b2010-05-10 14:33:55 +01007268
7269 // Make sure that the expected number of instructions are generated.
Leon Clarkef7060e22010-06-03 12:02:55 +01007270 ASSERT_EQ(kInlinedKeyedStoreInstructionsAfterPatch,
Steve Block6ded16b2010-05-10 14:33:55 +01007271 masm_->InstructionsGeneratedSince(&check_inlined_codesize));
7272 }
7273
Steve Block8defd9f2010-07-08 12:39:36 +01007274 ASSERT(we_remembered_the_write_barrier);
7275
Steve Block6ded16b2010-05-10 14:33:55 +01007276 deferred->BindExit();
7277 } else {
7278 frame()->CallKeyedStoreIC();
7279 }
Leon Clarked91b9f72010-01-27 17:25:45 +00007280}
7281
7282
Steve Blocka7e24c12009-10-30 11:49:00 +00007283#ifdef DEBUG
7284bool CodeGenerator::HasValidEntryRegisters() { return true; }
7285#endif
7286
7287
7288#undef __
7289#define __ ACCESS_MASM(masm)
7290
Steve Blocka7e24c12009-10-30 11:49:00 +00007291Handle<String> Reference::GetName() {
7292 ASSERT(type_ == NAMED);
7293 Property* property = expression_->AsProperty();
7294 if (property == NULL) {
7295 // Global variable reference treated as a named property reference.
7296 VariableProxy* proxy = expression_->AsVariableProxy();
7297 ASSERT(proxy->AsVariable() != NULL);
7298 ASSERT(proxy->AsVariable()->is_global());
7299 return proxy->name();
7300 } else {
7301 Literal* raw_name = property->key()->AsLiteral();
7302 ASSERT(raw_name != NULL);
7303 return Handle<String>(String::cast(*raw_name->handle()));
7304 }
7305}
7306
7307
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007308void Reference::DupIfPersist() {
7309 if (persist_after_get_) {
7310 switch (type_) {
7311 case KEYED:
7312 cgen_->frame()->Dup2();
7313 break;
7314 case NAMED:
7315 cgen_->frame()->Dup();
7316 // Fall through.
7317 case UNLOADED:
7318 case ILLEGAL:
7319 case SLOT:
7320 // Do nothing.
7321 ;
7322 }
7323 } else {
7324 set_unloaded();
7325 }
7326}
7327
7328
Steve Blockd0582a62009-12-15 09:54:21 +00007329void Reference::GetValue() {
Steve Blocka7e24c12009-10-30 11:49:00 +00007330 ASSERT(cgen_->HasValidEntryRegisters());
7331 ASSERT(!is_illegal());
7332 ASSERT(!cgen_->has_cc());
7333 MacroAssembler* masm = cgen_->masm();
7334 Property* property = expression_->AsProperty();
7335 if (property != NULL) {
7336 cgen_->CodeForSourcePosition(property->position());
7337 }
7338
7339 switch (type_) {
7340 case SLOT: {
7341 Comment cmnt(masm, "[ Load from Slot");
Kristian Monsen0d5e1162010-09-30 15:31:59 +01007342 Slot* slot = expression_->AsVariableProxy()->AsVariable()->AsSlot();
Steve Blocka7e24c12009-10-30 11:49:00 +00007343 ASSERT(slot != NULL);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007344 DupIfPersist();
Steve Block6ded16b2010-05-10 14:33:55 +01007345 cgen_->LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF);
Steve Blocka7e24c12009-10-30 11:49:00 +00007346 break;
7347 }
7348
7349 case NAMED: {
Steve Blocka7e24c12009-10-30 11:49:00 +00007350 Variable* var = expression_->AsVariableProxy()->AsVariable();
Steve Block6ded16b2010-05-10 14:33:55 +01007351 bool is_global = var != NULL;
7352 ASSERT(!is_global || var->is_global());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007353 Handle<String> name = GetName();
7354 DupIfPersist();
7355 cgen_->EmitNamedLoad(name, is_global);
Steve Blocka7e24c12009-10-30 11:49:00 +00007356 break;
7357 }
7358
7359 case KEYED: {
Leon Clarkef7060e22010-06-03 12:02:55 +01007360 ASSERT(property != NULL);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007361 DupIfPersist();
Steve Block6ded16b2010-05-10 14:33:55 +01007362 cgen_->EmitKeyedLoad();
Leon Clarked91b9f72010-01-27 17:25:45 +00007363 cgen_->frame()->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00007364 break;
7365 }
7366
7367 default:
7368 UNREACHABLE();
7369 }
7370}
7371
7372
Steve Block8defd9f2010-07-08 12:39:36 +01007373void Reference::SetValue(InitState init_state, WriteBarrierCharacter wb_info) {
Steve Blocka7e24c12009-10-30 11:49:00 +00007374 ASSERT(!is_illegal());
7375 ASSERT(!cgen_->has_cc());
7376 MacroAssembler* masm = cgen_->masm();
7377 VirtualFrame* frame = cgen_->frame();
7378 Property* property = expression_->AsProperty();
7379 if (property != NULL) {
7380 cgen_->CodeForSourcePosition(property->position());
7381 }
7382
7383 switch (type_) {
7384 case SLOT: {
7385 Comment cmnt(masm, "[ Store to Slot");
Kristian Monsen0d5e1162010-09-30 15:31:59 +01007386 Slot* slot = expression_->AsVariableProxy()->AsVariable()->AsSlot();
Leon Clarkee46be812010-01-19 14:06:41 +00007387 cgen_->StoreToSlot(slot, init_state);
Steve Block6ded16b2010-05-10 14:33:55 +01007388 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00007389 break;
7390 }
7391
7392 case NAMED: {
7393 Comment cmnt(masm, "[ Store to named Property");
Steve Block6ded16b2010-05-10 14:33:55 +01007394 cgen_->EmitNamedStore(GetName(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00007395 frame->EmitPush(r0);
Andrei Popescu402d9372010-02-26 13:31:12 +00007396 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00007397 break;
7398 }
7399
7400 case KEYED: {
7401 Comment cmnt(masm, "[ Store to keyed Property");
7402 Property* property = expression_->AsProperty();
7403 ASSERT(property != NULL);
7404 cgen_->CodeForSourcePosition(property->position());
Steve Block8defd9f2010-07-08 12:39:36 +01007405 cgen_->EmitKeyedStore(property->key()->type(), wb_info);
Steve Blocka7e24c12009-10-30 11:49:00 +00007406 frame->EmitPush(r0);
Leon Clarkef7060e22010-06-03 12:02:55 +01007407 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00007408 break;
7409 }
7410
7411 default:
7412 UNREACHABLE();
7413 }
7414}
7415
7416
Leon Clarkee46be812010-01-19 14:06:41 +00007417const char* GenericBinaryOpStub::GetName() {
7418 if (name_ != NULL) return name_;
7419 const int len = 100;
7420 name_ = Bootstrapper::AllocateAutoDeletedArray(len);
7421 if (name_ == NULL) return "OOM";
7422 const char* op_name = Token::Name(op_);
7423 const char* overwrite_name;
7424 switch (mode_) {
7425 case NO_OVERWRITE: overwrite_name = "Alloc"; break;
7426 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break;
7427 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
7428 default: overwrite_name = "UnknownOverwrite"; break;
7429 }
7430
7431 OS::SNPrintF(Vector<char>(name_, len),
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01007432 "GenericBinaryOpStub_%s_%s%s_%s",
Leon Clarkee46be812010-01-19 14:06:41 +00007433 op_name,
7434 overwrite_name,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01007435 specialized_on_rhs_ ? "_ConstantRhs" : "",
7436 BinaryOpIC::GetName(runtime_operands_type_));
Leon Clarkee46be812010-01-19 14:06:41 +00007437 return name_;
7438}
7439
7440
Steve Blocka7e24c12009-10-30 11:49:00 +00007441#undef __
7442
7443} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01007444
7445#endif // V8_TARGET_ARCH_ARM