blob: 91c47476caef315ea3ee2b75809ed4fb57d88cab [file] [log] [blame]
Leon Clarked91b9f72010-01-27 17:25:45 +00001// Copyright 2010 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_ARM)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "bootstrapper.h"
Kristian Monsen80d68ea2010-09-08 11:05:35 +010033#include "code-stubs.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000034#include "codegen-inl.h"
Steve Blockd0582a62009-12-15 09:54:21 +000035#include "compiler.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000036#include "debug.h"
Steve Block6ded16b2010-05-10 14:33:55 +010037#include "ic-inl.h"
38#include "jsregexp.h"
Ben Murdochb0fe1622011-05-05 13:52:32 +010039#include "jump-target-inl.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000040#include "parser.h"
Steve Block6ded16b2010-05-10 14:33:55 +010041#include "regexp-macro-assembler.h"
42#include "regexp-stack.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000043#include "register-allocator-inl.h"
44#include "runtime.h"
45#include "scopes.h"
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -080046#include "stub-cache.h"
Steve Block6ded16b2010-05-10 14:33:55 +010047#include "virtual-frame-inl.h"
Kristian Monsen25f61362010-05-21 11:50:48 +010048#include "virtual-frame-arm-inl.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000049
50namespace v8 {
51namespace internal {
52
Kristian Monsen25f61362010-05-21 11:50:48 +010053
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010054#define __ ACCESS_MASM(masm_)
Steve Blocka7e24c12009-10-30 11:49:00 +000055
56// -------------------------------------------------------------------------
57// Platform-specific DeferredCode functions.
58
59void DeferredCode::SaveRegisters() {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010060 // On ARM you either have a completely spilled frame or you
61 // handle it yourself, but at the moment there's no automation
62 // of registers and deferred code.
Steve Blocka7e24c12009-10-30 11:49:00 +000063}
64
65
66void DeferredCode::RestoreRegisters() {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010067}
68
69
70// -------------------------------------------------------------------------
71// Platform-specific RuntimeCallHelper functions.
72
73void VirtualFrameRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
74 frame_state_->frame()->AssertIsSpilled();
75}
76
77
78void VirtualFrameRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
79}
80
81
Ben Murdochb0fe1622011-05-05 13:52:32 +010082void StubRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010083 masm->EnterInternalFrame();
84}
85
86
Ben Murdochb0fe1622011-05-05 13:52:32 +010087void StubRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010088 masm->LeaveInternalFrame();
Steve Blocka7e24c12009-10-30 11:49:00 +000089}
90
91
92// -------------------------------------------------------------------------
93// CodeGenState implementation.
94
95CodeGenState::CodeGenState(CodeGenerator* owner)
96 : owner_(owner),
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010097 previous_(owner->state()) {
98 owner->set_state(this);
Steve Blocka7e24c12009-10-30 11:49:00 +000099}
100
101
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100102ConditionCodeGenState::ConditionCodeGenState(CodeGenerator* owner,
103 JumpTarget* true_target,
104 JumpTarget* false_target)
105 : CodeGenState(owner),
Steve Blocka7e24c12009-10-30 11:49:00 +0000106 true_target_(true_target),
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100107 false_target_(false_target) {
108 owner->set_state(this);
109}
110
111
112TypeInfoCodeGenState::TypeInfoCodeGenState(CodeGenerator* owner,
113 Slot* slot,
114 TypeInfo type_info)
115 : CodeGenState(owner),
116 slot_(slot) {
117 owner->set_state(this);
118 old_type_info_ = owner->set_type_info(slot, type_info);
Steve Blocka7e24c12009-10-30 11:49:00 +0000119}
120
121
122CodeGenState::~CodeGenState() {
123 ASSERT(owner_->state() == this);
124 owner_->set_state(previous_);
125}
126
127
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100128TypeInfoCodeGenState::~TypeInfoCodeGenState() {
129 owner()->set_type_info(slot_, old_type_info_);
130}
131
Steve Blocka7e24c12009-10-30 11:49:00 +0000132// -------------------------------------------------------------------------
133// CodeGenerator implementation
134
Andrei Popescu31002712010-02-23 13:46:05 +0000135CodeGenerator::CodeGenerator(MacroAssembler* masm)
136 : deferred_(8),
Leon Clarke4515c472010-02-03 11:58:03 +0000137 masm_(masm),
Andrei Popescu31002712010-02-23 13:46:05 +0000138 info_(NULL),
Steve Blocka7e24c12009-10-30 11:49:00 +0000139 frame_(NULL),
140 allocator_(NULL),
141 cc_reg_(al),
142 state_(NULL),
Steve Block6ded16b2010-05-10 14:33:55 +0100143 loop_nesting_(0),
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100144 type_info_(NULL),
Steve Block8defd9f2010-07-08 12:39:36 +0100145 function_return_(JumpTarget::BIDIRECTIONAL),
Steve Blocka7e24c12009-10-30 11:49:00 +0000146 function_return_is_shadowed_(false) {
147}
148
149
150// Calling conventions:
151// fp: caller's frame pointer
152// sp: stack pointer
153// r1: called JS function
154// cp: callee's context
155
Andrei Popescu402d9372010-02-26 13:31:12 +0000156void CodeGenerator::Generate(CompilationInfo* info) {
Steve Blockd0582a62009-12-15 09:54:21 +0000157 // Record the position for debugging purposes.
Andrei Popescu31002712010-02-23 13:46:05 +0000158 CodeForFunctionPosition(info->function());
Steve Block6ded16b2010-05-10 14:33:55 +0100159 Comment cmnt(masm_, "[ function compiled by virtual frame code generator");
Steve Blocka7e24c12009-10-30 11:49:00 +0000160
161 // Initialize state.
Andrei Popescu31002712010-02-23 13:46:05 +0000162 info_ = info;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100163
164 int slots = scope()->num_parameters() + scope()->num_stack_slots();
165 ScopedVector<TypeInfo> type_info_array(slots);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100166 for (int i = 0; i < slots; i++) {
167 type_info_array[i] = TypeInfo::Unknown();
168 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100169 type_info_ = &type_info_array;
170
Steve Blocka7e24c12009-10-30 11:49:00 +0000171 ASSERT(allocator_ == NULL);
172 RegisterAllocator register_allocator(this);
173 allocator_ = &register_allocator;
174 ASSERT(frame_ == NULL);
175 frame_ = new VirtualFrame();
176 cc_reg_ = al;
Steve Block6ded16b2010-05-10 14:33:55 +0100177
178 // Adjust for function-level loop nesting.
179 ASSERT_EQ(0, loop_nesting_);
Ben Murdochf87a2032010-10-22 12:50:53 +0100180 loop_nesting_ = info->is_in_loop() ? 1 : 0;
Steve Block6ded16b2010-05-10 14:33:55 +0100181
Steve Blocka7e24c12009-10-30 11:49:00 +0000182 {
183 CodeGenState state(this);
184
185 // Entry:
186 // Stack: receiver, arguments
187 // lr: return address
188 // fp: caller's frame pointer
189 // sp: stack pointer
190 // r1: called JS function
191 // cp: callee's context
192 allocator_->Initialize();
Leon Clarke4515c472010-02-03 11:58:03 +0000193
Steve Blocka7e24c12009-10-30 11:49:00 +0000194#ifdef DEBUG
195 if (strlen(FLAG_stop_at) > 0 &&
Andrei Popescu31002712010-02-23 13:46:05 +0000196 info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000197 frame_->SpillAll();
198 __ stop("stop-at");
199 }
200#endif
201
Iain Merrick75681382010-08-19 15:07:18 +0100202 frame_->Enter();
203 // tos: code slot
Leon Clarke4515c472010-02-03 11:58:03 +0000204
Iain Merrick75681382010-08-19 15:07:18 +0100205 // Allocate space for locals and initialize them. This also checks
206 // for stack overflow.
207 frame_->AllocateStackSlots();
Leon Clarke4515c472010-02-03 11:58:03 +0000208
Iain Merrick75681382010-08-19 15:07:18 +0100209 frame_->AssertIsSpilled();
210 int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
211 if (heap_slots > 0) {
212 // Allocate local context.
213 // Get outer context and create a new context based on it.
214 __ ldr(r0, frame_->Function());
215 frame_->EmitPush(r0);
216 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
217 FastNewContextStub stub(heap_slots);
218 frame_->CallStub(&stub, 1);
219 } else {
220 frame_->CallRuntime(Runtime::kNewContext, 1);
221 }
Leon Clarke4515c472010-02-03 11:58:03 +0000222
223#ifdef DEBUG
Iain Merrick75681382010-08-19 15:07:18 +0100224 JumpTarget verified_true;
225 __ cmp(r0, cp);
226 verified_true.Branch(eq);
227 __ stop("NewContext: r0 is expected to be the same as cp");
228 verified_true.Bind();
Leon Clarke4515c472010-02-03 11:58:03 +0000229#endif
Iain Merrick75681382010-08-19 15:07:18 +0100230 // Update context local.
231 __ str(cp, frame_->Context());
232 }
Leon Clarke4515c472010-02-03 11:58:03 +0000233
Iain Merrick75681382010-08-19 15:07:18 +0100234 // TODO(1241774): Improve this code:
235 // 1) only needed if we have a context
236 // 2) no need to recompute context ptr every single time
237 // 3) don't copy parameter operand code from SlotOperand!
238 {
239 Comment cmnt2(masm_, "[ copy context parameters into .context");
240 // Note that iteration order is relevant here! If we have the same
241 // parameter twice (e.g., function (x, y, x)), and that parameter
242 // needs to be copied into the context, it must be the last argument
243 // passed to the parameter that needs to be copied. This is a rare
244 // case so we don't check for it, instead we rely on the copying
245 // order: such a parameter is copied repeatedly into the same
246 // context location and thus the last value is what is seen inside
247 // the function.
248 frame_->AssertIsSpilled();
249 for (int i = 0; i < scope()->num_parameters(); i++) {
250 Variable* par = scope()->parameter(i);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100251 Slot* slot = par->AsSlot();
Iain Merrick75681382010-08-19 15:07:18 +0100252 if (slot != NULL && slot->type() == Slot::CONTEXT) {
253 ASSERT(!scope()->is_global_scope()); // No params in global scope.
254 __ ldr(r1, frame_->ParameterAt(i));
255 // Loads r2 with context; used below in RecordWrite.
256 __ str(r1, SlotOperand(slot, r2));
257 // Load the offset into r3.
258 int slot_offset =
259 FixedArray::kHeaderSize + slot->index() * kPointerSize;
260 __ RecordWrite(r2, Operand(slot_offset), r3, r1);
Leon Clarke4515c472010-02-03 11:58:03 +0000261 }
262 }
Iain Merrick75681382010-08-19 15:07:18 +0100263 }
Leon Clarke4515c472010-02-03 11:58:03 +0000264
Iain Merrick75681382010-08-19 15:07:18 +0100265 // Store the arguments object. This must happen after context
266 // initialization because the arguments object may be stored in
267 // the context.
268 if (ArgumentsMode() != NO_ARGUMENTS_ALLOCATION) {
269 StoreArgumentsObject(true);
270 }
Leon Clarke4515c472010-02-03 11:58:03 +0000271
Iain Merrick75681382010-08-19 15:07:18 +0100272 // Initialize ThisFunction reference if present.
273 if (scope()->is_function_scope() && scope()->function() != NULL) {
274 frame_->EmitPushRoot(Heap::kTheHoleValueRootIndex);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100275 StoreToSlot(scope()->function()->AsSlot(), NOT_CONST_INIT);
Leon Clarke4515c472010-02-03 11:58:03 +0000276 }
277
Steve Blocka7e24c12009-10-30 11:49:00 +0000278 // Initialize the function return target after the locals are set
279 // up, because it needs the expected frame height from the frame.
Kristian Monsen25f61362010-05-21 11:50:48 +0100280 function_return_.SetExpectedHeight();
Steve Blocka7e24c12009-10-30 11:49:00 +0000281 function_return_is_shadowed_ = false;
282
Steve Blocka7e24c12009-10-30 11:49:00 +0000283 // Generate code to 'execute' declarations and initialize functions
284 // (source elements). In case of an illegal redeclaration we need to
285 // handle that instead of processing the declarations.
Andrei Popescu31002712010-02-23 13:46:05 +0000286 if (scope()->HasIllegalRedeclaration()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000287 Comment cmnt(masm_, "[ illegal redeclarations");
Andrei Popescu31002712010-02-23 13:46:05 +0000288 scope()->VisitIllegalRedeclaration(this);
Steve Blocka7e24c12009-10-30 11:49:00 +0000289 } else {
290 Comment cmnt(masm_, "[ declarations");
Andrei Popescu31002712010-02-23 13:46:05 +0000291 ProcessDeclarations(scope()->declarations());
Steve Blocka7e24c12009-10-30 11:49:00 +0000292 // Bail out if a stack-overflow exception occurred when processing
293 // declarations.
294 if (HasStackOverflow()) return;
295 }
296
297 if (FLAG_trace) {
298 frame_->CallRuntime(Runtime::kTraceEnter, 0);
299 // Ignore the return value.
300 }
301
302 // Compile the body of the function in a vanilla state. Don't
303 // bother compiling all the code if the scope has an illegal
304 // redeclaration.
Andrei Popescu31002712010-02-23 13:46:05 +0000305 if (!scope()->HasIllegalRedeclaration()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000306 Comment cmnt(masm_, "[ function body");
307#ifdef DEBUG
Steve Block44f0eee2011-05-26 01:26:41 +0100308 bool is_builtin = Isolate::Current()->bootstrapper()->IsActive();
Steve Blocka7e24c12009-10-30 11:49:00 +0000309 bool should_trace =
310 is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls;
311 if (should_trace) {
312 frame_->CallRuntime(Runtime::kDebugTrace, 0);
313 // Ignore the return value.
314 }
315#endif
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100316 VisitStatements(info->function()->body());
Steve Blocka7e24c12009-10-30 11:49:00 +0000317 }
318 }
319
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100320 // Handle the return from the function.
321 if (has_valid_frame()) {
322 // If there is a valid frame, control flow can fall off the end of
323 // the body. In that case there is an implicit return statement.
324 ASSERT(!function_return_is_shadowed_);
325 frame_->PrepareForReturn();
Steve Blocka7e24c12009-10-30 11:49:00 +0000326 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100327 if (function_return_.is_bound()) {
328 function_return_.Jump();
329 } else {
330 function_return_.Bind();
331 GenerateReturnSequence();
332 }
333 } else if (function_return_.is_linked()) {
334 // If the return target has dangling jumps to it, then we have not
335 // yet generated the return sequence. This can happen when (a)
336 // control does not flow off the end of the body so we did not
337 // compile an artificial return statement just above, and (b) there
338 // are return statements in the body but (c) they are all shadowed.
Steve Blocka7e24c12009-10-30 11:49:00 +0000339 function_return_.Bind();
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100340 GenerateReturnSequence();
Steve Blocka7e24c12009-10-30 11:49:00 +0000341 }
342
Steve Block6ded16b2010-05-10 14:33:55 +0100343 // Adjust for function-level loop nesting.
Ben Murdochf87a2032010-10-22 12:50:53 +0100344 ASSERT(loop_nesting_ == info->is_in_loop()? 1 : 0);
Steve Block6ded16b2010-05-10 14:33:55 +0100345 loop_nesting_ = 0;
346
Steve Blocka7e24c12009-10-30 11:49:00 +0000347 // Code generation state must be reset.
348 ASSERT(!has_cc());
349 ASSERT(state_ == NULL);
Steve Block6ded16b2010-05-10 14:33:55 +0100350 ASSERT(loop_nesting() == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +0000351 ASSERT(!function_return_is_shadowed_);
352 function_return_.Unuse();
353 DeleteFrame();
354
355 // Process any deferred code using the register allocator.
356 if (!HasStackOverflow()) {
357 ProcessDeferred();
358 }
359
360 allocator_ = NULL;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100361 type_info_ = NULL;
362}
363
364
365int CodeGenerator::NumberOfSlot(Slot* slot) {
366 if (slot == NULL) return kInvalidSlotNumber;
367 switch (slot->type()) {
368 case Slot::PARAMETER:
369 return slot->index();
370 case Slot::LOCAL:
371 return slot->index() + scope()->num_parameters();
372 default:
373 break;
374 }
375 return kInvalidSlotNumber;
Steve Blocka7e24c12009-10-30 11:49:00 +0000376}
377
378
379MemOperand CodeGenerator::SlotOperand(Slot* slot, Register tmp) {
380 // Currently, this assertion will fail if we try to assign to
381 // a constant variable that is constant because it is read-only
382 // (such as the variable referring to a named function expression).
383 // We need to implement assignments to read-only variables.
384 // Ideally, we should do this during AST generation (by converting
385 // such assignments into expression statements); however, in general
386 // we may not be able to make the decision until past AST generation,
387 // that is when the entire program is known.
388 ASSERT(slot != NULL);
389 int index = slot->index();
390 switch (slot->type()) {
391 case Slot::PARAMETER:
392 return frame_->ParameterAt(index);
393
394 case Slot::LOCAL:
395 return frame_->LocalAt(index);
396
397 case Slot::CONTEXT: {
398 // Follow the context chain if necessary.
399 ASSERT(!tmp.is(cp)); // do not overwrite context register
400 Register context = cp;
401 int chain_length = scope()->ContextChainLength(slot->var()->scope());
402 for (int i = 0; i < chain_length; i++) {
403 // Load the closure.
404 // (All contexts, even 'with' contexts, have a closure,
405 // and it is the same for all contexts inside a function.
406 // There is no need to go to the function context first.)
407 __ ldr(tmp, ContextOperand(context, Context::CLOSURE_INDEX));
408 // Load the function context (which is the incoming, outer context).
409 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset));
410 context = tmp;
411 }
412 // We may have a 'with' context now. Get the function context.
413 // (In fact this mov may never be the needed, since the scope analysis
414 // may not permit a direct context access in this case and thus we are
415 // always at a function context. However it is safe to dereference be-
416 // cause the function context of a function context is itself. Before
417 // deleting this mov we should try to create a counter-example first,
418 // though...)
419 __ ldr(tmp, ContextOperand(context, Context::FCONTEXT_INDEX));
420 return ContextOperand(tmp, index);
421 }
422
423 default:
424 UNREACHABLE();
425 return MemOperand(r0, 0);
426 }
427}
428
429
430MemOperand CodeGenerator::ContextSlotOperandCheckExtensions(
431 Slot* slot,
432 Register tmp,
433 Register tmp2,
434 JumpTarget* slow) {
435 ASSERT(slot->type() == Slot::CONTEXT);
436 Register context = cp;
437
438 for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) {
439 if (s->num_heap_slots() > 0) {
440 if (s->calls_eval()) {
441 // Check that extension is NULL.
442 __ ldr(tmp2, ContextOperand(context, Context::EXTENSION_INDEX));
443 __ tst(tmp2, tmp2);
444 slow->Branch(ne);
445 }
446 __ ldr(tmp, ContextOperand(context, Context::CLOSURE_INDEX));
447 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset));
448 context = tmp;
449 }
450 }
451 // Check that last extension is NULL.
452 __ ldr(tmp2, ContextOperand(context, Context::EXTENSION_INDEX));
453 __ tst(tmp2, tmp2);
454 slow->Branch(ne);
455 __ ldr(tmp, ContextOperand(context, Context::FCONTEXT_INDEX));
456 return ContextOperand(tmp, slot->index());
457}
458
459
460// Loads a value on TOS. If it is a boolean value, the result may have been
461// (partially) translated into branches, or it may have set the condition
462// code register. If force_cc is set, the value is forced to set the
463// condition code register and no value is pushed. If the condition code
464// register was set, has_cc() is true and cc_reg_ contains the condition to
465// test for 'true'.
466void CodeGenerator::LoadCondition(Expression* x,
Steve Blocka7e24c12009-10-30 11:49:00 +0000467 JumpTarget* true_target,
468 JumpTarget* false_target,
469 bool force_cc) {
470 ASSERT(!has_cc());
471 int original_height = frame_->height();
472
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100473 { ConditionCodeGenState new_state(this, true_target, false_target);
Steve Blocka7e24c12009-10-30 11:49:00 +0000474 Visit(x);
475
476 // If we hit a stack overflow, we may not have actually visited
477 // the expression. In that case, we ensure that we have a
478 // valid-looking frame state because we will continue to generate
479 // code as we unwind the C++ stack.
480 //
481 // It's possible to have both a stack overflow and a valid frame
482 // state (eg, a subexpression overflowed, visiting it returned
483 // with a dummied frame state, and visiting this expression
484 // returned with a normal-looking state).
485 if (HasStackOverflow() &&
486 has_valid_frame() &&
487 !has_cc() &&
488 frame_->height() == original_height) {
489 true_target->Jump();
490 }
491 }
492 if (force_cc && frame_ != NULL && !has_cc()) {
493 // Convert the TOS value to a boolean in the condition code register.
494 ToBoolean(true_target, false_target);
495 }
496 ASSERT(!force_cc || !has_valid_frame() || has_cc());
497 ASSERT(!has_valid_frame() ||
498 (has_cc() && frame_->height() == original_height) ||
499 (!has_cc() && frame_->height() == original_height + 1));
500}
501
502
Steve Blockd0582a62009-12-15 09:54:21 +0000503void CodeGenerator::Load(Expression* expr) {
Iain Merrick75681382010-08-19 15:07:18 +0100504 // We generally assume that we are not in a spilled scope for most
505 // of the code generator. A failure to ensure this caused issue 815
506 // and this assert is designed to catch similar issues.
507 frame_->AssertIsNotSpilled();
Steve Blocka7e24c12009-10-30 11:49:00 +0000508#ifdef DEBUG
509 int original_height = frame_->height();
510#endif
511 JumpTarget true_target;
512 JumpTarget false_target;
Steve Blockd0582a62009-12-15 09:54:21 +0000513 LoadCondition(expr, &true_target, &false_target, false);
Steve Blocka7e24c12009-10-30 11:49:00 +0000514
515 if (has_cc()) {
516 // Convert cc_reg_ into a boolean value.
517 JumpTarget loaded;
518 JumpTarget materialize_true;
519 materialize_true.Branch(cc_reg_);
Steve Block8defd9f2010-07-08 12:39:36 +0100520 frame_->EmitPushRoot(Heap::kFalseValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +0000521 loaded.Jump();
522 materialize_true.Bind();
Steve Block8defd9f2010-07-08 12:39:36 +0100523 frame_->EmitPushRoot(Heap::kTrueValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +0000524 loaded.Bind();
525 cc_reg_ = al;
526 }
527
528 if (true_target.is_linked() || false_target.is_linked()) {
529 // We have at least one condition value that has been "translated"
530 // into a branch, thus it needs to be loaded explicitly.
531 JumpTarget loaded;
532 if (frame_ != NULL) {
533 loaded.Jump(); // Don't lose the current TOS.
534 }
535 bool both = true_target.is_linked() && false_target.is_linked();
536 // Load "true" if necessary.
537 if (true_target.is_linked()) {
538 true_target.Bind();
Steve Block8defd9f2010-07-08 12:39:36 +0100539 frame_->EmitPushRoot(Heap::kTrueValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +0000540 }
541 // If both "true" and "false" need to be loaded jump across the code for
542 // "false".
543 if (both) {
544 loaded.Jump();
545 }
546 // Load "false" if necessary.
547 if (false_target.is_linked()) {
548 false_target.Bind();
Steve Block8defd9f2010-07-08 12:39:36 +0100549 frame_->EmitPushRoot(Heap::kFalseValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +0000550 }
551 // A value is loaded on all paths reaching this point.
552 loaded.Bind();
553 }
554 ASSERT(has_valid_frame());
555 ASSERT(!has_cc());
Steve Block6ded16b2010-05-10 14:33:55 +0100556 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +0000557}
558
559
560void CodeGenerator::LoadGlobal() {
Steve Block6ded16b2010-05-10 14:33:55 +0100561 Register reg = frame_->GetTOSRegister();
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800562 __ ldr(reg, GlobalObjectOperand());
Steve Block6ded16b2010-05-10 14:33:55 +0100563 frame_->EmitPush(reg);
Steve Blocka7e24c12009-10-30 11:49:00 +0000564}
565
566
567void CodeGenerator::LoadGlobalReceiver(Register scratch) {
Steve Block8defd9f2010-07-08 12:39:36 +0100568 Register reg = frame_->GetTOSRegister();
569 __ ldr(reg, ContextOperand(cp, Context::GLOBAL_INDEX));
570 __ ldr(reg,
571 FieldMemOperand(reg, GlobalObject::kGlobalReceiverOffset));
572 frame_->EmitPush(reg);
Steve Blocka7e24c12009-10-30 11:49:00 +0000573}
574
575
Steve Block6ded16b2010-05-10 14:33:55 +0100576ArgumentsAllocationMode CodeGenerator::ArgumentsMode() {
577 if (scope()->arguments() == NULL) return NO_ARGUMENTS_ALLOCATION;
Steve Block44f0eee2011-05-26 01:26:41 +0100578
579 // In strict mode there is no need for shadow arguments.
580 ASSERT(scope()->arguments_shadow() != NULL || scope()->is_strict_mode());
Steve Block6ded16b2010-05-10 14:33:55 +0100581 // We don't want to do lazy arguments allocation for functions that
582 // have heap-allocated contexts, because it interfers with the
583 // uninitialized const tracking in the context objects.
Steve Block44f0eee2011-05-26 01:26:41 +0100584 return (scope()->num_heap_slots() > 0 || scope()->is_strict_mode())
Steve Block6ded16b2010-05-10 14:33:55 +0100585 ? EAGER_ARGUMENTS_ALLOCATION
586 : LAZY_ARGUMENTS_ALLOCATION;
587}
588
589
590void CodeGenerator::StoreArgumentsObject(bool initial) {
Steve Block6ded16b2010-05-10 14:33:55 +0100591 ArgumentsAllocationMode mode = ArgumentsMode();
592 ASSERT(mode != NO_ARGUMENTS_ALLOCATION);
593
594 Comment cmnt(masm_, "[ store arguments object");
595 if (mode == LAZY_ARGUMENTS_ALLOCATION && initial) {
596 // When using lazy arguments allocation, we store the hole value
597 // as a sentinel indicating that the arguments object hasn't been
598 // allocated yet.
Ben Murdoch086aeea2011-05-13 15:57:08 +0100599 frame_->EmitPushRoot(Heap::kArgumentsMarkerRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +0100600 } else {
Steve Block8defd9f2010-07-08 12:39:36 +0100601 frame_->SpillAll();
Steve Block44f0eee2011-05-26 01:26:41 +0100602 ArgumentsAccessStub stub(is_strict_mode()
603 ? ArgumentsAccessStub::NEW_STRICT
604 : ArgumentsAccessStub::NEW_NON_STRICT);
Steve Block6ded16b2010-05-10 14:33:55 +0100605 __ ldr(r2, frame_->Function());
606 // The receiver is below the arguments, the return address, and the
607 // frame pointer on the stack.
608 const int kReceiverDisplacement = 2 + scope()->num_parameters();
609 __ add(r1, fp, Operand(kReceiverDisplacement * kPointerSize));
610 __ mov(r0, Operand(Smi::FromInt(scope()->num_parameters())));
611 frame_->Adjust(3);
612 __ Push(r2, r1, r0);
613 frame_->CallStub(&stub, 3);
614 frame_->EmitPush(r0);
615 }
616
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100617 Variable* arguments = scope()->arguments();
618 Variable* shadow = scope()->arguments_shadow();
619 ASSERT(arguments != NULL && arguments->AsSlot() != NULL);
Steve Block44f0eee2011-05-26 01:26:41 +0100620 ASSERT((shadow != NULL && shadow->AsSlot() != NULL) ||
621 scope()->is_strict_mode());
622
Steve Block6ded16b2010-05-10 14:33:55 +0100623 JumpTarget done;
624 if (mode == LAZY_ARGUMENTS_ALLOCATION && !initial) {
625 // We have to skip storing into the arguments slot if it has
626 // already been written to. This can happen if the a function
627 // has a local variable named 'arguments'.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100628 LoadFromSlot(scope()->arguments()->AsSlot(), NOT_INSIDE_TYPEOF);
Steve Block8defd9f2010-07-08 12:39:36 +0100629 Register arguments = frame_->PopToRegister();
Ben Murdoch086aeea2011-05-13 15:57:08 +0100630 __ LoadRoot(ip, Heap::kArgumentsMarkerRootIndex);
Steve Block8defd9f2010-07-08 12:39:36 +0100631 __ cmp(arguments, ip);
Steve Block6ded16b2010-05-10 14:33:55 +0100632 done.Branch(ne);
633 }
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100634 StoreToSlot(arguments->AsSlot(), NOT_CONST_INIT);
Steve Block6ded16b2010-05-10 14:33:55 +0100635 if (mode == LAZY_ARGUMENTS_ALLOCATION) done.Bind();
Steve Block44f0eee2011-05-26 01:26:41 +0100636 if (shadow != NULL) {
637 StoreToSlot(shadow->AsSlot(), NOT_CONST_INIT);
638 }
Steve Block6ded16b2010-05-10 14:33:55 +0100639}
640
641
Steve Blockd0582a62009-12-15 09:54:21 +0000642void CodeGenerator::LoadTypeofExpression(Expression* expr) {
643 // Special handling of identifiers as subexpressions of typeof.
Steve Blockd0582a62009-12-15 09:54:21 +0000644 Variable* variable = expr->AsVariableProxy()->AsVariable();
Steve Blocka7e24c12009-10-30 11:49:00 +0000645 if (variable != NULL && !variable->is_this() && variable->is_global()) {
Steve Blockd0582a62009-12-15 09:54:21 +0000646 // For a global variable we build the property reference
647 // <global>.<variable> and perform a (regular non-contextual) property
648 // load to make sure we do not get reference errors.
Steve Blocka7e24c12009-10-30 11:49:00 +0000649 Slot global(variable, Slot::CONTEXT, Context::GLOBAL_INDEX);
650 Literal key(variable->name());
Steve Blocka7e24c12009-10-30 11:49:00 +0000651 Property property(&global, &key, RelocInfo::kNoPosition);
Steve Blockd0582a62009-12-15 09:54:21 +0000652 Reference ref(this, &property);
Steve Block6ded16b2010-05-10 14:33:55 +0100653 ref.GetValue();
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100654 } else if (variable != NULL && variable->AsSlot() != NULL) {
Steve Blockd0582a62009-12-15 09:54:21 +0000655 // For a variable that rewrites to a slot, we signal it is the immediate
656 // subexpression of a typeof.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100657 LoadFromSlotCheckForArguments(variable->AsSlot(), INSIDE_TYPEOF);
Steve Blocka7e24c12009-10-30 11:49:00 +0000658 } else {
Steve Blockd0582a62009-12-15 09:54:21 +0000659 // Anything else can be handled normally.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100660 Load(expr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000661 }
662}
663
664
Leon Clarked91b9f72010-01-27 17:25:45 +0000665Reference::Reference(CodeGenerator* cgen,
666 Expression* expression,
667 bool persist_after_get)
668 : cgen_(cgen),
669 expression_(expression),
670 type_(ILLEGAL),
671 persist_after_get_(persist_after_get) {
Iain Merrick75681382010-08-19 15:07:18 +0100672 // We generally assume that we are not in a spilled scope for most
673 // of the code generator. A failure to ensure this caused issue 815
674 // and this assert is designed to catch similar issues.
675 cgen->frame()->AssertIsNotSpilled();
Steve Blocka7e24c12009-10-30 11:49:00 +0000676 cgen->LoadReference(this);
677}
678
679
680Reference::~Reference() {
Leon Clarked91b9f72010-01-27 17:25:45 +0000681 ASSERT(is_unloaded() || is_illegal());
Steve Blocka7e24c12009-10-30 11:49:00 +0000682}
683
684
685void CodeGenerator::LoadReference(Reference* ref) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000686 Comment cmnt(masm_, "[ LoadReference");
687 Expression* e = ref->expression();
688 Property* property = e->AsProperty();
689 Variable* var = e->AsVariableProxy()->AsVariable();
690
691 if (property != NULL) {
692 // The expression is either a property or a variable proxy that rewrites
693 // to a property.
Steve Block6ded16b2010-05-10 14:33:55 +0100694 Load(property->obj());
Leon Clarkee46be812010-01-19 14:06:41 +0000695 if (property->key()->IsPropertyName()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000696 ref->set_type(Reference::NAMED);
697 } else {
Steve Block6ded16b2010-05-10 14:33:55 +0100698 Load(property->key());
Steve Blocka7e24c12009-10-30 11:49:00 +0000699 ref->set_type(Reference::KEYED);
700 }
701 } else if (var != NULL) {
702 // The expression is a variable proxy that does not rewrite to a
703 // property. Global variables are treated as named property references.
704 if (var->is_global()) {
705 LoadGlobal();
706 ref->set_type(Reference::NAMED);
707 } else {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100708 ASSERT(var->AsSlot() != NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000709 ref->set_type(Reference::SLOT);
710 }
711 } else {
712 // Anything else is a runtime error.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100713 Load(e);
Steve Blocka7e24c12009-10-30 11:49:00 +0000714 frame_->CallRuntime(Runtime::kThrowReferenceError, 1);
715 }
716}
717
718
719void CodeGenerator::UnloadReference(Reference* ref) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000720 int size = ref->size();
Leon Clarked91b9f72010-01-27 17:25:45 +0000721 ref->set_unloaded();
Steve Block6ded16b2010-05-10 14:33:55 +0100722 if (size == 0) return;
723
724 // Pop a reference from the stack while preserving TOS.
725 VirtualFrame::RegisterAllocationScope scope(this);
726 Comment cmnt(masm_, "[ UnloadReference");
727 if (size > 0) {
728 Register tos = frame_->PopToRegister();
729 frame_->Drop(size);
730 frame_->EmitPush(tos);
731 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000732}
733
734
735// ECMA-262, section 9.2, page 30: ToBoolean(). Convert the given
736// register to a boolean in the condition code register. The code
737// may jump to 'false_target' in case the register converts to 'false'.
738void CodeGenerator::ToBoolean(JumpTarget* true_target,
739 JumpTarget* false_target) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000740 // Note: The generated code snippet does not change stack variables.
741 // Only the condition code should be set.
Steve Block8defd9f2010-07-08 12:39:36 +0100742 bool known_smi = frame_->KnownSmiAt(0);
743 Register tos = frame_->PopToRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +0000744
745 // Fast case checks
746
747 // Check if the value is 'false'.
Steve Block8defd9f2010-07-08 12:39:36 +0100748 if (!known_smi) {
749 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
750 __ cmp(tos, ip);
751 false_target->Branch(eq);
Steve Blocka7e24c12009-10-30 11:49:00 +0000752
Steve Block8defd9f2010-07-08 12:39:36 +0100753 // Check if the value is 'true'.
754 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
755 __ cmp(tos, ip);
756 true_target->Branch(eq);
Steve Blocka7e24c12009-10-30 11:49:00 +0000757
Steve Block8defd9f2010-07-08 12:39:36 +0100758 // Check if the value is 'undefined'.
759 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
760 __ cmp(tos, ip);
761 false_target->Branch(eq);
762 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000763
764 // Check if the value is a smi.
Steve Block8defd9f2010-07-08 12:39:36 +0100765 __ cmp(tos, Operand(Smi::FromInt(0)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000766
Steve Block8defd9f2010-07-08 12:39:36 +0100767 if (!known_smi) {
768 false_target->Branch(eq);
769 __ tst(tos, Operand(kSmiTagMask));
770 true_target->Branch(eq);
771
Iain Merrick75681382010-08-19 15:07:18 +0100772 // Slow case.
Steve Block44f0eee2011-05-26 01:26:41 +0100773 if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
Iain Merrick75681382010-08-19 15:07:18 +0100774 CpuFeatures::Scope scope(VFP3);
775 // Implements the slow case by using ToBooleanStub.
776 // The ToBooleanStub takes a single argument, and
777 // returns a non-zero value for true, or zero for false.
778 // Both the argument value and the return value use the
779 // register assigned to tos_
780 ToBooleanStub stub(tos);
781 frame_->CallStub(&stub, 0);
782 // Convert the result in "tos" to a condition code.
Iain Merrick9ac36c92010-09-13 15:29:50 +0100783 __ cmp(tos, Operand(0, RelocInfo::NONE));
Iain Merrick75681382010-08-19 15:07:18 +0100784 } else {
785 // Implements slow case by calling the runtime.
786 frame_->EmitPush(tos);
787 frame_->CallRuntime(Runtime::kToBool, 1);
788 // Convert the result (r0) to a condition code.
789 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
790 __ cmp(r0, ip);
791 }
Steve Block8defd9f2010-07-08 12:39:36 +0100792 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000793
794 cc_reg_ = ne;
795}
796
797
798void CodeGenerator::GenericBinaryOperation(Token::Value op,
799 OverwriteMode overwrite_mode,
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100800 GenerateInlineSmi inline_smi,
Steve Blocka7e24c12009-10-30 11:49:00 +0000801 int constant_rhs) {
Steve Block6ded16b2010-05-10 14:33:55 +0100802 // top of virtual frame: y
803 // 2nd elt. on virtual frame : x
804 // result : top of virtual frame
805
806 // Stub is entered with a call: 'return address' is in lr.
807 switch (op) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100808 case Token::ADD:
809 case Token::SUB:
810 if (inline_smi) {
811 JumpTarget done;
812 Register rhs = frame_->PopToRegister();
813 Register lhs = frame_->PopToRegister(rhs);
814 Register scratch = VirtualFrame::scratch0();
815 __ orr(scratch, rhs, Operand(lhs));
816 // Check they are both small and positive.
817 __ tst(scratch, Operand(kSmiTagMask | 0xc0000000));
818 ASSERT(rhs.is(r0) || lhs.is(r0)); // r0 is free now.
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100819 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100820 if (op == Token::ADD) {
821 __ add(r0, lhs, Operand(rhs), LeaveCC, eq);
822 } else {
823 __ sub(r0, lhs, Operand(rhs), LeaveCC, eq);
824 }
825 done.Branch(eq);
826 GenericBinaryOpStub stub(op, overwrite_mode, lhs, rhs, constant_rhs);
827 frame_->SpillAll();
828 frame_->CallStub(&stub, 0);
829 done.Bind();
830 frame_->EmitPush(r0);
831 break;
832 } else {
833 // Fall through!
834 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000835 case Token::BIT_OR:
836 case Token::BIT_AND:
837 case Token::BIT_XOR:
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100838 if (inline_smi) {
839 bool rhs_is_smi = frame_->KnownSmiAt(0);
840 bool lhs_is_smi = frame_->KnownSmiAt(1);
841 Register rhs = frame_->PopToRegister();
842 Register lhs = frame_->PopToRegister(rhs);
843 Register smi_test_reg;
844 Condition cond;
845 if (!rhs_is_smi || !lhs_is_smi) {
846 if (rhs_is_smi) {
847 smi_test_reg = lhs;
848 } else if (lhs_is_smi) {
849 smi_test_reg = rhs;
850 } else {
851 smi_test_reg = VirtualFrame::scratch0();
852 __ orr(smi_test_reg, rhs, Operand(lhs));
853 }
854 // Check they are both Smis.
855 __ tst(smi_test_reg, Operand(kSmiTagMask));
856 cond = eq;
857 } else {
858 cond = al;
859 }
860 ASSERT(rhs.is(r0) || lhs.is(r0)); // r0 is free now.
861 if (op == Token::BIT_OR) {
862 __ orr(r0, lhs, Operand(rhs), LeaveCC, cond);
863 } else if (op == Token::BIT_AND) {
864 __ and_(r0, lhs, Operand(rhs), LeaveCC, cond);
865 } else {
866 ASSERT(op == Token::BIT_XOR);
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100867 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100868 __ eor(r0, lhs, Operand(rhs), LeaveCC, cond);
869 }
870 if (cond != al) {
871 JumpTarget done;
872 done.Branch(cond);
873 GenericBinaryOpStub stub(op, overwrite_mode, lhs, rhs, constant_rhs);
874 frame_->SpillAll();
875 frame_->CallStub(&stub, 0);
876 done.Bind();
877 }
878 frame_->EmitPush(r0);
879 break;
880 } else {
881 // Fall through!
882 }
883 case Token::MUL:
884 case Token::DIV:
885 case Token::MOD:
Steve Blocka7e24c12009-10-30 11:49:00 +0000886 case Token::SHL:
887 case Token::SHR:
888 case Token::SAR: {
Steve Block6ded16b2010-05-10 14:33:55 +0100889 Register rhs = frame_->PopToRegister();
890 Register lhs = frame_->PopToRegister(rhs); // Don't pop to rhs register.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100891 GenericBinaryOpStub stub(op, overwrite_mode, lhs, rhs, constant_rhs);
892 frame_->SpillAll();
893 frame_->CallStub(&stub, 0);
Steve Block6ded16b2010-05-10 14:33:55 +0100894 frame_->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +0000895 break;
896 }
897
Steve Block6ded16b2010-05-10 14:33:55 +0100898 case Token::COMMA: {
899 Register scratch = frame_->PopToRegister();
900 // Simply discard left value.
Steve Blocka7e24c12009-10-30 11:49:00 +0000901 frame_->Drop();
Steve Block6ded16b2010-05-10 14:33:55 +0100902 frame_->EmitPush(scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000903 break;
Steve Block6ded16b2010-05-10 14:33:55 +0100904 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000905
906 default:
907 // Other cases should have been handled before this point.
908 UNREACHABLE();
909 break;
910 }
911}
912
913
914class DeferredInlineSmiOperation: public DeferredCode {
915 public:
916 DeferredInlineSmiOperation(Token::Value op,
917 int value,
918 bool reversed,
Steve Block6ded16b2010-05-10 14:33:55 +0100919 OverwriteMode overwrite_mode,
920 Register tos)
Steve Blocka7e24c12009-10-30 11:49:00 +0000921 : op_(op),
922 value_(value),
923 reversed_(reversed),
Steve Block6ded16b2010-05-10 14:33:55 +0100924 overwrite_mode_(overwrite_mode),
925 tos_register_(tos) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000926 set_comment("[ DeferredInlinedSmiOperation");
927 }
928
929 virtual void Generate();
Iain Merrick9ac36c92010-09-13 15:29:50 +0100930 // This stub makes explicit calls to SaveRegisters(), RestoreRegisters() and
931 // Exit(). Currently on ARM SaveRegisters() and RestoreRegisters() are empty
932 // methods, it is the responsibility of the deferred code to save and restore
933 // registers.
934 virtual bool AutoSaveAndRestore() { return false; }
935
936 void JumpToNonSmiInput(Condition cond);
937 void JumpToAnswerOutOfRange(Condition cond);
Steve Blocka7e24c12009-10-30 11:49:00 +0000938
939 private:
Iain Merrick9ac36c92010-09-13 15:29:50 +0100940 void GenerateNonSmiInput();
941 void GenerateAnswerOutOfRange();
942 void WriteNonSmiAnswer(Register answer,
943 Register heap_number,
944 Register scratch);
945
Steve Blocka7e24c12009-10-30 11:49:00 +0000946 Token::Value op_;
947 int value_;
948 bool reversed_;
949 OverwriteMode overwrite_mode_;
Steve Block6ded16b2010-05-10 14:33:55 +0100950 Register tos_register_;
Iain Merrick9ac36c92010-09-13 15:29:50 +0100951 Label non_smi_input_;
952 Label answer_out_of_range_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000953};
954
955
Iain Merrick9ac36c92010-09-13 15:29:50 +0100956// For bit operations we try harder and handle the case where the input is not
957// a Smi but a 32bits integer without calling the generic stub.
958void DeferredInlineSmiOperation::JumpToNonSmiInput(Condition cond) {
959 ASSERT(Token::IsBitOp(op_));
960
961 __ b(cond, &non_smi_input_);
962}
963
964
965// For bit operations the result is always 32bits so we handle the case where
966// the result does not fit in a Smi without calling the generic stub.
967void DeferredInlineSmiOperation::JumpToAnswerOutOfRange(Condition cond) {
968 ASSERT(Token::IsBitOp(op_));
969
Steve Block44f0eee2011-05-26 01:26:41 +0100970 if ((op_ == Token::SHR) &&
971 !Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
Iain Merrick9ac36c92010-09-13 15:29:50 +0100972 // >>> requires an unsigned to double conversion and the non VFP code
973 // does not support this conversion.
974 __ b(cond, entry_label());
975 } else {
976 __ b(cond, &answer_out_of_range_);
977 }
978}
979
Steve Block8defd9f2010-07-08 12:39:36 +0100980
981// On entry the non-constant side of the binary operation is in tos_register_
982// and the constant smi side is nowhere. The tos_register_ is not used by the
983// virtual frame. On exit the answer is in the tos_register_ and the virtual
984// frame is unchanged.
Steve Blocka7e24c12009-10-30 11:49:00 +0000985void DeferredInlineSmiOperation::Generate() {
Steve Block8defd9f2010-07-08 12:39:36 +0100986 VirtualFrame copied_frame(*frame_state()->frame());
987 copied_frame.SpillAll();
988
Steve Block6ded16b2010-05-10 14:33:55 +0100989 Register lhs = r1;
990 Register rhs = r0;
Steve Blocka7e24c12009-10-30 11:49:00 +0000991 switch (op_) {
992 case Token::ADD: {
993 // Revert optimistic add.
994 if (reversed_) {
Steve Block6ded16b2010-05-10 14:33:55 +0100995 __ sub(r0, tos_register_, Operand(Smi::FromInt(value_)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000996 __ mov(r1, Operand(Smi::FromInt(value_)));
997 } else {
Steve Block6ded16b2010-05-10 14:33:55 +0100998 __ sub(r1, tos_register_, Operand(Smi::FromInt(value_)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000999 __ mov(r0, Operand(Smi::FromInt(value_)));
1000 }
1001 break;
1002 }
1003
1004 case Token::SUB: {
1005 // Revert optimistic sub.
1006 if (reversed_) {
Steve Block6ded16b2010-05-10 14:33:55 +01001007 __ rsb(r0, tos_register_, Operand(Smi::FromInt(value_)));
Steve Blocka7e24c12009-10-30 11:49:00 +00001008 __ mov(r1, Operand(Smi::FromInt(value_)));
1009 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01001010 __ add(r1, tos_register_, Operand(Smi::FromInt(value_)));
Steve Blocka7e24c12009-10-30 11:49:00 +00001011 __ mov(r0, Operand(Smi::FromInt(value_)));
1012 }
1013 break;
1014 }
1015
1016 // For these operations there is no optimistic operation that needs to be
1017 // reverted.
1018 case Token::MUL:
1019 case Token::MOD:
1020 case Token::BIT_OR:
1021 case Token::BIT_XOR:
Steve Block8defd9f2010-07-08 12:39:36 +01001022 case Token::BIT_AND:
Steve Blocka7e24c12009-10-30 11:49:00 +00001023 case Token::SHL:
1024 case Token::SHR:
1025 case Token::SAR: {
Steve Block8defd9f2010-07-08 12:39:36 +01001026 if (tos_register_.is(r1)) {
1027 __ mov(r0, Operand(Smi::FromInt(value_)));
1028 } else {
1029 ASSERT(tos_register_.is(r0));
1030 __ mov(r1, Operand(Smi::FromInt(value_)));
1031 }
1032 if (reversed_ == tos_register_.is(r1)) {
Steve Block6ded16b2010-05-10 14:33:55 +01001033 lhs = r0;
1034 rhs = r1;
Steve Blocka7e24c12009-10-30 11:49:00 +00001035 }
1036 break;
1037 }
1038
1039 default:
1040 // Other cases should have been handled before this point.
1041 UNREACHABLE();
1042 break;
1043 }
1044
Steve Block6ded16b2010-05-10 14:33:55 +01001045 GenericBinaryOpStub stub(op_, overwrite_mode_, lhs, rhs, value_);
Steve Blocka7e24c12009-10-30 11:49:00 +00001046 __ CallStub(&stub);
Steve Block8defd9f2010-07-08 12:39:36 +01001047
Steve Block6ded16b2010-05-10 14:33:55 +01001048 // The generic stub returns its value in r0, but that's not
1049 // necessarily what we want. We want whatever the inlined code
1050 // expected, which is that the answer is in the same register as
1051 // the operand was.
1052 __ Move(tos_register_, r0);
Steve Block8defd9f2010-07-08 12:39:36 +01001053
1054 // The tos register was not in use for the virtual frame that we
1055 // came into this function with, so we can merge back to that frame
1056 // without trashing it.
1057 copied_frame.MergeTo(frame_state()->frame());
Iain Merrick9ac36c92010-09-13 15:29:50 +01001058
1059 Exit();
1060
1061 if (non_smi_input_.is_linked()) {
1062 GenerateNonSmiInput();
1063 }
1064
1065 if (answer_out_of_range_.is_linked()) {
1066 GenerateAnswerOutOfRange();
1067 }
1068}
1069
1070
1071// Convert and write the integer answer into heap_number.
1072void DeferredInlineSmiOperation::WriteNonSmiAnswer(Register answer,
1073 Register heap_number,
1074 Register scratch) {
Steve Block44f0eee2011-05-26 01:26:41 +01001075 if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
Iain Merrick9ac36c92010-09-13 15:29:50 +01001076 CpuFeatures::Scope scope(VFP3);
1077 __ vmov(s0, answer);
1078 if (op_ == Token::SHR) {
1079 __ vcvt_f64_u32(d0, s0);
1080 } else {
1081 __ vcvt_f64_s32(d0, s0);
1082 }
1083 __ sub(scratch, heap_number, Operand(kHeapObjectTag));
1084 __ vstr(d0, scratch, HeapNumber::kValueOffset);
1085 } else {
1086 WriteInt32ToHeapNumberStub stub(answer, heap_number, scratch);
1087 __ CallStub(&stub);
1088 }
1089}
1090
1091
1092void DeferredInlineSmiOperation::GenerateNonSmiInput() {
1093 // We know the left hand side is not a Smi and the right hand side is an
1094 // immediate value (value_) which can be represented as a Smi. We only
1095 // handle bit operations.
1096 ASSERT(Token::IsBitOp(op_));
1097
1098 if (FLAG_debug_code) {
1099 __ Abort("Should not fall through!");
1100 }
1101
1102 __ bind(&non_smi_input_);
1103 if (FLAG_debug_code) {
1104 __ AbortIfSmi(tos_register_);
1105 }
1106
1107 // This routine uses the registers from r2 to r6. At the moment they are
1108 // not used by the register allocator, but when they are it should use
1109 // SpillAll and MergeTo like DeferredInlineSmiOperation::Generate() above.
1110
1111 Register heap_number_map = r7;
1112 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
1113 __ ldr(r3, FieldMemOperand(tos_register_, HeapNumber::kMapOffset));
1114 __ cmp(r3, heap_number_map);
1115 // Not a number, fall back to the GenericBinaryOpStub.
1116 __ b(ne, entry_label());
1117
1118 Register int32 = r2;
1119 // Not a 32bits signed int, fall back to the GenericBinaryOpStub.
Steve Block1e0659c2011-05-24 12:43:12 +01001120 __ ConvertToInt32(tos_register_, int32, r4, r5, d0, entry_label());
Iain Merrick9ac36c92010-09-13 15:29:50 +01001121
1122 // tos_register_ (r0 or r1): Original heap number.
1123 // int32: signed 32bits int.
1124
1125 Label result_not_a_smi;
1126 int shift_value = value_ & 0x1f;
1127 switch (op_) {
1128 case Token::BIT_OR: __ orr(int32, int32, Operand(value_)); break;
1129 case Token::BIT_XOR: __ eor(int32, int32, Operand(value_)); break;
1130 case Token::BIT_AND: __ and_(int32, int32, Operand(value_)); break;
1131 case Token::SAR:
1132 ASSERT(!reversed_);
1133 if (shift_value != 0) {
1134 __ mov(int32, Operand(int32, ASR, shift_value));
1135 }
1136 break;
1137 case Token::SHR:
1138 ASSERT(!reversed_);
1139 if (shift_value != 0) {
1140 __ mov(int32, Operand(int32, LSR, shift_value), SetCC);
1141 } else {
1142 // SHR is special because it is required to produce a positive answer.
1143 __ cmp(int32, Operand(0, RelocInfo::NONE));
1144 }
Steve Block44f0eee2011-05-26 01:26:41 +01001145 if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
Iain Merrick9ac36c92010-09-13 15:29:50 +01001146 __ b(mi, &result_not_a_smi);
1147 } else {
1148 // Non VFP code cannot convert from unsigned to double, so fall back
1149 // to GenericBinaryOpStub.
1150 __ b(mi, entry_label());
1151 }
1152 break;
1153 case Token::SHL:
1154 ASSERT(!reversed_);
1155 if (shift_value != 0) {
1156 __ mov(int32, Operand(int32, LSL, shift_value));
1157 }
1158 break;
1159 default: UNREACHABLE();
1160 }
1161 // Check that the *signed* result fits in a smi. Not necessary for AND, SAR
1162 // if the shift if more than 0 or SHR if the shit is more than 1.
Steve Block44f0eee2011-05-26 01:26:41 +01001163 if (!( (op_ == Token::AND && value_ >= 0) ||
Iain Merrick9ac36c92010-09-13 15:29:50 +01001164 ((op_ == Token::SAR) && (shift_value > 0)) ||
1165 ((op_ == Token::SHR) && (shift_value > 1)))) {
1166 __ add(r3, int32, Operand(0x40000000), SetCC);
1167 __ b(mi, &result_not_a_smi);
1168 }
1169 __ mov(tos_register_, Operand(int32, LSL, kSmiTagSize));
1170 Exit();
1171
1172 if (result_not_a_smi.is_linked()) {
1173 __ bind(&result_not_a_smi);
1174 if (overwrite_mode_ != OVERWRITE_LEFT) {
1175 ASSERT((overwrite_mode_ == NO_OVERWRITE) ||
1176 (overwrite_mode_ == OVERWRITE_RIGHT));
1177 // If the allocation fails, fall back to the GenericBinaryOpStub.
1178 __ AllocateHeapNumber(r4, r5, r6, heap_number_map, entry_label());
1179 // Nothing can go wrong now, so overwrite tos.
1180 __ mov(tos_register_, Operand(r4));
1181 }
1182
1183 // int32: answer as signed 32bits integer.
1184 // tos_register_: Heap number to write the answer into.
1185 WriteNonSmiAnswer(int32, tos_register_, r3);
1186
1187 Exit();
1188 }
1189}
1190
1191
1192void DeferredInlineSmiOperation::GenerateAnswerOutOfRange() {
1193 // The input from a bitwise operation were Smis but the result cannot fit
John Reck59135872010-11-02 12:39:01 -07001194 // into a Smi, so we store it into a heap number. VirtualFrame::scratch0()
1195 // holds the untagged result to be converted. tos_register_ contains the
1196 // input. See the calls to JumpToAnswerOutOfRange to see how we got here.
Iain Merrick9ac36c92010-09-13 15:29:50 +01001197 ASSERT(Token::IsBitOp(op_));
1198 ASSERT(!reversed_);
1199
John Reck59135872010-11-02 12:39:01 -07001200 Register untagged_result = VirtualFrame::scratch0();
1201
Iain Merrick9ac36c92010-09-13 15:29:50 +01001202 if (FLAG_debug_code) {
1203 __ Abort("Should not fall through!");
1204 }
1205
1206 __ bind(&answer_out_of_range_);
1207 if (((value_ & 0x1f) == 0) && (op_ == Token::SHR)) {
John Reck59135872010-11-02 12:39:01 -07001208 // >>> 0 is a special case where the untagged_result register is not set up
1209 // yet. We untag the input to get it.
1210 __ mov(untagged_result, Operand(tos_register_, ASR, kSmiTagSize));
Iain Merrick9ac36c92010-09-13 15:29:50 +01001211 }
1212
1213 // This routine uses the registers from r2 to r6. At the moment they are
1214 // not used by the register allocator, but when they are it should use
1215 // SpillAll and MergeTo like DeferredInlineSmiOperation::Generate() above.
1216
1217 // Allocate the result heap number.
John Reck59135872010-11-02 12:39:01 -07001218 Register heap_number_map = VirtualFrame::scratch1();
Iain Merrick9ac36c92010-09-13 15:29:50 +01001219 Register heap_number = r4;
1220 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
1221 // If the allocation fails, fall back to the GenericBinaryOpStub.
1222 __ AllocateHeapNumber(heap_number, r5, r6, heap_number_map, entry_label());
John Reck59135872010-11-02 12:39:01 -07001223 WriteNonSmiAnswer(untagged_result, heap_number, r3);
Iain Merrick9ac36c92010-09-13 15:29:50 +01001224 __ mov(tos_register_, Operand(heap_number));
1225
1226 Exit();
Steve Blocka7e24c12009-10-30 11:49:00 +00001227}
1228
1229
1230static bool PopCountLessThanEqual2(unsigned int x) {
1231 x &= x - 1;
1232 return (x & (x - 1)) == 0;
1233}
1234
1235
1236// Returns the index of the lowest bit set.
1237static int BitPosition(unsigned x) {
1238 int bit_posn = 0;
1239 while ((x & 0xf) == 0) {
1240 bit_posn += 4;
1241 x >>= 4;
1242 }
1243 while ((x & 1) == 0) {
1244 bit_posn++;
1245 x >>= 1;
1246 }
1247 return bit_posn;
1248}
1249
1250
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001251// Can we multiply by x with max two shifts and an add.
1252// This answers yes to all integers from 2 to 10.
1253static bool IsEasyToMultiplyBy(int x) {
1254 if (x < 2) return false; // Avoid special cases.
1255 if (x > (Smi::kMaxValue + 1) >> 2) return false; // Almost always overflows.
1256 if (IsPowerOf2(x)) return true; // Simple shift.
1257 if (PopCountLessThanEqual2(x)) return true; // Shift and add and shift.
1258 if (IsPowerOf2(x + 1)) return true; // Patterns like 11111.
1259 return false;
1260}
1261
1262
1263// Can multiply by anything that IsEasyToMultiplyBy returns true for.
1264// Source and destination may be the same register. This routine does
1265// not set carry and overflow the way a mul instruction would.
1266static void InlineMultiplyByKnownInt(MacroAssembler* masm,
1267 Register source,
1268 Register destination,
1269 int known_int) {
1270 if (IsPowerOf2(known_int)) {
1271 masm->mov(destination, Operand(source, LSL, BitPosition(known_int)));
1272 } else if (PopCountLessThanEqual2(known_int)) {
1273 int first_bit = BitPosition(known_int);
1274 int second_bit = BitPosition(known_int ^ (1 << first_bit));
1275 masm->add(destination, source,
1276 Operand(source, LSL, second_bit - first_bit));
1277 if (first_bit != 0) {
1278 masm->mov(destination, Operand(destination, LSL, first_bit));
1279 }
1280 } else {
1281 ASSERT(IsPowerOf2(known_int + 1)); // Patterns like 1111.
1282 int the_bit = BitPosition(known_int + 1);
1283 masm->rsb(destination, source, Operand(source, LSL, the_bit));
1284 }
1285}
1286
1287
Steve Blocka7e24c12009-10-30 11:49:00 +00001288void CodeGenerator::SmiOperation(Token::Value op,
1289 Handle<Object> value,
1290 bool reversed,
1291 OverwriteMode mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001292 int int_value = Smi::cast(*value)->value();
1293
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001294 bool both_sides_are_smi = frame_->KnownSmiAt(0);
1295
Steve Block6ded16b2010-05-10 14:33:55 +01001296 bool something_to_inline;
1297 switch (op) {
1298 case Token::ADD:
1299 case Token::SUB:
1300 case Token::BIT_AND:
1301 case Token::BIT_OR:
1302 case Token::BIT_XOR: {
1303 something_to_inline = true;
1304 break;
1305 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001306 case Token::SHL: {
1307 something_to_inline = (both_sides_are_smi || !reversed);
1308 break;
1309 }
Steve Block6ded16b2010-05-10 14:33:55 +01001310 case Token::SHR:
1311 case Token::SAR: {
1312 if (reversed) {
1313 something_to_inline = false;
1314 } else {
1315 something_to_inline = true;
1316 }
1317 break;
1318 }
1319 case Token::MOD: {
1320 if (reversed || int_value < 2 || !IsPowerOf2(int_value)) {
1321 something_to_inline = false;
1322 } else {
1323 something_to_inline = true;
1324 }
1325 break;
1326 }
1327 case Token::MUL: {
1328 if (!IsEasyToMultiplyBy(int_value)) {
1329 something_to_inline = false;
1330 } else {
1331 something_to_inline = true;
1332 }
1333 break;
1334 }
1335 default: {
1336 something_to_inline = false;
1337 break;
1338 }
1339 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001340
Steve Block6ded16b2010-05-10 14:33:55 +01001341 if (!something_to_inline) {
1342 if (!reversed) {
1343 // Push the rhs onto the virtual frame by putting it in a TOS register.
1344 Register rhs = frame_->GetTOSRegister();
1345 __ mov(rhs, Operand(value));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001346 frame_->EmitPush(rhs, TypeInfo::Smi());
1347 GenericBinaryOperation(op, mode, GENERATE_INLINE_SMI, int_value);
Steve Block6ded16b2010-05-10 14:33:55 +01001348 } else {
1349 // Pop the rhs, then push lhs and rhs in the right order. Only performs
1350 // at most one pop, the rest takes place in TOS registers.
1351 Register lhs = frame_->GetTOSRegister(); // Get reg for pushing.
1352 Register rhs = frame_->PopToRegister(lhs); // Don't use lhs for this.
1353 __ mov(lhs, Operand(value));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001354 frame_->EmitPush(lhs, TypeInfo::Smi());
1355 TypeInfo t = both_sides_are_smi ? TypeInfo::Smi() : TypeInfo::Unknown();
1356 frame_->EmitPush(rhs, t);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001357 GenericBinaryOperation(op, mode, GENERATE_INLINE_SMI,
1358 GenericBinaryOpStub::kUnknownIntValue);
Steve Block6ded16b2010-05-10 14:33:55 +01001359 }
1360 return;
1361 }
1362
1363 // We move the top of stack to a register (normally no move is invoved).
1364 Register tos = frame_->PopToRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +00001365 switch (op) {
1366 case Token::ADD: {
1367 DeferredCode* deferred =
Steve Block6ded16b2010-05-10 14:33:55 +01001368 new DeferredInlineSmiOperation(op, int_value, reversed, mode, tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001369
Steve Block6ded16b2010-05-10 14:33:55 +01001370 __ add(tos, tos, Operand(value), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00001371 deferred->Branch(vs);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001372 if (!both_sides_are_smi) {
1373 __ tst(tos, Operand(kSmiTagMask));
1374 deferred->Branch(ne);
1375 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001376 deferred->BindExit();
Steve Block6ded16b2010-05-10 14:33:55 +01001377 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001378 break;
1379 }
1380
1381 case Token::SUB: {
1382 DeferredCode* deferred =
Steve Block6ded16b2010-05-10 14:33:55 +01001383 new DeferredInlineSmiOperation(op, int_value, reversed, mode, tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001384
1385 if (reversed) {
Steve Block6ded16b2010-05-10 14:33:55 +01001386 __ rsb(tos, tos, Operand(value), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00001387 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01001388 __ sub(tos, tos, Operand(value), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00001389 }
1390 deferred->Branch(vs);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001391 if (!both_sides_are_smi) {
1392 __ tst(tos, Operand(kSmiTagMask));
1393 deferred->Branch(ne);
1394 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001395 deferred->BindExit();
Steve Block6ded16b2010-05-10 14:33:55 +01001396 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001397 break;
1398 }
1399
1400
1401 case Token::BIT_OR:
1402 case Token::BIT_XOR:
1403 case Token::BIT_AND: {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001404 if (both_sides_are_smi) {
1405 switch (op) {
1406 case Token::BIT_OR: __ orr(tos, tos, Operand(value)); break;
1407 case Token::BIT_XOR: __ eor(tos, tos, Operand(value)); break;
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001408 case Token::BIT_AND: __ And(tos, tos, Operand(value)); break;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001409 default: UNREACHABLE();
1410 }
1411 frame_->EmitPush(tos, TypeInfo::Smi());
1412 } else {
Iain Merrick9ac36c92010-09-13 15:29:50 +01001413 DeferredInlineSmiOperation* deferred =
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001414 new DeferredInlineSmiOperation(op, int_value, reversed, mode, tos);
1415 __ tst(tos, Operand(kSmiTagMask));
Iain Merrick9ac36c92010-09-13 15:29:50 +01001416 deferred->JumpToNonSmiInput(ne);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001417 switch (op) {
1418 case Token::BIT_OR: __ orr(tos, tos, Operand(value)); break;
1419 case Token::BIT_XOR: __ eor(tos, tos, Operand(value)); break;
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001420 case Token::BIT_AND: __ And(tos, tos, Operand(value)); break;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001421 default: UNREACHABLE();
1422 }
1423 deferred->BindExit();
Steve Block44f0eee2011-05-26 01:26:41 +01001424 TypeInfo result_type = TypeInfo::Integer32();
1425 if (op == Token::BIT_AND && int_value >= 0) {
1426 result_type = TypeInfo::Smi();
1427 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001428 frame_->EmitPush(tos, result_type);
Steve Blocka7e24c12009-10-30 11:49:00 +00001429 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001430 break;
1431 }
1432
1433 case Token::SHL:
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001434 if (reversed) {
1435 ASSERT(both_sides_are_smi);
1436 int max_shift = 0;
1437 int max_result = int_value == 0 ? 1 : int_value;
1438 while (Smi::IsValid(max_result << 1)) {
1439 max_shift++;
1440 max_result <<= 1;
1441 }
1442 DeferredCode* deferred =
1443 new DeferredInlineSmiOperation(op, int_value, true, mode, tos);
1444 // Mask off the last 5 bits of the shift operand (rhs). This is part
1445 // of the definition of shift in JS and we know we have a Smi so we
1446 // can safely do this. The masked version gets passed to the
1447 // deferred code, but that makes no difference.
1448 __ and_(tos, tos, Operand(Smi::FromInt(0x1f)));
1449 __ cmp(tos, Operand(Smi::FromInt(max_shift)));
1450 deferred->Branch(ge);
1451 Register scratch = VirtualFrame::scratch0();
1452 __ mov(scratch, Operand(tos, ASR, kSmiTagSize)); // Untag.
1453 __ mov(tos, Operand(Smi::FromInt(int_value))); // Load constant.
1454 __ mov(tos, Operand(tos, LSL, scratch)); // Shift constant.
1455 deferred->BindExit();
1456 TypeInfo result = TypeInfo::Integer32();
1457 frame_->EmitPush(tos, result);
1458 break;
1459 }
1460 // Fall through!
Steve Blocka7e24c12009-10-30 11:49:00 +00001461 case Token::SHR:
1462 case Token::SAR: {
Steve Block6ded16b2010-05-10 14:33:55 +01001463 ASSERT(!reversed);
Iain Merrick9ac36c92010-09-13 15:29:50 +01001464 int shift_value = int_value & 0x1f;
Steve Block791712a2010-08-27 10:21:07 +01001465 TypeInfo result = TypeInfo::Number();
1466
1467 if (op == Token::SHR) {
Iain Merrick9ac36c92010-09-13 15:29:50 +01001468 if (shift_value > 1) {
Steve Block791712a2010-08-27 10:21:07 +01001469 result = TypeInfo::Smi();
Iain Merrick9ac36c92010-09-13 15:29:50 +01001470 } else if (shift_value > 0) {
Steve Block791712a2010-08-27 10:21:07 +01001471 result = TypeInfo::Integer32();
Iain Merrick75681382010-08-19 15:07:18 +01001472 }
Steve Block791712a2010-08-27 10:21:07 +01001473 } else if (op == Token::SAR) {
Iain Merrick9ac36c92010-09-13 15:29:50 +01001474 if (shift_value > 0) {
Steve Block791712a2010-08-27 10:21:07 +01001475 result = TypeInfo::Smi();
1476 } else {
1477 result = TypeInfo::Integer32();
1478 }
1479 } else {
1480 ASSERT(op == Token::SHL);
1481 result = TypeInfo::Integer32();
Iain Merrick75681382010-08-19 15:07:18 +01001482 }
Steve Block791712a2010-08-27 10:21:07 +01001483
Iain Merrick9ac36c92010-09-13 15:29:50 +01001484 DeferredInlineSmiOperation* deferred =
Steve Block6ded16b2010-05-10 14:33:55 +01001485 new DeferredInlineSmiOperation(op, shift_value, false, mode, tos);
Iain Merrick9ac36c92010-09-13 15:29:50 +01001486 if (!both_sides_are_smi) {
1487 __ tst(tos, Operand(kSmiTagMask));
1488 deferred->JumpToNonSmiInput(ne);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001489 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001490 switch (op) {
1491 case Token::SHL: {
1492 if (shift_value != 0) {
John Reck59135872010-11-02 12:39:01 -07001493 Register untagged_result = VirtualFrame::scratch0();
1494 Register scratch = VirtualFrame::scratch1();
Kristian Monsen25f61362010-05-21 11:50:48 +01001495 int adjusted_shift = shift_value - kSmiTagSize;
1496 ASSERT(adjusted_shift >= 0);
Iain Merrick9ac36c92010-09-13 15:29:50 +01001497
Kristian Monsen25f61362010-05-21 11:50:48 +01001498 if (adjusted_shift != 0) {
John Reck59135872010-11-02 12:39:01 -07001499 __ mov(untagged_result, Operand(tos, LSL, adjusted_shift));
1500 } else {
1501 __ mov(untagged_result, Operand(tos));
Kristian Monsen25f61362010-05-21 11:50:48 +01001502 }
Iain Merrick9ac36c92010-09-13 15:29:50 +01001503 // Check that the *signed* result fits in a smi.
John Reck59135872010-11-02 12:39:01 -07001504 __ add(scratch, untagged_result, Operand(0x40000000), SetCC);
Iain Merrick9ac36c92010-09-13 15:29:50 +01001505 deferred->JumpToAnswerOutOfRange(mi);
John Reck59135872010-11-02 12:39:01 -07001506 __ mov(tos, Operand(untagged_result, LSL, kSmiTagSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00001507 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001508 break;
1509 }
1510 case Token::SHR: {
Steve Blocka7e24c12009-10-30 11:49:00 +00001511 if (shift_value != 0) {
John Reck59135872010-11-02 12:39:01 -07001512 Register untagged_result = VirtualFrame::scratch0();
1513 // Remove tag.
1514 __ mov(untagged_result, Operand(tos, ASR, kSmiTagSize));
1515 __ mov(untagged_result, Operand(untagged_result, LSR, shift_value));
Kristian Monsen25f61362010-05-21 11:50:48 +01001516 if (shift_value == 1) {
Iain Merrick9ac36c92010-09-13 15:29:50 +01001517 // Check that the *unsigned* result fits in a smi.
1518 // Neither of the two high-order bits can be set:
Kristian Monsen25f61362010-05-21 11:50:48 +01001519 // - 0x80000000: high bit would be lost when smi tagging
Iain Merrick9ac36c92010-09-13 15:29:50 +01001520 // - 0x40000000: this number would convert to negative when Smi
1521 // tagging.
1522 // These two cases can only happen with shifts by 0 or 1 when
1523 // handed a valid smi.
John Reck59135872010-11-02 12:39:01 -07001524 __ tst(untagged_result, Operand(0xc0000000));
Iain Merrick9ac36c92010-09-13 15:29:50 +01001525 deferred->JumpToAnswerOutOfRange(ne);
Kristian Monsen25f61362010-05-21 11:50:48 +01001526 }
John Reck59135872010-11-02 12:39:01 -07001527 __ mov(tos, Operand(untagged_result, LSL, kSmiTagSize));
Iain Merrick9ac36c92010-09-13 15:29:50 +01001528 } else {
1529 __ cmp(tos, Operand(0, RelocInfo::NONE));
1530 deferred->JumpToAnswerOutOfRange(mi);
Steve Blocka7e24c12009-10-30 11:49:00 +00001531 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001532 break;
1533 }
1534 case Token::SAR: {
1535 if (shift_value != 0) {
Iain Merrick9ac36c92010-09-13 15:29:50 +01001536 // Do the shift and the tag removal in one operation. If the shift
Kristian Monsen25f61362010-05-21 11:50:48 +01001537 // is 31 bits (the highest possible value) then we emit the
Iain Merrick9ac36c92010-09-13 15:29:50 +01001538 // instruction as a shift by 0 which in the ARM ISA means shift
1539 // arithmetically by 32.
Kristian Monsen25f61362010-05-21 11:50:48 +01001540 __ mov(tos, Operand(tos, ASR, (kSmiTagSize + shift_value) & 0x1f));
Kristian Monsen25f61362010-05-21 11:50:48 +01001541 __ mov(tos, Operand(tos, LSL, kSmiTagSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00001542 }
1543 break;
1544 }
1545 default: UNREACHABLE();
1546 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001547 deferred->BindExit();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001548 frame_->EmitPush(tos, result);
Steve Blocka7e24c12009-10-30 11:49:00 +00001549 break;
1550 }
1551
1552 case Token::MOD: {
Steve Block6ded16b2010-05-10 14:33:55 +01001553 ASSERT(!reversed);
1554 ASSERT(int_value >= 2);
1555 ASSERT(IsPowerOf2(int_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001556 DeferredCode* deferred =
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001557 new DeferredInlineSmiOperation(op, int_value, reversed, mode, tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001558 unsigned mask = (0x80000000u | kSmiTagMask);
Steve Block6ded16b2010-05-10 14:33:55 +01001559 __ tst(tos, Operand(mask));
Steve Blocka7e24c12009-10-30 11:49:00 +00001560 deferred->Branch(ne); // Go to deferred code on non-Smis and negative.
1561 mask = (int_value << kSmiTagSize) - 1;
Steve Block6ded16b2010-05-10 14:33:55 +01001562 __ and_(tos, tos, Operand(mask));
Steve Blocka7e24c12009-10-30 11:49:00 +00001563 deferred->BindExit();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001564 // Mod of positive power of 2 Smi gives a Smi if the lhs is an integer.
1565 frame_->EmitPush(
1566 tos,
1567 both_sides_are_smi ? TypeInfo::Smi() : TypeInfo::Number());
Steve Blocka7e24c12009-10-30 11:49:00 +00001568 break;
1569 }
1570
1571 case Token::MUL: {
Steve Block6ded16b2010-05-10 14:33:55 +01001572 ASSERT(IsEasyToMultiplyBy(int_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001573 DeferredCode* deferred =
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001574 new DeferredInlineSmiOperation(op, int_value, reversed, mode, tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001575 unsigned max_smi_that_wont_overflow = Smi::kMaxValue / int_value;
1576 max_smi_that_wont_overflow <<= kSmiTagSize;
1577 unsigned mask = 0x80000000u;
1578 while ((mask & max_smi_that_wont_overflow) == 0) {
1579 mask |= mask >> 1;
1580 }
1581 mask |= kSmiTagMask;
1582 // This does a single mask that checks for a too high value in a
1583 // conservative way and for a non-Smi. It also filters out negative
1584 // numbers, unfortunately, but since this code is inline we prefer
1585 // brevity to comprehensiveness.
Steve Block6ded16b2010-05-10 14:33:55 +01001586 __ tst(tos, Operand(mask));
Steve Blocka7e24c12009-10-30 11:49:00 +00001587 deferred->Branch(ne);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001588 InlineMultiplyByKnownInt(masm_, tos, tos, int_value);
Steve Blocka7e24c12009-10-30 11:49:00 +00001589 deferred->BindExit();
Steve Block6ded16b2010-05-10 14:33:55 +01001590 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001591 break;
1592 }
1593
1594 default:
Steve Block6ded16b2010-05-10 14:33:55 +01001595 UNREACHABLE();
Steve Blocka7e24c12009-10-30 11:49:00 +00001596 break;
1597 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001598}
1599
1600
Steve Block1e0659c2011-05-24 12:43:12 +01001601void CodeGenerator::Comparison(Condition cond,
Steve Blocka7e24c12009-10-30 11:49:00 +00001602 Expression* left,
1603 Expression* right,
1604 bool strict) {
Steve Block6ded16b2010-05-10 14:33:55 +01001605 VirtualFrame::RegisterAllocationScope scope(this);
Steve Blocka7e24c12009-10-30 11:49:00 +00001606
Steve Block6ded16b2010-05-10 14:33:55 +01001607 if (left != NULL) Load(left);
1608 if (right != NULL) Load(right);
1609
Steve Blocka7e24c12009-10-30 11:49:00 +00001610 // sp[0] : y
1611 // sp[1] : x
1612 // result : cc register
1613
1614 // Strict only makes sense for equality comparisons.
Steve Block1e0659c2011-05-24 12:43:12 +01001615 ASSERT(!strict || cond == eq);
Steve Blocka7e24c12009-10-30 11:49:00 +00001616
Steve Block6ded16b2010-05-10 14:33:55 +01001617 Register lhs;
1618 Register rhs;
1619
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001620 bool lhs_is_smi;
1621 bool rhs_is_smi;
1622
Steve Block6ded16b2010-05-10 14:33:55 +01001623 // We load the top two stack positions into registers chosen by the virtual
1624 // frame. This should keep the register shuffling to a minimum.
Steve Blocka7e24c12009-10-30 11:49:00 +00001625 // Implement '>' and '<=' by reversal to obtain ECMA-262 conversion order.
Steve Block1e0659c2011-05-24 12:43:12 +01001626 if (cond == gt || cond == le) {
1627 cond = ReverseCondition(cond);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001628 lhs_is_smi = frame_->KnownSmiAt(0);
1629 rhs_is_smi = frame_->KnownSmiAt(1);
Steve Block6ded16b2010-05-10 14:33:55 +01001630 lhs = frame_->PopToRegister();
1631 rhs = frame_->PopToRegister(lhs); // Don't pop to the same register again!
Steve Blocka7e24c12009-10-30 11:49:00 +00001632 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001633 rhs_is_smi = frame_->KnownSmiAt(0);
1634 lhs_is_smi = frame_->KnownSmiAt(1);
Steve Block6ded16b2010-05-10 14:33:55 +01001635 rhs = frame_->PopToRegister();
1636 lhs = frame_->PopToRegister(rhs); // Don't pop to the same register again!
Steve Blocka7e24c12009-10-30 11:49:00 +00001637 }
Steve Block6ded16b2010-05-10 14:33:55 +01001638
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001639 bool both_sides_are_smi = (lhs_is_smi && rhs_is_smi);
1640
Steve Block6ded16b2010-05-10 14:33:55 +01001641 ASSERT(rhs.is(r0) || rhs.is(r1));
1642 ASSERT(lhs.is(r0) || lhs.is(r1));
1643
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001644 JumpTarget exit;
Steve Blocka7e24c12009-10-30 11:49:00 +00001645
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001646 if (!both_sides_are_smi) {
1647 // Now we have the two sides in r0 and r1. We flush any other registers
1648 // because the stub doesn't know about register allocation.
1649 frame_->SpillAll();
1650 Register scratch = VirtualFrame::scratch0();
1651 Register smi_test_reg;
1652 if (lhs_is_smi) {
1653 smi_test_reg = rhs;
1654 } else if (rhs_is_smi) {
1655 smi_test_reg = lhs;
1656 } else {
1657 __ orr(scratch, lhs, Operand(rhs));
1658 smi_test_reg = scratch;
1659 }
1660 __ tst(smi_test_reg, Operand(kSmiTagMask));
1661 JumpTarget smi;
1662 smi.Branch(eq);
1663
1664 // Perform non-smi comparison by stub.
1665 // CompareStub takes arguments in r0 and r1, returns <0, >0 or 0 in r0.
1666 // We call with 0 args because there are 0 on the stack.
Steve Block1e0659c2011-05-24 12:43:12 +01001667 CompareStub stub(cond, strict, NO_SMI_COMPARE_IN_STUB, lhs, rhs);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001668 frame_->CallStub(&stub, 0);
Iain Merrick9ac36c92010-09-13 15:29:50 +01001669 __ cmp(r0, Operand(0, RelocInfo::NONE));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001670 exit.Jump();
1671
1672 smi.Bind();
Steve Block6ded16b2010-05-10 14:33:55 +01001673 }
1674
Steve Blocka7e24c12009-10-30 11:49:00 +00001675 // Do smi comparisons by pointer comparison.
Steve Block6ded16b2010-05-10 14:33:55 +01001676 __ cmp(lhs, Operand(rhs));
Steve Blocka7e24c12009-10-30 11:49:00 +00001677
1678 exit.Bind();
Steve Block1e0659c2011-05-24 12:43:12 +01001679 cc_reg_ = cond;
Steve Blocka7e24c12009-10-30 11:49:00 +00001680}
1681
1682
Steve Blocka7e24c12009-10-30 11:49:00 +00001683// Call the function on the stack with the given arguments.
1684void CodeGenerator::CallWithArguments(ZoneList<Expression*>* args,
Leon Clarkee46be812010-01-19 14:06:41 +00001685 CallFunctionFlags flags,
1686 int position) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001687 // Push the arguments ("left-to-right") on the stack.
1688 int arg_count = args->length();
1689 for (int i = 0; i < arg_count; i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001690 Load(args->at(i));
Steve Blocka7e24c12009-10-30 11:49:00 +00001691 }
1692
1693 // Record the position for debugging purposes.
1694 CodeForSourcePosition(position);
1695
1696 // Use the shared code stub to call the function.
1697 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Leon Clarkee46be812010-01-19 14:06:41 +00001698 CallFunctionStub call_function(arg_count, in_loop, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00001699 frame_->CallStub(&call_function, arg_count + 1);
1700
1701 // Restore context and pop function from the stack.
1702 __ ldr(cp, frame_->Context());
1703 frame_->Drop(); // discard the TOS
1704}
1705
1706
Steve Block6ded16b2010-05-10 14:33:55 +01001707void CodeGenerator::CallApplyLazy(Expression* applicand,
1708 Expression* receiver,
1709 VariableProxy* arguments,
1710 int position) {
1711 // An optimized implementation of expressions of the form
1712 // x.apply(y, arguments).
1713 // If the arguments object of the scope has not been allocated,
1714 // and x.apply is Function.prototype.apply, this optimization
1715 // just copies y and the arguments of the current function on the
1716 // stack, as receiver and arguments, and calls x.
1717 // In the implementation comments, we call x the applicand
1718 // and y the receiver.
Steve Block6ded16b2010-05-10 14:33:55 +01001719
1720 ASSERT(ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION);
1721 ASSERT(arguments->IsArguments());
1722
1723 // Load applicand.apply onto the stack. This will usually
1724 // give us a megamorphic load site. Not super, but it works.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001725 Load(applicand);
Steve Block44f0eee2011-05-26 01:26:41 +01001726 Handle<String> name = FACTORY->LookupAsciiSymbol("apply");
Leon Clarkef7060e22010-06-03 12:02:55 +01001727 frame_->Dup();
Steve Block6ded16b2010-05-10 14:33:55 +01001728 frame_->CallLoadIC(name, RelocInfo::CODE_TARGET);
1729 frame_->EmitPush(r0);
1730
1731 // Load the receiver and the existing arguments object onto the
1732 // expression stack. Avoid allocating the arguments object here.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001733 Load(receiver);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001734 LoadFromSlot(scope()->arguments()->AsSlot(), NOT_INSIDE_TYPEOF);
Steve Block6ded16b2010-05-10 14:33:55 +01001735
Steve Block8defd9f2010-07-08 12:39:36 +01001736 // At this point the top two stack elements are probably in registers
1737 // since they were just loaded. Ensure they are in regs and get the
1738 // regs.
1739 Register receiver_reg = frame_->Peek2();
1740 Register arguments_reg = frame_->Peek();
1741
1742 // From now on the frame is spilled.
1743 frame_->SpillAll();
1744
Steve Block6ded16b2010-05-10 14:33:55 +01001745 // Emit the source position information after having loaded the
1746 // receiver and the arguments.
1747 CodeForSourcePosition(position);
1748 // Contents of the stack at this point:
1749 // sp[0]: arguments object of the current function or the hole.
1750 // sp[1]: receiver
1751 // sp[2]: applicand.apply
1752 // sp[3]: applicand.
1753
1754 // Check if the arguments object has been lazily allocated
1755 // already. If so, just use that instead of copying the arguments
1756 // from the stack. This also deals with cases where a local variable
1757 // named 'arguments' has been introduced.
Steve Block8defd9f2010-07-08 12:39:36 +01001758 JumpTarget slow;
1759 Label done;
Ben Murdoch086aeea2011-05-13 15:57:08 +01001760 __ LoadRoot(ip, Heap::kArgumentsMarkerRootIndex);
Steve Block8defd9f2010-07-08 12:39:36 +01001761 __ cmp(ip, arguments_reg);
1762 slow.Branch(ne);
Steve Block6ded16b2010-05-10 14:33:55 +01001763
1764 Label build_args;
1765 // Get rid of the arguments object probe.
1766 frame_->Drop();
1767 // Stack now has 3 elements on it.
1768 // Contents of stack at this point:
Steve Block8defd9f2010-07-08 12:39:36 +01001769 // sp[0]: receiver - in the receiver_reg register.
Steve Block6ded16b2010-05-10 14:33:55 +01001770 // sp[1]: applicand.apply
1771 // sp[2]: applicand.
1772
1773 // Check that the receiver really is a JavaScript object.
Steve Block1e0659c2011-05-24 12:43:12 +01001774 __ JumpIfSmi(receiver_reg, &build_args);
Steve Block6ded16b2010-05-10 14:33:55 +01001775 // We allow all JSObjects including JSFunctions. As long as
1776 // JS_FUNCTION_TYPE is the last instance type and it is right
1777 // after LAST_JS_OBJECT_TYPE, we do not have to check the upper
1778 // bound.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01001779 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
1780 STATIC_ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
Steve Block8defd9f2010-07-08 12:39:36 +01001781 __ CompareObjectType(receiver_reg, r2, r3, FIRST_JS_OBJECT_TYPE);
Steve Block6ded16b2010-05-10 14:33:55 +01001782 __ b(lt, &build_args);
1783
1784 // Check that applicand.apply is Function.prototype.apply.
1785 __ ldr(r0, MemOperand(sp, kPointerSize));
Steve Block1e0659c2011-05-24 12:43:12 +01001786 __ JumpIfSmi(r0, &build_args);
Steve Block6ded16b2010-05-10 14:33:55 +01001787 __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE);
1788 __ b(ne, &build_args);
Steve Block44f0eee2011-05-26 01:26:41 +01001789 Handle<Code> apply_code(
1790 Isolate::Current()->builtins()->builtin(Builtins::kFunctionApply));
Steve Block791712a2010-08-27 10:21:07 +01001791 __ ldr(r1, FieldMemOperand(r0, JSFunction::kCodeEntryOffset));
1792 __ sub(r1, r1, Operand(Code::kHeaderSize - kHeapObjectTag));
Steve Block6ded16b2010-05-10 14:33:55 +01001793 __ cmp(r1, Operand(apply_code));
1794 __ b(ne, &build_args);
1795
1796 // Check that applicand is a function.
1797 __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
Steve Block1e0659c2011-05-24 12:43:12 +01001798 __ JumpIfSmi(r1, &build_args);
Steve Block6ded16b2010-05-10 14:33:55 +01001799 __ CompareObjectType(r1, r2, r3, JS_FUNCTION_TYPE);
1800 __ b(ne, &build_args);
1801
1802 // Copy the arguments to this function possibly from the
1803 // adaptor frame below it.
1804 Label invoke, adapted;
1805 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1806 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
1807 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1808 __ b(eq, &adapted);
1809
1810 // No arguments adaptor frame. Copy fixed number of arguments.
1811 __ mov(r0, Operand(scope()->num_parameters()));
1812 for (int i = 0; i < scope()->num_parameters(); i++) {
1813 __ ldr(r2, frame_->ParameterAt(i));
1814 __ push(r2);
1815 }
1816 __ jmp(&invoke);
1817
1818 // Arguments adaptor frame present. Copy arguments from there, but
1819 // avoid copying too many arguments to avoid stack overflows.
1820 __ bind(&adapted);
1821 static const uint32_t kArgumentsLimit = 1 * KB;
1822 __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset));
1823 __ mov(r0, Operand(r0, LSR, kSmiTagSize));
1824 __ mov(r3, r0);
1825 __ cmp(r0, Operand(kArgumentsLimit));
1826 __ b(gt, &build_args);
1827
1828 // Loop through the arguments pushing them onto the execution
1829 // stack. We don't inform the virtual frame of the push, so we don't
1830 // have to worry about getting rid of the elements from the virtual
1831 // frame.
1832 Label loop;
1833 // r3 is a small non-negative integer, due to the test above.
Iain Merrick9ac36c92010-09-13 15:29:50 +01001834 __ cmp(r3, Operand(0, RelocInfo::NONE));
Steve Block6ded16b2010-05-10 14:33:55 +01001835 __ b(eq, &invoke);
1836 // Compute the address of the first argument.
1837 __ add(r2, r2, Operand(r3, LSL, kPointerSizeLog2));
1838 __ add(r2, r2, Operand(kPointerSize));
1839 __ bind(&loop);
1840 // Post-decrement argument address by kPointerSize on each iteration.
1841 __ ldr(r4, MemOperand(r2, kPointerSize, NegPostIndex));
1842 __ push(r4);
1843 __ sub(r3, r3, Operand(1), SetCC);
1844 __ b(gt, &loop);
1845
1846 // Invoke the function.
1847 __ bind(&invoke);
1848 ParameterCount actual(r0);
1849 __ InvokeFunction(r1, actual, CALL_FUNCTION);
1850 // Drop applicand.apply and applicand from the stack, and push
1851 // the result of the function call, but leave the spilled frame
1852 // unchanged, with 3 elements, so it is correct when we compile the
1853 // slow-case code.
1854 __ add(sp, sp, Operand(2 * kPointerSize));
1855 __ push(r0);
1856 // Stack now has 1 element:
1857 // sp[0]: result
1858 __ jmp(&done);
1859
1860 // Slow-case: Allocate the arguments object since we know it isn't
1861 // there, and fall-through to the slow-case where we call
1862 // applicand.apply.
1863 __ bind(&build_args);
1864 // Stack now has 3 elements, because we have jumped from where:
1865 // sp[0]: receiver
1866 // sp[1]: applicand.apply
1867 // sp[2]: applicand.
1868 StoreArgumentsObject(false);
1869
1870 // Stack and frame now have 4 elements.
Steve Block8defd9f2010-07-08 12:39:36 +01001871 slow.Bind();
Steve Block6ded16b2010-05-10 14:33:55 +01001872
1873 // Generic computation of x.apply(y, args) with no special optimization.
1874 // Flip applicand.apply and applicand on the stack, so
1875 // applicand looks like the receiver of the applicand.apply call.
1876 // Then process it as a normal function call.
1877 __ ldr(r0, MemOperand(sp, 3 * kPointerSize));
1878 __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
Leon Clarkef7060e22010-06-03 12:02:55 +01001879 __ Strd(r0, r1, MemOperand(sp, 2 * kPointerSize));
Steve Block6ded16b2010-05-10 14:33:55 +01001880
1881 CallFunctionStub call_function(2, NOT_IN_LOOP, NO_CALL_FUNCTION_FLAGS);
1882 frame_->CallStub(&call_function, 3);
1883 // The function and its two arguments have been dropped.
1884 frame_->Drop(); // Drop the receiver as well.
1885 frame_->EmitPush(r0);
Ben Murdochbb769b22010-08-11 14:56:33 +01001886 frame_->SpillAll(); // A spilled frame is also jumping to label done.
Steve Block6ded16b2010-05-10 14:33:55 +01001887 // Stack now has 1 element:
1888 // sp[0]: result
1889 __ bind(&done);
1890
1891 // Restore the context register after a call.
1892 __ ldr(cp, frame_->Context());
1893}
1894
1895
Steve Blocka7e24c12009-10-30 11:49:00 +00001896void CodeGenerator::Branch(bool if_true, JumpTarget* target) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001897 ASSERT(has_cc());
Steve Block1e0659c2011-05-24 12:43:12 +01001898 Condition cond = if_true ? cc_reg_ : NegateCondition(cc_reg_);
1899 target->Branch(cond);
Steve Blocka7e24c12009-10-30 11:49:00 +00001900 cc_reg_ = al;
1901}
1902
1903
1904void CodeGenerator::CheckStack() {
Steve Block8defd9f2010-07-08 12:39:36 +01001905 frame_->SpillAll();
Steve Blockd0582a62009-12-15 09:54:21 +00001906 Comment cmnt(masm_, "[ check stack");
1907 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
Steve Blockd0582a62009-12-15 09:54:21 +00001908 masm_->cmp(sp, Operand(ip));
1909 StackCheckStub stub;
1910 // Call the stub if lower.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001911 masm_->mov(ip,
Steve Blockd0582a62009-12-15 09:54:21 +00001912 Operand(reinterpret_cast<intptr_t>(stub.GetCode().location()),
1913 RelocInfo::CODE_TARGET),
1914 LeaveCC,
1915 lo);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001916 masm_->Call(ip, lo);
Steve Blocka7e24c12009-10-30 11:49:00 +00001917}
1918
1919
1920void CodeGenerator::VisitStatements(ZoneList<Statement*>* statements) {
1921#ifdef DEBUG
1922 int original_height = frame_->height();
1923#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00001924 for (int i = 0; frame_ != NULL && i < statements->length(); i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001925 Visit(statements->at(i));
Steve Blocka7e24c12009-10-30 11:49:00 +00001926 }
1927 ASSERT(!has_valid_frame() || frame_->height() == original_height);
1928}
1929
1930
1931void CodeGenerator::VisitBlock(Block* node) {
1932#ifdef DEBUG
1933 int original_height = frame_->height();
1934#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00001935 Comment cmnt(masm_, "[ Block");
1936 CodeForStatementPosition(node);
Kristian Monsen25f61362010-05-21 11:50:48 +01001937 node->break_target()->SetExpectedHeight();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001938 VisitStatements(node->statements());
Steve Blocka7e24c12009-10-30 11:49:00 +00001939 if (node->break_target()->is_linked()) {
1940 node->break_target()->Bind();
1941 }
1942 node->break_target()->Unuse();
1943 ASSERT(!has_valid_frame() || frame_->height() == original_height);
1944}
1945
1946
1947void CodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
Steve Block3ce2e202009-11-05 08:53:23 +00001948 frame_->EmitPush(cp);
Steve Block6ded16b2010-05-10 14:33:55 +01001949 frame_->EmitPush(Operand(pairs));
1950 frame_->EmitPush(Operand(Smi::FromInt(is_eval() ? 1 : 0)));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001951 frame_->EmitPush(Operand(Smi::FromInt(strict_mode_flag())));
Steve Block6ded16b2010-05-10 14:33:55 +01001952
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001953 frame_->CallRuntime(Runtime::kDeclareGlobals, 4);
Steve Blocka7e24c12009-10-30 11:49:00 +00001954 // The result is discarded.
1955}
1956
1957
1958void CodeGenerator::VisitDeclaration(Declaration* node) {
1959#ifdef DEBUG
1960 int original_height = frame_->height();
1961#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00001962 Comment cmnt(masm_, "[ Declaration");
1963 Variable* var = node->proxy()->var();
1964 ASSERT(var != NULL); // must have been resolved
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001965 Slot* slot = var->AsSlot();
Steve Blocka7e24c12009-10-30 11:49:00 +00001966
1967 // If it was not possible to allocate the variable at compile time,
1968 // we need to "declare" it at runtime to make sure it actually
1969 // exists in the local context.
1970 if (slot != NULL && slot->type() == Slot::LOOKUP) {
1971 // Variables with a "LOOKUP" slot were introduced as non-locals
1972 // during variable resolution and must have mode DYNAMIC.
1973 ASSERT(var->is_dynamic());
1974 // For now, just do a runtime call.
1975 frame_->EmitPush(cp);
Steve Block6ded16b2010-05-10 14:33:55 +01001976 frame_->EmitPush(Operand(var->name()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001977 // Declaration nodes are always declared in only two modes.
1978 ASSERT(node->mode() == Variable::VAR || node->mode() == Variable::CONST);
1979 PropertyAttributes attr = node->mode() == Variable::VAR ? NONE : READ_ONLY;
Steve Block6ded16b2010-05-10 14:33:55 +01001980 frame_->EmitPush(Operand(Smi::FromInt(attr)));
Steve Blocka7e24c12009-10-30 11:49:00 +00001981 // Push initial value, if any.
1982 // Note: For variables we must not push an initial value (such as
1983 // 'undefined') because we may have a (legal) redeclaration and we
1984 // must not destroy the current value.
1985 if (node->mode() == Variable::CONST) {
Steve Block6ded16b2010-05-10 14:33:55 +01001986 frame_->EmitPushRoot(Heap::kTheHoleValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00001987 } else if (node->fun() != NULL) {
Steve Block6ded16b2010-05-10 14:33:55 +01001988 Load(node->fun());
Steve Blocka7e24c12009-10-30 11:49:00 +00001989 } else {
Iain Merrick9ac36c92010-09-13 15:29:50 +01001990 frame_->EmitPush(Operand(0, RelocInfo::NONE));
Steve Blocka7e24c12009-10-30 11:49:00 +00001991 }
Steve Block6ded16b2010-05-10 14:33:55 +01001992
Steve Blocka7e24c12009-10-30 11:49:00 +00001993 frame_->CallRuntime(Runtime::kDeclareContextSlot, 4);
1994 // Ignore the return value (declarations are statements).
Steve Block6ded16b2010-05-10 14:33:55 +01001995
Steve Blocka7e24c12009-10-30 11:49:00 +00001996 ASSERT(frame_->height() == original_height);
1997 return;
1998 }
1999
2000 ASSERT(!var->is_global());
2001
2002 // If we have a function or a constant, we need to initialize the variable.
2003 Expression* val = NULL;
2004 if (node->mode() == Variable::CONST) {
Steve Block44f0eee2011-05-26 01:26:41 +01002005 val = new Literal(FACTORY->the_hole_value());
Steve Blocka7e24c12009-10-30 11:49:00 +00002006 } else {
2007 val = node->fun(); // NULL if we don't have a function
2008 }
2009
Steve Block8defd9f2010-07-08 12:39:36 +01002010
Steve Blocka7e24c12009-10-30 11:49:00 +00002011 if (val != NULL) {
Steve Block8defd9f2010-07-08 12:39:36 +01002012 WriteBarrierCharacter wb_info =
2013 val->type()->IsLikelySmi() ? LIKELY_SMI : UNLIKELY_SMI;
2014 if (val->AsLiteral() != NULL) wb_info = NEVER_NEWSPACE;
Steve Block6ded16b2010-05-10 14:33:55 +01002015 // Set initial value.
2016 Reference target(this, node->proxy());
2017 Load(val);
Steve Block8defd9f2010-07-08 12:39:36 +01002018 target.SetValue(NOT_CONST_INIT, wb_info);
Steve Block6ded16b2010-05-10 14:33:55 +01002019
Steve Blocka7e24c12009-10-30 11:49:00 +00002020 // Get rid of the assigned value (declarations are statements).
2021 frame_->Drop();
2022 }
2023 ASSERT(frame_->height() == original_height);
2024}
2025
2026
2027void CodeGenerator::VisitExpressionStatement(ExpressionStatement* node) {
2028#ifdef DEBUG
2029 int original_height = frame_->height();
2030#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002031 Comment cmnt(masm_, "[ ExpressionStatement");
2032 CodeForStatementPosition(node);
2033 Expression* expression = node->expression();
2034 expression->MarkAsStatement();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002035 Load(expression);
Steve Blocka7e24c12009-10-30 11:49:00 +00002036 frame_->Drop();
2037 ASSERT(frame_->height() == original_height);
2038}
2039
2040
2041void CodeGenerator::VisitEmptyStatement(EmptyStatement* node) {
2042#ifdef DEBUG
2043 int original_height = frame_->height();
2044#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002045 Comment cmnt(masm_, "// EmptyStatement");
2046 CodeForStatementPosition(node);
2047 // nothing to do
2048 ASSERT(frame_->height() == original_height);
2049}
2050
2051
2052void CodeGenerator::VisitIfStatement(IfStatement* node) {
2053#ifdef DEBUG
2054 int original_height = frame_->height();
2055#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002056 Comment cmnt(masm_, "[ IfStatement");
2057 // Generate different code depending on which parts of the if statement
2058 // are present or not.
2059 bool has_then_stm = node->HasThenStatement();
2060 bool has_else_stm = node->HasElseStatement();
2061
2062 CodeForStatementPosition(node);
2063
2064 JumpTarget exit;
2065 if (has_then_stm && has_else_stm) {
2066 Comment cmnt(masm_, "[ IfThenElse");
2067 JumpTarget then;
2068 JumpTarget else_;
2069 // if (cond)
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002070 LoadCondition(node->condition(), &then, &else_, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00002071 if (frame_ != NULL) {
2072 Branch(false, &else_);
2073 }
2074 // then
2075 if (frame_ != NULL || then.is_linked()) {
2076 then.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002077 Visit(node->then_statement());
Steve Blocka7e24c12009-10-30 11:49:00 +00002078 }
2079 if (frame_ != NULL) {
2080 exit.Jump();
2081 }
2082 // else
2083 if (else_.is_linked()) {
2084 else_.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002085 Visit(node->else_statement());
Steve Blocka7e24c12009-10-30 11:49:00 +00002086 }
2087
2088 } else if (has_then_stm) {
2089 Comment cmnt(masm_, "[ IfThen");
2090 ASSERT(!has_else_stm);
2091 JumpTarget then;
2092 // if (cond)
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002093 LoadCondition(node->condition(), &then, &exit, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00002094 if (frame_ != NULL) {
2095 Branch(false, &exit);
2096 }
2097 // then
2098 if (frame_ != NULL || then.is_linked()) {
2099 then.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002100 Visit(node->then_statement());
Steve Blocka7e24c12009-10-30 11:49:00 +00002101 }
2102
2103 } else if (has_else_stm) {
2104 Comment cmnt(masm_, "[ IfElse");
2105 ASSERT(!has_then_stm);
2106 JumpTarget else_;
2107 // if (!cond)
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002108 LoadCondition(node->condition(), &exit, &else_, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00002109 if (frame_ != NULL) {
2110 Branch(true, &exit);
2111 }
2112 // else
2113 if (frame_ != NULL || else_.is_linked()) {
2114 else_.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002115 Visit(node->else_statement());
Steve Blocka7e24c12009-10-30 11:49:00 +00002116 }
2117
2118 } else {
2119 Comment cmnt(masm_, "[ If");
2120 ASSERT(!has_then_stm && !has_else_stm);
2121 // if (cond)
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002122 LoadCondition(node->condition(), &exit, &exit, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00002123 if (frame_ != NULL) {
2124 if (has_cc()) {
2125 cc_reg_ = al;
2126 } else {
2127 frame_->Drop();
2128 }
2129 }
2130 }
2131
2132 // end
2133 if (exit.is_linked()) {
2134 exit.Bind();
2135 }
2136 ASSERT(!has_valid_frame() || frame_->height() == original_height);
2137}
2138
2139
2140void CodeGenerator::VisitContinueStatement(ContinueStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002141 Comment cmnt(masm_, "[ ContinueStatement");
2142 CodeForStatementPosition(node);
2143 node->target()->continue_target()->Jump();
2144}
2145
2146
2147void CodeGenerator::VisitBreakStatement(BreakStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002148 Comment cmnt(masm_, "[ BreakStatement");
2149 CodeForStatementPosition(node);
2150 node->target()->break_target()->Jump();
2151}
2152
2153
2154void CodeGenerator::VisitReturnStatement(ReturnStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002155 Comment cmnt(masm_, "[ ReturnStatement");
2156
2157 CodeForStatementPosition(node);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002158 Load(node->expression());
Iain Merrick75681382010-08-19 15:07:18 +01002159 frame_->PopToR0();
2160 frame_->PrepareForReturn();
Steve Blocka7e24c12009-10-30 11:49:00 +00002161 if (function_return_is_shadowed_) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002162 function_return_.Jump();
2163 } else {
2164 // Pop the result from the frame and prepare the frame for
2165 // returning thus making it easier to merge.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01002166 if (function_return_.is_bound()) {
2167 // If the function return label is already bound we reuse the
2168 // code by jumping to the return site.
2169 function_return_.Jump();
2170 } else {
2171 function_return_.Bind();
2172 GenerateReturnSequence();
2173 }
2174 }
2175}
Steve Blocka7e24c12009-10-30 11:49:00 +00002176
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01002177
2178void CodeGenerator::GenerateReturnSequence() {
2179 if (FLAG_trace) {
2180 // Push the return value on the stack as the parameter.
2181 // Runtime::TraceExit returns the parameter as it is.
2182 frame_->EmitPush(r0);
2183 frame_->CallRuntime(Runtime::kTraceExit, 1);
2184 }
2185
2186#ifdef DEBUG
2187 // Add a label for checking the size of the code used for returning.
2188 Label check_exit_codesize;
2189 masm_->bind(&check_exit_codesize);
2190#endif
2191 // Make sure that the constant pool is not emitted inside of the return
2192 // sequence.
2193 { Assembler::BlockConstPoolScope block_const_pool(masm_);
2194 // Tear down the frame which will restore the caller's frame pointer and
2195 // the link register.
2196 frame_->Exit();
2197
2198 // Here we use masm_-> instead of the __ macro to avoid the code coverage
2199 // tool from instrumenting as we rely on the code size here.
2200 int32_t sp_delta = (scope()->num_parameters() + 1) * kPointerSize;
2201 masm_->add(sp, sp, Operand(sp_delta));
2202 masm_->Jump(lr);
2203 DeleteFrame();
2204
2205#ifdef DEBUG
Steve Block1e0659c2011-05-24 12:43:12 +01002206 // Check that the size of the code used for returning is large enough
2207 // for the debugger's requirements.
2208 ASSERT(Assembler::kJSReturnSequenceInstructions <=
2209 masm_->InstructionsGeneratedSince(&check_exit_codesize));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01002210#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002211 }
2212}
2213
2214
2215void CodeGenerator::VisitWithEnterStatement(WithEnterStatement* node) {
2216#ifdef DEBUG
2217 int original_height = frame_->height();
2218#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002219 Comment cmnt(masm_, "[ WithEnterStatement");
2220 CodeForStatementPosition(node);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002221 Load(node->expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00002222 if (node->is_catch_block()) {
2223 frame_->CallRuntime(Runtime::kPushCatchContext, 1);
2224 } else {
2225 frame_->CallRuntime(Runtime::kPushContext, 1);
2226 }
2227#ifdef DEBUG
2228 JumpTarget verified_true;
Steve Block6ded16b2010-05-10 14:33:55 +01002229 __ cmp(r0, cp);
Steve Blocka7e24c12009-10-30 11:49:00 +00002230 verified_true.Branch(eq);
2231 __ stop("PushContext: r0 is expected to be the same as cp");
2232 verified_true.Bind();
2233#endif
2234 // Update context local.
2235 __ str(cp, frame_->Context());
2236 ASSERT(frame_->height() == original_height);
2237}
2238
2239
2240void CodeGenerator::VisitWithExitStatement(WithExitStatement* node) {
2241#ifdef DEBUG
2242 int original_height = frame_->height();
2243#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002244 Comment cmnt(masm_, "[ WithExitStatement");
2245 CodeForStatementPosition(node);
2246 // Pop context.
2247 __ ldr(cp, ContextOperand(cp, Context::PREVIOUS_INDEX));
2248 // Update context local.
2249 __ str(cp, frame_->Context());
2250 ASSERT(frame_->height() == original_height);
2251}
2252
2253
2254void CodeGenerator::VisitSwitchStatement(SwitchStatement* node) {
2255#ifdef DEBUG
2256 int original_height = frame_->height();
2257#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002258 Comment cmnt(masm_, "[ SwitchStatement");
2259 CodeForStatementPosition(node);
Kristian Monsen25f61362010-05-21 11:50:48 +01002260 node->break_target()->SetExpectedHeight();
Steve Blocka7e24c12009-10-30 11:49:00 +00002261
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002262 Load(node->tag());
Steve Blocka7e24c12009-10-30 11:49:00 +00002263
2264 JumpTarget next_test;
2265 JumpTarget fall_through;
2266 JumpTarget default_entry;
2267 JumpTarget default_exit(JumpTarget::BIDIRECTIONAL);
2268 ZoneList<CaseClause*>* cases = node->cases();
2269 int length = cases->length();
2270 CaseClause* default_clause = NULL;
2271
2272 for (int i = 0; i < length; i++) {
2273 CaseClause* clause = cases->at(i);
2274 if (clause->is_default()) {
2275 // Remember the default clause and compile it at the end.
2276 default_clause = clause;
2277 continue;
2278 }
2279
2280 Comment cmnt(masm_, "[ Case clause");
2281 // Compile the test.
2282 next_test.Bind();
2283 next_test.Unuse();
2284 // Duplicate TOS.
Steve Block8defd9f2010-07-08 12:39:36 +01002285 frame_->Dup();
Steve Blocka7e24c12009-10-30 11:49:00 +00002286 Comparison(eq, NULL, clause->label(), true);
2287 Branch(false, &next_test);
2288
2289 // Before entering the body from the test, remove the switch value from
2290 // the stack.
2291 frame_->Drop();
2292
2293 // Label the body so that fall through is enabled.
2294 if (i > 0 && cases->at(i - 1)->is_default()) {
2295 default_exit.Bind();
2296 } else {
2297 fall_through.Bind();
2298 fall_through.Unuse();
2299 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002300 VisitStatements(clause->statements());
Steve Blocka7e24c12009-10-30 11:49:00 +00002301
2302 // If control flow can fall through from the body, jump to the next body
2303 // or the end of the statement.
2304 if (frame_ != NULL) {
2305 if (i < length - 1 && cases->at(i + 1)->is_default()) {
2306 default_entry.Jump();
2307 } else {
2308 fall_through.Jump();
2309 }
2310 }
2311 }
2312
2313 // The final "test" removes the switch value.
2314 next_test.Bind();
2315 frame_->Drop();
2316
2317 // If there is a default clause, compile it.
2318 if (default_clause != NULL) {
2319 Comment cmnt(masm_, "[ Default clause");
2320 default_entry.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002321 VisitStatements(default_clause->statements());
Steve Blocka7e24c12009-10-30 11:49:00 +00002322 // If control flow can fall out of the default and there is a case after
Steve Block8defd9f2010-07-08 12:39:36 +01002323 // it, jump to that case's body.
Steve Blocka7e24c12009-10-30 11:49:00 +00002324 if (frame_ != NULL && default_exit.is_bound()) {
2325 default_exit.Jump();
2326 }
2327 }
2328
2329 if (fall_through.is_linked()) {
2330 fall_through.Bind();
2331 }
2332
2333 if (node->break_target()->is_linked()) {
2334 node->break_target()->Bind();
2335 }
2336 node->break_target()->Unuse();
2337 ASSERT(!has_valid_frame() || frame_->height() == original_height);
2338}
2339
2340
Steve Block3ce2e202009-11-05 08:53:23 +00002341void CodeGenerator::VisitDoWhileStatement(DoWhileStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002342#ifdef DEBUG
2343 int original_height = frame_->height();
2344#endif
Steve Block3ce2e202009-11-05 08:53:23 +00002345 Comment cmnt(masm_, "[ DoWhileStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00002346 CodeForStatementPosition(node);
Kristian Monsen25f61362010-05-21 11:50:48 +01002347 node->break_target()->SetExpectedHeight();
Steve Block3ce2e202009-11-05 08:53:23 +00002348 JumpTarget body(JumpTarget::BIDIRECTIONAL);
Steve Block6ded16b2010-05-10 14:33:55 +01002349 IncrementLoopNesting();
Steve Blocka7e24c12009-10-30 11:49:00 +00002350
Steve Block3ce2e202009-11-05 08:53:23 +00002351 // Label the top of the loop for the backward CFG edge. If the test
2352 // is always true we can use the continue target, and if the test is
2353 // always false there is no need.
2354 ConditionAnalysis info = AnalyzeCondition(node->cond());
2355 switch (info) {
2356 case ALWAYS_TRUE:
Kristian Monsen25f61362010-05-21 11:50:48 +01002357 node->continue_target()->SetExpectedHeight();
Steve Blocka7e24c12009-10-30 11:49:00 +00002358 node->continue_target()->Bind();
Steve Block3ce2e202009-11-05 08:53:23 +00002359 break;
2360 case ALWAYS_FALSE:
Kristian Monsen25f61362010-05-21 11:50:48 +01002361 node->continue_target()->SetExpectedHeight();
Steve Block3ce2e202009-11-05 08:53:23 +00002362 break;
2363 case DONT_KNOW:
Kristian Monsen25f61362010-05-21 11:50:48 +01002364 node->continue_target()->SetExpectedHeight();
Steve Block3ce2e202009-11-05 08:53:23 +00002365 body.Bind();
2366 break;
2367 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002368
Steve Block3ce2e202009-11-05 08:53:23 +00002369 CheckStack(); // TODO(1222600): ignore if body contains calls.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002370 Visit(node->body());
Steve Blocka7e24c12009-10-30 11:49:00 +00002371
Steve Blockd0582a62009-12-15 09:54:21 +00002372 // Compile the test.
Steve Block3ce2e202009-11-05 08:53:23 +00002373 switch (info) {
2374 case ALWAYS_TRUE:
2375 // If control can fall off the end of the body, jump back to the
2376 // top.
Steve Blocka7e24c12009-10-30 11:49:00 +00002377 if (has_valid_frame()) {
Steve Block3ce2e202009-11-05 08:53:23 +00002378 node->continue_target()->Jump();
Steve Blocka7e24c12009-10-30 11:49:00 +00002379 }
2380 break;
Steve Block3ce2e202009-11-05 08:53:23 +00002381 case ALWAYS_FALSE:
2382 // If we have a continue in the body, we only have to bind its
2383 // jump target.
2384 if (node->continue_target()->is_linked()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002385 node->continue_target()->Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00002386 }
Steve Block3ce2e202009-11-05 08:53:23 +00002387 break;
2388 case DONT_KNOW:
2389 // We have to compile the test expression if it can be reached by
2390 // control flow falling out of the body or via continue.
2391 if (node->continue_target()->is_linked()) {
2392 node->continue_target()->Bind();
2393 }
2394 if (has_valid_frame()) {
Steve Blockd0582a62009-12-15 09:54:21 +00002395 Comment cmnt(masm_, "[ DoWhileCondition");
2396 CodeForDoWhileConditionPosition(node);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002397 LoadCondition(node->cond(), &body, node->break_target(), true);
Steve Blocka7e24c12009-10-30 11:49:00 +00002398 if (has_valid_frame()) {
Steve Block3ce2e202009-11-05 08:53:23 +00002399 // A invalid frame here indicates that control did not
2400 // fall out of the test expression.
2401 Branch(true, &body);
Steve Blocka7e24c12009-10-30 11:49:00 +00002402 }
2403 }
2404 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00002405 }
2406
2407 if (node->break_target()->is_linked()) {
2408 node->break_target()->Bind();
2409 }
Steve Block6ded16b2010-05-10 14:33:55 +01002410 DecrementLoopNesting();
Steve Block3ce2e202009-11-05 08:53:23 +00002411 ASSERT(!has_valid_frame() || frame_->height() == original_height);
2412}
2413
2414
2415void CodeGenerator::VisitWhileStatement(WhileStatement* node) {
2416#ifdef DEBUG
2417 int original_height = frame_->height();
2418#endif
Steve Block3ce2e202009-11-05 08:53:23 +00002419 Comment cmnt(masm_, "[ WhileStatement");
2420 CodeForStatementPosition(node);
2421
2422 // If the test is never true and has no side effects there is no need
2423 // to compile the test or body.
2424 ConditionAnalysis info = AnalyzeCondition(node->cond());
2425 if (info == ALWAYS_FALSE) return;
2426
Kristian Monsen25f61362010-05-21 11:50:48 +01002427 node->break_target()->SetExpectedHeight();
Steve Block6ded16b2010-05-10 14:33:55 +01002428 IncrementLoopNesting();
Steve Block3ce2e202009-11-05 08:53:23 +00002429
2430 // Label the top of the loop with the continue target for the backward
2431 // CFG edge.
Kristian Monsen25f61362010-05-21 11:50:48 +01002432 node->continue_target()->SetExpectedHeight();
Steve Block3ce2e202009-11-05 08:53:23 +00002433 node->continue_target()->Bind();
2434
2435 if (info == DONT_KNOW) {
Steve Block8defd9f2010-07-08 12:39:36 +01002436 JumpTarget body(JumpTarget::BIDIRECTIONAL);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002437 LoadCondition(node->cond(), &body, node->break_target(), true);
Steve Block3ce2e202009-11-05 08:53:23 +00002438 if (has_valid_frame()) {
2439 // A NULL frame indicates that control did not fall out of the
2440 // test expression.
2441 Branch(false, node->break_target());
2442 }
2443 if (has_valid_frame() || body.is_linked()) {
2444 body.Bind();
2445 }
2446 }
2447
2448 if (has_valid_frame()) {
2449 CheckStack(); // TODO(1222600): ignore if body contains calls.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002450 Visit(node->body());
Steve Block3ce2e202009-11-05 08:53:23 +00002451
2452 // If control flow can fall out of the body, jump back to the top.
2453 if (has_valid_frame()) {
2454 node->continue_target()->Jump();
2455 }
2456 }
2457 if (node->break_target()->is_linked()) {
2458 node->break_target()->Bind();
2459 }
Steve Block6ded16b2010-05-10 14:33:55 +01002460 DecrementLoopNesting();
Steve Block3ce2e202009-11-05 08:53:23 +00002461 ASSERT(!has_valid_frame() || frame_->height() == original_height);
2462}
2463
2464
2465void CodeGenerator::VisitForStatement(ForStatement* node) {
2466#ifdef DEBUG
2467 int original_height = frame_->height();
2468#endif
Steve Block3ce2e202009-11-05 08:53:23 +00002469 Comment cmnt(masm_, "[ ForStatement");
2470 CodeForStatementPosition(node);
2471 if (node->init() != NULL) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002472 Visit(node->init());
Steve Block3ce2e202009-11-05 08:53:23 +00002473 }
2474
2475 // If the test is never true there is no need to compile the test or
2476 // body.
2477 ConditionAnalysis info = AnalyzeCondition(node->cond());
2478 if (info == ALWAYS_FALSE) return;
2479
Kristian Monsen25f61362010-05-21 11:50:48 +01002480 node->break_target()->SetExpectedHeight();
Steve Block6ded16b2010-05-10 14:33:55 +01002481 IncrementLoopNesting();
Steve Block3ce2e202009-11-05 08:53:23 +00002482
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002483 // We know that the loop index is a smi if it is not modified in the
2484 // loop body and it is checked against a constant limit in the loop
2485 // condition. In this case, we reset the static type information of the
2486 // loop index to smi before compiling the body, the update expression, and
2487 // the bottom check of the loop condition.
2488 TypeInfoCodeGenState type_info_scope(this,
2489 node->is_fast_smi_loop() ?
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002490 node->loop_variable()->AsSlot() :
2491 NULL,
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002492 TypeInfo::Smi());
2493
Steve Block3ce2e202009-11-05 08:53:23 +00002494 // If there is no update statement, label the top of the loop with the
2495 // continue target, otherwise with the loop target.
2496 JumpTarget loop(JumpTarget::BIDIRECTIONAL);
2497 if (node->next() == NULL) {
Kristian Monsen25f61362010-05-21 11:50:48 +01002498 node->continue_target()->SetExpectedHeight();
Steve Block3ce2e202009-11-05 08:53:23 +00002499 node->continue_target()->Bind();
2500 } else {
Kristian Monsen25f61362010-05-21 11:50:48 +01002501 node->continue_target()->SetExpectedHeight();
Steve Block3ce2e202009-11-05 08:53:23 +00002502 loop.Bind();
2503 }
2504
2505 // If the test is always true, there is no need to compile it.
2506 if (info == DONT_KNOW) {
2507 JumpTarget body;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002508 LoadCondition(node->cond(), &body, node->break_target(), true);
Steve Block3ce2e202009-11-05 08:53:23 +00002509 if (has_valid_frame()) {
2510 Branch(false, node->break_target());
2511 }
2512 if (has_valid_frame() || body.is_linked()) {
2513 body.Bind();
2514 }
2515 }
2516
2517 if (has_valid_frame()) {
2518 CheckStack(); // TODO(1222600): ignore if body contains calls.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002519 Visit(node->body());
Steve Block3ce2e202009-11-05 08:53:23 +00002520
2521 if (node->next() == NULL) {
2522 // If there is no update statement and control flow can fall out
2523 // of the loop, jump directly to the continue label.
2524 if (has_valid_frame()) {
2525 node->continue_target()->Jump();
2526 }
2527 } else {
2528 // If there is an update statement and control flow can reach it
2529 // via falling out of the body of the loop or continuing, we
2530 // compile the update statement.
2531 if (node->continue_target()->is_linked()) {
2532 node->continue_target()->Bind();
2533 }
2534 if (has_valid_frame()) {
2535 // Record source position of the statement as this code which is
2536 // after the code for the body actually belongs to the loop
2537 // statement and not the body.
2538 CodeForStatementPosition(node);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002539 Visit(node->next());
Steve Block3ce2e202009-11-05 08:53:23 +00002540 loop.Jump();
2541 }
2542 }
2543 }
2544 if (node->break_target()->is_linked()) {
2545 node->break_target()->Bind();
2546 }
Steve Block6ded16b2010-05-10 14:33:55 +01002547 DecrementLoopNesting();
Steve Blocka7e24c12009-10-30 11:49:00 +00002548 ASSERT(!has_valid_frame() || frame_->height() == original_height);
2549}
2550
2551
2552void CodeGenerator::VisitForInStatement(ForInStatement* node) {
2553#ifdef DEBUG
2554 int original_height = frame_->height();
2555#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002556 Comment cmnt(masm_, "[ ForInStatement");
2557 CodeForStatementPosition(node);
2558
2559 JumpTarget primitive;
2560 JumpTarget jsobject;
2561 JumpTarget fixed_array;
2562 JumpTarget entry(JumpTarget::BIDIRECTIONAL);
2563 JumpTarget end_del_check;
2564 JumpTarget exit;
2565
2566 // Get the object to enumerate over (converted to JSObject).
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002567 Load(node->enumerable());
Steve Blocka7e24c12009-10-30 11:49:00 +00002568
Iain Merrick75681382010-08-19 15:07:18 +01002569 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +00002570 // Both SpiderMonkey and kjs ignore null and undefined in contrast
2571 // to the specification. 12.6.4 mandates a call to ToObject.
2572 frame_->EmitPop(r0);
2573 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
2574 __ cmp(r0, ip);
2575 exit.Branch(eq);
2576 __ LoadRoot(ip, Heap::kNullValueRootIndex);
2577 __ cmp(r0, ip);
2578 exit.Branch(eq);
2579
2580 // Stack layout in body:
2581 // [iteration counter (Smi)]
2582 // [length of array]
2583 // [FixedArray]
2584 // [Map or 0]
2585 // [Object]
2586
2587 // Check if enumerable is already a JSObject
2588 __ tst(r0, Operand(kSmiTagMask));
2589 primitive.Branch(eq);
2590 __ CompareObjectType(r0, r1, r1, FIRST_JS_OBJECT_TYPE);
2591 jsobject.Branch(hs);
2592
2593 primitive.Bind();
2594 frame_->EmitPush(r0);
Steve Blockd0582a62009-12-15 09:54:21 +00002595 frame_->InvokeBuiltin(Builtins::TO_OBJECT, CALL_JS, 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00002596
2597 jsobject.Bind();
2598 // Get the set of properties (as a FixedArray or Map).
Steve Blockd0582a62009-12-15 09:54:21 +00002599 // r0: value to be iterated over
2600 frame_->EmitPush(r0); // Push the object being iterated over.
2601
2602 // Check cache validity in generated code. This is a fast case for
2603 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
2604 // guarantee cache validity, call the runtime system to check cache
2605 // validity or get the property names in a fixed array.
2606 JumpTarget call_runtime;
2607 JumpTarget loop(JumpTarget::BIDIRECTIONAL);
2608 JumpTarget check_prototype;
2609 JumpTarget use_cache;
2610 __ mov(r1, Operand(r0));
2611 loop.Bind();
2612 // Check that there are no elements.
2613 __ ldr(r2, FieldMemOperand(r1, JSObject::kElementsOffset));
2614 __ LoadRoot(r4, Heap::kEmptyFixedArrayRootIndex);
2615 __ cmp(r2, r4);
2616 call_runtime.Branch(ne);
2617 // Check that instance descriptors are not empty so that we can
2618 // check for an enum cache. Leave the map in r3 for the subsequent
2619 // prototype load.
2620 __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
2621 __ ldr(r2, FieldMemOperand(r3, Map::kInstanceDescriptorsOffset));
2622 __ LoadRoot(ip, Heap::kEmptyDescriptorArrayRootIndex);
2623 __ cmp(r2, ip);
2624 call_runtime.Branch(eq);
2625 // Check that there in an enum cache in the non-empty instance
2626 // descriptors. This is the case if the next enumeration index
2627 // field does not contain a smi.
2628 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumerationIndexOffset));
2629 __ tst(r2, Operand(kSmiTagMask));
2630 call_runtime.Branch(eq);
2631 // For all objects but the receiver, check that the cache is empty.
2632 // r4: empty fixed array root.
2633 __ cmp(r1, r0);
2634 check_prototype.Branch(eq);
2635 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheBridgeCacheOffset));
2636 __ cmp(r2, r4);
2637 call_runtime.Branch(ne);
2638 check_prototype.Bind();
2639 // Load the prototype from the map and loop if non-null.
2640 __ ldr(r1, FieldMemOperand(r3, Map::kPrototypeOffset));
2641 __ LoadRoot(ip, Heap::kNullValueRootIndex);
2642 __ cmp(r1, ip);
2643 loop.Branch(ne);
2644 // The enum cache is valid. Load the map of the object being
2645 // iterated over and use the cache for the iteration.
2646 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
2647 use_cache.Jump();
2648
2649 call_runtime.Bind();
2650 // Call the runtime to get the property names for the object.
2651 frame_->EmitPush(r0); // push the object (slot 4) for the runtime call
Steve Blocka7e24c12009-10-30 11:49:00 +00002652 frame_->CallRuntime(Runtime::kGetPropertyNamesFast, 1);
2653
Steve Blockd0582a62009-12-15 09:54:21 +00002654 // If we got a map from the runtime call, we can do a fast
2655 // modification check. Otherwise, we got a fixed array, and we have
2656 // to do a slow check.
2657 // r0: map or fixed array (result from call to
2658 // Runtime::kGetPropertyNamesFast)
Steve Blocka7e24c12009-10-30 11:49:00 +00002659 __ mov(r2, Operand(r0));
2660 __ ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset));
2661 __ LoadRoot(ip, Heap::kMetaMapRootIndex);
2662 __ cmp(r1, ip);
2663 fixed_array.Branch(ne);
2664
Steve Blockd0582a62009-12-15 09:54:21 +00002665 use_cache.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00002666 // Get enum cache
Steve Blockd0582a62009-12-15 09:54:21 +00002667 // r0: map (either the result from a call to
2668 // Runtime::kGetPropertyNamesFast or has been fetched directly from
2669 // the object)
Steve Blocka7e24c12009-10-30 11:49:00 +00002670 __ mov(r1, Operand(r0));
2671 __ ldr(r1, FieldMemOperand(r1, Map::kInstanceDescriptorsOffset));
2672 __ ldr(r1, FieldMemOperand(r1, DescriptorArray::kEnumerationIndexOffset));
2673 __ ldr(r2,
2674 FieldMemOperand(r1, DescriptorArray::kEnumCacheBridgeCacheOffset));
2675
2676 frame_->EmitPush(r0); // map
2677 frame_->EmitPush(r2); // enum cache bridge cache
2678 __ ldr(r0, FieldMemOperand(r2, FixedArray::kLengthOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00002679 frame_->EmitPush(r0);
2680 __ mov(r0, Operand(Smi::FromInt(0)));
2681 frame_->EmitPush(r0);
2682 entry.Jump();
2683
2684 fixed_array.Bind();
2685 __ mov(r1, Operand(Smi::FromInt(0)));
2686 frame_->EmitPush(r1); // insert 0 in place of Map
2687 frame_->EmitPush(r0);
2688
2689 // Push the length of the array and the initial index onto the stack.
2690 __ ldr(r0, FieldMemOperand(r0, FixedArray::kLengthOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00002691 frame_->EmitPush(r0);
2692 __ mov(r0, Operand(Smi::FromInt(0))); // init index
2693 frame_->EmitPush(r0);
2694
2695 // Condition.
2696 entry.Bind();
2697 // sp[0] : index
2698 // sp[1] : array/enum cache length
2699 // sp[2] : array or enum cache
2700 // sp[3] : 0 or map
2701 // sp[4] : enumerable
2702 // Grab the current frame's height for the break and continue
2703 // targets only after all the state is pushed on the frame.
Kristian Monsen25f61362010-05-21 11:50:48 +01002704 node->break_target()->SetExpectedHeight();
2705 node->continue_target()->SetExpectedHeight();
Steve Blocka7e24c12009-10-30 11:49:00 +00002706
Kristian Monsen25f61362010-05-21 11:50:48 +01002707 // Load the current count to r0, load the length to r1.
Leon Clarkef7060e22010-06-03 12:02:55 +01002708 __ Ldrd(r0, r1, frame_->ElementAt(0));
Steve Block6ded16b2010-05-10 14:33:55 +01002709 __ cmp(r0, r1); // compare to the array length
Steve Blocka7e24c12009-10-30 11:49:00 +00002710 node->break_target()->Branch(hs);
2711
Steve Blocka7e24c12009-10-30 11:49:00 +00002712 // Get the i'th entry of the array.
2713 __ ldr(r2, frame_->ElementAt(2));
2714 __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2715 __ ldr(r3, MemOperand(r2, r0, LSL, kPointerSizeLog2 - kSmiTagSize));
2716
2717 // Get Map or 0.
2718 __ ldr(r2, frame_->ElementAt(3));
2719 // Check if this (still) matches the map of the enumerable.
2720 // If not, we have to filter the key.
2721 __ ldr(r1, frame_->ElementAt(4));
2722 __ ldr(r1, FieldMemOperand(r1, HeapObject::kMapOffset));
2723 __ cmp(r1, Operand(r2));
2724 end_del_check.Branch(eq);
2725
2726 // Convert the entry to a string (or null if it isn't a property anymore).
2727 __ ldr(r0, frame_->ElementAt(4)); // push enumerable
2728 frame_->EmitPush(r0);
2729 frame_->EmitPush(r3); // push entry
Steve Blockd0582a62009-12-15 09:54:21 +00002730 frame_->InvokeBuiltin(Builtins::FILTER_KEY, CALL_JS, 2);
Iain Merrick75681382010-08-19 15:07:18 +01002731 __ mov(r3, Operand(r0), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00002732 // If the property has been removed while iterating, we just skip it.
Steve Blocka7e24c12009-10-30 11:49:00 +00002733 node->continue_target()->Branch(eq);
2734
2735 end_del_check.Bind();
2736 // Store the entry in the 'each' expression and take another spin in the
2737 // loop. r3: i'th entry of the enum cache (or string there of)
2738 frame_->EmitPush(r3); // push entry
Iain Merrick75681382010-08-19 15:07:18 +01002739 { VirtualFrame::RegisterAllocationScope scope(this);
2740 Reference each(this, node->each());
Steve Blocka7e24c12009-10-30 11:49:00 +00002741 if (!each.is_illegal()) {
2742 if (each.size() > 0) {
Iain Merrick75681382010-08-19 15:07:18 +01002743 // Loading a reference may leave the frame in an unspilled state.
2744 frame_->SpillAll(); // Sync stack to memory.
2745 // Get the value (under the reference on the stack) from memory.
Steve Blocka7e24c12009-10-30 11:49:00 +00002746 __ ldr(r0, frame_->ElementAt(each.size()));
2747 frame_->EmitPush(r0);
Steve Block8defd9f2010-07-08 12:39:36 +01002748 each.SetValue(NOT_CONST_INIT, UNLIKELY_SMI);
Iain Merrick75681382010-08-19 15:07:18 +01002749 frame_->Drop(2); // The result of the set and the extra pushed value.
Leon Clarked91b9f72010-01-27 17:25:45 +00002750 } else {
2751 // If the reference was to a slot we rely on the convenient property
Iain Merrick75681382010-08-19 15:07:18 +01002752 // that it doesn't matter whether a value (eg, ebx pushed above) is
Leon Clarked91b9f72010-01-27 17:25:45 +00002753 // right on top of or right underneath a zero-sized reference.
Steve Block8defd9f2010-07-08 12:39:36 +01002754 each.SetValue(NOT_CONST_INIT, UNLIKELY_SMI);
Iain Merrick75681382010-08-19 15:07:18 +01002755 frame_->Drop(1); // Drop the result of the set operation.
Steve Blocka7e24c12009-10-30 11:49:00 +00002756 }
2757 }
2758 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002759 // Body.
2760 CheckStack(); // TODO(1222600): ignore if body contains calls.
Iain Merrick75681382010-08-19 15:07:18 +01002761 { VirtualFrame::RegisterAllocationScope scope(this);
2762 Visit(node->body());
2763 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002764
2765 // Next. Reestablish a spilled frame in case we are coming here via
2766 // a continue in the body.
2767 node->continue_target()->Bind();
2768 frame_->SpillAll();
2769 frame_->EmitPop(r0);
2770 __ add(r0, r0, Operand(Smi::FromInt(1)));
2771 frame_->EmitPush(r0);
2772 entry.Jump();
2773
2774 // Cleanup. No need to spill because VirtualFrame::Drop is safe for
2775 // any frame.
2776 node->break_target()->Bind();
2777 frame_->Drop(5);
2778
2779 // Exit.
2780 exit.Bind();
2781 node->continue_target()->Unuse();
2782 node->break_target()->Unuse();
2783 ASSERT(frame_->height() == original_height);
2784}
2785
2786
Steve Block3ce2e202009-11-05 08:53:23 +00002787void CodeGenerator::VisitTryCatchStatement(TryCatchStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002788#ifdef DEBUG
2789 int original_height = frame_->height();
2790#endif
Steve Block6ded16b2010-05-10 14:33:55 +01002791 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Block3ce2e202009-11-05 08:53:23 +00002792 Comment cmnt(masm_, "[ TryCatchStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00002793 CodeForStatementPosition(node);
2794
2795 JumpTarget try_block;
2796 JumpTarget exit;
2797
2798 try_block.Call();
2799 // --- Catch block ---
2800 frame_->EmitPush(r0);
2801
2802 // Store the caught exception in the catch variable.
Leon Clarkee46be812010-01-19 14:06:41 +00002803 Variable* catch_var = node->catch_var()->var();
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002804 ASSERT(catch_var != NULL && catch_var->AsSlot() != NULL);
2805 StoreToSlot(catch_var->AsSlot(), NOT_CONST_INIT);
Steve Blocka7e24c12009-10-30 11:49:00 +00002806
2807 // Remove the exception from the stack.
2808 frame_->Drop();
2809
Iain Merrick75681382010-08-19 15:07:18 +01002810 { VirtualFrame::RegisterAllocationScope scope(this);
2811 VisitStatements(node->catch_block()->statements());
2812 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002813 if (frame_ != NULL) {
2814 exit.Jump();
2815 }
2816
2817
2818 // --- Try block ---
2819 try_block.Bind();
2820
2821 frame_->PushTryHandler(TRY_CATCH_HANDLER);
2822 int handler_height = frame_->height();
2823
2824 // Shadow the labels for all escapes from the try block, including
2825 // returns. During shadowing, the original label is hidden as the
2826 // LabelShadow and operations on the original actually affect the
2827 // shadowing label.
2828 //
2829 // We should probably try to unify the escaping labels and the return
2830 // label.
2831 int nof_escapes = node->escaping_targets()->length();
2832 List<ShadowTarget*> shadows(1 + nof_escapes);
2833
2834 // Add the shadow target for the function return.
2835 static const int kReturnShadowIndex = 0;
2836 shadows.Add(new ShadowTarget(&function_return_));
2837 bool function_return_was_shadowed = function_return_is_shadowed_;
2838 function_return_is_shadowed_ = true;
2839 ASSERT(shadows[kReturnShadowIndex]->other_target() == &function_return_);
2840
2841 // Add the remaining shadow targets.
2842 for (int i = 0; i < nof_escapes; i++) {
2843 shadows.Add(new ShadowTarget(node->escaping_targets()->at(i)));
2844 }
2845
2846 // Generate code for the statements in the try block.
Iain Merrick75681382010-08-19 15:07:18 +01002847 { VirtualFrame::RegisterAllocationScope scope(this);
2848 VisitStatements(node->try_block()->statements());
2849 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002850
2851 // Stop the introduced shadowing and count the number of required unlinks.
2852 // After shadowing stops, the original labels are unshadowed and the
2853 // LabelShadows represent the formerly shadowing labels.
2854 bool has_unlinks = false;
2855 for (int i = 0; i < shadows.length(); i++) {
2856 shadows[i]->StopShadowing();
2857 has_unlinks = has_unlinks || shadows[i]->is_linked();
2858 }
2859 function_return_is_shadowed_ = function_return_was_shadowed;
2860
2861 // Get an external reference to the handler address.
Steve Block44f0eee2011-05-26 01:26:41 +01002862 ExternalReference handler_address(Isolate::k_handler_address, isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +00002863
2864 // If we can fall off the end of the try block, unlink from try chain.
2865 if (has_valid_frame()) {
2866 // The next handler address is on top of the frame. Unlink from
2867 // the handler list and drop the rest of this handler from the
2868 // frame.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002869 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Iain Merrick75681382010-08-19 15:07:18 +01002870 frame_->EmitPop(r1); // r0 can contain the return value.
Steve Blocka7e24c12009-10-30 11:49:00 +00002871 __ mov(r3, Operand(handler_address));
2872 __ str(r1, MemOperand(r3));
2873 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
2874 if (has_unlinks) {
2875 exit.Jump();
2876 }
2877 }
2878
2879 // Generate unlink code for the (formerly) shadowing labels that have been
2880 // jumped to. Deallocate each shadow target.
2881 for (int i = 0; i < shadows.length(); i++) {
2882 if (shadows[i]->is_linked()) {
2883 // Unlink from try chain;
2884 shadows[i]->Bind();
2885 // Because we can be jumping here (to spilled code) from unspilled
2886 // code, we need to reestablish a spilled frame at this block.
2887 frame_->SpillAll();
2888
2889 // Reload sp from the top handler, because some statements that we
2890 // break from (eg, for...in) may have left stuff on the stack.
2891 __ mov(r3, Operand(handler_address));
2892 __ ldr(sp, MemOperand(r3));
2893 frame_->Forget(frame_->height() - handler_height);
2894
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002895 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Iain Merrick75681382010-08-19 15:07:18 +01002896 frame_->EmitPop(r1); // r0 can contain the return value.
Steve Blocka7e24c12009-10-30 11:49:00 +00002897 __ str(r1, MemOperand(r3));
2898 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
2899
2900 if (!function_return_is_shadowed_ && i == kReturnShadowIndex) {
2901 frame_->PrepareForReturn();
2902 }
2903 shadows[i]->other_target()->Jump();
2904 }
2905 }
2906
2907 exit.Bind();
2908 ASSERT(!has_valid_frame() || frame_->height() == original_height);
2909}
2910
2911
Steve Block3ce2e202009-11-05 08:53:23 +00002912void CodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002913#ifdef DEBUG
2914 int original_height = frame_->height();
2915#endif
Steve Block6ded16b2010-05-10 14:33:55 +01002916 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Block3ce2e202009-11-05 08:53:23 +00002917 Comment cmnt(masm_, "[ TryFinallyStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00002918 CodeForStatementPosition(node);
2919
2920 // State: Used to keep track of reason for entering the finally
2921 // block. Should probably be extended to hold information for
2922 // break/continue from within the try block.
2923 enum { FALLING, THROWING, JUMPING };
2924
2925 JumpTarget try_block;
2926 JumpTarget finally_block;
2927
2928 try_block.Call();
2929
2930 frame_->EmitPush(r0); // save exception object on the stack
2931 // In case of thrown exceptions, this is where we continue.
2932 __ mov(r2, Operand(Smi::FromInt(THROWING)));
2933 finally_block.Jump();
2934
2935 // --- Try block ---
2936 try_block.Bind();
2937
2938 frame_->PushTryHandler(TRY_FINALLY_HANDLER);
2939 int handler_height = frame_->height();
2940
2941 // Shadow the labels for all escapes from the try block, including
2942 // returns. Shadowing hides the original label as the LabelShadow and
2943 // operations on the original actually affect the shadowing label.
2944 //
2945 // We should probably try to unify the escaping labels and the return
2946 // label.
2947 int nof_escapes = node->escaping_targets()->length();
2948 List<ShadowTarget*> shadows(1 + nof_escapes);
2949
2950 // Add the shadow target for the function return.
2951 static const int kReturnShadowIndex = 0;
2952 shadows.Add(new ShadowTarget(&function_return_));
2953 bool function_return_was_shadowed = function_return_is_shadowed_;
2954 function_return_is_shadowed_ = true;
2955 ASSERT(shadows[kReturnShadowIndex]->other_target() == &function_return_);
2956
2957 // Add the remaining shadow targets.
2958 for (int i = 0; i < nof_escapes; i++) {
2959 shadows.Add(new ShadowTarget(node->escaping_targets()->at(i)));
2960 }
2961
2962 // Generate code for the statements in the try block.
Iain Merrick75681382010-08-19 15:07:18 +01002963 { VirtualFrame::RegisterAllocationScope scope(this);
2964 VisitStatements(node->try_block()->statements());
2965 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002966
2967 // Stop the introduced shadowing and count the number of required unlinks.
2968 // After shadowing stops, the original labels are unshadowed and the
2969 // LabelShadows represent the formerly shadowing labels.
2970 int nof_unlinks = 0;
2971 for (int i = 0; i < shadows.length(); i++) {
2972 shadows[i]->StopShadowing();
2973 if (shadows[i]->is_linked()) nof_unlinks++;
2974 }
2975 function_return_is_shadowed_ = function_return_was_shadowed;
2976
2977 // Get an external reference to the handler address.
Steve Block44f0eee2011-05-26 01:26:41 +01002978 ExternalReference handler_address(Isolate::k_handler_address, isolate());
Steve Blocka7e24c12009-10-30 11:49:00 +00002979
2980 // If we can fall off the end of the try block, unlink from the try
2981 // chain and set the state on the frame to FALLING.
2982 if (has_valid_frame()) {
2983 // The next handler address is on top of the frame.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002984 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00002985 frame_->EmitPop(r1);
2986 __ mov(r3, Operand(handler_address));
2987 __ str(r1, MemOperand(r3));
2988 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
2989
2990 // Fake a top of stack value (unneeded when FALLING) and set the
2991 // state in r2, then jump around the unlink blocks if any.
2992 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2993 frame_->EmitPush(r0);
2994 __ mov(r2, Operand(Smi::FromInt(FALLING)));
2995 if (nof_unlinks > 0) {
2996 finally_block.Jump();
2997 }
2998 }
2999
3000 // Generate code to unlink and set the state for the (formerly)
3001 // shadowing targets that have been jumped to.
3002 for (int i = 0; i < shadows.length(); i++) {
3003 if (shadows[i]->is_linked()) {
3004 // If we have come from the shadowed return, the return value is
3005 // in (a non-refcounted reference to) r0. We must preserve it
3006 // until it is pushed.
3007 //
3008 // Because we can be jumping here (to spilled code) from
3009 // unspilled code, we need to reestablish a spilled frame at
3010 // this block.
3011 shadows[i]->Bind();
3012 frame_->SpillAll();
3013
3014 // Reload sp from the top handler, because some statements that
3015 // we break from (eg, for...in) may have left stuff on the
3016 // stack.
3017 __ mov(r3, Operand(handler_address));
3018 __ ldr(sp, MemOperand(r3));
3019 frame_->Forget(frame_->height() - handler_height);
3020
3021 // Unlink this handler and drop it from the frame. The next
3022 // handler address is currently on top of the frame.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01003023 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00003024 frame_->EmitPop(r1);
3025 __ str(r1, MemOperand(r3));
3026 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
3027
3028 if (i == kReturnShadowIndex) {
3029 // If this label shadowed the function return, materialize the
3030 // return value on the stack.
3031 frame_->EmitPush(r0);
3032 } else {
3033 // Fake TOS for targets that shadowed breaks and continues.
3034 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
3035 frame_->EmitPush(r0);
3036 }
3037 __ mov(r2, Operand(Smi::FromInt(JUMPING + i)));
3038 if (--nof_unlinks > 0) {
3039 // If this is not the last unlink block, jump around the next.
3040 finally_block.Jump();
3041 }
3042 }
3043 }
3044
3045 // --- Finally block ---
3046 finally_block.Bind();
3047
3048 // Push the state on the stack.
3049 frame_->EmitPush(r2);
3050
3051 // We keep two elements on the stack - the (possibly faked) result
3052 // and the state - while evaluating the finally block.
3053 //
3054 // Generate code for the statements in the finally block.
Iain Merrick75681382010-08-19 15:07:18 +01003055 { VirtualFrame::RegisterAllocationScope scope(this);
3056 VisitStatements(node->finally_block()->statements());
3057 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003058
3059 if (has_valid_frame()) {
3060 // Restore state and return value or faked TOS.
3061 frame_->EmitPop(r2);
3062 frame_->EmitPop(r0);
3063 }
3064
3065 // Generate code to jump to the right destination for all used
3066 // formerly shadowing targets. Deallocate each shadow target.
3067 for (int i = 0; i < shadows.length(); i++) {
3068 if (has_valid_frame() && shadows[i]->is_bound()) {
3069 JumpTarget* original = shadows[i]->other_target();
3070 __ cmp(r2, Operand(Smi::FromInt(JUMPING + i)));
3071 if (!function_return_is_shadowed_ && i == kReturnShadowIndex) {
3072 JumpTarget skip;
3073 skip.Branch(ne);
3074 frame_->PrepareForReturn();
3075 original->Jump();
3076 skip.Bind();
3077 } else {
3078 original->Branch(eq);
3079 }
3080 }
3081 }
3082
3083 if (has_valid_frame()) {
3084 // Check if we need to rethrow the exception.
3085 JumpTarget exit;
3086 __ cmp(r2, Operand(Smi::FromInt(THROWING)));
3087 exit.Branch(ne);
3088
3089 // Rethrow exception.
3090 frame_->EmitPush(r0);
3091 frame_->CallRuntime(Runtime::kReThrow, 1);
3092
3093 // Done.
3094 exit.Bind();
3095 }
3096 ASSERT(!has_valid_frame() || frame_->height() == original_height);
3097}
3098
3099
3100void CodeGenerator::VisitDebuggerStatement(DebuggerStatement* node) {
3101#ifdef DEBUG
3102 int original_height = frame_->height();
3103#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003104 Comment cmnt(masm_, "[ DebuggerStatament");
3105 CodeForStatementPosition(node);
3106#ifdef ENABLE_DEBUGGER_SUPPORT
Andrei Popescu402d9372010-02-26 13:31:12 +00003107 frame_->DebugBreak();
Steve Blocka7e24c12009-10-30 11:49:00 +00003108#endif
3109 // Ignore the return value.
3110 ASSERT(frame_->height() == original_height);
3111}
3112
3113
Steve Block6ded16b2010-05-10 14:33:55 +01003114void CodeGenerator::InstantiateFunction(
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003115 Handle<SharedFunctionInfo> function_info,
3116 bool pretenure) {
Leon Clarkee46be812010-01-19 14:06:41 +00003117 // Use the fast case closure allocation code that allocates in new
3118 // space for nested functions that don't need literals cloning.
Steve Block44f0eee2011-05-26 01:26:41 +01003119 if (!pretenure &&
3120 scope()->is_function_scope() &&
3121 function_info->num_literals() == 0) {
3122 FastNewClosureStub stub(
3123 function_info->strict_mode() ? kStrictMode : kNonStrictMode);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003124 frame_->EmitPush(Operand(function_info));
3125 frame_->SpillAll();
Leon Clarkee46be812010-01-19 14:06:41 +00003126 frame_->CallStub(&stub, 1);
3127 frame_->EmitPush(r0);
3128 } else {
3129 // Create a new closure.
3130 frame_->EmitPush(cp);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003131 frame_->EmitPush(Operand(function_info));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003132 frame_->EmitPush(Operand(pretenure
Steve Block44f0eee2011-05-26 01:26:41 +01003133 ? FACTORY->true_value()
3134 : FACTORY->false_value()));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003135 frame_->CallRuntime(Runtime::kNewClosure, 3);
Leon Clarkee46be812010-01-19 14:06:41 +00003136 frame_->EmitPush(r0);
3137 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003138}
3139
3140
3141void CodeGenerator::VisitFunctionLiteral(FunctionLiteral* node) {
3142#ifdef DEBUG
3143 int original_height = frame_->height();
3144#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003145 Comment cmnt(masm_, "[ FunctionLiteral");
3146
Steve Block6ded16b2010-05-10 14:33:55 +01003147 // Build the function info and instantiate it.
3148 Handle<SharedFunctionInfo> function_info =
Ben Murdochf87a2032010-10-22 12:50:53 +01003149 Compiler::BuildFunctionInfo(node, script());
3150 if (function_info.is_null()) {
3151 SetStackOverflow();
Steve Blocka7e24c12009-10-30 11:49:00 +00003152 ASSERT(frame_->height() == original_height);
3153 return;
3154 }
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003155 InstantiateFunction(function_info, node->pretenure());
Steve Block6ded16b2010-05-10 14:33:55 +01003156 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003157}
3158
3159
Steve Block6ded16b2010-05-10 14:33:55 +01003160void CodeGenerator::VisitSharedFunctionInfoLiteral(
3161 SharedFunctionInfoLiteral* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003162#ifdef DEBUG
3163 int original_height = frame_->height();
3164#endif
Steve Block6ded16b2010-05-10 14:33:55 +01003165 Comment cmnt(masm_, "[ SharedFunctionInfoLiteral");
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003166 InstantiateFunction(node->shared_function_info(), false);
Steve Block6ded16b2010-05-10 14:33:55 +01003167 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003168}
3169
3170
3171void CodeGenerator::VisitConditional(Conditional* node) {
3172#ifdef DEBUG
3173 int original_height = frame_->height();
3174#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003175 Comment cmnt(masm_, "[ Conditional");
3176 JumpTarget then;
3177 JumpTarget else_;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003178 LoadCondition(node->condition(), &then, &else_, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00003179 if (has_valid_frame()) {
3180 Branch(false, &else_);
3181 }
3182 if (has_valid_frame() || then.is_linked()) {
3183 then.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003184 Load(node->then_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00003185 }
3186 if (else_.is_linked()) {
3187 JumpTarget exit;
3188 if (has_valid_frame()) exit.Jump();
3189 else_.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003190 Load(node->else_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00003191 if (exit.is_linked()) exit.Bind();
3192 }
Steve Block6ded16b2010-05-10 14:33:55 +01003193 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003194}
3195
3196
3197void CodeGenerator::LoadFromSlot(Slot* slot, TypeofState typeof_state) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003198 if (slot->type() == Slot::LOOKUP) {
3199 ASSERT(slot->var()->is_dynamic());
3200
Steve Block6ded16b2010-05-10 14:33:55 +01003201 // JumpTargets do not yet support merging frames so the frame must be
3202 // spilled when jumping to these targets.
Steve Blocka7e24c12009-10-30 11:49:00 +00003203 JumpTarget slow;
3204 JumpTarget done;
3205
Kristian Monsen25f61362010-05-21 11:50:48 +01003206 // Generate fast case for loading from slots that correspond to
3207 // local/global variables or arguments unless they are shadowed by
3208 // eval-introduced bindings.
3209 EmitDynamicLoadFromSlotFastCase(slot,
3210 typeof_state,
3211 &slow,
3212 &done);
Steve Blocka7e24c12009-10-30 11:49:00 +00003213
3214 slow.Bind();
3215 frame_->EmitPush(cp);
Steve Block8defd9f2010-07-08 12:39:36 +01003216 frame_->EmitPush(Operand(slot->var()->name()));
Steve Blocka7e24c12009-10-30 11:49:00 +00003217
3218 if (typeof_state == INSIDE_TYPEOF) {
3219 frame_->CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
3220 } else {
3221 frame_->CallRuntime(Runtime::kLoadContextSlot, 2);
3222 }
3223
3224 done.Bind();
3225 frame_->EmitPush(r0);
3226
3227 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01003228 Register scratch = VirtualFrame::scratch0();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003229 TypeInfo info = type_info(slot);
3230 frame_->EmitPush(SlotOperand(slot, scratch), info);
Steve Block8defd9f2010-07-08 12:39:36 +01003231
Steve Blocka7e24c12009-10-30 11:49:00 +00003232 if (slot->var()->mode() == Variable::CONST) {
3233 // Const slots may contain 'the hole' value (the constant hasn't been
3234 // initialized yet) which needs to be converted into the 'undefined'
3235 // value.
3236 Comment cmnt(masm_, "[ Unhole const");
Steve Block8defd9f2010-07-08 12:39:36 +01003237 Register tos = frame_->PopToRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +00003238 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
Steve Block8defd9f2010-07-08 12:39:36 +01003239 __ cmp(tos, ip);
3240 __ LoadRoot(tos, Heap::kUndefinedValueRootIndex, eq);
3241 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00003242 }
3243 }
3244}
3245
3246
Steve Block6ded16b2010-05-10 14:33:55 +01003247void CodeGenerator::LoadFromSlotCheckForArguments(Slot* slot,
3248 TypeofState state) {
Steve Block8defd9f2010-07-08 12:39:36 +01003249 VirtualFrame::RegisterAllocationScope scope(this);
Steve Block6ded16b2010-05-10 14:33:55 +01003250 LoadFromSlot(slot, state);
3251
3252 // Bail out quickly if we're not using lazy arguments allocation.
3253 if (ArgumentsMode() != LAZY_ARGUMENTS_ALLOCATION) return;
3254
3255 // ... or if the slot isn't a non-parameter arguments slot.
3256 if (slot->type() == Slot::PARAMETER || !slot->is_arguments()) return;
3257
Steve Block8defd9f2010-07-08 12:39:36 +01003258 // Load the loaded value from the stack into a register but leave it on the
Steve Block6ded16b2010-05-10 14:33:55 +01003259 // stack.
Steve Block8defd9f2010-07-08 12:39:36 +01003260 Register tos = frame_->Peek();
Steve Block6ded16b2010-05-10 14:33:55 +01003261
3262 // If the loaded value is the sentinel that indicates that we
3263 // haven't loaded the arguments object yet, we need to do it now.
3264 JumpTarget exit;
Ben Murdoch086aeea2011-05-13 15:57:08 +01003265 __ LoadRoot(ip, Heap::kArgumentsMarkerRootIndex);
Steve Block8defd9f2010-07-08 12:39:36 +01003266 __ cmp(tos, ip);
Steve Block6ded16b2010-05-10 14:33:55 +01003267 exit.Branch(ne);
3268 frame_->Drop();
3269 StoreArgumentsObject(false);
3270 exit.Bind();
3271}
3272
3273
Leon Clarkee46be812010-01-19 14:06:41 +00003274void CodeGenerator::StoreToSlot(Slot* slot, InitState init_state) {
3275 ASSERT(slot != NULL);
Steve Block8defd9f2010-07-08 12:39:36 +01003276 VirtualFrame::RegisterAllocationScope scope(this);
Leon Clarkee46be812010-01-19 14:06:41 +00003277 if (slot->type() == Slot::LOOKUP) {
3278 ASSERT(slot->var()->is_dynamic());
3279
3280 // For now, just do a runtime call.
3281 frame_->EmitPush(cp);
Steve Block8defd9f2010-07-08 12:39:36 +01003282 frame_->EmitPush(Operand(slot->var()->name()));
Leon Clarkee46be812010-01-19 14:06:41 +00003283
3284 if (init_state == CONST_INIT) {
3285 // Same as the case for a normal store, but ignores attribute
3286 // (e.g. READ_ONLY) of context slot so that we can initialize
3287 // const properties (introduced via eval("const foo = (some
3288 // expr);")). Also, uses the current function context instead of
3289 // the top context.
3290 //
3291 // Note that we must declare the foo upon entry of eval(), via a
3292 // context slot declaration, but we cannot initialize it at the
3293 // same time, because the const declaration may be at the end of
3294 // the eval code (sigh...) and the const variable may have been
3295 // used before (where its value is 'undefined'). Thus, we can only
3296 // do the initialization when we actually encounter the expression
3297 // and when the expression operands are defined and valid, and
3298 // thus we need the split into 2 operations: declaration of the
3299 // context slot followed by initialization.
3300 frame_->CallRuntime(Runtime::kInitializeConstContextSlot, 3);
3301 } else {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003302 frame_->EmitPush(Operand(Smi::FromInt(strict_mode_flag())));
3303 frame_->CallRuntime(Runtime::kStoreContextSlot, 4);
Leon Clarkee46be812010-01-19 14:06:41 +00003304 }
3305 // Storing a variable must keep the (new) value on the expression
3306 // stack. This is necessary for compiling assignment expressions.
3307 frame_->EmitPush(r0);
3308
3309 } else {
3310 ASSERT(!slot->var()->is_dynamic());
Steve Block6ded16b2010-05-10 14:33:55 +01003311 Register scratch = VirtualFrame::scratch0();
Steve Block8defd9f2010-07-08 12:39:36 +01003312 Register scratch2 = VirtualFrame::scratch1();
Leon Clarkee46be812010-01-19 14:06:41 +00003313
Steve Block6ded16b2010-05-10 14:33:55 +01003314 // The frame must be spilled when branching to this target.
Leon Clarkee46be812010-01-19 14:06:41 +00003315 JumpTarget exit;
Steve Block6ded16b2010-05-10 14:33:55 +01003316
Leon Clarkee46be812010-01-19 14:06:41 +00003317 if (init_state == CONST_INIT) {
3318 ASSERT(slot->var()->mode() == Variable::CONST);
3319 // Only the first const initialization must be executed (the slot
3320 // still contains 'the hole' value). When the assignment is
3321 // executed, the code is identical to a normal store (see below).
3322 Comment cmnt(masm_, "[ Init const");
Steve Block6ded16b2010-05-10 14:33:55 +01003323 __ ldr(scratch, SlotOperand(slot, scratch));
Leon Clarkee46be812010-01-19 14:06:41 +00003324 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01003325 __ cmp(scratch, ip);
Leon Clarkee46be812010-01-19 14:06:41 +00003326 exit.Branch(ne);
3327 }
3328
3329 // We must execute the store. Storing a variable must keep the
3330 // (new) value on the stack. This is necessary for compiling
3331 // assignment expressions.
3332 //
3333 // Note: We will reach here even with slot->var()->mode() ==
3334 // Variable::CONST because of const declarations which will
3335 // initialize consts to 'the hole' value and by doing so, end up
3336 // calling this code. r2 may be loaded with context; used below in
3337 // RecordWrite.
Steve Block6ded16b2010-05-10 14:33:55 +01003338 Register tos = frame_->Peek();
3339 __ str(tos, SlotOperand(slot, scratch));
Leon Clarkee46be812010-01-19 14:06:41 +00003340 if (slot->type() == Slot::CONTEXT) {
3341 // Skip write barrier if the written value is a smi.
Steve Block6ded16b2010-05-10 14:33:55 +01003342 __ tst(tos, Operand(kSmiTagMask));
3343 // We don't use tos any more after here.
Leon Clarkee46be812010-01-19 14:06:41 +00003344 exit.Branch(eq);
Steve Block6ded16b2010-05-10 14:33:55 +01003345 // scratch is loaded with context when calling SlotOperand above.
Leon Clarkee46be812010-01-19 14:06:41 +00003346 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
Steve Block8defd9f2010-07-08 12:39:36 +01003347 // We need an extra register. Until we have a way to do that in the
3348 // virtual frame we will cheat and ask for a free TOS register.
3349 Register scratch3 = frame_->GetTOSRegister();
3350 __ RecordWrite(scratch, Operand(offset), scratch2, scratch3);
Leon Clarkee46be812010-01-19 14:06:41 +00003351 }
3352 // If we definitely did not jump over the assignment, we do not need
3353 // to bind the exit label. Doing so can defeat peephole
3354 // optimization.
3355 if (init_state == CONST_INIT || slot->type() == Slot::CONTEXT) {
3356 exit.Bind();
3357 }
3358 }
3359}
3360
3361
Steve Blocka7e24c12009-10-30 11:49:00 +00003362void CodeGenerator::LoadFromGlobalSlotCheckExtensions(Slot* slot,
3363 TypeofState typeof_state,
Steve Blocka7e24c12009-10-30 11:49:00 +00003364 JumpTarget* slow) {
3365 // Check that no extension objects have been created by calls to
3366 // eval from the current scope to the global scope.
Steve Block6ded16b2010-05-10 14:33:55 +01003367 Register tmp = frame_->scratch0();
3368 Register tmp2 = frame_->scratch1();
Steve Blocka7e24c12009-10-30 11:49:00 +00003369 Register context = cp;
3370 Scope* s = scope();
3371 while (s != NULL) {
3372 if (s->num_heap_slots() > 0) {
3373 if (s->calls_eval()) {
Steve Block6ded16b2010-05-10 14:33:55 +01003374 frame_->SpillAll();
Steve Blocka7e24c12009-10-30 11:49:00 +00003375 // Check that extension is NULL.
3376 __ ldr(tmp2, ContextOperand(context, Context::EXTENSION_INDEX));
3377 __ tst(tmp2, tmp2);
3378 slow->Branch(ne);
3379 }
3380 // Load next context in chain.
3381 __ ldr(tmp, ContextOperand(context, Context::CLOSURE_INDEX));
3382 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset));
3383 context = tmp;
3384 }
3385 // If no outer scope calls eval, we do not need to check more
3386 // context extensions.
3387 if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break;
3388 s = s->outer_scope();
3389 }
3390
3391 if (s->is_eval_scope()) {
Steve Block6ded16b2010-05-10 14:33:55 +01003392 frame_->SpillAll();
Steve Blocka7e24c12009-10-30 11:49:00 +00003393 Label next, fast;
Steve Block6ded16b2010-05-10 14:33:55 +01003394 __ Move(tmp, context);
Steve Blocka7e24c12009-10-30 11:49:00 +00003395 __ bind(&next);
3396 // Terminate at global context.
3397 __ ldr(tmp2, FieldMemOperand(tmp, HeapObject::kMapOffset));
3398 __ LoadRoot(ip, Heap::kGlobalContextMapRootIndex);
3399 __ cmp(tmp2, ip);
3400 __ b(eq, &fast);
3401 // Check that extension is NULL.
3402 __ ldr(tmp2, ContextOperand(tmp, Context::EXTENSION_INDEX));
3403 __ tst(tmp2, tmp2);
3404 slow->Branch(ne);
3405 // Load next context in chain.
3406 __ ldr(tmp, ContextOperand(tmp, Context::CLOSURE_INDEX));
3407 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset));
3408 __ b(&next);
3409 __ bind(&fast);
3410 }
3411
Steve Blocka7e24c12009-10-30 11:49:00 +00003412 // Load the global object.
3413 LoadGlobal();
Steve Block6ded16b2010-05-10 14:33:55 +01003414 // Setup the name register and call load IC.
3415 frame_->CallLoadIC(slot->var()->name(),
3416 typeof_state == INSIDE_TYPEOF
3417 ? RelocInfo::CODE_TARGET
3418 : RelocInfo::CODE_TARGET_CONTEXT);
Steve Blocka7e24c12009-10-30 11:49:00 +00003419}
3420
3421
Kristian Monsen25f61362010-05-21 11:50:48 +01003422void CodeGenerator::EmitDynamicLoadFromSlotFastCase(Slot* slot,
3423 TypeofState typeof_state,
3424 JumpTarget* slow,
3425 JumpTarget* done) {
3426 // Generate fast-case code for variables that might be shadowed by
3427 // eval-introduced variables. Eval is used a lot without
3428 // introducing variables. In those cases, we do not want to
3429 // perform a runtime call for all variables in the scope
3430 // containing the eval.
3431 if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) {
3432 LoadFromGlobalSlotCheckExtensions(slot, typeof_state, slow);
3433 frame_->SpillAll();
3434 done->Jump();
3435
3436 } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) {
3437 frame_->SpillAll();
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003438 Slot* potential_slot = slot->var()->local_if_not_shadowed()->AsSlot();
Kristian Monsen25f61362010-05-21 11:50:48 +01003439 Expression* rewrite = slot->var()->local_if_not_shadowed()->rewrite();
3440 if (potential_slot != NULL) {
3441 // Generate fast case for locals that rewrite to slots.
3442 __ ldr(r0,
3443 ContextSlotOperandCheckExtensions(potential_slot,
3444 r1,
3445 r2,
3446 slow));
3447 if (potential_slot->var()->mode() == Variable::CONST) {
3448 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
3449 __ cmp(r0, ip);
3450 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
3451 }
3452 done->Jump();
3453 } else if (rewrite != NULL) {
3454 // Generate fast case for argument loads.
3455 Property* property = rewrite->AsProperty();
3456 if (property != NULL) {
3457 VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
3458 Literal* key_literal = property->key()->AsLiteral();
3459 if (obj_proxy != NULL &&
3460 key_literal != NULL &&
3461 obj_proxy->IsArguments() &&
3462 key_literal->handle()->IsSmi()) {
3463 // Load arguments object if there are no eval-introduced
3464 // variables. Then load the argument from the arguments
3465 // object using keyed load.
3466 __ ldr(r0,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003467 ContextSlotOperandCheckExtensions(obj_proxy->var()->AsSlot(),
Kristian Monsen25f61362010-05-21 11:50:48 +01003468 r1,
3469 r2,
3470 slow));
3471 frame_->EmitPush(r0);
3472 __ mov(r1, Operand(key_literal->handle()));
3473 frame_->EmitPush(r1);
3474 EmitKeyedLoad();
3475 done->Jump();
3476 }
3477 }
3478 }
3479 }
3480}
3481
3482
Steve Blocka7e24c12009-10-30 11:49:00 +00003483void CodeGenerator::VisitSlot(Slot* node) {
3484#ifdef DEBUG
3485 int original_height = frame_->height();
3486#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003487 Comment cmnt(masm_, "[ Slot");
Steve Block6ded16b2010-05-10 14:33:55 +01003488 LoadFromSlotCheckForArguments(node, NOT_INSIDE_TYPEOF);
3489 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003490}
3491
3492
3493void CodeGenerator::VisitVariableProxy(VariableProxy* node) {
3494#ifdef DEBUG
3495 int original_height = frame_->height();
3496#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003497 Comment cmnt(masm_, "[ VariableProxy");
3498
3499 Variable* var = node->var();
3500 Expression* expr = var->rewrite();
3501 if (expr != NULL) {
3502 Visit(expr);
3503 } else {
3504 ASSERT(var->is_global());
3505 Reference ref(this, node);
Steve Block6ded16b2010-05-10 14:33:55 +01003506 ref.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00003507 }
Steve Block6ded16b2010-05-10 14:33:55 +01003508 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003509}
3510
3511
3512void CodeGenerator::VisitLiteral(Literal* node) {
3513#ifdef DEBUG
3514 int original_height = frame_->height();
3515#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003516 Comment cmnt(masm_, "[ Literal");
Steve Block6ded16b2010-05-10 14:33:55 +01003517 Register reg = frame_->GetTOSRegister();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003518 bool is_smi = node->handle()->IsSmi();
Steve Block6ded16b2010-05-10 14:33:55 +01003519 __ mov(reg, Operand(node->handle()));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003520 frame_->EmitPush(reg, is_smi ? TypeInfo::Smi() : TypeInfo::Unknown());
Steve Block6ded16b2010-05-10 14:33:55 +01003521 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003522}
3523
3524
3525void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) {
3526#ifdef DEBUG
3527 int original_height = frame_->height();
3528#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003529 Comment cmnt(masm_, "[ RexExp Literal");
3530
Steve Block8defd9f2010-07-08 12:39:36 +01003531 Register tmp = VirtualFrame::scratch0();
3532 // Free up a TOS register that can be used to push the literal.
3533 Register literal = frame_->GetTOSRegister();
3534
Steve Blocka7e24c12009-10-30 11:49:00 +00003535 // Retrieve the literal array and check the allocated entry.
3536
3537 // Load the function of this activation.
Steve Block8defd9f2010-07-08 12:39:36 +01003538 __ ldr(tmp, frame_->Function());
Steve Blocka7e24c12009-10-30 11:49:00 +00003539
3540 // Load the literals array of the function.
Steve Block8defd9f2010-07-08 12:39:36 +01003541 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kLiteralsOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00003542
3543 // Load the literal at the ast saved index.
3544 int literal_offset =
3545 FixedArray::kHeaderSize + node->literal_index() * kPointerSize;
Steve Block8defd9f2010-07-08 12:39:36 +01003546 __ ldr(literal, FieldMemOperand(tmp, literal_offset));
Steve Blocka7e24c12009-10-30 11:49:00 +00003547
Ben Murdochbb769b22010-08-11 14:56:33 +01003548 JumpTarget materialized;
Steve Blocka7e24c12009-10-30 11:49:00 +00003549 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
Steve Block8defd9f2010-07-08 12:39:36 +01003550 __ cmp(literal, ip);
3551 // This branch locks the virtual frame at the done label to match the
3552 // one we have here, where the literal register is not on the stack and
3553 // nothing is spilled.
Ben Murdochbb769b22010-08-11 14:56:33 +01003554 materialized.Branch(ne);
Steve Blocka7e24c12009-10-30 11:49:00 +00003555
Steve Block8defd9f2010-07-08 12:39:36 +01003556 // If the entry is undefined we call the runtime system to compute
Steve Blocka7e24c12009-10-30 11:49:00 +00003557 // the literal.
Steve Block8defd9f2010-07-08 12:39:36 +01003558 // literal array (0)
3559 frame_->EmitPush(tmp);
3560 // literal index (1)
3561 frame_->EmitPush(Operand(Smi::FromInt(node->literal_index())));
3562 // RegExp pattern (2)
3563 frame_->EmitPush(Operand(node->pattern()));
3564 // RegExp flags (3)
3565 frame_->EmitPush(Operand(node->flags()));
Steve Blocka7e24c12009-10-30 11:49:00 +00003566 frame_->CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
Steve Block8defd9f2010-07-08 12:39:36 +01003567 __ Move(literal, r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00003568
Ben Murdochbb769b22010-08-11 14:56:33 +01003569 materialized.Bind();
3570
Steve Block8defd9f2010-07-08 12:39:36 +01003571 frame_->EmitPush(literal);
Ben Murdochbb769b22010-08-11 14:56:33 +01003572 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
3573 frame_->EmitPush(Operand(Smi::FromInt(size)));
3574 frame_->CallRuntime(Runtime::kAllocateInNewSpace, 1);
3575 // TODO(lrn): Use AllocateInNewSpace macro with fallback to runtime.
3576 // r0 is newly allocated space.
3577
3578 // Reuse literal variable with (possibly) a new register, still holding
3579 // the materialized boilerplate.
3580 literal = frame_->PopToRegister(r0);
3581
3582 __ CopyFields(r0, literal, tmp.bit(), size / kPointerSize);
3583
3584 // Push the clone.
3585 frame_->EmitPush(r0);
Steve Block6ded16b2010-05-10 14:33:55 +01003586 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003587}
3588
3589
Steve Blocka7e24c12009-10-30 11:49:00 +00003590void CodeGenerator::VisitObjectLiteral(ObjectLiteral* node) {
3591#ifdef DEBUG
3592 int original_height = frame_->height();
3593#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003594 Comment cmnt(masm_, "[ ObjectLiteral");
3595
Steve Block8defd9f2010-07-08 12:39:36 +01003596 Register literal = frame_->GetTOSRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +00003597 // Load the function of this activation.
Steve Block8defd9f2010-07-08 12:39:36 +01003598 __ ldr(literal, frame_->Function());
Leon Clarkee46be812010-01-19 14:06:41 +00003599 // Literal array.
Steve Block8defd9f2010-07-08 12:39:36 +01003600 __ ldr(literal, FieldMemOperand(literal, JSFunction::kLiteralsOffset));
3601 frame_->EmitPush(literal);
Leon Clarkee46be812010-01-19 14:06:41 +00003602 // Literal index.
Steve Block8defd9f2010-07-08 12:39:36 +01003603 frame_->EmitPush(Operand(Smi::FromInt(node->literal_index())));
Leon Clarkee46be812010-01-19 14:06:41 +00003604 // Constant properties.
Steve Block8defd9f2010-07-08 12:39:36 +01003605 frame_->EmitPush(Operand(node->constant_properties()));
Steve Block6ded16b2010-05-10 14:33:55 +01003606 // Should the object literal have fast elements?
Steve Block8defd9f2010-07-08 12:39:36 +01003607 frame_->EmitPush(Operand(Smi::FromInt(node->fast_elements() ? 1 : 0)));
Leon Clarkee46be812010-01-19 14:06:41 +00003608 if (node->depth() > 1) {
Steve Block6ded16b2010-05-10 14:33:55 +01003609 frame_->CallRuntime(Runtime::kCreateObjectLiteral, 4);
Leon Clarkee46be812010-01-19 14:06:41 +00003610 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01003611 frame_->CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
Steve Blocka7e24c12009-10-30 11:49:00 +00003612 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003613 frame_->EmitPush(r0); // save the result
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08003614
3615 // Mark all computed expressions that are bound to a key that
3616 // is shadowed by a later occurrence of the same key. For the
3617 // marked expressions, no store code is emitted.
3618 node->CalculateEmitStore();
3619
Steve Blocka7e24c12009-10-30 11:49:00 +00003620 for (int i = 0; i < node->properties()->length(); i++) {
Andrei Popescu402d9372010-02-26 13:31:12 +00003621 // At the start of each iteration, the top of stack contains
3622 // the newly created object literal.
Steve Blocka7e24c12009-10-30 11:49:00 +00003623 ObjectLiteral::Property* property = node->properties()->at(i);
3624 Literal* key = property->key();
3625 Expression* value = property->value();
3626 switch (property->kind()) {
3627 case ObjectLiteral::Property::CONSTANT:
3628 break;
3629 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
3630 if (CompileTimeValue::IsCompileTimeValue(property->value())) break;
3631 // else fall through
Andrei Popescu402d9372010-02-26 13:31:12 +00003632 case ObjectLiteral::Property::COMPUTED:
3633 if (key->handle()->IsSymbol()) {
Steve Block44f0eee2011-05-26 01:26:41 +01003634 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
3635 Builtins::kStoreIC_Initialize));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003636 Load(value);
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08003637 if (property->emit_store()) {
3638 frame_->PopToR0();
3639 // Fetch the object literal.
3640 frame_->SpillAllButCopyTOSToR1();
3641 __ mov(r2, Operand(key->handle()));
3642 frame_->CallCodeObject(ic, RelocInfo::CODE_TARGET, 0);
3643 } else {
3644 frame_->Drop();
3645 }
Andrei Popescu402d9372010-02-26 13:31:12 +00003646 break;
3647 }
3648 // else fall through
Steve Blocka7e24c12009-10-30 11:49:00 +00003649 case ObjectLiteral::Property::PROTOTYPE: {
Steve Block8defd9f2010-07-08 12:39:36 +01003650 frame_->Dup();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003651 Load(key);
3652 Load(value);
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08003653 if (property->emit_store()) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003654 frame_->EmitPush(Operand(Smi::FromInt(NONE))); // PropertyAttributes
3655 frame_->CallRuntime(Runtime::kSetProperty, 4);
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08003656 } else {
3657 frame_->Drop(3);
3658 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003659 break;
3660 }
3661 case ObjectLiteral::Property::SETTER: {
Steve Block8defd9f2010-07-08 12:39:36 +01003662 frame_->Dup();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003663 Load(key);
Steve Block8defd9f2010-07-08 12:39:36 +01003664 frame_->EmitPush(Operand(Smi::FromInt(1)));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003665 Load(value);
Steve Blocka7e24c12009-10-30 11:49:00 +00003666 frame_->CallRuntime(Runtime::kDefineAccessor, 4);
Steve Blocka7e24c12009-10-30 11:49:00 +00003667 break;
3668 }
3669 case ObjectLiteral::Property::GETTER: {
Steve Block8defd9f2010-07-08 12:39:36 +01003670 frame_->Dup();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003671 Load(key);
Steve Block8defd9f2010-07-08 12:39:36 +01003672 frame_->EmitPush(Operand(Smi::FromInt(0)));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003673 Load(value);
Steve Blocka7e24c12009-10-30 11:49:00 +00003674 frame_->CallRuntime(Runtime::kDefineAccessor, 4);
Steve Blocka7e24c12009-10-30 11:49:00 +00003675 break;
3676 }
3677 }
3678 }
Steve Block6ded16b2010-05-10 14:33:55 +01003679 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003680}
3681
3682
Steve Blocka7e24c12009-10-30 11:49:00 +00003683void CodeGenerator::VisitArrayLiteral(ArrayLiteral* node) {
3684#ifdef DEBUG
3685 int original_height = frame_->height();
3686#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003687 Comment cmnt(masm_, "[ ArrayLiteral");
3688
Steve Block8defd9f2010-07-08 12:39:36 +01003689 Register tos = frame_->GetTOSRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +00003690 // Load the function of this activation.
Steve Block8defd9f2010-07-08 12:39:36 +01003691 __ ldr(tos, frame_->Function());
Andrei Popescu402d9372010-02-26 13:31:12 +00003692 // Load the literals array of the function.
Steve Block8defd9f2010-07-08 12:39:36 +01003693 __ ldr(tos, FieldMemOperand(tos, JSFunction::kLiteralsOffset));
3694 frame_->EmitPush(tos);
3695 frame_->EmitPush(Operand(Smi::FromInt(node->literal_index())));
3696 frame_->EmitPush(Operand(node->constant_elements()));
Andrei Popescu402d9372010-02-26 13:31:12 +00003697 int length = node->values()->length();
Steve Block44f0eee2011-05-26 01:26:41 +01003698 if (node->constant_elements()->map() == HEAP->fixed_cow_array_map()) {
Iain Merrick75681382010-08-19 15:07:18 +01003699 FastCloneShallowArrayStub stub(
3700 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
3701 frame_->CallStub(&stub, 3);
Steve Block44f0eee2011-05-26 01:26:41 +01003702 __ IncrementCounter(masm_->isolate()->counters()->cow_arrays_created_stub(),
3703 1, r1, r2);
Iain Merrick75681382010-08-19 15:07:18 +01003704 } else if (node->depth() > 1) {
Leon Clarkee46be812010-01-19 14:06:41 +00003705 frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3);
Iain Merrick75681382010-08-19 15:07:18 +01003706 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
Leon Clarkee46be812010-01-19 14:06:41 +00003707 frame_->CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
Andrei Popescu402d9372010-02-26 13:31:12 +00003708 } else {
Iain Merrick75681382010-08-19 15:07:18 +01003709 FastCloneShallowArrayStub stub(
3710 FastCloneShallowArrayStub::CLONE_ELEMENTS, length);
Andrei Popescu402d9372010-02-26 13:31:12 +00003711 frame_->CallStub(&stub, 3);
Steve Blocka7e24c12009-10-30 11:49:00 +00003712 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003713 frame_->EmitPush(r0); // save the result
Leon Clarkee46be812010-01-19 14:06:41 +00003714 // r0: created object literal
Steve Blocka7e24c12009-10-30 11:49:00 +00003715
3716 // Generate code to set the elements in the array that are not
3717 // literals.
3718 for (int i = 0; i < node->values()->length(); i++) {
3719 Expression* value = node->values()->at(i);
3720
3721 // If value is a literal the property value is already set in the
3722 // boilerplate object.
3723 if (value->AsLiteral() != NULL) continue;
3724 // If value is a materialized literal the property value is already set
3725 // in the boilerplate object if it is simple.
3726 if (CompileTimeValue::IsCompileTimeValue(value)) continue;
3727
3728 // The property must be set by generated code.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003729 Load(value);
Steve Block8defd9f2010-07-08 12:39:36 +01003730 frame_->PopToR0();
Steve Blocka7e24c12009-10-30 11:49:00 +00003731 // Fetch the object literal.
Steve Block8defd9f2010-07-08 12:39:36 +01003732 frame_->SpillAllButCopyTOSToR1();
3733
Steve Blocka7e24c12009-10-30 11:49:00 +00003734 // Get the elements array.
3735 __ ldr(r1, FieldMemOperand(r1, JSObject::kElementsOffset));
3736
3737 // Write to the indexed properties array.
3738 int offset = i * kPointerSize + FixedArray::kHeaderSize;
3739 __ str(r0, FieldMemOperand(r1, offset));
3740
3741 // Update the write barrier for the array address.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003742 __ RecordWrite(r1, Operand(offset), r3, r2);
Steve Blocka7e24c12009-10-30 11:49:00 +00003743 }
Steve Block6ded16b2010-05-10 14:33:55 +01003744 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003745}
3746
3747
3748void CodeGenerator::VisitCatchExtensionObject(CatchExtensionObject* node) {
3749#ifdef DEBUG
3750 int original_height = frame_->height();
3751#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003752 // Call runtime routine to allocate the catch extension object and
3753 // assign the exception value to the catch variable.
3754 Comment cmnt(masm_, "[ CatchExtensionObject");
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003755 Load(node->key());
3756 Load(node->value());
Steve Blocka7e24c12009-10-30 11:49:00 +00003757 frame_->CallRuntime(Runtime::kCreateCatchExtensionObject, 2);
3758 frame_->EmitPush(r0);
Steve Block6ded16b2010-05-10 14:33:55 +01003759 ASSERT_EQ(original_height + 1, frame_->height());
3760}
3761
3762
3763void CodeGenerator::EmitSlotAssignment(Assignment* node) {
3764#ifdef DEBUG
3765 int original_height = frame_->height();
3766#endif
3767 Comment cmnt(masm(), "[ Variable Assignment");
3768 Variable* var = node->target()->AsVariableProxy()->AsVariable();
3769 ASSERT(var != NULL);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003770 Slot* slot = var->AsSlot();
Steve Block6ded16b2010-05-10 14:33:55 +01003771 ASSERT(slot != NULL);
3772
3773 // Evaluate the right-hand side.
3774 if (node->is_compound()) {
3775 // For a compound assignment the right-hand side is a binary operation
3776 // between the current property value and the actual right-hand side.
3777 LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF);
3778
3779 // Perform the binary operation.
3780 Literal* literal = node->value()->AsLiteral();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003781 bool overwrite_value = node->value()->ResultOverwriteAllowed();
Steve Block6ded16b2010-05-10 14:33:55 +01003782 if (literal != NULL && literal->handle()->IsSmi()) {
3783 SmiOperation(node->binary_op(),
3784 literal->handle(),
3785 false,
3786 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
3787 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003788 GenerateInlineSmi inline_smi =
3789 loop_nesting() > 0 ? GENERATE_INLINE_SMI : DONT_GENERATE_INLINE_SMI;
3790 if (literal != NULL) {
3791 ASSERT(!literal->handle()->IsSmi());
3792 inline_smi = DONT_GENERATE_INLINE_SMI;
3793 }
Steve Block6ded16b2010-05-10 14:33:55 +01003794 Load(node->value());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003795 GenericBinaryOperation(node->binary_op(),
3796 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE,
3797 inline_smi);
Steve Block6ded16b2010-05-10 14:33:55 +01003798 }
3799 } else {
3800 Load(node->value());
3801 }
3802
3803 // Perform the assignment.
3804 if (var->mode() != Variable::CONST || node->op() == Token::INIT_CONST) {
3805 CodeForSourcePosition(node->position());
3806 StoreToSlot(slot,
3807 node->op() == Token::INIT_CONST ? CONST_INIT : NOT_CONST_INIT);
3808 }
3809 ASSERT_EQ(original_height + 1, frame_->height());
3810}
3811
3812
3813void CodeGenerator::EmitNamedPropertyAssignment(Assignment* node) {
3814#ifdef DEBUG
3815 int original_height = frame_->height();
3816#endif
3817 Comment cmnt(masm(), "[ Named Property Assignment");
3818 Variable* var = node->target()->AsVariableProxy()->AsVariable();
3819 Property* prop = node->target()->AsProperty();
3820 ASSERT(var == NULL || (prop == NULL && var->is_global()));
3821
3822 // Initialize name and evaluate the receiver sub-expression if necessary. If
3823 // the receiver is trivial it is not placed on the stack at this point, but
3824 // loaded whenever actually needed.
3825 Handle<String> name;
3826 bool is_trivial_receiver = false;
3827 if (var != NULL) {
3828 name = var->name();
3829 } else {
3830 Literal* lit = prop->key()->AsLiteral();
3831 ASSERT_NOT_NULL(lit);
3832 name = Handle<String>::cast(lit->handle());
3833 // Do not materialize the receiver on the frame if it is trivial.
3834 is_trivial_receiver = prop->obj()->IsTrivial();
3835 if (!is_trivial_receiver) Load(prop->obj());
3836 }
3837
3838 // Change to slow case in the beginning of an initialization block to
3839 // avoid the quadratic behavior of repeatedly adding fast properties.
3840 if (node->starts_initialization_block()) {
3841 // Initialization block consists of assignments of the form expr.x = ..., so
3842 // this will never be an assignment to a variable, so there must be a
3843 // receiver object.
3844 ASSERT_EQ(NULL, var);
3845 if (is_trivial_receiver) {
3846 Load(prop->obj());
3847 } else {
3848 frame_->Dup();
3849 }
3850 frame_->CallRuntime(Runtime::kToSlowProperties, 1);
3851 }
3852
3853 // Change to fast case at the end of an initialization block. To prepare for
3854 // that add an extra copy of the receiver to the frame, so that it can be
3855 // converted back to fast case after the assignment.
3856 if (node->ends_initialization_block() && !is_trivial_receiver) {
3857 frame_->Dup();
3858 }
3859
3860 // Stack layout:
3861 // [tos] : receiver (only materialized if non-trivial)
3862 // [tos+1] : receiver if at the end of an initialization block
3863
3864 // Evaluate the right-hand side.
3865 if (node->is_compound()) {
3866 // For a compound assignment the right-hand side is a binary operation
3867 // between the current property value and the actual right-hand side.
3868 if (is_trivial_receiver) {
3869 Load(prop->obj());
3870 } else if (var != NULL) {
3871 LoadGlobal();
3872 } else {
3873 frame_->Dup();
3874 }
3875 EmitNamedLoad(name, var != NULL);
Steve Block6ded16b2010-05-10 14:33:55 +01003876
3877 // Perform the binary operation.
3878 Literal* literal = node->value()->AsLiteral();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003879 bool overwrite_value = node->value()->ResultOverwriteAllowed();
Steve Block6ded16b2010-05-10 14:33:55 +01003880 if (literal != NULL && literal->handle()->IsSmi()) {
3881 SmiOperation(node->binary_op(),
3882 literal->handle(),
3883 false,
3884 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
3885 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003886 GenerateInlineSmi inline_smi =
3887 loop_nesting() > 0 ? GENERATE_INLINE_SMI : DONT_GENERATE_INLINE_SMI;
3888 if (literal != NULL) {
3889 ASSERT(!literal->handle()->IsSmi());
3890 inline_smi = DONT_GENERATE_INLINE_SMI;
3891 }
Steve Block6ded16b2010-05-10 14:33:55 +01003892 Load(node->value());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003893 GenericBinaryOperation(node->binary_op(),
3894 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE,
3895 inline_smi);
Steve Block6ded16b2010-05-10 14:33:55 +01003896 }
3897 } else {
3898 // For non-compound assignment just load the right-hand side.
3899 Load(node->value());
3900 }
3901
3902 // Stack layout:
3903 // [tos] : value
3904 // [tos+1] : receiver (only materialized if non-trivial)
3905 // [tos+2] : receiver if at the end of an initialization block
3906
3907 // Perform the assignment. It is safe to ignore constants here.
3908 ASSERT(var == NULL || var->mode() != Variable::CONST);
3909 ASSERT_NE(Token::INIT_CONST, node->op());
3910 if (is_trivial_receiver) {
3911 // Load the receiver and swap with the value.
3912 Load(prop->obj());
3913 Register t0 = frame_->PopToRegister();
3914 Register t1 = frame_->PopToRegister(t0);
3915 frame_->EmitPush(t0);
3916 frame_->EmitPush(t1);
3917 }
3918 CodeForSourcePosition(node->position());
3919 bool is_contextual = (var != NULL);
3920 EmitNamedStore(name, is_contextual);
3921 frame_->EmitPush(r0);
3922
3923 // Change to fast case at the end of an initialization block.
3924 if (node->ends_initialization_block()) {
3925 ASSERT_EQ(NULL, var);
3926 // The argument to the runtime call is the receiver.
3927 if (is_trivial_receiver) {
3928 Load(prop->obj());
3929 } else {
3930 // A copy of the receiver is below the value of the assignment. Swap
3931 // the receiver and the value of the assignment expression.
3932 Register t0 = frame_->PopToRegister();
3933 Register t1 = frame_->PopToRegister(t0);
3934 frame_->EmitPush(t0);
3935 frame_->EmitPush(t1);
3936 }
3937 frame_->CallRuntime(Runtime::kToFastProperties, 1);
3938 }
3939
3940 // Stack layout:
3941 // [tos] : result
3942
3943 ASSERT_EQ(original_height + 1, frame_->height());
3944}
3945
3946
3947void CodeGenerator::EmitKeyedPropertyAssignment(Assignment* node) {
3948#ifdef DEBUG
3949 int original_height = frame_->height();
3950#endif
3951 Comment cmnt(masm_, "[ Keyed Property Assignment");
3952 Property* prop = node->target()->AsProperty();
3953 ASSERT_NOT_NULL(prop);
3954
3955 // Evaluate the receiver subexpression.
3956 Load(prop->obj());
3957
Steve Block8defd9f2010-07-08 12:39:36 +01003958 WriteBarrierCharacter wb_info;
3959
Steve Block6ded16b2010-05-10 14:33:55 +01003960 // Change to slow case in the beginning of an initialization block to
3961 // avoid the quadratic behavior of repeatedly adding fast properties.
3962 if (node->starts_initialization_block()) {
3963 frame_->Dup();
3964 frame_->CallRuntime(Runtime::kToSlowProperties, 1);
3965 }
3966
3967 // Change to fast case at the end of an initialization block. To prepare for
3968 // that add an extra copy of the receiver to the frame, so that it can be
3969 // converted back to fast case after the assignment.
3970 if (node->ends_initialization_block()) {
3971 frame_->Dup();
3972 }
3973
3974 // Evaluate the key subexpression.
3975 Load(prop->key());
3976
3977 // Stack layout:
3978 // [tos] : key
3979 // [tos+1] : receiver
3980 // [tos+2] : receiver if at the end of an initialization block
Steve Block8defd9f2010-07-08 12:39:36 +01003981 //
Steve Block6ded16b2010-05-10 14:33:55 +01003982 // Evaluate the right-hand side.
3983 if (node->is_compound()) {
3984 // For a compound assignment the right-hand side is a binary operation
3985 // between the current property value and the actual right-hand side.
Kristian Monsen25f61362010-05-21 11:50:48 +01003986 // Duplicate receiver and key for loading the current property value.
3987 frame_->Dup2();
Steve Block6ded16b2010-05-10 14:33:55 +01003988 EmitKeyedLoad();
3989 frame_->EmitPush(r0);
3990
3991 // Perform the binary operation.
3992 Literal* literal = node->value()->AsLiteral();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003993 bool overwrite_value = node->value()->ResultOverwriteAllowed();
Steve Block6ded16b2010-05-10 14:33:55 +01003994 if (literal != NULL && literal->handle()->IsSmi()) {
3995 SmiOperation(node->binary_op(),
3996 literal->handle(),
3997 false,
3998 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
3999 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004000 GenerateInlineSmi inline_smi =
4001 loop_nesting() > 0 ? GENERATE_INLINE_SMI : DONT_GENERATE_INLINE_SMI;
4002 if (literal != NULL) {
4003 ASSERT(!literal->handle()->IsSmi());
4004 inline_smi = DONT_GENERATE_INLINE_SMI;
4005 }
Steve Block6ded16b2010-05-10 14:33:55 +01004006 Load(node->value());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004007 GenericBinaryOperation(node->binary_op(),
4008 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE,
4009 inline_smi);
Steve Block6ded16b2010-05-10 14:33:55 +01004010 }
Steve Block8defd9f2010-07-08 12:39:36 +01004011 wb_info = node->type()->IsLikelySmi() ? LIKELY_SMI : UNLIKELY_SMI;
Steve Block6ded16b2010-05-10 14:33:55 +01004012 } else {
4013 // For non-compound assignment just load the right-hand side.
4014 Load(node->value());
Steve Block8defd9f2010-07-08 12:39:36 +01004015 wb_info = node->value()->AsLiteral() != NULL ?
4016 NEVER_NEWSPACE :
4017 (node->value()->type()->IsLikelySmi() ? LIKELY_SMI : UNLIKELY_SMI);
Steve Block6ded16b2010-05-10 14:33:55 +01004018 }
4019
4020 // Stack layout:
4021 // [tos] : value
4022 // [tos+1] : key
4023 // [tos+2] : receiver
4024 // [tos+3] : receiver if at the end of an initialization block
4025
4026 // Perform the assignment. It is safe to ignore constants here.
4027 ASSERT(node->op() != Token::INIT_CONST);
4028 CodeForSourcePosition(node->position());
Steve Block8defd9f2010-07-08 12:39:36 +01004029 EmitKeyedStore(prop->key()->type(), wb_info);
Steve Block6ded16b2010-05-10 14:33:55 +01004030 frame_->EmitPush(r0);
4031
4032 // Stack layout:
4033 // [tos] : result
4034 // [tos+1] : receiver if at the end of an initialization block
4035
4036 // Change to fast case at the end of an initialization block.
4037 if (node->ends_initialization_block()) {
4038 // The argument to the runtime call is the extra copy of the receiver,
4039 // which is below the value of the assignment. Swap the receiver and
4040 // the value of the assignment expression.
4041 Register t0 = frame_->PopToRegister();
4042 Register t1 = frame_->PopToRegister(t0);
4043 frame_->EmitPush(t1);
4044 frame_->EmitPush(t0);
4045 frame_->CallRuntime(Runtime::kToFastProperties, 1);
4046 }
4047
4048 // Stack layout:
4049 // [tos] : result
4050
4051 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00004052}
4053
4054
4055void CodeGenerator::VisitAssignment(Assignment* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01004056 VirtualFrame::RegisterAllocationScope scope(this);
Steve Blocka7e24c12009-10-30 11:49:00 +00004057#ifdef DEBUG
4058 int original_height = frame_->height();
4059#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00004060 Comment cmnt(masm_, "[ Assignment");
4061
Steve Block6ded16b2010-05-10 14:33:55 +01004062 Variable* var = node->target()->AsVariableProxy()->AsVariable();
4063 Property* prop = node->target()->AsProperty();
Steve Blocka7e24c12009-10-30 11:49:00 +00004064
Steve Block6ded16b2010-05-10 14:33:55 +01004065 if (var != NULL && !var->is_global()) {
4066 EmitSlotAssignment(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00004067
Steve Block6ded16b2010-05-10 14:33:55 +01004068 } else if ((prop != NULL && prop->key()->IsPropertyName()) ||
4069 (var != NULL && var->is_global())) {
4070 // Properties whose keys are property names and global variables are
4071 // treated as named property references. We do not need to consider
4072 // global 'this' because it is not a valid left-hand side.
4073 EmitNamedPropertyAssignment(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00004074
Steve Block6ded16b2010-05-10 14:33:55 +01004075 } else if (prop != NULL) {
4076 // Other properties (including rewritten parameters for a function that
4077 // uses arguments) are keyed property assignments.
4078 EmitKeyedPropertyAssignment(node);
4079
4080 } else {
4081 // Invalid left-hand side.
4082 Load(node->target());
4083 frame_->CallRuntime(Runtime::kThrowReferenceError, 1);
4084 // The runtime call doesn't actually return but the code generator will
4085 // still generate code and expects a certain frame height.
4086 frame_->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00004087 }
Steve Block6ded16b2010-05-10 14:33:55 +01004088 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00004089}
4090
4091
4092void CodeGenerator::VisitThrow(Throw* node) {
4093#ifdef DEBUG
4094 int original_height = frame_->height();
4095#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00004096 Comment cmnt(masm_, "[ Throw");
4097
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004098 Load(node->exception());
Steve Blocka7e24c12009-10-30 11:49:00 +00004099 CodeForSourcePosition(node->position());
4100 frame_->CallRuntime(Runtime::kThrow, 1);
4101 frame_->EmitPush(r0);
Steve Block6ded16b2010-05-10 14:33:55 +01004102 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00004103}
4104
4105
4106void CodeGenerator::VisitProperty(Property* node) {
4107#ifdef DEBUG
4108 int original_height = frame_->height();
4109#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00004110 Comment cmnt(masm_, "[ Property");
4111
4112 { Reference property(this, node);
Steve Block6ded16b2010-05-10 14:33:55 +01004113 property.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00004114 }
Steve Block6ded16b2010-05-10 14:33:55 +01004115 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00004116}
4117
4118
4119void CodeGenerator::VisitCall(Call* node) {
4120#ifdef DEBUG
4121 int original_height = frame_->height();
4122#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00004123 Comment cmnt(masm_, "[ Call");
4124
4125 Expression* function = node->expression();
4126 ZoneList<Expression*>* args = node->arguments();
4127
4128 // Standard function call.
4129 // Check if the function is a variable or a property.
4130 Variable* var = function->AsVariableProxy()->AsVariable();
4131 Property* property = function->AsProperty();
4132
4133 // ------------------------------------------------------------------------
4134 // Fast-case: Use inline caching.
4135 // ---
4136 // According to ECMA-262, section 11.2.3, page 44, the function to call
4137 // must be resolved after the arguments have been evaluated. The IC code
4138 // automatically handles this by loading the arguments before the function
4139 // is resolved in cache misses (this also holds for megamorphic calls).
4140 // ------------------------------------------------------------------------
4141
4142 if (var != NULL && var->is_possibly_eval()) {
4143 // ----------------------------------
4144 // JavaScript example: 'eval(arg)' // eval is not known to be shadowed
4145 // ----------------------------------
4146
4147 // In a call to eval, we first call %ResolvePossiblyDirectEval to
4148 // resolve the function we need to call and the receiver of the
4149 // call. Then we call the resolved function using the given
4150 // arguments.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004151
Steve Blocka7e24c12009-10-30 11:49:00 +00004152 // Prepare stack for call to resolved function.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004153 Load(function);
4154
4155 // Allocate a frame slot for the receiver.
Steve Block8defd9f2010-07-08 12:39:36 +01004156 frame_->EmitPushRoot(Heap::kUndefinedValueRootIndex);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004157
4158 // Load the arguments.
Steve Blocka7e24c12009-10-30 11:49:00 +00004159 int arg_count = args->length();
4160 for (int i = 0; i < arg_count; i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004161 Load(args->at(i));
Steve Blocka7e24c12009-10-30 11:49:00 +00004162 }
4163
Steve Block8defd9f2010-07-08 12:39:36 +01004164 VirtualFrame::SpilledScope spilled_scope(frame_);
4165
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004166 // If we know that eval can only be shadowed by eval-introduced
4167 // variables we attempt to load the global eval function directly
4168 // in generated code. If we succeed, there is no need to perform a
4169 // context lookup in the runtime system.
4170 JumpTarget done;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004171 if (var->AsSlot() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) {
4172 ASSERT(var->AsSlot()->type() == Slot::LOOKUP);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004173 JumpTarget slow;
4174 // Prepare the stack for the call to
4175 // ResolvePossiblyDirectEvalNoLookup by pushing the loaded
4176 // function, the first argument to the eval call and the
4177 // receiver.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004178 LoadFromGlobalSlotCheckExtensions(var->AsSlot(),
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004179 NOT_INSIDE_TYPEOF,
4180 &slow);
4181 frame_->EmitPush(r0);
4182 if (arg_count > 0) {
4183 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
4184 frame_->EmitPush(r1);
4185 } else {
4186 frame_->EmitPush(r2);
4187 }
4188 __ ldr(r1, frame_->Receiver());
4189 frame_->EmitPush(r1);
4190
Steve Block1e0659c2011-05-24 12:43:12 +01004191 // Push the strict mode flag.
4192 frame_->EmitPush(Operand(Smi::FromInt(strict_mode_flag())));
4193
4194 frame_->CallRuntime(Runtime::kResolvePossiblyDirectEvalNoLookup, 4);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004195
4196 done.Jump();
4197 slow.Bind();
4198 }
4199
4200 // Prepare the stack for the call to ResolvePossiblyDirectEval by
4201 // pushing the loaded function, the first argument to the eval
4202 // call and the receiver.
Steve Blocka7e24c12009-10-30 11:49:00 +00004203 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize + kPointerSize));
4204 frame_->EmitPush(r1);
4205 if (arg_count > 0) {
4206 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
4207 frame_->EmitPush(r1);
4208 } else {
4209 frame_->EmitPush(r2);
4210 }
Leon Clarkee46be812010-01-19 14:06:41 +00004211 __ ldr(r1, frame_->Receiver());
4212 frame_->EmitPush(r1);
4213
Steve Block1e0659c2011-05-24 12:43:12 +01004214 // Push the strict mode flag.
4215 frame_->EmitPush(Operand(Smi::FromInt(strict_mode_flag())));
4216
Steve Blocka7e24c12009-10-30 11:49:00 +00004217 // Resolve the call.
Steve Block1e0659c2011-05-24 12:43:12 +01004218 frame_->CallRuntime(Runtime::kResolvePossiblyDirectEval, 4);
Steve Blocka7e24c12009-10-30 11:49:00 +00004219
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004220 // If we generated fast-case code bind the jump-target where fast
4221 // and slow case merge.
4222 if (done.is_linked()) done.Bind();
4223
Steve Blocka7e24c12009-10-30 11:49:00 +00004224 // Touch up stack with the right values for the function and the receiver.
Leon Clarkee46be812010-01-19 14:06:41 +00004225 __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00004226 __ str(r1, MemOperand(sp, arg_count * kPointerSize));
4227
4228 // Call the function.
4229 CodeForSourcePosition(node->position());
4230
4231 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Leon Clarkee46be812010-01-19 14:06:41 +00004232 CallFunctionStub call_function(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
Steve Blocka7e24c12009-10-30 11:49:00 +00004233 frame_->CallStub(&call_function, arg_count + 1);
4234
4235 __ ldr(cp, frame_->Context());
4236 // Remove the function from the stack.
4237 frame_->Drop();
4238 frame_->EmitPush(r0);
4239
4240 } else if (var != NULL && !var->is_this() && var->is_global()) {
4241 // ----------------------------------
4242 // JavaScript example: 'foo(1, 2, 3)' // foo is global
4243 // ----------------------------------
Steve Blocka7e24c12009-10-30 11:49:00 +00004244 // Pass the global object as the receiver and let the IC stub
4245 // patch the stack to use the global proxy as 'this' in the
4246 // invoked function.
4247 LoadGlobal();
4248
4249 // Load the arguments.
4250 int arg_count = args->length();
4251 for (int i = 0; i < arg_count; i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004252 Load(args->at(i));
Steve Blocka7e24c12009-10-30 11:49:00 +00004253 }
4254
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004255 VirtualFrame::SpilledScope spilled_scope(frame_);
Andrei Popescu402d9372010-02-26 13:31:12 +00004256 // Setup the name register and call the IC initialization code.
4257 __ mov(r2, Operand(var->name()));
Steve Blocka7e24c12009-10-30 11:49:00 +00004258 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Steve Block44f0eee2011-05-26 01:26:41 +01004259 Handle<Code> stub =
4260 ISOLATE->stub_cache()->ComputeCallInitialize(arg_count, in_loop);
Steve Blocka7e24c12009-10-30 11:49:00 +00004261 CodeForSourcePosition(node->position());
4262 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET_CONTEXT,
4263 arg_count + 1);
4264 __ ldr(cp, frame_->Context());
Steve Blocka7e24c12009-10-30 11:49:00 +00004265 frame_->EmitPush(r0);
4266
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004267 } else if (var != NULL && var->AsSlot() != NULL &&
4268 var->AsSlot()->type() == Slot::LOOKUP) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004269 // ----------------------------------
Kristian Monsen25f61362010-05-21 11:50:48 +01004270 // JavaScript examples:
4271 //
4272 // with (obj) foo(1, 2, 3) // foo may be in obj.
4273 //
4274 // function f() {};
4275 // function g() {
4276 // eval(...);
4277 // f(); // f could be in extension object.
4278 // }
Steve Blocka7e24c12009-10-30 11:49:00 +00004279 // ----------------------------------
4280
Kristian Monsen25f61362010-05-21 11:50:48 +01004281 JumpTarget slow, done;
4282
4283 // Generate fast case for loading functions from slots that
4284 // correspond to local/global variables or arguments unless they
4285 // are shadowed by eval-introduced bindings.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004286 EmitDynamicLoadFromSlotFastCase(var->AsSlot(),
Kristian Monsen25f61362010-05-21 11:50:48 +01004287 NOT_INSIDE_TYPEOF,
4288 &slow,
4289 &done);
4290
4291 slow.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00004292 // Load the function
4293 frame_->EmitPush(cp);
Iain Merrick75681382010-08-19 15:07:18 +01004294 frame_->EmitPush(Operand(var->name()));
Steve Blocka7e24c12009-10-30 11:49:00 +00004295 frame_->CallRuntime(Runtime::kLoadContextSlot, 2);
4296 // r0: slot value; r1: receiver
4297
4298 // Load the receiver.
4299 frame_->EmitPush(r0); // function
4300 frame_->EmitPush(r1); // receiver
4301
Kristian Monsen25f61362010-05-21 11:50:48 +01004302 // If fast case code has been generated, emit code to push the
4303 // function and receiver and have the slow path jump around this
4304 // code.
4305 if (done.is_linked()) {
4306 JumpTarget call;
4307 call.Jump();
4308 done.Bind();
4309 frame_->EmitPush(r0); // function
Iain Merrick75681382010-08-19 15:07:18 +01004310 LoadGlobalReceiver(VirtualFrame::scratch0()); // receiver
Kristian Monsen25f61362010-05-21 11:50:48 +01004311 call.Bind();
4312 }
4313
4314 // Call the function. At this point, everything is spilled but the
4315 // function and receiver are in r0 and r1.
Leon Clarkee46be812010-01-19 14:06:41 +00004316 CallWithArguments(args, NO_CALL_FUNCTION_FLAGS, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00004317 frame_->EmitPush(r0);
4318
4319 } else if (property != NULL) {
4320 // Check if the key is a literal string.
4321 Literal* literal = property->key()->AsLiteral();
4322
4323 if (literal != NULL && literal->handle()->IsSymbol()) {
4324 // ------------------------------------------------------------------
4325 // JavaScript example: 'object.foo(1, 2, 3)' or 'map["key"](1, 2, 3)'
4326 // ------------------------------------------------------------------
4327
Steve Block6ded16b2010-05-10 14:33:55 +01004328 Handle<String> name = Handle<String>::cast(literal->handle());
Steve Blocka7e24c12009-10-30 11:49:00 +00004329
Steve Block6ded16b2010-05-10 14:33:55 +01004330 if (ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION &&
4331 name->IsEqualTo(CStrVector("apply")) &&
4332 args->length() == 2 &&
4333 args->at(1)->AsVariableProxy() != NULL &&
4334 args->at(1)->AsVariableProxy()->IsArguments()) {
4335 // Use the optimized Function.prototype.apply that avoids
4336 // allocating lazily allocated arguments objects.
4337 CallApplyLazy(property->obj(),
4338 args->at(0),
4339 args->at(1)->AsVariableProxy(),
4340 node->position());
4341
4342 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004343 Load(property->obj()); // Receiver.
Steve Block6ded16b2010-05-10 14:33:55 +01004344 // Load the arguments.
4345 int arg_count = args->length();
4346 for (int i = 0; i < arg_count; i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004347 Load(args->at(i));
Steve Block6ded16b2010-05-10 14:33:55 +01004348 }
4349
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004350 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Block6ded16b2010-05-10 14:33:55 +01004351 // Set the name register and call the IC initialization code.
4352 __ mov(r2, Operand(name));
4353 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004354 Handle<Code> stub =
Steve Block44f0eee2011-05-26 01:26:41 +01004355 ISOLATE->stub_cache()->ComputeCallInitialize(arg_count, in_loop);
Steve Block6ded16b2010-05-10 14:33:55 +01004356 CodeForSourcePosition(node->position());
4357 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET, arg_count + 1);
4358 __ ldr(cp, frame_->Context());
4359 frame_->EmitPush(r0);
4360 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004361
4362 } else {
4363 // -------------------------------------------
4364 // JavaScript example: 'array[index](1, 2, 3)'
4365 // -------------------------------------------
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004366
4367 // Load the receiver and name of the function.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004368 Load(property->obj());
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004369 Load(property->key());
4370
Steve Blocka7e24c12009-10-30 11:49:00 +00004371 if (property->is_synthetic()) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01004372 EmitKeyedLoad();
4373 // Put the function below the receiver.
Leon Clarked91b9f72010-01-27 17:25:45 +00004374 // Use the global receiver.
Kristian Monsen25f61362010-05-21 11:50:48 +01004375 frame_->EmitPush(r0); // Function.
Iain Merrick75681382010-08-19 15:07:18 +01004376 LoadGlobalReceiver(VirtualFrame::scratch0());
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01004377 // Call the function.
4378 CallWithArguments(args, RECEIVER_MIGHT_BE_VALUE, node->position());
4379 frame_->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00004380 } else {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004381 // Swap the name of the function and the receiver on the stack to follow
4382 // the calling convention for call ICs.
4383 Register key = frame_->PopToRegister();
4384 Register receiver = frame_->PopToRegister(key);
4385 frame_->EmitPush(key);
4386 frame_->EmitPush(receiver);
4387
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01004388 // Load the arguments.
4389 int arg_count = args->length();
4390 for (int i = 0; i < arg_count; i++) {
4391 Load(args->at(i));
4392 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004393
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004394 // Load the key into r2 and call the IC initialization code.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01004395 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004396 Handle<Code> stub =
Steve Block44f0eee2011-05-26 01:26:41 +01004397 ISOLATE->stub_cache()->ComputeKeyedCallInitialize(arg_count,
4398 in_loop);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01004399 CodeForSourcePosition(node->position());
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004400 frame_->SpillAll();
4401 __ ldr(r2, frame_->ElementAt(arg_count + 1));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01004402 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET, arg_count + 1);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004403 frame_->Drop(); // Drop the key still on the stack.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01004404 __ ldr(cp, frame_->Context());
4405 frame_->EmitPush(r0);
4406 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004407 }
4408
4409 } else {
4410 // ----------------------------------
4411 // JavaScript example: 'foo(1, 2, 3)' // foo is not global
4412 // ----------------------------------
4413
4414 // Load the function.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004415 Load(function);
4416
Steve Blocka7e24c12009-10-30 11:49:00 +00004417 // Pass the global proxy as the receiver.
Iain Merrick75681382010-08-19 15:07:18 +01004418 LoadGlobalReceiver(VirtualFrame::scratch0());
Steve Blocka7e24c12009-10-30 11:49:00 +00004419
4420 // Call the function.
Leon Clarkee46be812010-01-19 14:06:41 +00004421 CallWithArguments(args, NO_CALL_FUNCTION_FLAGS, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00004422 frame_->EmitPush(r0);
4423 }
Steve Block6ded16b2010-05-10 14:33:55 +01004424 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00004425}
4426
4427
4428void CodeGenerator::VisitCallNew(CallNew* node) {
4429#ifdef DEBUG
4430 int original_height = frame_->height();
4431#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00004432 Comment cmnt(masm_, "[ CallNew");
4433
4434 // According to ECMA-262, section 11.2.2, page 44, the function
4435 // expression in new calls must be evaluated before the
4436 // arguments. This is different from ordinary calls, where the
4437 // actual function to call is resolved after the arguments have been
4438 // evaluated.
4439
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004440 // Push constructor on the stack. If it's not a function it's used as
4441 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
4442 // ignored.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004443 Load(node->expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00004444
4445 // Push the arguments ("left-to-right") on the stack.
4446 ZoneList<Expression*>* args = node->arguments();
4447 int arg_count = args->length();
4448 for (int i = 0; i < arg_count; i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004449 Load(args->at(i));
Steve Blocka7e24c12009-10-30 11:49:00 +00004450 }
4451
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004452 // Spill everything from here to simplify the implementation.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004453 VirtualFrame::SpilledScope spilled_scope(frame_);
4454
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004455 // Load the argument count into r0 and the function into r1 as per
4456 // calling convention.
Steve Blocka7e24c12009-10-30 11:49:00 +00004457 __ mov(r0, Operand(arg_count));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004458 __ ldr(r1, frame_->ElementAt(arg_count));
Steve Blocka7e24c12009-10-30 11:49:00 +00004459
4460 // Call the construct call builtin that handles allocation and
4461 // constructor invocation.
4462 CodeForSourcePosition(node->position());
Steve Block44f0eee2011-05-26 01:26:41 +01004463 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
4464 Builtins::kJSConstructCall));
Leon Clarke4515c472010-02-03 11:58:03 +00004465 frame_->CallCodeObject(ic, RelocInfo::CONSTRUCT_CALL, arg_count + 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004466 frame_->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00004467
Steve Block6ded16b2010-05-10 14:33:55 +01004468 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00004469}
4470
4471
4472void CodeGenerator::GenerateClassOf(ZoneList<Expression*>* args) {
Iain Merrick75681382010-08-19 15:07:18 +01004473 Register scratch = VirtualFrame::scratch0();
4474 JumpTarget null, function, leave, non_function_constructor;
Steve Blocka7e24c12009-10-30 11:49:00 +00004475
Iain Merrick75681382010-08-19 15:07:18 +01004476 // Load the object into register.
4477 ASSERT(args->length() == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004478 Load(args->at(0));
Iain Merrick75681382010-08-19 15:07:18 +01004479 Register tos = frame_->PopToRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +00004480
4481 // If the object is a smi, we return null.
Iain Merrick75681382010-08-19 15:07:18 +01004482 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00004483 null.Branch(eq);
4484
4485 // Check that the object is a JS object but take special care of JS
4486 // functions to make sure they have 'Function' as their class.
Iain Merrick75681382010-08-19 15:07:18 +01004487 __ CompareObjectType(tos, tos, scratch, FIRST_JS_OBJECT_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00004488 null.Branch(lt);
4489
4490 // As long as JS_FUNCTION_TYPE is the last instance type and it is
4491 // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
4492 // LAST_JS_OBJECT_TYPE.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01004493 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
4494 STATIC_ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
Iain Merrick75681382010-08-19 15:07:18 +01004495 __ cmp(scratch, Operand(JS_FUNCTION_TYPE));
Steve Blocka7e24c12009-10-30 11:49:00 +00004496 function.Branch(eq);
4497
4498 // Check if the constructor in the map is a function.
Iain Merrick75681382010-08-19 15:07:18 +01004499 __ ldr(tos, FieldMemOperand(tos, Map::kConstructorOffset));
4500 __ CompareObjectType(tos, scratch, scratch, JS_FUNCTION_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00004501 non_function_constructor.Branch(ne);
4502
Iain Merrick75681382010-08-19 15:07:18 +01004503 // The tos register now contains the constructor function. Grab the
Steve Blocka7e24c12009-10-30 11:49:00 +00004504 // instance class name from there.
Iain Merrick75681382010-08-19 15:07:18 +01004505 __ ldr(tos, FieldMemOperand(tos, JSFunction::kSharedFunctionInfoOffset));
4506 __ ldr(tos,
4507 FieldMemOperand(tos, SharedFunctionInfo::kInstanceClassNameOffset));
4508 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00004509 leave.Jump();
4510
4511 // Functions have class 'Function'.
4512 function.Bind();
Steve Block44f0eee2011-05-26 01:26:41 +01004513 __ mov(tos, Operand(FACTORY->function_class_symbol()));
Iain Merrick75681382010-08-19 15:07:18 +01004514 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00004515 leave.Jump();
4516
4517 // Objects with a non-function constructor have class 'Object'.
4518 non_function_constructor.Bind();
Steve Block44f0eee2011-05-26 01:26:41 +01004519 __ mov(tos, Operand(FACTORY->Object_symbol()));
Iain Merrick75681382010-08-19 15:07:18 +01004520 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00004521 leave.Jump();
4522
4523 // Non-JS objects have class null.
4524 null.Bind();
Iain Merrick75681382010-08-19 15:07:18 +01004525 __ LoadRoot(tos, Heap::kNullValueRootIndex);
4526 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00004527
4528 // All done.
4529 leave.Bind();
4530}
4531
4532
4533void CodeGenerator::GenerateValueOf(ZoneList<Expression*>* args) {
Iain Merrick75681382010-08-19 15:07:18 +01004534 Register scratch = VirtualFrame::scratch0();
Steve Blocka7e24c12009-10-30 11:49:00 +00004535 JumpTarget leave;
Iain Merrick75681382010-08-19 15:07:18 +01004536
4537 ASSERT(args->length() == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004538 Load(args->at(0));
Iain Merrick75681382010-08-19 15:07:18 +01004539 Register tos = frame_->PopToRegister(); // tos contains object.
Steve Blocka7e24c12009-10-30 11:49:00 +00004540 // if (object->IsSmi()) return the object.
Iain Merrick75681382010-08-19 15:07:18 +01004541 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00004542 leave.Branch(eq);
4543 // It is a heap object - get map. If (!object->IsJSValue()) return the object.
Iain Merrick75681382010-08-19 15:07:18 +01004544 __ CompareObjectType(tos, scratch, scratch, JS_VALUE_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00004545 leave.Branch(ne);
4546 // Load the value.
Iain Merrick75681382010-08-19 15:07:18 +01004547 __ ldr(tos, FieldMemOperand(tos, JSValue::kValueOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004548 leave.Bind();
Iain Merrick75681382010-08-19 15:07:18 +01004549 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00004550}
4551
4552
4553void CodeGenerator::GenerateSetValueOf(ZoneList<Expression*>* args) {
Iain Merrick75681382010-08-19 15:07:18 +01004554 Register scratch1 = VirtualFrame::scratch0();
4555 Register scratch2 = VirtualFrame::scratch1();
Steve Blocka7e24c12009-10-30 11:49:00 +00004556 JumpTarget leave;
Iain Merrick75681382010-08-19 15:07:18 +01004557
4558 ASSERT(args->length() == 2);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004559 Load(args->at(0)); // Load the object.
4560 Load(args->at(1)); // Load the value.
Iain Merrick75681382010-08-19 15:07:18 +01004561 Register value = frame_->PopToRegister();
4562 Register object = frame_->PopToRegister(value);
Steve Blocka7e24c12009-10-30 11:49:00 +00004563 // if (object->IsSmi()) return object.
Iain Merrick75681382010-08-19 15:07:18 +01004564 __ tst(object, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00004565 leave.Branch(eq);
4566 // It is a heap object - get map. If (!object->IsJSValue()) return the object.
Iain Merrick75681382010-08-19 15:07:18 +01004567 __ CompareObjectType(object, scratch1, scratch1, JS_VALUE_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00004568 leave.Branch(ne);
4569 // Store the value.
Iain Merrick75681382010-08-19 15:07:18 +01004570 __ str(value, FieldMemOperand(object, JSValue::kValueOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004571 // Update the write barrier.
Iain Merrick75681382010-08-19 15:07:18 +01004572 __ RecordWrite(object,
4573 Operand(JSValue::kValueOffset - kHeapObjectTag),
4574 scratch1,
4575 scratch2);
Steve Blocka7e24c12009-10-30 11:49:00 +00004576 // Leave.
4577 leave.Bind();
Iain Merrick75681382010-08-19 15:07:18 +01004578 frame_->EmitPush(value);
Steve Blocka7e24c12009-10-30 11:49:00 +00004579}
4580
4581
4582void CodeGenerator::GenerateIsSmi(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004583 ASSERT(args->length() == 1);
Leon Clarkef7060e22010-06-03 12:02:55 +01004584 Load(args->at(0));
4585 Register reg = frame_->PopToRegister();
4586 __ tst(reg, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00004587 cc_reg_ = eq;
4588}
4589
4590
4591void CodeGenerator::GenerateLog(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004592 // See comment in CodeGenerator::GenerateLog in codegen-ia32.cc.
4593 ASSERT_EQ(args->length(), 3);
4594#ifdef ENABLE_LOGGING_AND_PROFILING
4595 if (ShouldGenerateLog(args->at(0))) {
Leon Clarkef7060e22010-06-03 12:02:55 +01004596 Load(args->at(1));
4597 Load(args->at(2));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004598 frame_->CallRuntime(Runtime::kLog, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00004599 }
4600#endif
Leon Clarkef7060e22010-06-03 12:02:55 +01004601 frame_->EmitPushRoot(Heap::kUndefinedValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00004602}
4603
4604
4605void CodeGenerator::GenerateIsNonNegativeSmi(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004606 ASSERT(args->length() == 1);
Leon Clarkef7060e22010-06-03 12:02:55 +01004607 Load(args->at(0));
4608 Register reg = frame_->PopToRegister();
4609 __ tst(reg, Operand(kSmiTagMask | 0x80000000u));
Steve Blocka7e24c12009-10-30 11:49:00 +00004610 cc_reg_ = eq;
4611}
4612
4613
Steve Block8defd9f2010-07-08 12:39:36 +01004614// Generates the Math.pow method.
Steve Block6ded16b2010-05-10 14:33:55 +01004615void CodeGenerator::GenerateMathPow(ZoneList<Expression*>* args) {
4616 ASSERT(args->length() == 2);
4617 Load(args->at(0));
4618 Load(args->at(1));
Steve Block8defd9f2010-07-08 12:39:36 +01004619
Steve Block44f0eee2011-05-26 01:26:41 +01004620 if (!Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
Steve Block8defd9f2010-07-08 12:39:36 +01004621 frame_->CallRuntime(Runtime::kMath_pow, 2);
4622 frame_->EmitPush(r0);
4623 } else {
4624 CpuFeatures::Scope scope(VFP3);
4625 JumpTarget runtime, done;
4626 Label exponent_nonsmi, base_nonsmi, powi, not_minus_half, allocate_return;
4627
4628 Register scratch1 = VirtualFrame::scratch0();
4629 Register scratch2 = VirtualFrame::scratch1();
4630
4631 // Get base and exponent to registers.
4632 Register exponent = frame_->PopToRegister();
4633 Register base = frame_->PopToRegister(exponent);
4634 Register heap_number_map = no_reg;
4635
4636 // Set the frame for the runtime jump target. The code below jumps to the
4637 // jump target label so the frame needs to be established before that.
4638 ASSERT(runtime.entry_frame() == NULL);
4639 runtime.set_entry_frame(frame_);
4640
Steve Block1e0659c2011-05-24 12:43:12 +01004641 __ JumpIfNotSmi(exponent, &exponent_nonsmi);
4642 __ JumpIfNotSmi(base, &base_nonsmi);
Steve Block8defd9f2010-07-08 12:39:36 +01004643
4644 heap_number_map = r6;
4645 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
4646
4647 // Exponent is a smi and base is a smi. Get the smi value into vfp register
4648 // d1.
4649 __ SmiToDoubleVFPRegister(base, d1, scratch1, s0);
4650 __ b(&powi);
4651
4652 __ bind(&base_nonsmi);
4653 // Exponent is smi and base is non smi. Get the double value from the base
4654 // into vfp register d1.
4655 __ ObjectToDoubleVFPRegister(base, d1,
4656 scratch1, scratch2, heap_number_map, s0,
4657 runtime.entry_label());
4658
4659 __ bind(&powi);
4660
4661 // Load 1.0 into d0.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01004662 __ vmov(d0, 1.0);
Steve Block8defd9f2010-07-08 12:39:36 +01004663
4664 // Get the absolute untagged value of the exponent and use that for the
4665 // calculation.
4666 __ mov(scratch1, Operand(exponent, ASR, kSmiTagSize), SetCC);
Iain Merrick9ac36c92010-09-13 15:29:50 +01004667 // Negate if negative.
4668 __ rsb(scratch1, scratch1, Operand(0, RelocInfo::NONE), LeaveCC, mi);
Steve Block8defd9f2010-07-08 12:39:36 +01004669 __ vmov(d2, d0, mi); // 1.0 needed in d2 later if exponent is negative.
4670
4671 // Run through all the bits in the exponent. The result is calculated in d0
4672 // and d1 holds base^(bit^2).
4673 Label more_bits;
4674 __ bind(&more_bits);
4675 __ mov(scratch1, Operand(scratch1, LSR, 1), SetCC);
4676 __ vmul(d0, d0, d1, cs); // Multiply with base^(bit^2) if bit is set.
4677 __ vmul(d1, d1, d1, ne); // Don't bother calculating next d1 if done.
4678 __ b(ne, &more_bits);
4679
4680 // If exponent is positive we are done.
Iain Merrick9ac36c92010-09-13 15:29:50 +01004681 __ cmp(exponent, Operand(0, RelocInfo::NONE));
Steve Block8defd9f2010-07-08 12:39:36 +01004682 __ b(ge, &allocate_return);
4683
4684 // If exponent is negative result is 1/result (d2 already holds 1.0 in that
4685 // case). However if d0 has reached infinity this will not provide the
4686 // correct result, so call runtime if that is the case.
4687 __ mov(scratch2, Operand(0x7FF00000));
Iain Merrick9ac36c92010-09-13 15:29:50 +01004688 __ mov(scratch1, Operand(0, RelocInfo::NONE));
Steve Block8defd9f2010-07-08 12:39:36 +01004689 __ vmov(d1, scratch1, scratch2); // Load infinity into d1.
Ben Murdochb8e0da22011-05-16 14:20:40 +01004690 __ VFPCompareAndSetFlags(d0, d1);
Steve Block8defd9f2010-07-08 12:39:36 +01004691 runtime.Branch(eq); // d0 reached infinity.
4692 __ vdiv(d0, d2, d0);
4693 __ b(&allocate_return);
4694
4695 __ bind(&exponent_nonsmi);
4696 // Special handling of raising to the power of -0.5 and 0.5. First check
4697 // that the value is a heap number and that the lower bits (which for both
4698 // values are zero).
4699 heap_number_map = r6;
4700 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
4701 __ ldr(scratch1, FieldMemOperand(exponent, HeapObject::kMapOffset));
4702 __ ldr(scratch2, FieldMemOperand(exponent, HeapNumber::kMantissaOffset));
4703 __ cmp(scratch1, heap_number_map);
4704 runtime.Branch(ne);
4705 __ tst(scratch2, scratch2);
4706 runtime.Branch(ne);
4707
4708 // Load the higher bits (which contains the floating point exponent).
4709 __ ldr(scratch1, FieldMemOperand(exponent, HeapNumber::kExponentOffset));
4710
4711 // Compare exponent with -0.5.
4712 __ cmp(scratch1, Operand(0xbfe00000));
4713 __ b(ne, &not_minus_half);
4714
4715 // Get the double value from the base into vfp register d0.
4716 __ ObjectToDoubleVFPRegister(base, d0,
4717 scratch1, scratch2, heap_number_map, s0,
4718 runtime.entry_label(),
4719 AVOID_NANS_AND_INFINITIES);
4720
Steve Block1e0659c2011-05-24 12:43:12 +01004721 // Convert -0 into +0 by adding +0.
4722 __ vmov(d2, 0.0);
4723 __ vadd(d0, d2, d0);
Steve Block8defd9f2010-07-08 12:39:36 +01004724 // Load 1.0 into d2.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01004725 __ vmov(d2, 1.0);
Steve Block8defd9f2010-07-08 12:39:36 +01004726
Steve Block1e0659c2011-05-24 12:43:12 +01004727 // Calculate the reciprocal of the square root.
Steve Block8defd9f2010-07-08 12:39:36 +01004728 __ vsqrt(d0, d0);
Steve Block1e0659c2011-05-24 12:43:12 +01004729 __ vdiv(d0, d2, d0);
Steve Block8defd9f2010-07-08 12:39:36 +01004730
4731 __ b(&allocate_return);
4732
4733 __ bind(&not_minus_half);
4734 // Compare exponent with 0.5.
4735 __ cmp(scratch1, Operand(0x3fe00000));
4736 runtime.Branch(ne);
4737
4738 // Get the double value from the base into vfp register d0.
4739 __ ObjectToDoubleVFPRegister(base, d0,
4740 scratch1, scratch2, heap_number_map, s0,
4741 runtime.entry_label(),
4742 AVOID_NANS_AND_INFINITIES);
Steve Block1e0659c2011-05-24 12:43:12 +01004743 // Convert -0 into +0 by adding +0.
4744 __ vmov(d2, 0.0);
4745 __ vadd(d0, d2, d0);
Steve Block8defd9f2010-07-08 12:39:36 +01004746 __ vsqrt(d0, d0);
4747
4748 __ bind(&allocate_return);
4749 Register scratch3 = r5;
4750 __ AllocateHeapNumberWithValue(scratch3, d0, scratch1, scratch2,
4751 heap_number_map, runtime.entry_label());
4752 __ mov(base, scratch3);
4753 done.Jump();
4754
4755 runtime.Bind();
4756
4757 // Push back the arguments again for the runtime call.
4758 frame_->EmitPush(base);
4759 frame_->EmitPush(exponent);
4760 frame_->CallRuntime(Runtime::kMath_pow, 2);
4761 __ Move(base, r0);
4762
4763 done.Bind();
4764 frame_->EmitPush(base);
4765 }
Steve Block6ded16b2010-05-10 14:33:55 +01004766}
4767
4768
Steve Block8defd9f2010-07-08 12:39:36 +01004769// Generates the Math.sqrt method.
Steve Block6ded16b2010-05-10 14:33:55 +01004770void CodeGenerator::GenerateMathSqrt(ZoneList<Expression*>* args) {
4771 ASSERT(args->length() == 1);
4772 Load(args->at(0));
Steve Block8defd9f2010-07-08 12:39:36 +01004773
Steve Block44f0eee2011-05-26 01:26:41 +01004774 if (!Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
Steve Block8defd9f2010-07-08 12:39:36 +01004775 frame_->CallRuntime(Runtime::kMath_sqrt, 1);
4776 frame_->EmitPush(r0);
4777 } else {
4778 CpuFeatures::Scope scope(VFP3);
4779 JumpTarget runtime, done;
4780
4781 Register scratch1 = VirtualFrame::scratch0();
4782 Register scratch2 = VirtualFrame::scratch1();
4783
4784 // Get the value from the frame.
4785 Register tos = frame_->PopToRegister();
4786
4787 // Set the frame for the runtime jump target. The code below jumps to the
4788 // jump target label so the frame needs to be established before that.
4789 ASSERT(runtime.entry_frame() == NULL);
4790 runtime.set_entry_frame(frame_);
4791
4792 Register heap_number_map = r6;
John Reck59135872010-11-02 12:39:01 -07004793 Register new_heap_number = r5;
Steve Block8defd9f2010-07-08 12:39:36 +01004794 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
4795
4796 // Get the double value from the heap number into vfp register d0.
4797 __ ObjectToDoubleVFPRegister(tos, d0,
4798 scratch1, scratch2, heap_number_map, s0,
4799 runtime.entry_label());
4800
4801 // Calculate the square root of d0 and place result in a heap number object.
4802 __ vsqrt(d0, d0);
John Reck59135872010-11-02 12:39:01 -07004803 __ AllocateHeapNumberWithValue(new_heap_number,
4804 d0,
4805 scratch1, scratch2,
4806 heap_number_map,
4807 runtime.entry_label());
4808 __ mov(tos, Operand(new_heap_number));
Steve Block8defd9f2010-07-08 12:39:36 +01004809 done.Jump();
4810
4811 runtime.Bind();
4812 // Push back the argument again for the runtime call.
4813 frame_->EmitPush(tos);
4814 frame_->CallRuntime(Runtime::kMath_sqrt, 1);
4815 __ Move(tos, r0);
4816
4817 done.Bind();
4818 frame_->EmitPush(tos);
4819 }
Steve Block6ded16b2010-05-10 14:33:55 +01004820}
4821
4822
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004823class DeferredStringCharCodeAt : public DeferredCode {
4824 public:
4825 DeferredStringCharCodeAt(Register object,
4826 Register index,
4827 Register scratch,
4828 Register result)
4829 : result_(result),
4830 char_code_at_generator_(object,
4831 index,
4832 scratch,
4833 result,
4834 &need_conversion_,
4835 &need_conversion_,
4836 &index_out_of_range_,
4837 STRING_INDEX_IS_NUMBER) {}
4838
4839 StringCharCodeAtGenerator* fast_case_generator() {
4840 return &char_code_at_generator_;
4841 }
4842
4843 virtual void Generate() {
4844 VirtualFrameRuntimeCallHelper call_helper(frame_state());
4845 char_code_at_generator_.GenerateSlow(masm(), call_helper);
4846
4847 __ bind(&need_conversion_);
4848 // Move the undefined value into the result register, which will
4849 // trigger conversion.
4850 __ LoadRoot(result_, Heap::kUndefinedValueRootIndex);
4851 __ jmp(exit_label());
4852
4853 __ bind(&index_out_of_range_);
4854 // When the index is out of range, the spec requires us to return
4855 // NaN.
4856 __ LoadRoot(result_, Heap::kNanValueRootIndex);
4857 __ jmp(exit_label());
4858 }
4859
4860 private:
4861 Register result_;
4862
4863 Label need_conversion_;
4864 Label index_out_of_range_;
4865
4866 StringCharCodeAtGenerator char_code_at_generator_;
4867};
4868
4869
4870// This generates code that performs a String.prototype.charCodeAt() call
4871// or returns a smi in order to trigger conversion.
4872void CodeGenerator::GenerateStringCharCodeAt(ZoneList<Expression*>* args) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004873 Comment(masm_, "[ GenerateStringCharCodeAt");
Steve Blocka7e24c12009-10-30 11:49:00 +00004874 ASSERT(args->length() == 2);
Steve Blockd0582a62009-12-15 09:54:21 +00004875
Leon Clarkef7060e22010-06-03 12:02:55 +01004876 Load(args->at(0));
4877 Load(args->at(1));
Steve Blockd0582a62009-12-15 09:54:21 +00004878
Iain Merrick75681382010-08-19 15:07:18 +01004879 Register index = frame_->PopToRegister();
4880 Register object = frame_->PopToRegister(index);
Steve Blockd0582a62009-12-15 09:54:21 +00004881
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004882 // We need two extra registers.
Iain Merrick75681382010-08-19 15:07:18 +01004883 Register scratch = VirtualFrame::scratch0();
4884 Register result = VirtualFrame::scratch1();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004885
4886 DeferredStringCharCodeAt* deferred =
4887 new DeferredStringCharCodeAt(object,
4888 index,
4889 scratch,
4890 result);
4891 deferred->fast_case_generator()->GenerateFast(masm_);
4892 deferred->BindExit();
Leon Clarkef7060e22010-06-03 12:02:55 +01004893 frame_->EmitPush(result);
Steve Blocka7e24c12009-10-30 11:49:00 +00004894}
4895
4896
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004897class DeferredStringCharFromCode : public DeferredCode {
4898 public:
4899 DeferredStringCharFromCode(Register code,
4900 Register result)
4901 : char_from_code_generator_(code, result) {}
4902
4903 StringCharFromCodeGenerator* fast_case_generator() {
4904 return &char_from_code_generator_;
4905 }
4906
4907 virtual void Generate() {
4908 VirtualFrameRuntimeCallHelper call_helper(frame_state());
4909 char_from_code_generator_.GenerateSlow(masm(), call_helper);
4910 }
4911
4912 private:
4913 StringCharFromCodeGenerator char_from_code_generator_;
4914};
4915
4916
4917// Generates code for creating a one-char string from a char code.
4918void CodeGenerator::GenerateStringCharFromCode(ZoneList<Expression*>* args) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004919 Comment(masm_, "[ GenerateStringCharFromCode");
Steve Block6ded16b2010-05-10 14:33:55 +01004920 ASSERT(args->length() == 1);
4921
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004922 Load(args->at(0));
4923
Iain Merrick75681382010-08-19 15:07:18 +01004924 Register result = frame_->GetTOSRegister();
4925 Register code = frame_->PopToRegister(result);
Steve Block6ded16b2010-05-10 14:33:55 +01004926
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004927 DeferredStringCharFromCode* deferred = new DeferredStringCharFromCode(
4928 code, result);
4929 deferred->fast_case_generator()->GenerateFast(masm_);
4930 deferred->BindExit();
4931 frame_->EmitPush(result);
4932}
4933
4934
4935class DeferredStringCharAt : public DeferredCode {
4936 public:
4937 DeferredStringCharAt(Register object,
4938 Register index,
4939 Register scratch1,
4940 Register scratch2,
4941 Register result)
4942 : result_(result),
4943 char_at_generator_(object,
4944 index,
4945 scratch1,
4946 scratch2,
4947 result,
4948 &need_conversion_,
4949 &need_conversion_,
4950 &index_out_of_range_,
4951 STRING_INDEX_IS_NUMBER) {}
4952
4953 StringCharAtGenerator* fast_case_generator() {
4954 return &char_at_generator_;
4955 }
4956
4957 virtual void Generate() {
4958 VirtualFrameRuntimeCallHelper call_helper(frame_state());
4959 char_at_generator_.GenerateSlow(masm(), call_helper);
4960
4961 __ bind(&need_conversion_);
4962 // Move smi zero into the result register, which will trigger
4963 // conversion.
4964 __ mov(result_, Operand(Smi::FromInt(0)));
4965 __ jmp(exit_label());
4966
4967 __ bind(&index_out_of_range_);
4968 // When the index is out of range, the spec requires us to return
4969 // the empty string.
4970 __ LoadRoot(result_, Heap::kEmptyStringRootIndex);
4971 __ jmp(exit_label());
4972 }
4973
4974 private:
4975 Register result_;
4976
4977 Label need_conversion_;
4978 Label index_out_of_range_;
4979
4980 StringCharAtGenerator char_at_generator_;
4981};
4982
4983
4984// This generates code that performs a String.prototype.charAt() call
4985// or returns a smi in order to trigger conversion.
4986void CodeGenerator::GenerateStringCharAt(ZoneList<Expression*>* args) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004987 Comment(masm_, "[ GenerateStringCharAt");
4988 ASSERT(args->length() == 2);
4989
4990 Load(args->at(0));
4991 Load(args->at(1));
4992
Iain Merrick75681382010-08-19 15:07:18 +01004993 Register index = frame_->PopToRegister();
4994 Register object = frame_->PopToRegister(index);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004995
4996 // We need three extra registers.
Iain Merrick75681382010-08-19 15:07:18 +01004997 Register scratch1 = VirtualFrame::scratch0();
4998 Register scratch2 = VirtualFrame::scratch1();
4999 // Use r6 without notifying the virtual frame.
5000 Register result = r6;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005001
5002 DeferredStringCharAt* deferred =
5003 new DeferredStringCharAt(object,
5004 index,
5005 scratch1,
5006 scratch2,
5007 result);
5008 deferred->fast_case_generator()->GenerateFast(masm_);
5009 deferred->BindExit();
Steve Block6ded16b2010-05-10 14:33:55 +01005010 frame_->EmitPush(result);
5011}
5012
5013
Steve Blocka7e24c12009-10-30 11:49:00 +00005014void CodeGenerator::GenerateIsArray(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005015 ASSERT(args->length() == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005016 Load(args->at(0));
Steve Blocka7e24c12009-10-30 11:49:00 +00005017 JumpTarget answer;
5018 // We need the CC bits to come out as not_equal in the case where the
5019 // object is a smi. This can't be done with the usual test opcode so
5020 // we use XOR to get the right CC bits.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005021 Register possible_array = frame_->PopToRegister();
5022 Register scratch = VirtualFrame::scratch0();
5023 __ and_(scratch, possible_array, Operand(kSmiTagMask));
5024 __ eor(scratch, scratch, Operand(kSmiTagMask), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00005025 answer.Branch(ne);
5026 // It is a heap object - get the map. Check if the object is a JS array.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005027 __ CompareObjectType(possible_array, scratch, scratch, JS_ARRAY_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00005028 answer.Bind();
5029 cc_reg_ = eq;
5030}
5031
5032
Andrei Popescu402d9372010-02-26 13:31:12 +00005033void CodeGenerator::GenerateIsRegExp(ZoneList<Expression*>* args) {
Andrei Popescu402d9372010-02-26 13:31:12 +00005034 ASSERT(args->length() == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005035 Load(args->at(0));
Andrei Popescu402d9372010-02-26 13:31:12 +00005036 JumpTarget answer;
5037 // We need the CC bits to come out as not_equal in the case where the
5038 // object is a smi. This can't be done with the usual test opcode so
5039 // we use XOR to get the right CC bits.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005040 Register possible_regexp = frame_->PopToRegister();
5041 Register scratch = VirtualFrame::scratch0();
5042 __ and_(scratch, possible_regexp, Operand(kSmiTagMask));
5043 __ eor(scratch, scratch, Operand(kSmiTagMask), SetCC);
Andrei Popescu402d9372010-02-26 13:31:12 +00005044 answer.Branch(ne);
5045 // It is a heap object - get the map. Check if the object is a regexp.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005046 __ CompareObjectType(possible_regexp, scratch, scratch, JS_REGEXP_TYPE);
Andrei Popescu402d9372010-02-26 13:31:12 +00005047 answer.Bind();
5048 cc_reg_ = eq;
5049}
5050
5051
Steve Blockd0582a62009-12-15 09:54:21 +00005052void CodeGenerator::GenerateIsObject(ZoneList<Expression*>* args) {
5053 // This generates a fast version of:
5054 // (typeof(arg) === 'object' || %_ClassOf(arg) == 'RegExp')
Steve Blockd0582a62009-12-15 09:54:21 +00005055 ASSERT(args->length() == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005056 Load(args->at(0));
5057 Register possible_object = frame_->PopToRegister();
5058 __ tst(possible_object, Operand(kSmiTagMask));
Steve Blockd0582a62009-12-15 09:54:21 +00005059 false_target()->Branch(eq);
5060
5061 __ LoadRoot(ip, Heap::kNullValueRootIndex);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005062 __ cmp(possible_object, ip);
Steve Blockd0582a62009-12-15 09:54:21 +00005063 true_target()->Branch(eq);
5064
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005065 Register map_reg = VirtualFrame::scratch0();
5066 __ ldr(map_reg, FieldMemOperand(possible_object, HeapObject::kMapOffset));
Steve Blockd0582a62009-12-15 09:54:21 +00005067 // Undetectable objects behave like undefined when tested with typeof.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005068 __ ldrb(possible_object, FieldMemOperand(map_reg, Map::kBitFieldOffset));
5069 __ tst(possible_object, Operand(1 << Map::kIsUndetectable));
Leon Clarkef7060e22010-06-03 12:02:55 +01005070 false_target()->Branch(ne);
Steve Blockd0582a62009-12-15 09:54:21 +00005071
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005072 __ ldrb(possible_object, FieldMemOperand(map_reg, Map::kInstanceTypeOffset));
5073 __ cmp(possible_object, Operand(FIRST_JS_OBJECT_TYPE));
Steve Blockd0582a62009-12-15 09:54:21 +00005074 false_target()->Branch(lt);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005075 __ cmp(possible_object, Operand(LAST_JS_OBJECT_TYPE));
Steve Blockd0582a62009-12-15 09:54:21 +00005076 cc_reg_ = le;
5077}
5078
5079
Ben Murdoch3bec4d22010-07-22 14:51:16 +01005080void CodeGenerator::GenerateIsSpecObject(ZoneList<Expression*>* args) {
5081 // This generates a fast version of:
5082 // (typeof(arg) === 'object' || %_ClassOf(arg) == 'RegExp' ||
5083 // typeof(arg) == function).
5084 // It includes undetectable objects (as opposed to IsObject).
5085 ASSERT(args->length() == 1);
5086 Load(args->at(0));
5087 Register value = frame_->PopToRegister();
5088 __ tst(value, Operand(kSmiTagMask));
5089 false_target()->Branch(eq);
5090 // Check that this is an object.
5091 __ ldr(value, FieldMemOperand(value, HeapObject::kMapOffset));
5092 __ ldrb(value, FieldMemOperand(value, Map::kInstanceTypeOffset));
5093 __ cmp(value, Operand(FIRST_JS_OBJECT_TYPE));
5094 cc_reg_ = ge;
5095}
5096
5097
Iain Merrick75681382010-08-19 15:07:18 +01005098// Deferred code to check whether the String JavaScript object is safe for using
5099// default value of. This code is called after the bit caching this information
5100// in the map has been checked with the map for the object in the map_result_
5101// register. On return the register map_result_ contains 1 for true and 0 for
5102// false.
5103class DeferredIsStringWrapperSafeForDefaultValueOf : public DeferredCode {
5104 public:
5105 DeferredIsStringWrapperSafeForDefaultValueOf(Register object,
5106 Register map_result,
5107 Register scratch1,
5108 Register scratch2)
5109 : object_(object),
5110 map_result_(map_result),
5111 scratch1_(scratch1),
5112 scratch2_(scratch2) { }
5113
5114 virtual void Generate() {
5115 Label false_result;
5116
5117 // Check that map is loaded as expected.
5118 if (FLAG_debug_code) {
5119 __ ldr(ip, FieldMemOperand(object_, HeapObject::kMapOffset));
5120 __ cmp(map_result_, ip);
5121 __ Assert(eq, "Map not in expected register");
5122 }
5123
5124 // Check for fast case object. Generate false result for slow case object.
5125 __ ldr(scratch1_, FieldMemOperand(object_, JSObject::kPropertiesOffset));
5126 __ ldr(scratch1_, FieldMemOperand(scratch1_, HeapObject::kMapOffset));
5127 __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
5128 __ cmp(scratch1_, ip);
5129 __ b(eq, &false_result);
5130
5131 // Look for valueOf symbol in the descriptor array, and indicate false if
5132 // found. The type is not checked, so if it is a transition it is a false
5133 // negative.
5134 __ ldr(map_result_,
5135 FieldMemOperand(map_result_, Map::kInstanceDescriptorsOffset));
5136 __ ldr(scratch2_, FieldMemOperand(map_result_, FixedArray::kLengthOffset));
5137 // map_result_: descriptor array
5138 // scratch2_: length of descriptor array
5139 // Calculate the end of the descriptor array.
5140 STATIC_ASSERT(kSmiTag == 0);
5141 STATIC_ASSERT(kSmiTagSize == 1);
5142 STATIC_ASSERT(kPointerSize == 4);
5143 __ add(scratch1_,
5144 map_result_,
5145 Operand(FixedArray::kHeaderSize - kHeapObjectTag));
5146 __ add(scratch1_,
5147 scratch1_,
5148 Operand(scratch2_, LSL, kPointerSizeLog2 - kSmiTagSize));
5149
5150 // Calculate location of the first key name.
5151 __ add(map_result_,
5152 map_result_,
5153 Operand(FixedArray::kHeaderSize - kHeapObjectTag +
5154 DescriptorArray::kFirstIndex * kPointerSize));
5155 // Loop through all the keys in the descriptor array. If one of these is the
5156 // symbol valueOf the result is false.
5157 Label entry, loop;
5158 // The use of ip to store the valueOf symbol asumes that it is not otherwise
5159 // used in the loop below.
Steve Block44f0eee2011-05-26 01:26:41 +01005160 __ mov(ip, Operand(FACTORY->value_of_symbol()));
Iain Merrick75681382010-08-19 15:07:18 +01005161 __ jmp(&entry);
5162 __ bind(&loop);
5163 __ ldr(scratch2_, MemOperand(map_result_, 0));
5164 __ cmp(scratch2_, ip);
5165 __ b(eq, &false_result);
5166 __ add(map_result_, map_result_, Operand(kPointerSize));
5167 __ bind(&entry);
5168 __ cmp(map_result_, Operand(scratch1_));
5169 __ b(ne, &loop);
5170
5171 // Reload map as register map_result_ was used as temporary above.
5172 __ ldr(map_result_, FieldMemOperand(object_, HeapObject::kMapOffset));
5173
5174 // If a valueOf property is not found on the object check that it's
5175 // prototype is the un-modified String prototype. If not result is false.
5176 __ ldr(scratch1_, FieldMemOperand(map_result_, Map::kPrototypeOffset));
5177 __ tst(scratch1_, Operand(kSmiTagMask));
5178 __ b(eq, &false_result);
5179 __ ldr(scratch1_, FieldMemOperand(scratch1_, HeapObject::kMapOffset));
5180 __ ldr(scratch2_,
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08005181 ContextOperand(cp, Context::GLOBAL_INDEX));
Iain Merrick75681382010-08-19 15:07:18 +01005182 __ ldr(scratch2_,
5183 FieldMemOperand(scratch2_, GlobalObject::kGlobalContextOffset));
5184 __ ldr(scratch2_,
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08005185 ContextOperand(
Iain Merrick75681382010-08-19 15:07:18 +01005186 scratch2_, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
5187 __ cmp(scratch1_, scratch2_);
5188 __ b(ne, &false_result);
5189
5190 // Set the bit in the map to indicate that it has been checked safe for
5191 // default valueOf and set true result.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01005192 __ ldrb(scratch1_, FieldMemOperand(map_result_, Map::kBitField2Offset));
Iain Merrick75681382010-08-19 15:07:18 +01005193 __ orr(scratch1_,
5194 scratch1_,
5195 Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01005196 __ strb(scratch1_, FieldMemOperand(map_result_, Map::kBitField2Offset));
Iain Merrick75681382010-08-19 15:07:18 +01005197 __ mov(map_result_, Operand(1));
5198 __ jmp(exit_label());
5199 __ bind(&false_result);
5200 // Set false result.
Iain Merrick9ac36c92010-09-13 15:29:50 +01005201 __ mov(map_result_, Operand(0, RelocInfo::NONE));
Iain Merrick75681382010-08-19 15:07:18 +01005202 }
5203
5204 private:
5205 Register object_;
5206 Register map_result_;
5207 Register scratch1_;
5208 Register scratch2_;
5209};
5210
5211
5212void CodeGenerator::GenerateIsStringWrapperSafeForDefaultValueOf(
5213 ZoneList<Expression*>* args) {
5214 ASSERT(args->length() == 1);
5215 Load(args->at(0));
5216 Register obj = frame_->PopToRegister(); // Pop the string wrapper.
5217 if (FLAG_debug_code) {
5218 __ AbortIfSmi(obj);
5219 }
5220
5221 // Check whether this map has already been checked to be safe for default
5222 // valueOf.
5223 Register map_result = VirtualFrame::scratch0();
5224 __ ldr(map_result, FieldMemOperand(obj, HeapObject::kMapOffset));
5225 __ ldrb(ip, FieldMemOperand(map_result, Map::kBitField2Offset));
5226 __ tst(ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
5227 true_target()->Branch(ne);
5228
5229 // We need an additional two scratch registers for the deferred code.
5230 Register scratch1 = VirtualFrame::scratch1();
5231 // Use r6 without notifying the virtual frame.
5232 Register scratch2 = r6;
5233
5234 DeferredIsStringWrapperSafeForDefaultValueOf* deferred =
5235 new DeferredIsStringWrapperSafeForDefaultValueOf(
5236 obj, map_result, scratch1, scratch2);
5237 deferred->Branch(eq);
5238 deferred->BindExit();
5239 __ tst(map_result, Operand(map_result));
5240 cc_reg_ = ne;
5241}
5242
5243
Steve Blockd0582a62009-12-15 09:54:21 +00005244void CodeGenerator::GenerateIsFunction(ZoneList<Expression*>* args) {
5245 // This generates a fast version of:
5246 // (%_ClassOf(arg) === 'Function')
Steve Blockd0582a62009-12-15 09:54:21 +00005247 ASSERT(args->length() == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005248 Load(args->at(0));
5249 Register possible_function = frame_->PopToRegister();
5250 __ tst(possible_function, Operand(kSmiTagMask));
Steve Blockd0582a62009-12-15 09:54:21 +00005251 false_target()->Branch(eq);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005252 Register map_reg = VirtualFrame::scratch0();
5253 Register scratch = VirtualFrame::scratch1();
5254 __ CompareObjectType(possible_function, map_reg, scratch, JS_FUNCTION_TYPE);
Steve Blockd0582a62009-12-15 09:54:21 +00005255 cc_reg_ = eq;
5256}
5257
5258
Leon Clarked91b9f72010-01-27 17:25:45 +00005259void CodeGenerator::GenerateIsUndetectableObject(ZoneList<Expression*>* args) {
Leon Clarked91b9f72010-01-27 17:25:45 +00005260 ASSERT(args->length() == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005261 Load(args->at(0));
5262 Register possible_undetectable = frame_->PopToRegister();
5263 __ tst(possible_undetectable, Operand(kSmiTagMask));
Leon Clarked91b9f72010-01-27 17:25:45 +00005264 false_target()->Branch(eq);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005265 Register scratch = VirtualFrame::scratch0();
5266 __ ldr(scratch,
5267 FieldMemOperand(possible_undetectable, HeapObject::kMapOffset));
5268 __ ldrb(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
5269 __ tst(scratch, Operand(1 << Map::kIsUndetectable));
Leon Clarked91b9f72010-01-27 17:25:45 +00005270 cc_reg_ = ne;
5271}
5272
5273
Steve Blocka7e24c12009-10-30 11:49:00 +00005274void CodeGenerator::GenerateIsConstructCall(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005275 ASSERT(args->length() == 0);
5276
Leon Clarkef7060e22010-06-03 12:02:55 +01005277 Register scratch0 = VirtualFrame::scratch0();
5278 Register scratch1 = VirtualFrame::scratch1();
Steve Blocka7e24c12009-10-30 11:49:00 +00005279 // Get the frame pointer for the calling frame.
Leon Clarkef7060e22010-06-03 12:02:55 +01005280 __ ldr(scratch0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00005281
5282 // Skip the arguments adaptor frame if it exists.
Leon Clarkef7060e22010-06-03 12:02:55 +01005283 __ ldr(scratch1,
5284 MemOperand(scratch0, StandardFrameConstants::kContextOffset));
5285 __ cmp(scratch1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
5286 __ ldr(scratch0,
5287 MemOperand(scratch0, StandardFrameConstants::kCallerFPOffset), eq);
Steve Blocka7e24c12009-10-30 11:49:00 +00005288
5289 // Check the marker in the calling frame.
Leon Clarkef7060e22010-06-03 12:02:55 +01005290 __ ldr(scratch1,
5291 MemOperand(scratch0, StandardFrameConstants::kMarkerOffset));
5292 __ cmp(scratch1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
Steve Blocka7e24c12009-10-30 11:49:00 +00005293 cc_reg_ = eq;
5294}
5295
5296
5297void CodeGenerator::GenerateArgumentsLength(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005298 ASSERT(args->length() == 0);
5299
Leon Clarkef7060e22010-06-03 12:02:55 +01005300 Register tos = frame_->GetTOSRegister();
5301 Register scratch0 = VirtualFrame::scratch0();
5302 Register scratch1 = VirtualFrame::scratch1();
Steve Blocka7e24c12009-10-30 11:49:00 +00005303
Steve Block6ded16b2010-05-10 14:33:55 +01005304 // Check if the calling frame is an arguments adaptor frame.
Leon Clarkef7060e22010-06-03 12:02:55 +01005305 __ ldr(scratch0,
5306 MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
5307 __ ldr(scratch1,
5308 MemOperand(scratch0, StandardFrameConstants::kContextOffset));
5309 __ cmp(scratch1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
5310
5311 // Get the number of formal parameters.
5312 __ mov(tos, Operand(Smi::FromInt(scope()->num_parameters())), LeaveCC, ne);
Steve Block6ded16b2010-05-10 14:33:55 +01005313
5314 // Arguments adaptor case: Read the arguments length from the
5315 // adaptor frame.
Leon Clarkef7060e22010-06-03 12:02:55 +01005316 __ ldr(tos,
5317 MemOperand(scratch0, ArgumentsAdaptorFrameConstants::kLengthOffset),
5318 eq);
Steve Block6ded16b2010-05-10 14:33:55 +01005319
Leon Clarkef7060e22010-06-03 12:02:55 +01005320 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00005321}
5322
5323
Steve Block6ded16b2010-05-10 14:33:55 +01005324void CodeGenerator::GenerateArguments(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005325 ASSERT(args->length() == 1);
5326
5327 // Satisfy contract with ArgumentsAccessStub:
5328 // Load the key into r1 and the formal parameters count into r0.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005329 Load(args->at(0));
Iain Merrick75681382010-08-19 15:07:18 +01005330 frame_->PopToR1();
5331 frame_->SpillAll();
Andrei Popescu31002712010-02-23 13:46:05 +00005332 __ mov(r0, Operand(Smi::FromInt(scope()->num_parameters())));
Steve Blocka7e24c12009-10-30 11:49:00 +00005333
5334 // Call the shared stub to get to arguments[key].
5335 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
5336 frame_->CallStub(&stub, 0);
5337 frame_->EmitPush(r0);
5338}
5339
5340
Steve Block6ded16b2010-05-10 14:33:55 +01005341void CodeGenerator::GenerateRandomHeapNumber(
5342 ZoneList<Expression*>* args) {
5343 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +00005344 ASSERT(args->length() == 0);
Steve Block6ded16b2010-05-10 14:33:55 +01005345
5346 Label slow_allocate_heapnumber;
5347 Label heapnumber_allocated;
5348
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01005349 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
5350 __ AllocateHeapNumber(r4, r1, r2, r6, &slow_allocate_heapnumber);
Steve Block6ded16b2010-05-10 14:33:55 +01005351 __ jmp(&heapnumber_allocated);
5352
5353 __ bind(&slow_allocate_heapnumber);
Ben Murdoch3bec4d22010-07-22 14:51:16 +01005354 // Allocate a heap number.
5355 __ CallRuntime(Runtime::kNumberAlloc, 0);
Steve Block6ded16b2010-05-10 14:33:55 +01005356 __ mov(r4, Operand(r0));
5357
5358 __ bind(&heapnumber_allocated);
5359
5360 // Convert 32 random bits in r0 to 0.(32 random bits) in a double
5361 // by computing:
5362 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
Steve Block44f0eee2011-05-26 01:26:41 +01005363 if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
Steve Block6ded16b2010-05-10 14:33:55 +01005364 __ PrepareCallCFunction(0, r1);
Steve Block44f0eee2011-05-26 01:26:41 +01005365 __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 0);
Steve Block6ded16b2010-05-10 14:33:55 +01005366
5367 CpuFeatures::Scope scope(VFP3);
5368 // 0x41300000 is the top half of 1.0 x 2^20 as a double.
5369 // Create this constant using mov/orr to avoid PC relative load.
5370 __ mov(r1, Operand(0x41000000));
5371 __ orr(r1, r1, Operand(0x300000));
5372 // Move 0x41300000xxxxxxxx (x = random bits) to VFP.
5373 __ vmov(d7, r0, r1);
5374 // Move 0x4130000000000000 to VFP.
Iain Merrick9ac36c92010-09-13 15:29:50 +01005375 __ mov(r0, Operand(0, RelocInfo::NONE));
Steve Block6ded16b2010-05-10 14:33:55 +01005376 __ vmov(d8, r0, r1);
5377 // Subtract and store the result in the heap number.
5378 __ vsub(d7, d7, d8);
5379 __ sub(r0, r4, Operand(kHeapObjectTag));
5380 __ vstr(d7, r0, HeapNumber::kValueOffset);
5381 frame_->EmitPush(r4);
5382 } else {
5383 __ mov(r0, Operand(r4));
5384 __ PrepareCallCFunction(1, r1);
5385 __ CallCFunction(
Steve Block44f0eee2011-05-26 01:26:41 +01005386 ExternalReference::fill_heap_number_with_random_function(isolate()), 1);
Steve Block6ded16b2010-05-10 14:33:55 +01005387 frame_->EmitPush(r0);
5388 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005389}
5390
5391
Steve Blockd0582a62009-12-15 09:54:21 +00005392void CodeGenerator::GenerateStringAdd(ZoneList<Expression*>* args) {
5393 ASSERT_EQ(2, args->length());
5394
5395 Load(args->at(0));
5396 Load(args->at(1));
5397
Andrei Popescu31002712010-02-23 13:46:05 +00005398 StringAddStub stub(NO_STRING_ADD_FLAGS);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005399 frame_->SpillAll();
Andrei Popescu31002712010-02-23 13:46:05 +00005400 frame_->CallStub(&stub, 2);
Steve Blockd0582a62009-12-15 09:54:21 +00005401 frame_->EmitPush(r0);
5402}
5403
5404
Leon Clarkee46be812010-01-19 14:06:41 +00005405void CodeGenerator::GenerateSubString(ZoneList<Expression*>* args) {
5406 ASSERT_EQ(3, args->length());
5407
5408 Load(args->at(0));
5409 Load(args->at(1));
5410 Load(args->at(2));
5411
Andrei Popescu31002712010-02-23 13:46:05 +00005412 SubStringStub stub;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005413 frame_->SpillAll();
Andrei Popescu31002712010-02-23 13:46:05 +00005414 frame_->CallStub(&stub, 3);
Leon Clarkee46be812010-01-19 14:06:41 +00005415 frame_->EmitPush(r0);
5416}
5417
5418
5419void CodeGenerator::GenerateStringCompare(ZoneList<Expression*>* args) {
5420 ASSERT_EQ(2, args->length());
5421
5422 Load(args->at(0));
5423 Load(args->at(1));
5424
Leon Clarked91b9f72010-01-27 17:25:45 +00005425 StringCompareStub stub;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005426 frame_->SpillAll();
Leon Clarked91b9f72010-01-27 17:25:45 +00005427 frame_->CallStub(&stub, 2);
Leon Clarkee46be812010-01-19 14:06:41 +00005428 frame_->EmitPush(r0);
5429}
5430
5431
5432void CodeGenerator::GenerateRegExpExec(ZoneList<Expression*>* args) {
5433 ASSERT_EQ(4, args->length());
5434
5435 Load(args->at(0));
5436 Load(args->at(1));
5437 Load(args->at(2));
5438 Load(args->at(3));
Steve Block6ded16b2010-05-10 14:33:55 +01005439 RegExpExecStub stub;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005440 frame_->SpillAll();
Steve Block6ded16b2010-05-10 14:33:55 +01005441 frame_->CallStub(&stub, 4);
5442 frame_->EmitPush(r0);
5443}
Leon Clarkee46be812010-01-19 14:06:41 +00005444
Steve Block6ded16b2010-05-10 14:33:55 +01005445
5446void CodeGenerator::GenerateRegExpConstructResult(ZoneList<Expression*>* args) {
Steve Block6ded16b2010-05-10 14:33:55 +01005447 ASSERT_EQ(3, args->length());
Ben Murdochb0fe1622011-05-05 13:52:32 +01005448
Steve Block6ded16b2010-05-10 14:33:55 +01005449 Load(args->at(0)); // Size of array, smi.
5450 Load(args->at(1)); // "index" property value.
5451 Load(args->at(2)); // "input" property value.
Ben Murdochb0fe1622011-05-05 13:52:32 +01005452 RegExpConstructResultStub stub;
5453 frame_->SpillAll();
5454 frame_->CallStub(&stub, 3);
Steve Block6ded16b2010-05-10 14:33:55 +01005455 frame_->EmitPush(r0);
5456}
5457
5458
5459class DeferredSearchCache: public DeferredCode {
5460 public:
5461 DeferredSearchCache(Register dst, Register cache, Register key)
5462 : dst_(dst), cache_(cache), key_(key) {
5463 set_comment("[ DeferredSearchCache");
5464 }
5465
5466 virtual void Generate();
5467
5468 private:
5469 Register dst_, cache_, key_;
5470};
5471
5472
5473void DeferredSearchCache::Generate() {
5474 __ Push(cache_, key_);
5475 __ CallRuntime(Runtime::kGetFromCache, 2);
Iain Merrick75681382010-08-19 15:07:18 +01005476 __ Move(dst_, r0);
Steve Block6ded16b2010-05-10 14:33:55 +01005477}
5478
5479
5480void CodeGenerator::GenerateGetFromCache(ZoneList<Expression*>* args) {
5481 ASSERT_EQ(2, args->length());
5482
5483 ASSERT_NE(NULL, args->at(0)->AsLiteral());
5484 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
5485
5486 Handle<FixedArray> jsfunction_result_caches(
Steve Block44f0eee2011-05-26 01:26:41 +01005487 Isolate::Current()->global_context()->jsfunction_result_caches());
Steve Block6ded16b2010-05-10 14:33:55 +01005488 if (jsfunction_result_caches->length() <= cache_id) {
5489 __ Abort("Attempt to use undefined cache.");
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005490 frame_->EmitPushRoot(Heap::kUndefinedValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01005491 return;
5492 }
5493
5494 Load(args->at(1));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005495
Iain Merrick75681382010-08-19 15:07:18 +01005496 frame_->PopToR1();
5497 frame_->SpillAll();
5498 Register key = r1; // Just poped to r1
5499 Register result = r0; // Free, as frame has just been spilled.
5500 Register scratch1 = VirtualFrame::scratch0();
5501 Register scratch2 = VirtualFrame::scratch1();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005502
Iain Merrick75681382010-08-19 15:07:18 +01005503 __ ldr(scratch1, ContextOperand(cp, Context::GLOBAL_INDEX));
5504 __ ldr(scratch1,
5505 FieldMemOperand(scratch1, GlobalObject::kGlobalContextOffset));
5506 __ ldr(scratch1,
5507 ContextOperand(scratch1, Context::JSFUNCTION_RESULT_CACHES_INDEX));
5508 __ ldr(scratch1,
5509 FieldMemOperand(scratch1, FixedArray::OffsetOfElementAt(cache_id)));
Steve Block6ded16b2010-05-10 14:33:55 +01005510
Iain Merrick75681382010-08-19 15:07:18 +01005511 DeferredSearchCache* deferred =
5512 new DeferredSearchCache(result, scratch1, key);
Steve Block6ded16b2010-05-10 14:33:55 +01005513
5514 const int kFingerOffset =
5515 FixedArray::OffsetOfElementAt(JSFunctionResultCache::kFingerIndex);
Kristian Monsen50ef84f2010-07-29 15:18:00 +01005516 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
Iain Merrick75681382010-08-19 15:07:18 +01005517 __ ldr(result, FieldMemOperand(scratch1, kFingerOffset));
5518 // result now holds finger offset as a smi.
5519 __ add(scratch2, scratch1, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
5520 // scratch2 now points to the start of fixed array elements.
5521 __ ldr(result,
5522 MemOperand(
5523 scratch2, result, LSL, kPointerSizeLog2 - kSmiTagSize, PreIndex));
5524 // Note side effect of PreIndex: scratch2 now points to the key of the pair.
5525 __ cmp(key, result);
Steve Block6ded16b2010-05-10 14:33:55 +01005526 deferred->Branch(ne);
5527
Iain Merrick75681382010-08-19 15:07:18 +01005528 __ ldr(result, MemOperand(scratch2, kPointerSize));
Steve Block6ded16b2010-05-10 14:33:55 +01005529
5530 deferred->BindExit();
Iain Merrick75681382010-08-19 15:07:18 +01005531 frame_->EmitPush(result);
Leon Clarkee46be812010-01-19 14:06:41 +00005532}
5533
5534
Andrei Popescu402d9372010-02-26 13:31:12 +00005535void CodeGenerator::GenerateNumberToString(ZoneList<Expression*>* args) {
5536 ASSERT_EQ(args->length(), 1);
5537
5538 // Load the argument on the stack and jump to the runtime.
5539 Load(args->at(0));
5540
Steve Block6ded16b2010-05-10 14:33:55 +01005541 NumberToStringStub stub;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005542 frame_->SpillAll();
Steve Block6ded16b2010-05-10 14:33:55 +01005543 frame_->CallStub(&stub, 1);
5544 frame_->EmitPush(r0);
5545}
5546
5547
5548class DeferredSwapElements: public DeferredCode {
5549 public:
5550 DeferredSwapElements(Register object, Register index1, Register index2)
5551 : object_(object), index1_(index1), index2_(index2) {
5552 set_comment("[ DeferredSwapElements");
5553 }
5554
5555 virtual void Generate();
5556
5557 private:
5558 Register object_, index1_, index2_;
5559};
5560
5561
5562void DeferredSwapElements::Generate() {
5563 __ push(object_);
5564 __ push(index1_);
5565 __ push(index2_);
5566 __ CallRuntime(Runtime::kSwapElements, 3);
5567}
5568
5569
5570void CodeGenerator::GenerateSwapElements(ZoneList<Expression*>* args) {
5571 Comment cmnt(masm_, "[ GenerateSwapElements");
5572
5573 ASSERT_EQ(3, args->length());
5574
5575 Load(args->at(0));
5576 Load(args->at(1));
5577 Load(args->at(2));
5578
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005579 VirtualFrame::SpilledScope spilled_scope(frame_);
5580
Steve Block6ded16b2010-05-10 14:33:55 +01005581 Register index2 = r2;
5582 Register index1 = r1;
5583 Register object = r0;
5584 Register tmp1 = r3;
5585 Register tmp2 = r4;
5586
5587 frame_->EmitPop(index2);
5588 frame_->EmitPop(index1);
5589 frame_->EmitPop(object);
5590
5591 DeferredSwapElements* deferred =
5592 new DeferredSwapElements(object, index1, index2);
5593
5594 // Fetch the map and check if array is in fast case.
5595 // Check that object doesn't require security checks and
5596 // has no indexed interceptor.
Steve Block44f0eee2011-05-26 01:26:41 +01005597 __ CompareObjectType(object, tmp1, tmp2, JS_ARRAY_TYPE);
5598 deferred->Branch(ne);
Steve Block6ded16b2010-05-10 14:33:55 +01005599 __ ldrb(tmp2, FieldMemOperand(tmp1, Map::kBitFieldOffset));
5600 __ tst(tmp2, Operand(KeyedLoadIC::kSlowCaseBitFieldMask));
Steve Block1e0659c2011-05-24 12:43:12 +01005601 deferred->Branch(ne);
Steve Block6ded16b2010-05-10 14:33:55 +01005602
Iain Merrick75681382010-08-19 15:07:18 +01005603 // Check the object's elements are in fast case and writable.
Steve Block6ded16b2010-05-10 14:33:55 +01005604 __ ldr(tmp1, FieldMemOperand(object, JSObject::kElementsOffset));
5605 __ ldr(tmp2, FieldMemOperand(tmp1, HeapObject::kMapOffset));
5606 __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
5607 __ cmp(tmp2, ip);
5608 deferred->Branch(ne);
5609
5610 // Smi-tagging is equivalent to multiplying by 2.
5611 STATIC_ASSERT(kSmiTag == 0);
5612 STATIC_ASSERT(kSmiTagSize == 1);
5613
5614 // Check that both indices are smis.
5615 __ mov(tmp2, index1);
5616 __ orr(tmp2, tmp2, index2);
5617 __ tst(tmp2, Operand(kSmiTagMask));
Steve Block1e0659c2011-05-24 12:43:12 +01005618 deferred->Branch(ne);
Steve Block6ded16b2010-05-10 14:33:55 +01005619
Ben Murdochdb5a90a2011-01-06 18:27:03 +00005620 // Check that both indices are valid.
5621 __ ldr(tmp2, FieldMemOperand(object, JSArray::kLengthOffset));
5622 __ cmp(tmp2, index1);
5623 __ cmp(tmp2, index2, hi);
5624 deferred->Branch(ls);
5625
Steve Block6ded16b2010-05-10 14:33:55 +01005626 // Bring the offsets into the fixed array in tmp1 into index1 and
5627 // index2.
5628 __ mov(tmp2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
5629 __ add(index1, tmp2, Operand(index1, LSL, kPointerSizeLog2 - kSmiTagSize));
5630 __ add(index2, tmp2, Operand(index2, LSL, kPointerSizeLog2 - kSmiTagSize));
5631
5632 // Swap elements.
5633 Register tmp3 = object;
5634 object = no_reg;
5635 __ ldr(tmp3, MemOperand(tmp1, index1));
5636 __ ldr(tmp2, MemOperand(tmp1, index2));
5637 __ str(tmp3, MemOperand(tmp1, index2));
5638 __ str(tmp2, MemOperand(tmp1, index1));
5639
5640 Label done;
5641 __ InNewSpace(tmp1, tmp2, eq, &done);
5642 // Possible optimization: do a check that both values are Smis
5643 // (or them and test against Smi mask.)
5644
5645 __ mov(tmp2, tmp1);
Steve Block9fac8402011-05-12 15:51:54 +01005646 __ add(index1, index1, tmp1);
5647 __ add(index2, index2, tmp1);
5648 __ RecordWriteHelper(tmp1, index1, tmp3);
5649 __ RecordWriteHelper(tmp2, index2, tmp3);
Steve Block6ded16b2010-05-10 14:33:55 +01005650 __ bind(&done);
5651
5652 deferred->BindExit();
5653 __ LoadRoot(tmp1, Heap::kUndefinedValueRootIndex);
5654 frame_->EmitPush(tmp1);
5655}
5656
5657
5658void CodeGenerator::GenerateCallFunction(ZoneList<Expression*>* args) {
5659 Comment cmnt(masm_, "[ GenerateCallFunction");
5660
5661 ASSERT(args->length() >= 2);
5662
5663 int n_args = args->length() - 2; // for receiver and function.
5664 Load(args->at(0)); // receiver
5665 for (int i = 0; i < n_args; i++) {
5666 Load(args->at(i + 1));
5667 }
5668 Load(args->at(n_args + 1)); // function
5669 frame_->CallJSFunction(n_args);
Andrei Popescu402d9372010-02-26 13:31:12 +00005670 frame_->EmitPush(r0);
5671}
5672
5673
5674void CodeGenerator::GenerateMathSin(ZoneList<Expression*>* args) {
5675 ASSERT_EQ(args->length(), 1);
Andrei Popescu402d9372010-02-26 13:31:12 +00005676 Load(args->at(0));
Steve Block44f0eee2011-05-26 01:26:41 +01005677 if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01005678 TranscendentalCacheStub stub(TranscendentalCache::SIN,
5679 TranscendentalCacheStub::TAGGED);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005680 frame_->SpillAllButCopyTOSToR0();
5681 frame_->CallStub(&stub, 1);
5682 } else {
5683 frame_->CallRuntime(Runtime::kMath_sin, 1);
5684 }
Andrei Popescu402d9372010-02-26 13:31:12 +00005685 frame_->EmitPush(r0);
5686}
5687
5688
5689void CodeGenerator::GenerateMathCos(ZoneList<Expression*>* args) {
5690 ASSERT_EQ(args->length(), 1);
Andrei Popescu402d9372010-02-26 13:31:12 +00005691 Load(args->at(0));
Steve Block44f0eee2011-05-26 01:26:41 +01005692 if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01005693 TranscendentalCacheStub stub(TranscendentalCache::COS,
5694 TranscendentalCacheStub::TAGGED);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005695 frame_->SpillAllButCopyTOSToR0();
5696 frame_->CallStub(&stub, 1);
5697 } else {
5698 frame_->CallRuntime(Runtime::kMath_cos, 1);
5699 }
Andrei Popescu402d9372010-02-26 13:31:12 +00005700 frame_->EmitPush(r0);
5701}
5702
5703
Ben Murdochb0fe1622011-05-05 13:52:32 +01005704void CodeGenerator::GenerateMathLog(ZoneList<Expression*>* args) {
5705 ASSERT_EQ(args->length(), 1);
5706 Load(args->at(0));
Steve Block44f0eee2011-05-26 01:26:41 +01005707 if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01005708 TranscendentalCacheStub stub(TranscendentalCache::LOG,
5709 TranscendentalCacheStub::TAGGED);
Ben Murdochb0fe1622011-05-05 13:52:32 +01005710 frame_->SpillAllButCopyTOSToR0();
5711 frame_->CallStub(&stub, 1);
5712 } else {
5713 frame_->CallRuntime(Runtime::kMath_log, 1);
5714 }
5715 frame_->EmitPush(r0);
5716}
5717
5718
Steve Blocka7e24c12009-10-30 11:49:00 +00005719void CodeGenerator::GenerateObjectEquals(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005720 ASSERT(args->length() == 2);
5721
5722 // Load the two objects into registers and perform the comparison.
Leon Clarkef7060e22010-06-03 12:02:55 +01005723 Load(args->at(0));
5724 Load(args->at(1));
5725 Register lhs = frame_->PopToRegister();
5726 Register rhs = frame_->PopToRegister(lhs);
5727 __ cmp(lhs, rhs);
Steve Blocka7e24c12009-10-30 11:49:00 +00005728 cc_reg_ = eq;
5729}
5730
5731
Ben Murdochbb769b22010-08-11 14:56:33 +01005732void CodeGenerator::GenerateIsRegExpEquivalent(ZoneList<Expression*>* args) {
5733 ASSERT(args->length() == 2);
5734
5735 // Load the two objects into registers and perform the comparison.
5736 Load(args->at(0));
5737 Load(args->at(1));
5738 Register right = frame_->PopToRegister();
5739 Register left = frame_->PopToRegister(right);
5740 Register tmp = frame_->scratch0();
5741 Register tmp2 = frame_->scratch1();
5742
5743 // Jumps to done must have the eq flag set if the test is successful
5744 // and clear if the test has failed.
5745 Label done;
5746
5747 // Fail if either is a non-HeapObject.
5748 __ cmp(left, Operand(right));
5749 __ b(eq, &done);
5750 __ and_(tmp, left, Operand(right));
5751 __ eor(tmp, tmp, Operand(kSmiTagMask));
5752 __ tst(tmp, Operand(kSmiTagMask));
5753 __ b(ne, &done);
5754 __ ldr(tmp, FieldMemOperand(left, HeapObject::kMapOffset));
5755 __ ldrb(tmp2, FieldMemOperand(tmp, Map::kInstanceTypeOffset));
5756 __ cmp(tmp2, Operand(JS_REGEXP_TYPE));
5757 __ b(ne, &done);
5758 __ ldr(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
5759 __ cmp(tmp, Operand(tmp2));
5760 __ b(ne, &done);
5761 __ ldr(tmp, FieldMemOperand(left, JSRegExp::kDataOffset));
5762 __ ldr(tmp2, FieldMemOperand(right, JSRegExp::kDataOffset));
5763 __ cmp(tmp, tmp2);
5764 __ bind(&done);
5765 cc_reg_ = eq;
5766}
5767
5768
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005769void CodeGenerator::GenerateHasCachedArrayIndex(ZoneList<Expression*>* args) {
5770 ASSERT(args->length() == 1);
5771 Load(args->at(0));
5772 Register value = frame_->PopToRegister();
5773 Register tmp = frame_->scratch0();
5774 __ ldr(tmp, FieldMemOperand(value, String::kHashFieldOffset));
5775 __ tst(tmp, Operand(String::kContainsCachedArrayIndexMask));
5776 cc_reg_ = eq;
5777}
5778
5779
5780void CodeGenerator::GenerateGetCachedArrayIndex(ZoneList<Expression*>* args) {
5781 ASSERT(args->length() == 1);
5782 Load(args->at(0));
5783 Register value = frame_->PopToRegister();
5784
5785 __ ldr(value, FieldMemOperand(value, String::kHashFieldOffset));
5786 __ IndexFromHash(value, value);
5787 frame_->EmitPush(value);
5788}
5789
Ben Murdochbb769b22010-08-11 14:56:33 +01005790
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08005791void CodeGenerator::GenerateFastAsciiArrayJoin(ZoneList<Expression*>* args) {
5792 ASSERT(args->length() == 2);
5793 Load(args->at(0));
5794 Register value = frame_->PopToRegister();
5795 __ LoadRoot(value, Heap::kUndefinedValueRootIndex);
5796 frame_->EmitPush(value);
5797}
5798
5799
Steve Blocka7e24c12009-10-30 11:49:00 +00005800void CodeGenerator::VisitCallRuntime(CallRuntime* node) {
5801#ifdef DEBUG
5802 int original_height = frame_->height();
5803#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00005804 if (CheckForInlineRuntimeCall(node)) {
5805 ASSERT((has_cc() && frame_->height() == original_height) ||
5806 (!has_cc() && frame_->height() == original_height + 1));
5807 return;
5808 }
5809
5810 ZoneList<Expression*>* args = node->arguments();
5811 Comment cmnt(masm_, "[ CallRuntime");
Steve Block44f0eee2011-05-26 01:26:41 +01005812 const Runtime::Function* function = node->function();
Steve Blocka7e24c12009-10-30 11:49:00 +00005813
5814 if (function == NULL) {
5815 // Prepare stack for calling JS runtime function.
Steve Blocka7e24c12009-10-30 11:49:00 +00005816 // Push the builtins object found in the current global object.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005817 Register scratch = VirtualFrame::scratch0();
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08005818 __ ldr(scratch, GlobalObjectOperand());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005819 Register builtins = frame_->GetTOSRegister();
5820 __ ldr(builtins, FieldMemOperand(scratch, GlobalObject::kBuiltinsOffset));
5821 frame_->EmitPush(builtins);
Steve Blocka7e24c12009-10-30 11:49:00 +00005822 }
5823
5824 // Push the arguments ("left-to-right").
5825 int arg_count = args->length();
5826 for (int i = 0; i < arg_count; i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005827 Load(args->at(i));
Steve Blocka7e24c12009-10-30 11:49:00 +00005828 }
5829
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005830 VirtualFrame::SpilledScope spilled_scope(frame_);
5831
Steve Blocka7e24c12009-10-30 11:49:00 +00005832 if (function == NULL) {
5833 // Call the JS runtime function.
Andrei Popescu402d9372010-02-26 13:31:12 +00005834 __ mov(r2, Operand(node->name()));
Steve Blocka7e24c12009-10-30 11:49:00 +00005835 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Steve Block44f0eee2011-05-26 01:26:41 +01005836 Handle<Code> stub =
5837 ISOLATE->stub_cache()->ComputeCallInitialize(arg_count, in_loop);
Steve Blocka7e24c12009-10-30 11:49:00 +00005838 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET, arg_count + 1);
5839 __ ldr(cp, frame_->Context());
Steve Blocka7e24c12009-10-30 11:49:00 +00005840 frame_->EmitPush(r0);
5841 } else {
5842 // Call the C runtime function.
5843 frame_->CallRuntime(function, arg_count);
5844 frame_->EmitPush(r0);
5845 }
Steve Block6ded16b2010-05-10 14:33:55 +01005846 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00005847}
5848
5849
5850void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
5851#ifdef DEBUG
5852 int original_height = frame_->height();
5853#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00005854 Comment cmnt(masm_, "[ UnaryOperation");
5855
5856 Token::Value op = node->op();
5857
5858 if (op == Token::NOT) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005859 LoadCondition(node->expression(), false_target(), true_target(), true);
Steve Blocka7e24c12009-10-30 11:49:00 +00005860 // LoadCondition may (and usually does) leave a test and branch to
5861 // be emitted by the caller. In that case, negate the condition.
5862 if (has_cc()) cc_reg_ = NegateCondition(cc_reg_);
5863
5864 } else if (op == Token::DELETE) {
5865 Property* property = node->expression()->AsProperty();
5866 Variable* variable = node->expression()->AsVariableProxy()->AsVariable();
5867 if (property != NULL) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005868 Load(property->obj());
5869 Load(property->key());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01005870 frame_->EmitPush(Operand(Smi::FromInt(strict_mode_flag())));
5871 frame_->InvokeBuiltin(Builtins::DELETE, CALL_JS, 3);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005872 frame_->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00005873
5874 } else if (variable != NULL) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01005875 // Delete of an unqualified identifier is disallowed in strict mode
5876 // but "delete this" is.
5877 ASSERT(strict_mode_flag() == kNonStrictMode || variable->is_this());
Kristian Monsen0d5e1162010-09-30 15:31:59 +01005878 Slot* slot = variable->AsSlot();
Steve Blocka7e24c12009-10-30 11:49:00 +00005879 if (variable->is_global()) {
5880 LoadGlobal();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005881 frame_->EmitPush(Operand(variable->name()));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01005882 frame_->EmitPush(Operand(Smi::FromInt(kNonStrictMode)));
5883 frame_->InvokeBuiltin(Builtins::DELETE, CALL_JS, 3);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005884 frame_->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00005885
5886 } else if (slot != NULL && slot->type() == Slot::LOOKUP) {
Steve Block1e0659c2011-05-24 12:43:12 +01005887 // Delete from the context holding the named variable.
Steve Blocka7e24c12009-10-30 11:49:00 +00005888 frame_->EmitPush(cp);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005889 frame_->EmitPush(Operand(variable->name()));
Steve Block1e0659c2011-05-24 12:43:12 +01005890 frame_->CallRuntime(Runtime::kDeleteContextSlot, 2);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005891 frame_->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00005892
5893 } else {
5894 // Default: Result of deleting non-global, not dynamically
5895 // introduced variables is false.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005896 frame_->EmitPushRoot(Heap::kFalseValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00005897 }
5898
5899 } else {
5900 // Default: Result of deleting expressions is true.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005901 Load(node->expression()); // may have side-effects
Steve Blocka7e24c12009-10-30 11:49:00 +00005902 frame_->Drop();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005903 frame_->EmitPushRoot(Heap::kTrueValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00005904 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005905
5906 } else if (op == Token::TYPEOF) {
5907 // Special case for loading the typeof expression; see comment on
5908 // LoadTypeofExpression().
5909 LoadTypeofExpression(node->expression());
5910 frame_->CallRuntime(Runtime::kTypeof, 1);
5911 frame_->EmitPush(r0); // r0 has result
5912
5913 } else {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005914 bool can_overwrite = node->expression()->ResultOverwriteAllowed();
Leon Clarkeac952652010-07-15 11:15:24 +01005915 UnaryOverwriteMode overwrite =
5916 can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
5917
5918 bool no_negative_zero = node->expression()->no_negative_zero();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005919 Load(node->expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00005920 switch (op) {
5921 case Token::NOT:
5922 case Token::DELETE:
5923 case Token::TYPEOF:
5924 UNREACHABLE(); // handled above
5925 break;
5926
5927 case Token::SUB: {
Steve Block8defd9f2010-07-08 12:39:36 +01005928 frame_->PopToR0();
Leon Clarkeac952652010-07-15 11:15:24 +01005929 GenericUnaryOpStub stub(
5930 Token::SUB,
5931 overwrite,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01005932 NO_UNARY_FLAGS,
Leon Clarkeac952652010-07-15 11:15:24 +01005933 no_negative_zero ? kIgnoreNegativeZero : kStrictNegativeZero);
Steve Blocka7e24c12009-10-30 11:49:00 +00005934 frame_->CallStub(&stub, 0);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005935 frame_->EmitPush(r0); // r0 has result
Steve Blocka7e24c12009-10-30 11:49:00 +00005936 break;
5937 }
5938
5939 case Token::BIT_NOT: {
Steve Block8defd9f2010-07-08 12:39:36 +01005940 Register tos = frame_->PopToRegister();
5941 JumpTarget not_smi_label;
Steve Blocka7e24c12009-10-30 11:49:00 +00005942 JumpTarget continue_label;
Steve Block8defd9f2010-07-08 12:39:36 +01005943 // Smi check.
5944 __ tst(tos, Operand(kSmiTagMask));
5945 not_smi_label.Branch(ne);
Steve Blocka7e24c12009-10-30 11:49:00 +00005946
Steve Block8defd9f2010-07-08 12:39:36 +01005947 __ mvn(tos, Operand(tos));
5948 __ bic(tos, tos, Operand(kSmiTagMask)); // Bit-clear inverted smi-tag.
5949 frame_->EmitPush(tos);
5950 // The fast case is the first to jump to the continue label, so it gets
5951 // to decide the virtual frame layout.
Steve Blocka7e24c12009-10-30 11:49:00 +00005952 continue_label.Jump();
Leon Clarke4515c472010-02-03 11:58:03 +00005953
Steve Block8defd9f2010-07-08 12:39:36 +01005954 not_smi_label.Bind();
5955 frame_->SpillAll();
5956 __ Move(r0, tos);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01005957 GenericUnaryOpStub stub(Token::BIT_NOT,
5958 overwrite,
5959 NO_UNARY_SMI_CODE_IN_STUB);
Steve Block8defd9f2010-07-08 12:39:36 +01005960 frame_->CallStub(&stub, 0);
5961 frame_->EmitPush(r0);
5962
Steve Blocka7e24c12009-10-30 11:49:00 +00005963 continue_label.Bind();
5964 break;
5965 }
5966
5967 case Token::VOID:
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005968 frame_->Drop();
5969 frame_->EmitPushRoot(Heap::kUndefinedValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00005970 break;
5971
5972 case Token::ADD: {
Steve Block8defd9f2010-07-08 12:39:36 +01005973 Register tos = frame_->Peek();
Steve Blocka7e24c12009-10-30 11:49:00 +00005974 // Smi check.
5975 JumpTarget continue_label;
Steve Block8defd9f2010-07-08 12:39:36 +01005976 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00005977 continue_label.Branch(eq);
Steve Block8defd9f2010-07-08 12:39:36 +01005978
Steve Blockd0582a62009-12-15 09:54:21 +00005979 frame_->InvokeBuiltin(Builtins::TO_NUMBER, CALL_JS, 1);
Steve Block8defd9f2010-07-08 12:39:36 +01005980 frame_->EmitPush(r0);
5981
Steve Blocka7e24c12009-10-30 11:49:00 +00005982 continue_label.Bind();
5983 break;
5984 }
5985 default:
5986 UNREACHABLE();
5987 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005988 }
5989 ASSERT(!has_valid_frame() ||
5990 (has_cc() && frame_->height() == original_height) ||
5991 (!has_cc() && frame_->height() == original_height + 1));
5992}
5993
5994
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08005995class DeferredCountOperation: public DeferredCode {
5996 public:
5997 DeferredCountOperation(Register value,
5998 bool is_increment,
5999 bool is_postfix,
6000 int target_size)
6001 : value_(value),
6002 is_increment_(is_increment),
6003 is_postfix_(is_postfix),
6004 target_size_(target_size) {}
6005
6006 virtual void Generate() {
6007 VirtualFrame copied_frame(*frame_state()->frame());
6008
6009 Label slow;
6010 // Check for smi operand.
6011 __ tst(value_, Operand(kSmiTagMask));
6012 __ b(ne, &slow);
6013
6014 // Revert optimistic increment/decrement.
6015 if (is_increment_) {
6016 __ sub(value_, value_, Operand(Smi::FromInt(1)));
6017 } else {
6018 __ add(value_, value_, Operand(Smi::FromInt(1)));
6019 }
6020
6021 // Slow case: Convert to number. At this point the
6022 // value to be incremented is in the value register..
6023 __ bind(&slow);
6024
6025 // Convert the operand to a number.
6026 copied_frame.EmitPush(value_);
6027
6028 copied_frame.InvokeBuiltin(Builtins::TO_NUMBER, CALL_JS, 1);
6029
6030 if (is_postfix_) {
6031 // Postfix: store to result (on the stack).
6032 __ str(r0, MemOperand(sp, target_size_ * kPointerSize));
6033 }
6034
6035 copied_frame.EmitPush(r0);
6036 copied_frame.EmitPush(Operand(Smi::FromInt(1)));
6037
6038 if (is_increment_) {
6039 copied_frame.CallRuntime(Runtime::kNumberAdd, 2);
6040 } else {
6041 copied_frame.CallRuntime(Runtime::kNumberSub, 2);
6042 }
6043
6044 __ Move(value_, r0);
6045
6046 copied_frame.MergeTo(frame_state()->frame());
6047 }
6048
6049 private:
6050 Register value_;
6051 bool is_increment_;
6052 bool is_postfix_;
6053 int target_size_;
6054};
6055
6056
Steve Blocka7e24c12009-10-30 11:49:00 +00006057void CodeGenerator::VisitCountOperation(CountOperation* node) {
6058#ifdef DEBUG
6059 int original_height = frame_->height();
6060#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00006061 Comment cmnt(masm_, "[ CountOperation");
Steve Block8defd9f2010-07-08 12:39:36 +01006062 VirtualFrame::RegisterAllocationScope scope(this);
Steve Blocka7e24c12009-10-30 11:49:00 +00006063
6064 bool is_postfix = node->is_postfix();
6065 bool is_increment = node->op() == Token::INC;
6066
6067 Variable* var = node->expression()->AsVariableProxy()->AsVariable();
6068 bool is_const = (var != NULL && var->mode() == Variable::CONST);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006069 bool is_slot = (var != NULL && var->mode() == Variable::VAR);
Steve Blocka7e24c12009-10-30 11:49:00 +00006070
Kristian Monsen0d5e1162010-09-30 15:31:59 +01006071 if (!is_const && is_slot && type_info(var->AsSlot()).IsSmi()) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006072 // The type info declares that this variable is always a Smi. That
6073 // means it is a Smi both before and after the increment/decrement.
6074 // Lets make use of that to make a very minimal count.
6075 Reference target(this, node->expression(), !is_const);
6076 ASSERT(!target.is_illegal());
6077 target.GetValue(); // Pushes the value.
6078 Register value = frame_->PopToRegister();
6079 if (is_postfix) frame_->EmitPush(value);
6080 if (is_increment) {
6081 __ add(value, value, Operand(Smi::FromInt(1)));
6082 } else {
6083 __ sub(value, value, Operand(Smi::FromInt(1)));
6084 }
6085 frame_->EmitPush(value);
Steve Block8defd9f2010-07-08 12:39:36 +01006086 target.SetValue(NOT_CONST_INIT, LIKELY_SMI);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006087 if (is_postfix) frame_->Pop();
6088 ASSERT_EQ(original_height + 1, frame_->height());
6089 return;
6090 }
6091
6092 // If it's a postfix expression and its result is not ignored and the
6093 // reference is non-trivial, then push a placeholder on the stack now
6094 // to hold the result of the expression.
6095 bool placeholder_pushed = false;
6096 if (!is_slot && is_postfix) {
Kristian Monsen25f61362010-05-21 11:50:48 +01006097 frame_->EmitPush(Operand(Smi::FromInt(0)));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006098 placeholder_pushed = true;
Steve Blocka7e24c12009-10-30 11:49:00 +00006099 }
6100
Leon Clarked91b9f72010-01-27 17:25:45 +00006101 // A constant reference is not saved to, so a constant reference is not a
6102 // compound assignment reference.
6103 { Reference target(this, node->expression(), !is_const);
Steve Blocka7e24c12009-10-30 11:49:00 +00006104 if (target.is_illegal()) {
6105 // Spoof the virtual frame to have the expected height (one higher
6106 // than on entry).
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006107 if (!placeholder_pushed) frame_->EmitPush(Operand(Smi::FromInt(0)));
Steve Block6ded16b2010-05-10 14:33:55 +01006108 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00006109 return;
6110 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006111
Kristian Monsen25f61362010-05-21 11:50:48 +01006112 // This pushes 0, 1 or 2 words on the object to be used later when updating
6113 // the target. It also pushes the current value of the target.
Steve Block6ded16b2010-05-10 14:33:55 +01006114 target.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00006115
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006116 bool value_is_known_smi = frame_->KnownSmiAt(0);
Kristian Monsen25f61362010-05-21 11:50:48 +01006117 Register value = frame_->PopToRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +00006118
6119 // Postfix: Store the old value as the result.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006120 if (placeholder_pushed) {
Kristian Monsen25f61362010-05-21 11:50:48 +01006121 frame_->SetElementAt(value, target.size());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006122 } else if (is_postfix) {
6123 frame_->EmitPush(value);
6124 __ mov(VirtualFrame::scratch0(), value);
6125 value = VirtualFrame::scratch0();
Steve Blocka7e24c12009-10-30 11:49:00 +00006126 }
6127
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006128 // We can't use any type information here since the virtual frame from the
6129 // deferred code may have lost information and we can't merge a virtual
6130 // frame with less specific type knowledge to a virtual frame with more
6131 // specific knowledge that has already used that specific knowledge to
6132 // generate code.
6133 frame_->ForgetTypeInfo();
6134
6135 // The constructor here will capture the current virtual frame and use it to
6136 // merge to after the deferred code has run. No virtual frame changes are
6137 // allowed from here until the 'BindExit' below.
6138 DeferredCode* deferred =
6139 new DeferredCountOperation(value,
6140 is_increment,
6141 is_postfix,
6142 target.size());
6143 if (!value_is_known_smi) {
6144 // Check for smi operand.
6145 __ tst(value, Operand(kSmiTagMask));
6146
6147 deferred->Branch(ne);
6148 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006149
Steve Blocka7e24c12009-10-30 11:49:00 +00006150 // Perform optimistic increment/decrement.
6151 if (is_increment) {
Kristian Monsen25f61362010-05-21 11:50:48 +01006152 __ add(value, value, Operand(Smi::FromInt(1)), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00006153 } else {
Kristian Monsen25f61362010-05-21 11:50:48 +01006154 __ sub(value, value, Operand(Smi::FromInt(1)), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00006155 }
6156
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006157 // If increment/decrement overflows, go to deferred code.
6158 deferred->Branch(vs);
Steve Blocka7e24c12009-10-30 11:49:00 +00006159
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006160 deferred->BindExit();
Steve Blocka7e24c12009-10-30 11:49:00 +00006161
Steve Blocka7e24c12009-10-30 11:49:00 +00006162 // Store the new value in the target if not const.
Kristian Monsen25f61362010-05-21 11:50:48 +01006163 // At this point the answer is in the value register.
Kristian Monsen25f61362010-05-21 11:50:48 +01006164 frame_->EmitPush(value);
6165 // Set the target with the result, leaving the result on
6166 // top of the stack. Removes the target from the stack if
6167 // it has a non-zero size.
Steve Block8defd9f2010-07-08 12:39:36 +01006168 if (!is_const) target.SetValue(NOT_CONST_INIT, LIKELY_SMI);
Steve Blocka7e24c12009-10-30 11:49:00 +00006169 }
6170
6171 // Postfix: Discard the new value and use the old.
Kristian Monsen25f61362010-05-21 11:50:48 +01006172 if (is_postfix) frame_->Pop();
Steve Block6ded16b2010-05-10 14:33:55 +01006173 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00006174}
6175
6176
Steve Block6ded16b2010-05-10 14:33:55 +01006177void CodeGenerator::GenerateLogicalBooleanOperation(BinaryOperation* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00006178 // According to ECMA-262 section 11.11, page 58, the binary logical
6179 // operators must yield the result of one of the two expressions
6180 // before any ToBoolean() conversions. This means that the value
6181 // produced by a && or || operator is not necessarily a boolean.
6182
6183 // NOTE: If the left hand side produces a materialized value (not in
6184 // the CC register), we force the right hand side to do the
6185 // same. This is necessary because we may have to branch to the exit
6186 // after evaluating the left hand side (due to the shortcut
6187 // semantics), but the compiler must (statically) know if the result
6188 // of compiling the binary operation is materialized or not.
Steve Block6ded16b2010-05-10 14:33:55 +01006189 if (node->op() == Token::AND) {
Steve Blocka7e24c12009-10-30 11:49:00 +00006190 JumpTarget is_true;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006191 LoadCondition(node->left(), &is_true, false_target(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00006192 if (has_valid_frame() && !has_cc()) {
6193 // The left-hand side result is on top of the virtual frame.
6194 JumpTarget pop_and_continue;
6195 JumpTarget exit;
6196
Leon Clarkef7060e22010-06-03 12:02:55 +01006197 frame_->Dup();
Steve Blocka7e24c12009-10-30 11:49:00 +00006198 // Avoid popping the result if it converts to 'false' using the
6199 // standard ToBoolean() conversion as described in ECMA-262,
6200 // section 9.2, page 30.
6201 ToBoolean(&pop_and_continue, &exit);
6202 Branch(false, &exit);
6203
6204 // Pop the result of evaluating the first part.
6205 pop_and_continue.Bind();
Leon Clarkef7060e22010-06-03 12:02:55 +01006206 frame_->Pop();
Steve Blocka7e24c12009-10-30 11:49:00 +00006207
6208 // Evaluate right side expression.
6209 is_true.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006210 Load(node->right());
Steve Blocka7e24c12009-10-30 11:49:00 +00006211
6212 // Exit (always with a materialized value).
6213 exit.Bind();
6214 } else if (has_cc() || is_true.is_linked()) {
6215 // The left-hand side is either (a) partially compiled to
6216 // control flow with a final branch left to emit or (b) fully
6217 // compiled to control flow and possibly true.
6218 if (has_cc()) {
6219 Branch(false, false_target());
6220 }
6221 is_true.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006222 LoadCondition(node->right(), true_target(), false_target(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00006223 } else {
6224 // Nothing to do.
6225 ASSERT(!has_valid_frame() && !has_cc() && !is_true.is_linked());
6226 }
6227
Steve Block6ded16b2010-05-10 14:33:55 +01006228 } else {
6229 ASSERT(node->op() == Token::OR);
Steve Blocka7e24c12009-10-30 11:49:00 +00006230 JumpTarget is_false;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006231 LoadCondition(node->left(), true_target(), &is_false, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00006232 if (has_valid_frame() && !has_cc()) {
6233 // The left-hand side result is on top of the virtual frame.
6234 JumpTarget pop_and_continue;
6235 JumpTarget exit;
6236
Leon Clarkef7060e22010-06-03 12:02:55 +01006237 frame_->Dup();
Steve Blocka7e24c12009-10-30 11:49:00 +00006238 // Avoid popping the result if it converts to 'true' using the
6239 // standard ToBoolean() conversion as described in ECMA-262,
6240 // section 9.2, page 30.
6241 ToBoolean(&exit, &pop_and_continue);
6242 Branch(true, &exit);
6243
6244 // Pop the result of evaluating the first part.
6245 pop_and_continue.Bind();
Leon Clarkef7060e22010-06-03 12:02:55 +01006246 frame_->Pop();
Steve Blocka7e24c12009-10-30 11:49:00 +00006247
6248 // Evaluate right side expression.
6249 is_false.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006250 Load(node->right());
Steve Blocka7e24c12009-10-30 11:49:00 +00006251
6252 // Exit (always with a materialized value).
6253 exit.Bind();
6254 } else if (has_cc() || is_false.is_linked()) {
6255 // The left-hand side is either (a) partially compiled to
6256 // control flow with a final branch left to emit or (b) fully
6257 // compiled to control flow and possibly false.
6258 if (has_cc()) {
6259 Branch(true, true_target());
6260 }
6261 is_false.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006262 LoadCondition(node->right(), true_target(), false_target(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00006263 } else {
6264 // Nothing to do.
6265 ASSERT(!has_valid_frame() && !has_cc() && !is_false.is_linked());
6266 }
Steve Block6ded16b2010-05-10 14:33:55 +01006267 }
6268}
Steve Blocka7e24c12009-10-30 11:49:00 +00006269
Steve Block6ded16b2010-05-10 14:33:55 +01006270
6271void CodeGenerator::VisitBinaryOperation(BinaryOperation* node) {
6272#ifdef DEBUG
6273 int original_height = frame_->height();
6274#endif
6275 Comment cmnt(masm_, "[ BinaryOperation");
6276
6277 if (node->op() == Token::AND || node->op() == Token::OR) {
Steve Block6ded16b2010-05-10 14:33:55 +01006278 GenerateLogicalBooleanOperation(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00006279 } else {
6280 // Optimize for the case where (at least) one of the expressions
6281 // is a literal small integer.
6282 Literal* lliteral = node->left()->AsLiteral();
6283 Literal* rliteral = node->right()->AsLiteral();
6284 // NOTE: The code below assumes that the slow cases (calls to runtime)
6285 // never return a constant/immutable object.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01006286 bool overwrite_left = node->left()->ResultOverwriteAllowed();
6287 bool overwrite_right = node->right()->ResultOverwriteAllowed();
Steve Blocka7e24c12009-10-30 11:49:00 +00006288
6289 if (rliteral != NULL && rliteral->handle()->IsSmi()) {
Steve Block6ded16b2010-05-10 14:33:55 +01006290 VirtualFrame::RegisterAllocationScope scope(this);
6291 Load(node->left());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006292 if (frame_->KnownSmiAt(0)) overwrite_left = false;
Steve Blocka7e24c12009-10-30 11:49:00 +00006293 SmiOperation(node->op(),
6294 rliteral->handle(),
6295 false,
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006296 overwrite_left ? OVERWRITE_LEFT : NO_OVERWRITE);
Steve Blocka7e24c12009-10-30 11:49:00 +00006297 } else if (lliteral != NULL && lliteral->handle()->IsSmi()) {
Steve Block6ded16b2010-05-10 14:33:55 +01006298 VirtualFrame::RegisterAllocationScope scope(this);
6299 Load(node->right());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006300 if (frame_->KnownSmiAt(0)) overwrite_right = false;
Steve Blocka7e24c12009-10-30 11:49:00 +00006301 SmiOperation(node->op(),
6302 lliteral->handle(),
6303 true,
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006304 overwrite_right ? OVERWRITE_RIGHT : NO_OVERWRITE);
Steve Blocka7e24c12009-10-30 11:49:00 +00006305 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006306 GenerateInlineSmi inline_smi =
6307 loop_nesting() > 0 ? GENERATE_INLINE_SMI : DONT_GENERATE_INLINE_SMI;
6308 if (lliteral != NULL) {
6309 ASSERT(!lliteral->handle()->IsSmi());
6310 inline_smi = DONT_GENERATE_INLINE_SMI;
6311 }
6312 if (rliteral != NULL) {
6313 ASSERT(!rliteral->handle()->IsSmi());
6314 inline_smi = DONT_GENERATE_INLINE_SMI;
6315 }
Steve Block6ded16b2010-05-10 14:33:55 +01006316 VirtualFrame::RegisterAllocationScope scope(this);
Steve Blocka7e24c12009-10-30 11:49:00 +00006317 OverwriteMode overwrite_mode = NO_OVERWRITE;
6318 if (overwrite_left) {
6319 overwrite_mode = OVERWRITE_LEFT;
6320 } else if (overwrite_right) {
6321 overwrite_mode = OVERWRITE_RIGHT;
6322 }
Steve Block6ded16b2010-05-10 14:33:55 +01006323 Load(node->left());
6324 Load(node->right());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006325 GenericBinaryOperation(node->op(), overwrite_mode, inline_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +00006326 }
Steve Blocka7e24c12009-10-30 11:49:00 +00006327 }
6328 ASSERT(!has_valid_frame() ||
6329 (has_cc() && frame_->height() == original_height) ||
6330 (!has_cc() && frame_->height() == original_height + 1));
6331}
6332
6333
6334void CodeGenerator::VisitThisFunction(ThisFunction* node) {
6335#ifdef DEBUG
6336 int original_height = frame_->height();
6337#endif
Leon Clarkef7060e22010-06-03 12:02:55 +01006338 frame_->EmitPush(MemOperand(frame_->Function()));
Steve Block6ded16b2010-05-10 14:33:55 +01006339 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00006340}
6341
6342
6343void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
6344#ifdef DEBUG
6345 int original_height = frame_->height();
6346#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00006347 Comment cmnt(masm_, "[ CompareOperation");
6348
Steve Block6ded16b2010-05-10 14:33:55 +01006349 VirtualFrame::RegisterAllocationScope nonspilled_scope(this);
6350
Steve Blocka7e24c12009-10-30 11:49:00 +00006351 // Get the expressions from the node.
6352 Expression* left = node->left();
6353 Expression* right = node->right();
6354 Token::Value op = node->op();
6355
Steve Blocka7e24c12009-10-30 11:49:00 +00006356 // To make typeof testing for natives implemented in JavaScript really
6357 // efficient, we generate special code for expressions of the form:
6358 // 'typeof <expression> == <string>'.
6359 UnaryOperation* operation = left->AsUnaryOperation();
6360 if ((op == Token::EQ || op == Token::EQ_STRICT) &&
6361 (operation != NULL && operation->op() == Token::TYPEOF) &&
6362 (right->AsLiteral() != NULL &&
6363 right->AsLiteral()->handle()->IsString())) {
6364 Handle<String> check(String::cast(*right->AsLiteral()->handle()));
6365
Steve Block6ded16b2010-05-10 14:33:55 +01006366 // Load the operand, move it to a register.
Steve Blocka7e24c12009-10-30 11:49:00 +00006367 LoadTypeofExpression(operation->expression());
Steve Block6ded16b2010-05-10 14:33:55 +01006368 Register tos = frame_->PopToRegister();
6369
Steve Block6ded16b2010-05-10 14:33:55 +01006370 Register scratch = VirtualFrame::scratch0();
Steve Blocka7e24c12009-10-30 11:49:00 +00006371
Steve Block44f0eee2011-05-26 01:26:41 +01006372 if (check->Equals(HEAP->number_symbol())) {
Steve Block6ded16b2010-05-10 14:33:55 +01006373 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00006374 true_target()->Branch(eq);
Steve Block6ded16b2010-05-10 14:33:55 +01006375 __ ldr(tos, FieldMemOperand(tos, HeapObject::kMapOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00006376 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01006377 __ cmp(tos, ip);
Steve Blocka7e24c12009-10-30 11:49:00 +00006378 cc_reg_ = eq;
6379
Steve Block44f0eee2011-05-26 01:26:41 +01006380 } else if (check->Equals(HEAP->string_symbol())) {
Steve Block6ded16b2010-05-10 14:33:55 +01006381 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00006382 false_target()->Branch(eq);
6383
Steve Block6ded16b2010-05-10 14:33:55 +01006384 __ ldr(tos, FieldMemOperand(tos, HeapObject::kMapOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00006385
6386 // It can be an undetectable string object.
Steve Block6ded16b2010-05-10 14:33:55 +01006387 __ ldrb(scratch, FieldMemOperand(tos, Map::kBitFieldOffset));
6388 __ and_(scratch, scratch, Operand(1 << Map::kIsUndetectable));
6389 __ cmp(scratch, Operand(1 << Map::kIsUndetectable));
Steve Blocka7e24c12009-10-30 11:49:00 +00006390 false_target()->Branch(eq);
6391
Steve Block6ded16b2010-05-10 14:33:55 +01006392 __ ldrb(scratch, FieldMemOperand(tos, Map::kInstanceTypeOffset));
6393 __ cmp(scratch, Operand(FIRST_NONSTRING_TYPE));
Steve Blocka7e24c12009-10-30 11:49:00 +00006394 cc_reg_ = lt;
6395
Steve Block44f0eee2011-05-26 01:26:41 +01006396 } else if (check->Equals(HEAP->boolean_symbol())) {
Steve Blocka7e24c12009-10-30 11:49:00 +00006397 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01006398 __ cmp(tos, ip);
Steve Blocka7e24c12009-10-30 11:49:00 +00006399 true_target()->Branch(eq);
6400 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01006401 __ cmp(tos, ip);
Steve Blocka7e24c12009-10-30 11:49:00 +00006402 cc_reg_ = eq;
6403
Steve Block44f0eee2011-05-26 01:26:41 +01006404 } else if (check->Equals(HEAP->undefined_symbol())) {
Steve Blocka7e24c12009-10-30 11:49:00 +00006405 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01006406 __ cmp(tos, ip);
Steve Blocka7e24c12009-10-30 11:49:00 +00006407 true_target()->Branch(eq);
6408
Steve Block6ded16b2010-05-10 14:33:55 +01006409 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00006410 false_target()->Branch(eq);
6411
6412 // It can be an undetectable object.
Steve Block6ded16b2010-05-10 14:33:55 +01006413 __ ldr(tos, FieldMemOperand(tos, HeapObject::kMapOffset));
6414 __ ldrb(scratch, FieldMemOperand(tos, Map::kBitFieldOffset));
6415 __ and_(scratch, scratch, Operand(1 << Map::kIsUndetectable));
6416 __ cmp(scratch, Operand(1 << Map::kIsUndetectable));
Steve Blocka7e24c12009-10-30 11:49:00 +00006417
6418 cc_reg_ = eq;
6419
Steve Block44f0eee2011-05-26 01:26:41 +01006420 } else if (check->Equals(HEAP->function_symbol())) {
Steve Block6ded16b2010-05-10 14:33:55 +01006421 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00006422 false_target()->Branch(eq);
Steve Block6ded16b2010-05-10 14:33:55 +01006423 Register map_reg = scratch;
6424 __ CompareObjectType(tos, map_reg, tos, JS_FUNCTION_TYPE);
Steve Blockd0582a62009-12-15 09:54:21 +00006425 true_target()->Branch(eq);
6426 // Regular expressions are callable so typeof == 'function'.
Steve Block6ded16b2010-05-10 14:33:55 +01006427 __ CompareInstanceType(map_reg, tos, JS_REGEXP_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00006428 cc_reg_ = eq;
6429
Steve Block44f0eee2011-05-26 01:26:41 +01006430 } else if (check->Equals(HEAP->object_symbol())) {
Steve Block6ded16b2010-05-10 14:33:55 +01006431 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00006432 false_target()->Branch(eq);
6433
Steve Blocka7e24c12009-10-30 11:49:00 +00006434 __ LoadRoot(ip, Heap::kNullValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01006435 __ cmp(tos, ip);
Steve Blocka7e24c12009-10-30 11:49:00 +00006436 true_target()->Branch(eq);
6437
Steve Block6ded16b2010-05-10 14:33:55 +01006438 Register map_reg = scratch;
6439 __ CompareObjectType(tos, map_reg, tos, JS_REGEXP_TYPE);
Steve Blockd0582a62009-12-15 09:54:21 +00006440 false_target()->Branch(eq);
6441
Steve Blocka7e24c12009-10-30 11:49:00 +00006442 // It can be an undetectable object.
Steve Block6ded16b2010-05-10 14:33:55 +01006443 __ ldrb(tos, FieldMemOperand(map_reg, Map::kBitFieldOffset));
6444 __ and_(tos, tos, Operand(1 << Map::kIsUndetectable));
6445 __ cmp(tos, Operand(1 << Map::kIsUndetectable));
Steve Blocka7e24c12009-10-30 11:49:00 +00006446 false_target()->Branch(eq);
6447
Steve Block6ded16b2010-05-10 14:33:55 +01006448 __ ldrb(tos, FieldMemOperand(map_reg, Map::kInstanceTypeOffset));
6449 __ cmp(tos, Operand(FIRST_JS_OBJECT_TYPE));
Steve Blocka7e24c12009-10-30 11:49:00 +00006450 false_target()->Branch(lt);
Steve Block6ded16b2010-05-10 14:33:55 +01006451 __ cmp(tos, Operand(LAST_JS_OBJECT_TYPE));
Steve Blocka7e24c12009-10-30 11:49:00 +00006452 cc_reg_ = le;
6453
6454 } else {
6455 // Uncommon case: typeof testing against a string literal that is
6456 // never returned from the typeof operator.
6457 false_target()->Jump();
6458 }
6459 ASSERT(!has_valid_frame() ||
6460 (has_cc() && frame_->height() == original_height));
6461 return;
6462 }
6463
6464 switch (op) {
6465 case Token::EQ:
6466 Comparison(eq, left, right, false);
6467 break;
6468
6469 case Token::LT:
6470 Comparison(lt, left, right);
6471 break;
6472
6473 case Token::GT:
6474 Comparison(gt, left, right);
6475 break;
6476
6477 case Token::LTE:
6478 Comparison(le, left, right);
6479 break;
6480
6481 case Token::GTE:
6482 Comparison(ge, left, right);
6483 break;
6484
6485 case Token::EQ_STRICT:
6486 Comparison(eq, left, right, true);
6487 break;
6488
6489 case Token::IN: {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006490 Load(left);
6491 Load(right);
Steve Blockd0582a62009-12-15 09:54:21 +00006492 frame_->InvokeBuiltin(Builtins::IN, CALL_JS, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00006493 frame_->EmitPush(r0);
6494 break;
6495 }
6496
6497 case Token::INSTANCEOF: {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006498 Load(left);
6499 Load(right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01006500 InstanceofStub stub(InstanceofStub::kNoFlags);
Steve Blocka7e24c12009-10-30 11:49:00 +00006501 frame_->CallStub(&stub, 2);
6502 // At this point if instanceof succeeded then r0 == 0.
6503 __ tst(r0, Operand(r0));
6504 cc_reg_ = eq;
6505 break;
6506 }
6507
6508 default:
6509 UNREACHABLE();
6510 }
6511 ASSERT((has_cc() && frame_->height() == original_height) ||
6512 (!has_cc() && frame_->height() == original_height + 1));
6513}
6514
6515
Kristian Monsen80d68ea2010-09-08 11:05:35 +01006516void CodeGenerator::VisitCompareToNull(CompareToNull* node) {
6517#ifdef DEBUG
6518 int original_height = frame_->height();
6519#endif
6520 Comment cmnt(masm_, "[ CompareToNull");
6521
6522 Load(node->expression());
6523 Register tos = frame_->PopToRegister();
6524 __ LoadRoot(ip, Heap::kNullValueRootIndex);
6525 __ cmp(tos, ip);
6526
6527 // The 'null' value is only equal to 'undefined' if using non-strict
6528 // comparisons.
6529 if (!node->is_strict()) {
6530 true_target()->Branch(eq);
6531 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
6532 __ cmp(tos, Operand(ip));
6533 true_target()->Branch(eq);
6534
6535 __ tst(tos, Operand(kSmiTagMask));
6536 false_target()->Branch(eq);
6537
6538 // It can be an undetectable object.
6539 __ ldr(tos, FieldMemOperand(tos, HeapObject::kMapOffset));
6540 __ ldrb(tos, FieldMemOperand(tos, Map::kBitFieldOffset));
6541 __ and_(tos, tos, Operand(1 << Map::kIsUndetectable));
6542 __ cmp(tos, Operand(1 << Map::kIsUndetectable));
6543 }
6544
6545 cc_reg_ = eq;
6546 ASSERT(has_cc() && frame_->height() == original_height);
6547}
6548
6549
Steve Block6ded16b2010-05-10 14:33:55 +01006550class DeferredReferenceGetNamedValue: public DeferredCode {
6551 public:
Leon Clarkef7060e22010-06-03 12:02:55 +01006552 explicit DeferredReferenceGetNamedValue(Register receiver,
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006553 Handle<String> name,
6554 bool is_contextual)
6555 : receiver_(receiver),
6556 name_(name),
6557 is_contextual_(is_contextual),
6558 is_dont_delete_(false) {
6559 set_comment(is_contextual
6560 ? "[ DeferredReferenceGetNamedValue (contextual)"
6561 : "[ DeferredReferenceGetNamedValue");
Steve Block6ded16b2010-05-10 14:33:55 +01006562 }
6563
6564 virtual void Generate();
6565
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006566 void set_is_dont_delete(bool value) {
6567 ASSERT(is_contextual_);
6568 is_dont_delete_ = value;
6569 }
6570
Steve Block6ded16b2010-05-10 14:33:55 +01006571 private:
Leon Clarkef7060e22010-06-03 12:02:55 +01006572 Register receiver_;
Steve Block6ded16b2010-05-10 14:33:55 +01006573 Handle<String> name_;
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006574 bool is_contextual_;
6575 bool is_dont_delete_;
Steve Block6ded16b2010-05-10 14:33:55 +01006576};
6577
6578
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006579// Convention for this is that on entry the receiver is in a register that
6580// is not used by the stack. On exit the answer is found in that same
6581// register and the stack has the same height.
Steve Block6ded16b2010-05-10 14:33:55 +01006582void DeferredReferenceGetNamedValue::Generate() {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006583#ifdef DEBUG
6584 int expected_height = frame_state()->frame()->height();
6585#endif
6586 VirtualFrame copied_frame(*frame_state()->frame());
6587 copied_frame.SpillAll();
Leon Clarkef7060e22010-06-03 12:02:55 +01006588
Steve Block6ded16b2010-05-10 14:33:55 +01006589 Register scratch1 = VirtualFrame::scratch0();
6590 Register scratch2 = VirtualFrame::scratch1();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006591 ASSERT(!receiver_.is(scratch1) && !receiver_.is(scratch2));
Steve Block44f0eee2011-05-26 01:26:41 +01006592 __ DecrementCounter(masm_->isolate()->counters()->named_load_inline(),
6593 1, scratch1, scratch2);
6594 __ IncrementCounter(masm_->isolate()->counters()->named_load_inline_miss(),
6595 1, scratch1, scratch2);
Steve Block6ded16b2010-05-10 14:33:55 +01006596
Leon Clarkef7060e22010-06-03 12:02:55 +01006597 // Ensure receiver in r0 and name in r2 to match load ic calling convention.
6598 __ Move(r0, receiver_);
Steve Block6ded16b2010-05-10 14:33:55 +01006599 __ mov(r2, Operand(name_));
6600
6601 // The rest of the instructions in the deferred code must be together.
6602 { Assembler::BlockConstPoolScope block_const_pool(masm_);
Steve Block44f0eee2011-05-26 01:26:41 +01006603 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
6604 Builtins::kLoadIC_Initialize));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006605 RelocInfo::Mode mode = is_contextual_
6606 ? RelocInfo::CODE_TARGET_CONTEXT
6607 : RelocInfo::CODE_TARGET;
6608 __ Call(ic, mode);
6609 // We must mark the code just after the call with the correct marker.
6610 MacroAssembler::NopMarkerTypes code_marker;
6611 if (is_contextual_) {
6612 code_marker = is_dont_delete_
6613 ? MacroAssembler::PROPERTY_ACCESS_INLINED_CONTEXT_DONT_DELETE
6614 : MacroAssembler::PROPERTY_ACCESS_INLINED_CONTEXT;
6615 } else {
6616 code_marker = MacroAssembler::PROPERTY_ACCESS_INLINED;
6617 }
6618 __ MarkCode(code_marker);
Steve Block6ded16b2010-05-10 14:33:55 +01006619
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006620 // At this point the answer is in r0. We move it to the expected register
6621 // if necessary.
6622 __ Move(receiver_, r0);
6623
6624 // Now go back to the frame that we entered with. This will not overwrite
6625 // the receiver register since that register was not in use when we came
6626 // in. The instructions emitted by this merge are skipped over by the
6627 // inline load patching mechanism when looking for the branch instruction
6628 // that tells it where the code to patch is.
6629 copied_frame.MergeTo(frame_state()->frame());
6630
Steve Block6ded16b2010-05-10 14:33:55 +01006631 // Block the constant pool for one more instruction after leaving this
6632 // constant pool block scope to include the branch instruction ending the
6633 // deferred code.
6634 __ BlockConstPoolFor(1);
6635 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006636 ASSERT_EQ(expected_height, frame_state()->frame()->height());
Steve Block6ded16b2010-05-10 14:33:55 +01006637}
6638
6639
6640class DeferredReferenceGetKeyedValue: public DeferredCode {
6641 public:
Kristian Monsen25f61362010-05-21 11:50:48 +01006642 DeferredReferenceGetKeyedValue(Register key, Register receiver)
6643 : key_(key), receiver_(receiver) {
Steve Block6ded16b2010-05-10 14:33:55 +01006644 set_comment("[ DeferredReferenceGetKeyedValue");
6645 }
6646
6647 virtual void Generate();
Kristian Monsen25f61362010-05-21 11:50:48 +01006648
6649 private:
6650 Register key_;
6651 Register receiver_;
Steve Block6ded16b2010-05-10 14:33:55 +01006652};
6653
6654
Steve Block8defd9f2010-07-08 12:39:36 +01006655// Takes key and register in r0 and r1 or vice versa. Returns result
6656// in r0.
Steve Block6ded16b2010-05-10 14:33:55 +01006657void DeferredReferenceGetKeyedValue::Generate() {
Kristian Monsen25f61362010-05-21 11:50:48 +01006658 ASSERT((key_.is(r0) && receiver_.is(r1)) ||
6659 (key_.is(r1) && receiver_.is(r0)));
6660
Steve Block8defd9f2010-07-08 12:39:36 +01006661 VirtualFrame copied_frame(*frame_state()->frame());
6662 copied_frame.SpillAll();
6663
Steve Block6ded16b2010-05-10 14:33:55 +01006664 Register scratch1 = VirtualFrame::scratch0();
6665 Register scratch2 = VirtualFrame::scratch1();
Steve Block44f0eee2011-05-26 01:26:41 +01006666 __ DecrementCounter(masm_->isolate()->counters()->keyed_load_inline(),
6667 1, scratch1, scratch2);
6668 __ IncrementCounter(masm_->isolate()->counters()->keyed_load_inline_miss(),
6669 1, scratch1, scratch2);
Steve Block6ded16b2010-05-10 14:33:55 +01006670
Kristian Monsen25f61362010-05-21 11:50:48 +01006671 // Ensure key in r0 and receiver in r1 to match keyed load ic calling
6672 // convention.
6673 if (key_.is(r1)) {
6674 __ Swap(r0, r1, ip);
6675 }
6676
Steve Block6ded16b2010-05-10 14:33:55 +01006677 // The rest of the instructions in the deferred code must be together.
6678 { Assembler::BlockConstPoolScope block_const_pool(masm_);
Kristian Monsen25f61362010-05-21 11:50:48 +01006679 // Call keyed load IC. It has the arguments key and receiver in r0 and r1.
Steve Block44f0eee2011-05-26 01:26:41 +01006680 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
6681 Builtins::kKeyedLoadIC_Initialize));
Steve Block6ded16b2010-05-10 14:33:55 +01006682 __ Call(ic, RelocInfo::CODE_TARGET);
6683 // The call must be followed by a nop instruction to indicate that the
6684 // keyed load has been inlined.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006685 __ MarkCode(MacroAssembler::PROPERTY_ACCESS_INLINED);
Steve Block6ded16b2010-05-10 14:33:55 +01006686
Steve Block8defd9f2010-07-08 12:39:36 +01006687 // Now go back to the frame that we entered with. This will not overwrite
6688 // the receiver or key registers since they were not in use when we came
6689 // in. The instructions emitted by this merge are skipped over by the
6690 // inline load patching mechanism when looking for the branch instruction
6691 // that tells it where the code to patch is.
6692 copied_frame.MergeTo(frame_state()->frame());
6693
Steve Block6ded16b2010-05-10 14:33:55 +01006694 // Block the constant pool for one more instruction after leaving this
6695 // constant pool block scope to include the branch instruction ending the
6696 // deferred code.
6697 __ BlockConstPoolFor(1);
6698 }
6699}
6700
6701
6702class DeferredReferenceSetKeyedValue: public DeferredCode {
6703 public:
Leon Clarkef7060e22010-06-03 12:02:55 +01006704 DeferredReferenceSetKeyedValue(Register value,
6705 Register key,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01006706 Register receiver,
6707 StrictModeFlag strict_mode)
6708 : value_(value),
6709 key_(key),
6710 receiver_(receiver),
6711 strict_mode_(strict_mode) {
Steve Block6ded16b2010-05-10 14:33:55 +01006712 set_comment("[ DeferredReferenceSetKeyedValue");
6713 }
6714
6715 virtual void Generate();
Leon Clarkef7060e22010-06-03 12:02:55 +01006716
6717 private:
6718 Register value_;
6719 Register key_;
6720 Register receiver_;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01006721 StrictModeFlag strict_mode_;
Steve Block6ded16b2010-05-10 14:33:55 +01006722};
6723
6724
6725void DeferredReferenceSetKeyedValue::Generate() {
6726 Register scratch1 = VirtualFrame::scratch0();
6727 Register scratch2 = VirtualFrame::scratch1();
Steve Block44f0eee2011-05-26 01:26:41 +01006728 __ DecrementCounter(masm_->isolate()->counters()->keyed_store_inline(),
6729 1, scratch1, scratch2);
6730 __ IncrementCounter(masm_->isolate()->counters()->keyed_store_inline_miss(),
6731 1, scratch1, scratch2);
Steve Block6ded16b2010-05-10 14:33:55 +01006732
Leon Clarkef7060e22010-06-03 12:02:55 +01006733 // Ensure value in r0, key in r1 and receiver in r2 to match keyed store ic
6734 // calling convention.
6735 if (value_.is(r1)) {
6736 __ Swap(r0, r1, ip);
6737 }
6738 ASSERT(receiver_.is(r2));
6739
Steve Block6ded16b2010-05-10 14:33:55 +01006740 // The rest of the instructions in the deferred code must be together.
6741 { Assembler::BlockConstPoolScope block_const_pool(masm_);
Leon Clarkef7060e22010-06-03 12:02:55 +01006742 // Call keyed store IC. It has the arguments value, key and receiver in r0,
6743 // r1 and r2.
Steve Block44f0eee2011-05-26 01:26:41 +01006744 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
6745 (strict_mode_ == kStrictMode)
6746 ? Builtins::kKeyedStoreIC_Initialize_Strict
6747 : Builtins::kKeyedStoreIC_Initialize));
Steve Block6ded16b2010-05-10 14:33:55 +01006748 __ Call(ic, RelocInfo::CODE_TARGET);
6749 // The call must be followed by a nop instruction to indicate that the
6750 // keyed store has been inlined.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006751 __ MarkCode(MacroAssembler::PROPERTY_ACCESS_INLINED);
Steve Block6ded16b2010-05-10 14:33:55 +01006752
6753 // Block the constant pool for one more instruction after leaving this
6754 // constant pool block scope to include the branch instruction ending the
6755 // deferred code.
6756 __ BlockConstPoolFor(1);
6757 }
6758}
6759
6760
Kristian Monsen50ef84f2010-07-29 15:18:00 +01006761class DeferredReferenceSetNamedValue: public DeferredCode {
6762 public:
6763 DeferredReferenceSetNamedValue(Register value,
6764 Register receiver,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01006765 Handle<String> name,
6766 StrictModeFlag strict_mode)
6767 : value_(value),
6768 receiver_(receiver),
6769 name_(name),
6770 strict_mode_(strict_mode) {
Kristian Monsen50ef84f2010-07-29 15:18:00 +01006771 set_comment("[ DeferredReferenceSetNamedValue");
6772 }
6773
6774 virtual void Generate();
6775
6776 private:
6777 Register value_;
6778 Register receiver_;
6779 Handle<String> name_;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01006780 StrictModeFlag strict_mode_;
Kristian Monsen50ef84f2010-07-29 15:18:00 +01006781};
6782
6783
6784// Takes value in r0, receiver in r1 and returns the result (the
6785// value) in r0.
6786void DeferredReferenceSetNamedValue::Generate() {
6787 // Record the entry frame and spill.
6788 VirtualFrame copied_frame(*frame_state()->frame());
6789 copied_frame.SpillAll();
6790
6791 // Ensure value in r0, receiver in r1 to match store ic calling
6792 // convention.
6793 ASSERT(value_.is(r0) && receiver_.is(r1));
6794 __ mov(r2, Operand(name_));
6795
6796 // The rest of the instructions in the deferred code must be together.
6797 { Assembler::BlockConstPoolScope block_const_pool(masm_);
6798 // Call keyed store IC. It has the arguments value, key and receiver in r0,
6799 // r1 and r2.
Steve Block44f0eee2011-05-26 01:26:41 +01006800 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
6801 (strict_mode_ == kStrictMode) ? Builtins::kStoreIC_Initialize_Strict
6802 : Builtins::kStoreIC_Initialize));
Kristian Monsen50ef84f2010-07-29 15:18:00 +01006803 __ Call(ic, RelocInfo::CODE_TARGET);
6804 // The call must be followed by a nop instruction to indicate that the
6805 // named store has been inlined.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006806 __ MarkCode(MacroAssembler::PROPERTY_ACCESS_INLINED);
Kristian Monsen50ef84f2010-07-29 15:18:00 +01006807
6808 // Go back to the frame we entered with. The instructions
6809 // generated by this merge are skipped over by the inline store
6810 // patching mechanism when looking for the branch instruction that
6811 // tells it where the code to patch is.
6812 copied_frame.MergeTo(frame_state()->frame());
6813
6814 // Block the constant pool for one more instruction after leaving this
6815 // constant pool block scope to include the branch instruction ending the
6816 // deferred code.
6817 __ BlockConstPoolFor(1);
6818 }
6819}
6820
6821
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006822// Consumes the top of stack (the receiver) and pushes the result instead.
Steve Block6ded16b2010-05-10 14:33:55 +01006823void CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006824 bool contextual_load_in_builtin =
6825 is_contextual &&
Steve Block44f0eee2011-05-26 01:26:41 +01006826 (ISOLATE->bootstrapper()->IsActive() ||
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006827 (!info_->closure().is_null() && info_->closure()->IsBuiltin()));
6828
6829 if (scope()->is_global_scope() ||
6830 loop_nesting() == 0 ||
6831 contextual_load_in_builtin) {
Steve Block6ded16b2010-05-10 14:33:55 +01006832 Comment cmnt(masm(), "[ Load from named Property");
6833 // Setup the name register and call load IC.
6834 frame_->CallLoadIC(name,
6835 is_contextual
6836 ? RelocInfo::CODE_TARGET_CONTEXT
6837 : RelocInfo::CODE_TARGET);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006838 frame_->EmitPush(r0); // Push answer.
Steve Block6ded16b2010-05-10 14:33:55 +01006839 } else {
6840 // Inline the in-object property case.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006841 Comment cmnt(masm(), is_contextual
6842 ? "[ Inlined contextual property load"
6843 : "[ Inlined named property load");
Steve Block6ded16b2010-05-10 14:33:55 +01006844
6845 // Counter will be decremented in the deferred code. Placed here to avoid
6846 // having it in the instruction stream below where patching will occur.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006847 if (is_contextual) {
Steve Block44f0eee2011-05-26 01:26:41 +01006848 __ IncrementCounter(
6849 masm_->isolate()->counters()->named_load_global_inline(),
6850 1, frame_->scratch0(), frame_->scratch1());
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006851 } else {
Steve Block44f0eee2011-05-26 01:26:41 +01006852 __ IncrementCounter(masm_->isolate()->counters()->named_load_inline(),
6853 1, frame_->scratch0(), frame_->scratch1());
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006854 }
Steve Block6ded16b2010-05-10 14:33:55 +01006855
6856 // The following instructions are the inlined load of an in-object property.
6857 // Parts of this code is patched, so the exact instructions generated needs
6858 // to be fixed. Therefore the instruction pool is blocked when generating
6859 // this code
6860
6861 // Load the receiver from the stack.
Leon Clarkef7060e22010-06-03 12:02:55 +01006862 Register receiver = frame_->PopToRegister();
Steve Block6ded16b2010-05-10 14:33:55 +01006863
6864 DeferredReferenceGetNamedValue* deferred =
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006865 new DeferredReferenceGetNamedValue(receiver, name, is_contextual);
Steve Block6ded16b2010-05-10 14:33:55 +01006866
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006867 bool is_dont_delete = false;
6868 if (is_contextual) {
6869 if (!info_->closure().is_null()) {
6870 // When doing lazy compilation we can check if the global cell
6871 // already exists and use its "don't delete" status as a hint.
6872 AssertNoAllocation no_gc;
6873 v8::internal::GlobalObject* global_object =
6874 info_->closure()->context()->global();
6875 LookupResult lookup;
6876 global_object->LocalLookupRealNamedProperty(*name, &lookup);
6877 if (lookup.IsProperty() && lookup.type() == NORMAL) {
6878 ASSERT(lookup.holder() == global_object);
6879 ASSERT(global_object->property_dictionary()->ValueAt(
6880 lookup.GetDictionaryEntry())->IsJSGlobalPropertyCell());
6881 is_dont_delete = lookup.IsDontDelete();
6882 }
6883 }
6884 if (is_dont_delete) {
Steve Block44f0eee2011-05-26 01:26:41 +01006885 __ IncrementCounter(
6886 masm_->isolate()->counters()->dont_delete_hint_hit(),
6887 1, frame_->scratch0(), frame_->scratch1());
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006888 }
6889 }
Steve Block6ded16b2010-05-10 14:33:55 +01006890
6891 { Assembler::BlockConstPoolScope block_const_pool(masm_);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006892 if (!is_contextual) {
6893 // Check that the receiver is a heap object.
6894 __ tst(receiver, Operand(kSmiTagMask));
6895 deferred->Branch(eq);
6896 }
6897
6898 // Check for the_hole_value if necessary.
6899 // Below we rely on the number of instructions generated, and we can't
6900 // cope with the Check macro which does not generate a fixed number of
6901 // instructions.
6902 Label skip, check_the_hole, cont;
6903 if (FLAG_debug_code && is_contextual && is_dont_delete) {
6904 __ b(&skip);
6905 __ bind(&check_the_hole);
6906 __ Check(ne, "DontDelete cells can't contain the hole");
6907 __ b(&cont);
6908 __ bind(&skip);
6909 }
6910
6911#ifdef DEBUG
6912 int InlinedNamedLoadInstructions = 5;
6913 Label check_inlined_codesize;
6914 masm_->bind(&check_inlined_codesize);
6915#endif
Steve Block6ded16b2010-05-10 14:33:55 +01006916
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006917 Register scratch = VirtualFrame::scratch0();
6918 Register scratch2 = VirtualFrame::scratch1();
6919
Steve Block6ded16b2010-05-10 14:33:55 +01006920 // Check the map. The null map used below is patched by the inline cache
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006921 // code. Therefore we can't use a LoadRoot call.
6922 __ ldr(scratch, FieldMemOperand(receiver, HeapObject::kMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01006923 __ mov(scratch2, Operand(FACTORY->null_value()));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006924 __ cmp(scratch, scratch2);
Steve Block6ded16b2010-05-10 14:33:55 +01006925 deferred->Branch(ne);
6926
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006927 if (is_contextual) {
6928#ifdef DEBUG
6929 InlinedNamedLoadInstructions += 1;
6930#endif
6931 // Load the (initially invalid) cell and get its value.
Steve Block44f0eee2011-05-26 01:26:41 +01006932 masm()->mov(receiver, Operand(FACTORY->null_value()));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006933 __ ldr(receiver,
6934 FieldMemOperand(receiver, JSGlobalPropertyCell::kValueOffset));
6935
6936 deferred->set_is_dont_delete(is_dont_delete);
6937
6938 if (!is_dont_delete) {
6939#ifdef DEBUG
6940 InlinedNamedLoadInstructions += 3;
6941#endif
Steve Block44f0eee2011-05-26 01:26:41 +01006942 __ cmp(receiver, Operand(FACTORY->the_hole_value()));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006943 deferred->Branch(eq);
6944 } else if (FLAG_debug_code) {
6945#ifdef DEBUG
6946 InlinedNamedLoadInstructions += 3;
6947#endif
Steve Block44f0eee2011-05-26 01:26:41 +01006948 __ cmp(receiver, Operand(FACTORY->the_hole_value()));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006949 __ b(&check_the_hole, eq);
6950 __ bind(&cont);
6951 }
6952 } else {
6953 // Initially use an invalid index. The index will be patched by the
6954 // inline cache code.
6955 __ ldr(receiver, MemOperand(receiver, 0));
6956 }
Steve Block6ded16b2010-05-10 14:33:55 +01006957
6958 // Make sure that the expected number of instructions are generated.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006959 // If the code before is updated, the offsets in ic-arm.cc
6960 // LoadIC::PatchInlinedContextualLoad and PatchInlinedLoad need
6961 // to be updated.
6962 ASSERT_EQ(InlinedNamedLoadInstructions,
Steve Block6ded16b2010-05-10 14:33:55 +01006963 masm_->InstructionsGeneratedSince(&check_inlined_codesize));
6964 }
6965
6966 deferred->BindExit();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006967 // At this point the receiver register has the result, either from the
6968 // deferred code or from the inlined code.
6969 frame_->EmitPush(receiver);
Steve Block6ded16b2010-05-10 14:33:55 +01006970 }
6971}
6972
6973
6974void CodeGenerator::EmitNamedStore(Handle<String> name, bool is_contextual) {
6975#ifdef DEBUG
Kristian Monsen50ef84f2010-07-29 15:18:00 +01006976 int expected_height = frame()->height() - (is_contextual ? 1 : 2);
Steve Block6ded16b2010-05-10 14:33:55 +01006977#endif
Steve Block6ded16b2010-05-10 14:33:55 +01006978
Kristian Monsen50ef84f2010-07-29 15:18:00 +01006979 Result result;
6980 if (is_contextual || scope()->is_global_scope() || loop_nesting() == 0) {
Steve Block1e0659c2011-05-24 12:43:12 +01006981 frame()->CallStoreIC(name, is_contextual, strict_mode_flag());
Kristian Monsen50ef84f2010-07-29 15:18:00 +01006982 } else {
6983 // Inline the in-object property case.
6984 JumpTarget slow, done;
6985
6986 // Get the value and receiver from the stack.
6987 frame()->PopToR0();
6988 Register value = r0;
6989 frame()->PopToR1();
6990 Register receiver = r1;
6991
6992 DeferredReferenceSetNamedValue* deferred =
Ben Murdoche0cee9b2011-05-25 10:26:03 +01006993 new DeferredReferenceSetNamedValue(
6994 value, receiver, name, strict_mode_flag());
Kristian Monsen50ef84f2010-07-29 15:18:00 +01006995
6996 // Check that the receiver is a heap object.
6997 __ tst(receiver, Operand(kSmiTagMask));
6998 deferred->Branch(eq);
6999
7000 // The following instructions are the part of the inlined
7001 // in-object property store code which can be patched. Therefore
7002 // the exact number of instructions generated must be fixed, so
7003 // the constant pool is blocked while generating this code.
7004 { Assembler::BlockConstPoolScope block_const_pool(masm_);
7005 Register scratch0 = VirtualFrame::scratch0();
7006 Register scratch1 = VirtualFrame::scratch1();
7007
7008 // Check the map. Initially use an invalid map to force a
7009 // failure. The map check will be patched in the runtime system.
7010 __ ldr(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
7011
7012#ifdef DEBUG
7013 Label check_inlined_codesize;
7014 masm_->bind(&check_inlined_codesize);
7015#endif
Steve Block44f0eee2011-05-26 01:26:41 +01007016 __ mov(scratch0, Operand(FACTORY->null_value()));
Kristian Monsen50ef84f2010-07-29 15:18:00 +01007017 __ cmp(scratch0, scratch1);
7018 deferred->Branch(ne);
7019
7020 int offset = 0;
7021 __ str(value, MemOperand(receiver, offset));
7022
7023 // Update the write barrier and record its size. We do not use
7024 // the RecordWrite macro here because we want the offset
7025 // addition instruction first to make it easy to patch.
7026 Label record_write_start, record_write_done;
7027 __ bind(&record_write_start);
7028 // Add offset into the object.
7029 __ add(scratch0, receiver, Operand(offset));
7030 // Test that the object is not in the new space. We cannot set
7031 // region marks for new space pages.
7032 __ InNewSpace(receiver, scratch1, eq, &record_write_done);
7033 // Record the actual write.
7034 __ RecordWriteHelper(receiver, scratch0, scratch1);
7035 __ bind(&record_write_done);
7036 // Clobber all input registers when running with the debug-code flag
7037 // turned on to provoke errors.
7038 if (FLAG_debug_code) {
7039 __ mov(receiver, Operand(BitCast<int32_t>(kZapValue)));
7040 __ mov(scratch0, Operand(BitCast<int32_t>(kZapValue)));
7041 __ mov(scratch1, Operand(BitCast<int32_t>(kZapValue)));
7042 }
7043 // Check that this is the first inlined write barrier or that
7044 // this inlined write barrier has the same size as all the other
7045 // inlined write barriers.
Steve Block44f0eee2011-05-26 01:26:41 +01007046 ASSERT((Isolate::Current()->inlined_write_barrier_size() == -1) ||
7047 (Isolate::Current()->inlined_write_barrier_size() ==
Kristian Monsen50ef84f2010-07-29 15:18:00 +01007048 masm()->InstructionsGeneratedSince(&record_write_start)));
Steve Block44f0eee2011-05-26 01:26:41 +01007049 Isolate::Current()->set_inlined_write_barrier_size(
7050 masm()->InstructionsGeneratedSince(&record_write_start));
Kristian Monsen50ef84f2010-07-29 15:18:00 +01007051
7052 // Make sure that the expected number of instructions are generated.
7053 ASSERT_EQ(GetInlinedNamedStoreInstructionsAfterPatch(),
7054 masm()->InstructionsGeneratedSince(&check_inlined_codesize));
7055 }
7056 deferred->BindExit();
7057 }
7058 ASSERT_EQ(expected_height, frame()->height());
Steve Block6ded16b2010-05-10 14:33:55 +01007059}
7060
7061
7062void CodeGenerator::EmitKeyedLoad() {
7063 if (loop_nesting() == 0) {
7064 Comment cmnt(masm_, "[ Load from keyed property");
7065 frame_->CallKeyedLoadIC();
7066 } else {
7067 // Inline the keyed load.
7068 Comment cmnt(masm_, "[ Inlined load from keyed property");
7069
7070 // Counter will be decremented in the deferred code. Placed here to avoid
7071 // having it in the instruction stream below where patching will occur.
Steve Block44f0eee2011-05-26 01:26:41 +01007072 __ IncrementCounter(masm_->isolate()->counters()->keyed_load_inline(),
7073 1, frame_->scratch0(), frame_->scratch1());
Steve Block6ded16b2010-05-10 14:33:55 +01007074
Kristian Monsen25f61362010-05-21 11:50:48 +01007075 // Load the key and receiver from the stack.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007076 bool key_is_known_smi = frame_->KnownSmiAt(0);
Kristian Monsen25f61362010-05-21 11:50:48 +01007077 Register key = frame_->PopToRegister();
7078 Register receiver = frame_->PopToRegister(key);
Steve Block6ded16b2010-05-10 14:33:55 +01007079
Kristian Monsen25f61362010-05-21 11:50:48 +01007080 // The deferred code expects key and receiver in registers.
Steve Block6ded16b2010-05-10 14:33:55 +01007081 DeferredReferenceGetKeyedValue* deferred =
Kristian Monsen25f61362010-05-21 11:50:48 +01007082 new DeferredReferenceGetKeyedValue(key, receiver);
Steve Block6ded16b2010-05-10 14:33:55 +01007083
7084 // Check that the receiver is a heap object.
7085 __ tst(receiver, Operand(kSmiTagMask));
7086 deferred->Branch(eq);
7087
7088 // The following instructions are the part of the inlined load keyed
7089 // property code which can be patched. Therefore the exact number of
7090 // instructions generated need to be fixed, so the constant pool is blocked
7091 // while generating this code.
Steve Block6ded16b2010-05-10 14:33:55 +01007092 { Assembler::BlockConstPoolScope block_const_pool(masm_);
7093 Register scratch1 = VirtualFrame::scratch0();
7094 Register scratch2 = VirtualFrame::scratch1();
7095 // Check the map. The null map used below is patched by the inline cache
7096 // code.
7097 __ ldr(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007098
7099 // Check that the key is a smi.
7100 if (!key_is_known_smi) {
7101 __ tst(key, Operand(kSmiTagMask));
7102 deferred->Branch(ne);
7103 }
7104
Kristian Monsen25f61362010-05-21 11:50:48 +01007105#ifdef DEBUG
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007106 Label check_inlined_codesize;
7107 masm_->bind(&check_inlined_codesize);
Kristian Monsen25f61362010-05-21 11:50:48 +01007108#endif
Steve Block44f0eee2011-05-26 01:26:41 +01007109 __ mov(scratch2, Operand(FACTORY->null_value()));
Steve Block6ded16b2010-05-10 14:33:55 +01007110 __ cmp(scratch1, scratch2);
7111 deferred->Branch(ne);
7112
Iain Merrick75681382010-08-19 15:07:18 +01007113 // Get the elements array from the receiver.
Steve Block6ded16b2010-05-10 14:33:55 +01007114 __ ldr(scratch1, FieldMemOperand(receiver, JSObject::kElementsOffset));
Iain Merrick75681382010-08-19 15:07:18 +01007115 __ AssertFastElements(scratch1);
Steve Block6ded16b2010-05-10 14:33:55 +01007116
7117 // Check that key is within bounds. Use unsigned comparison to handle
7118 // negative keys.
7119 __ ldr(scratch2, FieldMemOperand(scratch1, FixedArray::kLengthOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007120 __ cmp(scratch2, key);
Steve Block6ded16b2010-05-10 14:33:55 +01007121 deferred->Branch(ls); // Unsigned less equal.
7122
7123 // Load and check that the result is not the hole (key is a smi).
7124 __ LoadRoot(scratch2, Heap::kTheHoleValueRootIndex);
7125 __ add(scratch1,
7126 scratch1,
7127 Operand(FixedArray::kHeaderSize - kHeapObjectTag));
Kristian Monsen25f61362010-05-21 11:50:48 +01007128 __ ldr(scratch1,
Steve Block6ded16b2010-05-10 14:33:55 +01007129 MemOperand(scratch1, key, LSL,
7130 kPointerSizeLog2 - (kSmiTagSize + kSmiShiftSize)));
Kristian Monsen25f61362010-05-21 11:50:48 +01007131 __ cmp(scratch1, scratch2);
Steve Block6ded16b2010-05-10 14:33:55 +01007132 deferred->Branch(eq);
7133
Kristian Monsen25f61362010-05-21 11:50:48 +01007134 __ mov(r0, scratch1);
Steve Block6ded16b2010-05-10 14:33:55 +01007135 // Make sure that the expected number of instructions are generated.
Steve Block8defd9f2010-07-08 12:39:36 +01007136 ASSERT_EQ(GetInlinedKeyedLoadInstructionsAfterPatch(),
Steve Block6ded16b2010-05-10 14:33:55 +01007137 masm_->InstructionsGeneratedSince(&check_inlined_codesize));
7138 }
7139
7140 deferred->BindExit();
7141 }
7142}
7143
7144
Steve Block8defd9f2010-07-08 12:39:36 +01007145void CodeGenerator::EmitKeyedStore(StaticType* key_type,
7146 WriteBarrierCharacter wb_info) {
Steve Block6ded16b2010-05-10 14:33:55 +01007147 // Generate inlined version of the keyed store if the code is in a loop
7148 // and the key is likely to be a smi.
7149 if (loop_nesting() > 0 && key_type->IsLikelySmi()) {
7150 // Inline the keyed store.
7151 Comment cmnt(masm_, "[ Inlined store to keyed property");
7152
Leon Clarkef7060e22010-06-03 12:02:55 +01007153 Register scratch1 = VirtualFrame::scratch0();
7154 Register scratch2 = VirtualFrame::scratch1();
7155 Register scratch3 = r3;
Steve Block6ded16b2010-05-10 14:33:55 +01007156
7157 // Counter will be decremented in the deferred code. Placed here to avoid
7158 // having it in the instruction stream below where patching will occur.
Steve Block44f0eee2011-05-26 01:26:41 +01007159 __ IncrementCounter(masm_->isolate()->counters()->keyed_store_inline(),
7160 1, scratch1, scratch2);
Steve Block8defd9f2010-07-08 12:39:36 +01007161
7162
Leon Clarkef7060e22010-06-03 12:02:55 +01007163 // Load the value, key and receiver from the stack.
Steve Block8defd9f2010-07-08 12:39:36 +01007164 bool value_is_harmless = frame_->KnownSmiAt(0);
7165 if (wb_info == NEVER_NEWSPACE) value_is_harmless = true;
7166 bool key_is_smi = frame_->KnownSmiAt(1);
Leon Clarkef7060e22010-06-03 12:02:55 +01007167 Register value = frame_->PopToRegister();
7168 Register key = frame_->PopToRegister(value);
Steve Block8defd9f2010-07-08 12:39:36 +01007169 VirtualFrame::SpilledScope spilled(frame_);
Leon Clarkef7060e22010-06-03 12:02:55 +01007170 Register receiver = r2;
7171 frame_->EmitPop(receiver);
Steve Block8defd9f2010-07-08 12:39:36 +01007172
7173#ifdef DEBUG
7174 bool we_remembered_the_write_barrier = value_is_harmless;
7175#endif
Leon Clarkef7060e22010-06-03 12:02:55 +01007176
7177 // The deferred code expects value, key and receiver in registers.
7178 DeferredReferenceSetKeyedValue* deferred =
Ben Murdoche0cee9b2011-05-25 10:26:03 +01007179 new DeferredReferenceSetKeyedValue(
7180 value, key, receiver, strict_mode_flag());
Steve Block6ded16b2010-05-10 14:33:55 +01007181
7182 // Check that the value is a smi. As this inlined code does not set the
7183 // write barrier it is only possible to store smi values.
Steve Block8defd9f2010-07-08 12:39:36 +01007184 if (!value_is_harmless) {
7185 // If the value is not likely to be a Smi then let's test the fixed array
7186 // for new space instead. See below.
7187 if (wb_info == LIKELY_SMI) {
7188 __ tst(value, Operand(kSmiTagMask));
7189 deferred->Branch(ne);
7190#ifdef DEBUG
7191 we_remembered_the_write_barrier = true;
7192#endif
7193 }
7194 }
Steve Block6ded16b2010-05-10 14:33:55 +01007195
Steve Block8defd9f2010-07-08 12:39:36 +01007196 if (!key_is_smi) {
7197 // Check that the key is a smi.
7198 __ tst(key, Operand(kSmiTagMask));
7199 deferred->Branch(ne);
7200 }
Steve Block6ded16b2010-05-10 14:33:55 +01007201
7202 // Check that the receiver is a heap object.
Leon Clarkef7060e22010-06-03 12:02:55 +01007203 __ tst(receiver, Operand(kSmiTagMask));
Steve Block6ded16b2010-05-10 14:33:55 +01007204 deferred->Branch(eq);
7205
7206 // Check that the receiver is a JSArray.
Leon Clarkef7060e22010-06-03 12:02:55 +01007207 __ CompareObjectType(receiver, scratch1, scratch1, JS_ARRAY_TYPE);
Steve Block6ded16b2010-05-10 14:33:55 +01007208 deferred->Branch(ne);
7209
Steve Block8defd9f2010-07-08 12:39:36 +01007210 // Get the elements array from the receiver.
7211 __ ldr(scratch1, FieldMemOperand(receiver, JSObject::kElementsOffset));
7212 if (!value_is_harmless && wb_info != LIKELY_SMI) {
7213 Label ok;
Steve Block44f0eee2011-05-26 01:26:41 +01007214 __ and_(scratch2,
7215 scratch1,
7216 Operand(ExternalReference::new_space_mask(isolate())));
7217 __ cmp(scratch2, Operand(ExternalReference::new_space_start(isolate())));
Steve Block8defd9f2010-07-08 12:39:36 +01007218 __ tst(value, Operand(kSmiTagMask), ne);
7219 deferred->Branch(ne);
7220#ifdef DEBUG
7221 we_remembered_the_write_barrier = true;
7222#endif
7223 }
7224 // Check that the elements array is not a dictionary.
7225 __ ldr(scratch2, FieldMemOperand(scratch1, JSObject::kMapOffset));
Steve Block44f0eee2011-05-26 01:26:41 +01007226
Steve Block6ded16b2010-05-10 14:33:55 +01007227 // The following instructions are the part of the inlined store keyed
7228 // property code which can be patched. Therefore the exact number of
7229 // instructions generated need to be fixed, so the constant pool is blocked
7230 // while generating this code.
Steve Block6ded16b2010-05-10 14:33:55 +01007231 { Assembler::BlockConstPoolScope block_const_pool(masm_);
Steve Block8defd9f2010-07-08 12:39:36 +01007232#ifdef DEBUG
7233 Label check_inlined_codesize;
7234 masm_->bind(&check_inlined_codesize);
7235#endif
7236
Steve Block6ded16b2010-05-10 14:33:55 +01007237 // Read the fixed array map from the constant pool (not from the root
7238 // array) so that the value can be patched. When debugging, we patch this
7239 // comparison to always fail so that we will hit the IC call in the
7240 // deferred code which will allow the debugger to break for fast case
7241 // stores.
Steve Block44f0eee2011-05-26 01:26:41 +01007242 __ mov(scratch3, Operand(FACTORY->fixed_array_map()));
Leon Clarkef7060e22010-06-03 12:02:55 +01007243 __ cmp(scratch2, scratch3);
Steve Block6ded16b2010-05-10 14:33:55 +01007244 deferred->Branch(ne);
7245
Steve Block44f0eee2011-05-26 01:26:41 +01007246 // Check that the key is within bounds. Both the key and the length of
7247 // the JSArray are smis (because the fixed array check above ensures the
7248 // elements are in fast case). Use unsigned comparison to handle negative
7249 // keys.
7250 __ ldr(scratch3, FieldMemOperand(receiver, JSArray::kLengthOffset));
7251 __ cmp(scratch3, key);
7252 deferred->Branch(ls); // Unsigned less equal.
7253
Steve Block6ded16b2010-05-10 14:33:55 +01007254 // Store the value.
Leon Clarkef7060e22010-06-03 12:02:55 +01007255 __ add(scratch1, scratch1,
7256 Operand(FixedArray::kHeaderSize - kHeapObjectTag));
7257 __ str(value,
7258 MemOperand(scratch1, key, LSL,
7259 kPointerSizeLog2 - (kSmiTagSize + kSmiShiftSize)));
Steve Block6ded16b2010-05-10 14:33:55 +01007260
7261 // Make sure that the expected number of instructions are generated.
Leon Clarkef7060e22010-06-03 12:02:55 +01007262 ASSERT_EQ(kInlinedKeyedStoreInstructionsAfterPatch,
Steve Block6ded16b2010-05-10 14:33:55 +01007263 masm_->InstructionsGeneratedSince(&check_inlined_codesize));
7264 }
7265
Steve Block8defd9f2010-07-08 12:39:36 +01007266 ASSERT(we_remembered_the_write_barrier);
7267
Steve Block6ded16b2010-05-10 14:33:55 +01007268 deferred->BindExit();
7269 } else {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01007270 frame()->CallKeyedStoreIC(strict_mode_flag());
Steve Block6ded16b2010-05-10 14:33:55 +01007271 }
Leon Clarked91b9f72010-01-27 17:25:45 +00007272}
7273
7274
Steve Blocka7e24c12009-10-30 11:49:00 +00007275#ifdef DEBUG
7276bool CodeGenerator::HasValidEntryRegisters() { return true; }
7277#endif
7278
7279
7280#undef __
7281#define __ ACCESS_MASM(masm)
7282
Steve Blocka7e24c12009-10-30 11:49:00 +00007283Handle<String> Reference::GetName() {
7284 ASSERT(type_ == NAMED);
7285 Property* property = expression_->AsProperty();
7286 if (property == NULL) {
7287 // Global variable reference treated as a named property reference.
7288 VariableProxy* proxy = expression_->AsVariableProxy();
7289 ASSERT(proxy->AsVariable() != NULL);
7290 ASSERT(proxy->AsVariable()->is_global());
7291 return proxy->name();
7292 } else {
7293 Literal* raw_name = property->key()->AsLiteral();
7294 ASSERT(raw_name != NULL);
7295 return Handle<String>(String::cast(*raw_name->handle()));
7296 }
7297}
7298
7299
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007300void Reference::DupIfPersist() {
7301 if (persist_after_get_) {
7302 switch (type_) {
7303 case KEYED:
7304 cgen_->frame()->Dup2();
7305 break;
7306 case NAMED:
7307 cgen_->frame()->Dup();
7308 // Fall through.
7309 case UNLOADED:
7310 case ILLEGAL:
7311 case SLOT:
7312 // Do nothing.
7313 ;
7314 }
7315 } else {
7316 set_unloaded();
7317 }
7318}
7319
7320
Steve Blockd0582a62009-12-15 09:54:21 +00007321void Reference::GetValue() {
Steve Blocka7e24c12009-10-30 11:49:00 +00007322 ASSERT(cgen_->HasValidEntryRegisters());
7323 ASSERT(!is_illegal());
7324 ASSERT(!cgen_->has_cc());
7325 MacroAssembler* masm = cgen_->masm();
7326 Property* property = expression_->AsProperty();
7327 if (property != NULL) {
7328 cgen_->CodeForSourcePosition(property->position());
7329 }
7330
7331 switch (type_) {
7332 case SLOT: {
7333 Comment cmnt(masm, "[ Load from Slot");
Kristian Monsen0d5e1162010-09-30 15:31:59 +01007334 Slot* slot = expression_->AsVariableProxy()->AsVariable()->AsSlot();
Steve Blocka7e24c12009-10-30 11:49:00 +00007335 ASSERT(slot != NULL);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007336 DupIfPersist();
Steve Block6ded16b2010-05-10 14:33:55 +01007337 cgen_->LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF);
Steve Blocka7e24c12009-10-30 11:49:00 +00007338 break;
7339 }
7340
7341 case NAMED: {
Steve Blocka7e24c12009-10-30 11:49:00 +00007342 Variable* var = expression_->AsVariableProxy()->AsVariable();
Steve Block6ded16b2010-05-10 14:33:55 +01007343 bool is_global = var != NULL;
7344 ASSERT(!is_global || var->is_global());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007345 Handle<String> name = GetName();
7346 DupIfPersist();
7347 cgen_->EmitNamedLoad(name, is_global);
Steve Blocka7e24c12009-10-30 11:49:00 +00007348 break;
7349 }
7350
7351 case KEYED: {
Leon Clarkef7060e22010-06-03 12:02:55 +01007352 ASSERT(property != NULL);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007353 DupIfPersist();
Steve Block6ded16b2010-05-10 14:33:55 +01007354 cgen_->EmitKeyedLoad();
Leon Clarked91b9f72010-01-27 17:25:45 +00007355 cgen_->frame()->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00007356 break;
7357 }
7358
7359 default:
7360 UNREACHABLE();
7361 }
7362}
7363
7364
Steve Block8defd9f2010-07-08 12:39:36 +01007365void Reference::SetValue(InitState init_state, WriteBarrierCharacter wb_info) {
Steve Blocka7e24c12009-10-30 11:49:00 +00007366 ASSERT(!is_illegal());
7367 ASSERT(!cgen_->has_cc());
7368 MacroAssembler* masm = cgen_->masm();
7369 VirtualFrame* frame = cgen_->frame();
7370 Property* property = expression_->AsProperty();
7371 if (property != NULL) {
7372 cgen_->CodeForSourcePosition(property->position());
7373 }
7374
7375 switch (type_) {
7376 case SLOT: {
7377 Comment cmnt(masm, "[ Store to Slot");
Kristian Monsen0d5e1162010-09-30 15:31:59 +01007378 Slot* slot = expression_->AsVariableProxy()->AsVariable()->AsSlot();
Leon Clarkee46be812010-01-19 14:06:41 +00007379 cgen_->StoreToSlot(slot, init_state);
Steve Block6ded16b2010-05-10 14:33:55 +01007380 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00007381 break;
7382 }
7383
7384 case NAMED: {
7385 Comment cmnt(masm, "[ Store to named Property");
Steve Block6ded16b2010-05-10 14:33:55 +01007386 cgen_->EmitNamedStore(GetName(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00007387 frame->EmitPush(r0);
Andrei Popescu402d9372010-02-26 13:31:12 +00007388 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00007389 break;
7390 }
7391
7392 case KEYED: {
7393 Comment cmnt(masm, "[ Store to keyed Property");
7394 Property* property = expression_->AsProperty();
7395 ASSERT(property != NULL);
7396 cgen_->CodeForSourcePosition(property->position());
Steve Block8defd9f2010-07-08 12:39:36 +01007397 cgen_->EmitKeyedStore(property->key()->type(), wb_info);
Steve Blocka7e24c12009-10-30 11:49:00 +00007398 frame->EmitPush(r0);
Leon Clarkef7060e22010-06-03 12:02:55 +01007399 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00007400 break;
7401 }
7402
7403 default:
7404 UNREACHABLE();
7405 }
7406}
7407
7408
Leon Clarkee46be812010-01-19 14:06:41 +00007409const char* GenericBinaryOpStub::GetName() {
7410 if (name_ != NULL) return name_;
7411 const int len = 100;
Steve Block44f0eee2011-05-26 01:26:41 +01007412 name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(len);
Leon Clarkee46be812010-01-19 14:06:41 +00007413 if (name_ == NULL) return "OOM";
7414 const char* op_name = Token::Name(op_);
7415 const char* overwrite_name;
7416 switch (mode_) {
7417 case NO_OVERWRITE: overwrite_name = "Alloc"; break;
7418 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break;
7419 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
7420 default: overwrite_name = "UnknownOverwrite"; break;
7421 }
7422
7423 OS::SNPrintF(Vector<char>(name_, len),
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01007424 "GenericBinaryOpStub_%s_%s%s_%s",
Leon Clarkee46be812010-01-19 14:06:41 +00007425 op_name,
7426 overwrite_name,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01007427 specialized_on_rhs_ ? "_ConstantRhs" : "",
7428 BinaryOpIC::GetName(runtime_operands_type_));
Leon Clarkee46be812010-01-19 14:06:41 +00007429 return name_;
7430}
7431
Steve Blocka7e24c12009-10-30 11:49:00 +00007432#undef __
7433
7434} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01007435
7436#endif // V8_TARGET_ARCH_ARM