blob: d32b009109ae8b9b95baac5c41ae58ab64ee6f1e [file] [log] [blame]
Leon Clarked91b9f72010-01-27 17:25:45 +00001// Copyright 2010 the V8 project authors. All rights reserved.
Steve Blocka7e24c12009-10-30 11:49:00 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
Leon Clarkef7060e22010-06-03 12:02:55 +010030#if defined(V8_TARGET_ARCH_ARM)
31
Steve Blocka7e24c12009-10-30 11:49:00 +000032#include "bootstrapper.h"
Kristian Monsen80d68ea2010-09-08 11:05:35 +010033#include "code-stubs.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000034#include "codegen-inl.h"
Steve Blockd0582a62009-12-15 09:54:21 +000035#include "compiler.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000036#include "debug.h"
Steve Block6ded16b2010-05-10 14:33:55 +010037#include "ic-inl.h"
38#include "jsregexp.h"
Ben Murdochb0fe1622011-05-05 13:52:32 +010039#include "jump-target-inl.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000040#include "parser.h"
Steve Block6ded16b2010-05-10 14:33:55 +010041#include "regexp-macro-assembler.h"
42#include "regexp-stack.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000043#include "register-allocator-inl.h"
44#include "runtime.h"
45#include "scopes.h"
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -080046#include "stub-cache.h"
Steve Block6ded16b2010-05-10 14:33:55 +010047#include "virtual-frame-inl.h"
Kristian Monsen25f61362010-05-21 11:50:48 +010048#include "virtual-frame-arm-inl.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000049
50namespace v8 {
51namespace internal {
52
Kristian Monsen25f61362010-05-21 11:50:48 +010053
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010054#define __ ACCESS_MASM(masm_)
Steve Blocka7e24c12009-10-30 11:49:00 +000055
56// -------------------------------------------------------------------------
57// Platform-specific DeferredCode functions.
58
59void DeferredCode::SaveRegisters() {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010060 // On ARM you either have a completely spilled frame or you
61 // handle it yourself, but at the moment there's no automation
62 // of registers and deferred code.
Steve Blocka7e24c12009-10-30 11:49:00 +000063}
64
65
66void DeferredCode::RestoreRegisters() {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010067}
68
69
70// -------------------------------------------------------------------------
71// Platform-specific RuntimeCallHelper functions.
72
73void VirtualFrameRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
74 frame_state_->frame()->AssertIsSpilled();
75}
76
77
78void VirtualFrameRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
79}
80
81
Ben Murdochb0fe1622011-05-05 13:52:32 +010082void StubRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010083 masm->EnterInternalFrame();
84}
85
86
Ben Murdochb0fe1622011-05-05 13:52:32 +010087void StubRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010088 masm->LeaveInternalFrame();
Steve Blocka7e24c12009-10-30 11:49:00 +000089}
90
91
92// -------------------------------------------------------------------------
93// CodeGenState implementation.
94
95CodeGenState::CodeGenState(CodeGenerator* owner)
96 : owner_(owner),
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010097 previous_(owner->state()) {
98 owner->set_state(this);
Steve Blocka7e24c12009-10-30 11:49:00 +000099}
100
101
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100102ConditionCodeGenState::ConditionCodeGenState(CodeGenerator* owner,
103 JumpTarget* true_target,
104 JumpTarget* false_target)
105 : CodeGenState(owner),
Steve Blocka7e24c12009-10-30 11:49:00 +0000106 true_target_(true_target),
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100107 false_target_(false_target) {
108 owner->set_state(this);
109}
110
111
112TypeInfoCodeGenState::TypeInfoCodeGenState(CodeGenerator* owner,
113 Slot* slot,
114 TypeInfo type_info)
115 : CodeGenState(owner),
116 slot_(slot) {
117 owner->set_state(this);
118 old_type_info_ = owner->set_type_info(slot, type_info);
Steve Blocka7e24c12009-10-30 11:49:00 +0000119}
120
121
122CodeGenState::~CodeGenState() {
123 ASSERT(owner_->state() == this);
124 owner_->set_state(previous_);
125}
126
127
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100128TypeInfoCodeGenState::~TypeInfoCodeGenState() {
129 owner()->set_type_info(slot_, old_type_info_);
130}
131
Steve Blocka7e24c12009-10-30 11:49:00 +0000132// -------------------------------------------------------------------------
133// CodeGenerator implementation
134
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100135int CodeGenerator::inlined_write_barrier_size_ = -1;
136
Andrei Popescu31002712010-02-23 13:46:05 +0000137CodeGenerator::CodeGenerator(MacroAssembler* masm)
138 : deferred_(8),
Leon Clarke4515c472010-02-03 11:58:03 +0000139 masm_(masm),
Andrei Popescu31002712010-02-23 13:46:05 +0000140 info_(NULL),
Steve Blocka7e24c12009-10-30 11:49:00 +0000141 frame_(NULL),
142 allocator_(NULL),
143 cc_reg_(al),
144 state_(NULL),
Steve Block6ded16b2010-05-10 14:33:55 +0100145 loop_nesting_(0),
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100146 type_info_(NULL),
Steve Block8defd9f2010-07-08 12:39:36 +0100147 function_return_(JumpTarget::BIDIRECTIONAL),
Steve Blocka7e24c12009-10-30 11:49:00 +0000148 function_return_is_shadowed_(false) {
149}
150
151
152// Calling conventions:
153// fp: caller's frame pointer
154// sp: stack pointer
155// r1: called JS function
156// cp: callee's context
157
Andrei Popescu402d9372010-02-26 13:31:12 +0000158void CodeGenerator::Generate(CompilationInfo* info) {
Steve Blockd0582a62009-12-15 09:54:21 +0000159 // Record the position for debugging purposes.
Andrei Popescu31002712010-02-23 13:46:05 +0000160 CodeForFunctionPosition(info->function());
Steve Block6ded16b2010-05-10 14:33:55 +0100161 Comment cmnt(masm_, "[ function compiled by virtual frame code generator");
Steve Blocka7e24c12009-10-30 11:49:00 +0000162
163 // Initialize state.
Andrei Popescu31002712010-02-23 13:46:05 +0000164 info_ = info;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100165
166 int slots = scope()->num_parameters() + scope()->num_stack_slots();
167 ScopedVector<TypeInfo> type_info_array(slots);
Ben Murdochb0fe1622011-05-05 13:52:32 +0100168 for (int i = 0; i < slots; i++) {
169 type_info_array[i] = TypeInfo::Unknown();
170 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100171 type_info_ = &type_info_array;
172
Steve Blocka7e24c12009-10-30 11:49:00 +0000173 ASSERT(allocator_ == NULL);
174 RegisterAllocator register_allocator(this);
175 allocator_ = &register_allocator;
176 ASSERT(frame_ == NULL);
177 frame_ = new VirtualFrame();
178 cc_reg_ = al;
Steve Block6ded16b2010-05-10 14:33:55 +0100179
180 // Adjust for function-level loop nesting.
181 ASSERT_EQ(0, loop_nesting_);
Ben Murdochf87a2032010-10-22 12:50:53 +0100182 loop_nesting_ = info->is_in_loop() ? 1 : 0;
Steve Block6ded16b2010-05-10 14:33:55 +0100183
Steve Blocka7e24c12009-10-30 11:49:00 +0000184 {
185 CodeGenState state(this);
186
187 // Entry:
188 // Stack: receiver, arguments
189 // lr: return address
190 // fp: caller's frame pointer
191 // sp: stack pointer
192 // r1: called JS function
193 // cp: callee's context
194 allocator_->Initialize();
Leon Clarke4515c472010-02-03 11:58:03 +0000195
Steve Blocka7e24c12009-10-30 11:49:00 +0000196#ifdef DEBUG
197 if (strlen(FLAG_stop_at) > 0 &&
Andrei Popescu31002712010-02-23 13:46:05 +0000198 info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000199 frame_->SpillAll();
200 __ stop("stop-at");
201 }
202#endif
203
Iain Merrick75681382010-08-19 15:07:18 +0100204 frame_->Enter();
205 // tos: code slot
Leon Clarke4515c472010-02-03 11:58:03 +0000206
Iain Merrick75681382010-08-19 15:07:18 +0100207 // Allocate space for locals and initialize them. This also checks
208 // for stack overflow.
209 frame_->AllocateStackSlots();
Leon Clarke4515c472010-02-03 11:58:03 +0000210
Iain Merrick75681382010-08-19 15:07:18 +0100211 frame_->AssertIsSpilled();
212 int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
213 if (heap_slots > 0) {
214 // Allocate local context.
215 // Get outer context and create a new context based on it.
216 __ ldr(r0, frame_->Function());
217 frame_->EmitPush(r0);
218 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
219 FastNewContextStub stub(heap_slots);
220 frame_->CallStub(&stub, 1);
221 } else {
222 frame_->CallRuntime(Runtime::kNewContext, 1);
223 }
Leon Clarke4515c472010-02-03 11:58:03 +0000224
225#ifdef DEBUG
Iain Merrick75681382010-08-19 15:07:18 +0100226 JumpTarget verified_true;
227 __ cmp(r0, cp);
228 verified_true.Branch(eq);
229 __ stop("NewContext: r0 is expected to be the same as cp");
230 verified_true.Bind();
Leon Clarke4515c472010-02-03 11:58:03 +0000231#endif
Iain Merrick75681382010-08-19 15:07:18 +0100232 // Update context local.
233 __ str(cp, frame_->Context());
234 }
Leon Clarke4515c472010-02-03 11:58:03 +0000235
Iain Merrick75681382010-08-19 15:07:18 +0100236 // TODO(1241774): Improve this code:
237 // 1) only needed if we have a context
238 // 2) no need to recompute context ptr every single time
239 // 3) don't copy parameter operand code from SlotOperand!
240 {
241 Comment cmnt2(masm_, "[ copy context parameters into .context");
242 // Note that iteration order is relevant here! If we have the same
243 // parameter twice (e.g., function (x, y, x)), and that parameter
244 // needs to be copied into the context, it must be the last argument
245 // passed to the parameter that needs to be copied. This is a rare
246 // case so we don't check for it, instead we rely on the copying
247 // order: such a parameter is copied repeatedly into the same
248 // context location and thus the last value is what is seen inside
249 // the function.
250 frame_->AssertIsSpilled();
251 for (int i = 0; i < scope()->num_parameters(); i++) {
252 Variable* par = scope()->parameter(i);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100253 Slot* slot = par->AsSlot();
Iain Merrick75681382010-08-19 15:07:18 +0100254 if (slot != NULL && slot->type() == Slot::CONTEXT) {
255 ASSERT(!scope()->is_global_scope()); // No params in global scope.
256 __ ldr(r1, frame_->ParameterAt(i));
257 // Loads r2 with context; used below in RecordWrite.
258 __ str(r1, SlotOperand(slot, r2));
259 // Load the offset into r3.
260 int slot_offset =
261 FixedArray::kHeaderSize + slot->index() * kPointerSize;
262 __ RecordWrite(r2, Operand(slot_offset), r3, r1);
Leon Clarke4515c472010-02-03 11:58:03 +0000263 }
264 }
Iain Merrick75681382010-08-19 15:07:18 +0100265 }
Leon Clarke4515c472010-02-03 11:58:03 +0000266
Iain Merrick75681382010-08-19 15:07:18 +0100267 // Store the arguments object. This must happen after context
268 // initialization because the arguments object may be stored in
269 // the context.
270 if (ArgumentsMode() != NO_ARGUMENTS_ALLOCATION) {
271 StoreArgumentsObject(true);
272 }
Leon Clarke4515c472010-02-03 11:58:03 +0000273
Iain Merrick75681382010-08-19 15:07:18 +0100274 // Initialize ThisFunction reference if present.
275 if (scope()->is_function_scope() && scope()->function() != NULL) {
276 frame_->EmitPushRoot(Heap::kTheHoleValueRootIndex);
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100277 StoreToSlot(scope()->function()->AsSlot(), NOT_CONST_INIT);
Leon Clarke4515c472010-02-03 11:58:03 +0000278 }
279
Steve Blocka7e24c12009-10-30 11:49:00 +0000280 // Initialize the function return target after the locals are set
281 // up, because it needs the expected frame height from the frame.
Kristian Monsen25f61362010-05-21 11:50:48 +0100282 function_return_.SetExpectedHeight();
Steve Blocka7e24c12009-10-30 11:49:00 +0000283 function_return_is_shadowed_ = false;
284
Steve Blocka7e24c12009-10-30 11:49:00 +0000285 // Generate code to 'execute' declarations and initialize functions
286 // (source elements). In case of an illegal redeclaration we need to
287 // handle that instead of processing the declarations.
Andrei Popescu31002712010-02-23 13:46:05 +0000288 if (scope()->HasIllegalRedeclaration()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000289 Comment cmnt(masm_, "[ illegal redeclarations");
Andrei Popescu31002712010-02-23 13:46:05 +0000290 scope()->VisitIllegalRedeclaration(this);
Steve Blocka7e24c12009-10-30 11:49:00 +0000291 } else {
292 Comment cmnt(masm_, "[ declarations");
Andrei Popescu31002712010-02-23 13:46:05 +0000293 ProcessDeclarations(scope()->declarations());
Steve Blocka7e24c12009-10-30 11:49:00 +0000294 // Bail out if a stack-overflow exception occurred when processing
295 // declarations.
296 if (HasStackOverflow()) return;
297 }
298
299 if (FLAG_trace) {
300 frame_->CallRuntime(Runtime::kTraceEnter, 0);
301 // Ignore the return value.
302 }
303
304 // Compile the body of the function in a vanilla state. Don't
305 // bother compiling all the code if the scope has an illegal
306 // redeclaration.
Andrei Popescu31002712010-02-23 13:46:05 +0000307 if (!scope()->HasIllegalRedeclaration()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000308 Comment cmnt(masm_, "[ function body");
309#ifdef DEBUG
310 bool is_builtin = Bootstrapper::IsActive();
311 bool should_trace =
312 is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls;
313 if (should_trace) {
314 frame_->CallRuntime(Runtime::kDebugTrace, 0);
315 // Ignore the return value.
316 }
317#endif
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100318 VisitStatements(info->function()->body());
Steve Blocka7e24c12009-10-30 11:49:00 +0000319 }
320 }
321
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100322 // Handle the return from the function.
323 if (has_valid_frame()) {
324 // If there is a valid frame, control flow can fall off the end of
325 // the body. In that case there is an implicit return statement.
326 ASSERT(!function_return_is_shadowed_);
327 frame_->PrepareForReturn();
Steve Blocka7e24c12009-10-30 11:49:00 +0000328 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100329 if (function_return_.is_bound()) {
330 function_return_.Jump();
331 } else {
332 function_return_.Bind();
333 GenerateReturnSequence();
334 }
335 } else if (function_return_.is_linked()) {
336 // If the return target has dangling jumps to it, then we have not
337 // yet generated the return sequence. This can happen when (a)
338 // control does not flow off the end of the body so we did not
339 // compile an artificial return statement just above, and (b) there
340 // are return statements in the body but (c) they are all shadowed.
Steve Blocka7e24c12009-10-30 11:49:00 +0000341 function_return_.Bind();
Kristian Monsen9dcf7e22010-06-28 14:14:28 +0100342 GenerateReturnSequence();
Steve Blocka7e24c12009-10-30 11:49:00 +0000343 }
344
Steve Block6ded16b2010-05-10 14:33:55 +0100345 // Adjust for function-level loop nesting.
Ben Murdochf87a2032010-10-22 12:50:53 +0100346 ASSERT(loop_nesting_ == info->is_in_loop()? 1 : 0);
Steve Block6ded16b2010-05-10 14:33:55 +0100347 loop_nesting_ = 0;
348
Steve Blocka7e24c12009-10-30 11:49:00 +0000349 // Code generation state must be reset.
350 ASSERT(!has_cc());
351 ASSERT(state_ == NULL);
Steve Block6ded16b2010-05-10 14:33:55 +0100352 ASSERT(loop_nesting() == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +0000353 ASSERT(!function_return_is_shadowed_);
354 function_return_.Unuse();
355 DeleteFrame();
356
357 // Process any deferred code using the register allocator.
358 if (!HasStackOverflow()) {
359 ProcessDeferred();
360 }
361
362 allocator_ = NULL;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100363 type_info_ = NULL;
364}
365
366
367int CodeGenerator::NumberOfSlot(Slot* slot) {
368 if (slot == NULL) return kInvalidSlotNumber;
369 switch (slot->type()) {
370 case Slot::PARAMETER:
371 return slot->index();
372 case Slot::LOCAL:
373 return slot->index() + scope()->num_parameters();
374 default:
375 break;
376 }
377 return kInvalidSlotNumber;
Steve Blocka7e24c12009-10-30 11:49:00 +0000378}
379
380
381MemOperand CodeGenerator::SlotOperand(Slot* slot, Register tmp) {
382 // Currently, this assertion will fail if we try to assign to
383 // a constant variable that is constant because it is read-only
384 // (such as the variable referring to a named function expression).
385 // We need to implement assignments to read-only variables.
386 // Ideally, we should do this during AST generation (by converting
387 // such assignments into expression statements); however, in general
388 // we may not be able to make the decision until past AST generation,
389 // that is when the entire program is known.
390 ASSERT(slot != NULL);
391 int index = slot->index();
392 switch (slot->type()) {
393 case Slot::PARAMETER:
394 return frame_->ParameterAt(index);
395
396 case Slot::LOCAL:
397 return frame_->LocalAt(index);
398
399 case Slot::CONTEXT: {
400 // Follow the context chain if necessary.
401 ASSERT(!tmp.is(cp)); // do not overwrite context register
402 Register context = cp;
403 int chain_length = scope()->ContextChainLength(slot->var()->scope());
404 for (int i = 0; i < chain_length; i++) {
405 // Load the closure.
406 // (All contexts, even 'with' contexts, have a closure,
407 // and it is the same for all contexts inside a function.
408 // There is no need to go to the function context first.)
409 __ ldr(tmp, ContextOperand(context, Context::CLOSURE_INDEX));
410 // Load the function context (which is the incoming, outer context).
411 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset));
412 context = tmp;
413 }
414 // We may have a 'with' context now. Get the function context.
415 // (In fact this mov may never be the needed, since the scope analysis
416 // may not permit a direct context access in this case and thus we are
417 // always at a function context. However it is safe to dereference be-
418 // cause the function context of a function context is itself. Before
419 // deleting this mov we should try to create a counter-example first,
420 // though...)
421 __ ldr(tmp, ContextOperand(context, Context::FCONTEXT_INDEX));
422 return ContextOperand(tmp, index);
423 }
424
425 default:
426 UNREACHABLE();
427 return MemOperand(r0, 0);
428 }
429}
430
431
432MemOperand CodeGenerator::ContextSlotOperandCheckExtensions(
433 Slot* slot,
434 Register tmp,
435 Register tmp2,
436 JumpTarget* slow) {
437 ASSERT(slot->type() == Slot::CONTEXT);
438 Register context = cp;
439
440 for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) {
441 if (s->num_heap_slots() > 0) {
442 if (s->calls_eval()) {
443 // Check that extension is NULL.
444 __ ldr(tmp2, ContextOperand(context, Context::EXTENSION_INDEX));
445 __ tst(tmp2, tmp2);
446 slow->Branch(ne);
447 }
448 __ ldr(tmp, ContextOperand(context, Context::CLOSURE_INDEX));
449 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset));
450 context = tmp;
451 }
452 }
453 // Check that last extension is NULL.
454 __ ldr(tmp2, ContextOperand(context, Context::EXTENSION_INDEX));
455 __ tst(tmp2, tmp2);
456 slow->Branch(ne);
457 __ ldr(tmp, ContextOperand(context, Context::FCONTEXT_INDEX));
458 return ContextOperand(tmp, slot->index());
459}
460
461
462// Loads a value on TOS. If it is a boolean value, the result may have been
463// (partially) translated into branches, or it may have set the condition
464// code register. If force_cc is set, the value is forced to set the
465// condition code register and no value is pushed. If the condition code
466// register was set, has_cc() is true and cc_reg_ contains the condition to
467// test for 'true'.
468void CodeGenerator::LoadCondition(Expression* x,
Steve Blocka7e24c12009-10-30 11:49:00 +0000469 JumpTarget* true_target,
470 JumpTarget* false_target,
471 bool force_cc) {
472 ASSERT(!has_cc());
473 int original_height = frame_->height();
474
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100475 { ConditionCodeGenState new_state(this, true_target, false_target);
Steve Blocka7e24c12009-10-30 11:49:00 +0000476 Visit(x);
477
478 // If we hit a stack overflow, we may not have actually visited
479 // the expression. In that case, we ensure that we have a
480 // valid-looking frame state because we will continue to generate
481 // code as we unwind the C++ stack.
482 //
483 // It's possible to have both a stack overflow and a valid frame
484 // state (eg, a subexpression overflowed, visiting it returned
485 // with a dummied frame state, and visiting this expression
486 // returned with a normal-looking state).
487 if (HasStackOverflow() &&
488 has_valid_frame() &&
489 !has_cc() &&
490 frame_->height() == original_height) {
491 true_target->Jump();
492 }
493 }
494 if (force_cc && frame_ != NULL && !has_cc()) {
495 // Convert the TOS value to a boolean in the condition code register.
496 ToBoolean(true_target, false_target);
497 }
498 ASSERT(!force_cc || !has_valid_frame() || has_cc());
499 ASSERT(!has_valid_frame() ||
500 (has_cc() && frame_->height() == original_height) ||
501 (!has_cc() && frame_->height() == original_height + 1));
502}
503
504
Steve Blockd0582a62009-12-15 09:54:21 +0000505void CodeGenerator::Load(Expression* expr) {
Iain Merrick75681382010-08-19 15:07:18 +0100506 // We generally assume that we are not in a spilled scope for most
507 // of the code generator. A failure to ensure this caused issue 815
508 // and this assert is designed to catch similar issues.
509 frame_->AssertIsNotSpilled();
Steve Blocka7e24c12009-10-30 11:49:00 +0000510#ifdef DEBUG
511 int original_height = frame_->height();
512#endif
513 JumpTarget true_target;
514 JumpTarget false_target;
Steve Blockd0582a62009-12-15 09:54:21 +0000515 LoadCondition(expr, &true_target, &false_target, false);
Steve Blocka7e24c12009-10-30 11:49:00 +0000516
517 if (has_cc()) {
518 // Convert cc_reg_ into a boolean value.
519 JumpTarget loaded;
520 JumpTarget materialize_true;
521 materialize_true.Branch(cc_reg_);
Steve Block8defd9f2010-07-08 12:39:36 +0100522 frame_->EmitPushRoot(Heap::kFalseValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +0000523 loaded.Jump();
524 materialize_true.Bind();
Steve Block8defd9f2010-07-08 12:39:36 +0100525 frame_->EmitPushRoot(Heap::kTrueValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +0000526 loaded.Bind();
527 cc_reg_ = al;
528 }
529
530 if (true_target.is_linked() || false_target.is_linked()) {
531 // We have at least one condition value that has been "translated"
532 // into a branch, thus it needs to be loaded explicitly.
533 JumpTarget loaded;
534 if (frame_ != NULL) {
535 loaded.Jump(); // Don't lose the current TOS.
536 }
537 bool both = true_target.is_linked() && false_target.is_linked();
538 // Load "true" if necessary.
539 if (true_target.is_linked()) {
540 true_target.Bind();
Steve Block8defd9f2010-07-08 12:39:36 +0100541 frame_->EmitPushRoot(Heap::kTrueValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +0000542 }
543 // If both "true" and "false" need to be loaded jump across the code for
544 // "false".
545 if (both) {
546 loaded.Jump();
547 }
548 // Load "false" if necessary.
549 if (false_target.is_linked()) {
550 false_target.Bind();
Steve Block8defd9f2010-07-08 12:39:36 +0100551 frame_->EmitPushRoot(Heap::kFalseValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +0000552 }
553 // A value is loaded on all paths reaching this point.
554 loaded.Bind();
555 }
556 ASSERT(has_valid_frame());
557 ASSERT(!has_cc());
Steve Block6ded16b2010-05-10 14:33:55 +0100558 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +0000559}
560
561
562void CodeGenerator::LoadGlobal() {
Steve Block6ded16b2010-05-10 14:33:55 +0100563 Register reg = frame_->GetTOSRegister();
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -0800564 __ ldr(reg, GlobalObjectOperand());
Steve Block6ded16b2010-05-10 14:33:55 +0100565 frame_->EmitPush(reg);
Steve Blocka7e24c12009-10-30 11:49:00 +0000566}
567
568
569void CodeGenerator::LoadGlobalReceiver(Register scratch) {
Steve Block8defd9f2010-07-08 12:39:36 +0100570 Register reg = frame_->GetTOSRegister();
571 __ ldr(reg, ContextOperand(cp, Context::GLOBAL_INDEX));
572 __ ldr(reg,
573 FieldMemOperand(reg, GlobalObject::kGlobalReceiverOffset));
574 frame_->EmitPush(reg);
Steve Blocka7e24c12009-10-30 11:49:00 +0000575}
576
577
Steve Block6ded16b2010-05-10 14:33:55 +0100578ArgumentsAllocationMode CodeGenerator::ArgumentsMode() {
579 if (scope()->arguments() == NULL) return NO_ARGUMENTS_ALLOCATION;
580 ASSERT(scope()->arguments_shadow() != NULL);
581 // We don't want to do lazy arguments allocation for functions that
582 // have heap-allocated contexts, because it interfers with the
583 // uninitialized const tracking in the context objects.
584 return (scope()->num_heap_slots() > 0)
585 ? EAGER_ARGUMENTS_ALLOCATION
586 : LAZY_ARGUMENTS_ALLOCATION;
587}
588
589
590void CodeGenerator::StoreArgumentsObject(bool initial) {
Steve Block6ded16b2010-05-10 14:33:55 +0100591 ArgumentsAllocationMode mode = ArgumentsMode();
592 ASSERT(mode != NO_ARGUMENTS_ALLOCATION);
593
594 Comment cmnt(masm_, "[ store arguments object");
595 if (mode == LAZY_ARGUMENTS_ALLOCATION && initial) {
596 // When using lazy arguments allocation, we store the hole value
597 // as a sentinel indicating that the arguments object hasn't been
598 // allocated yet.
Ben Murdoch086aeea2011-05-13 15:57:08 +0100599 frame_->EmitPushRoot(Heap::kArgumentsMarkerRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +0100600 } else {
Steve Block8defd9f2010-07-08 12:39:36 +0100601 frame_->SpillAll();
Steve Block6ded16b2010-05-10 14:33:55 +0100602 ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT);
603 __ ldr(r2, frame_->Function());
604 // The receiver is below the arguments, the return address, and the
605 // frame pointer on the stack.
606 const int kReceiverDisplacement = 2 + scope()->num_parameters();
607 __ add(r1, fp, Operand(kReceiverDisplacement * kPointerSize));
608 __ mov(r0, Operand(Smi::FromInt(scope()->num_parameters())));
609 frame_->Adjust(3);
610 __ Push(r2, r1, r0);
611 frame_->CallStub(&stub, 3);
612 frame_->EmitPush(r0);
613 }
614
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100615 Variable* arguments = scope()->arguments();
616 Variable* shadow = scope()->arguments_shadow();
617 ASSERT(arguments != NULL && arguments->AsSlot() != NULL);
618 ASSERT(shadow != NULL && shadow->AsSlot() != NULL);
Steve Block6ded16b2010-05-10 14:33:55 +0100619 JumpTarget done;
620 if (mode == LAZY_ARGUMENTS_ALLOCATION && !initial) {
621 // We have to skip storing into the arguments slot if it has
622 // already been written to. This can happen if the a function
623 // has a local variable named 'arguments'.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100624 LoadFromSlot(scope()->arguments()->AsSlot(), NOT_INSIDE_TYPEOF);
Steve Block8defd9f2010-07-08 12:39:36 +0100625 Register arguments = frame_->PopToRegister();
Ben Murdoch086aeea2011-05-13 15:57:08 +0100626 __ LoadRoot(ip, Heap::kArgumentsMarkerRootIndex);
Steve Block8defd9f2010-07-08 12:39:36 +0100627 __ cmp(arguments, ip);
Steve Block6ded16b2010-05-10 14:33:55 +0100628 done.Branch(ne);
629 }
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100630 StoreToSlot(arguments->AsSlot(), NOT_CONST_INIT);
Steve Block6ded16b2010-05-10 14:33:55 +0100631 if (mode == LAZY_ARGUMENTS_ALLOCATION) done.Bind();
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100632 StoreToSlot(shadow->AsSlot(), NOT_CONST_INIT);
Steve Block6ded16b2010-05-10 14:33:55 +0100633}
634
635
Steve Blockd0582a62009-12-15 09:54:21 +0000636void CodeGenerator::LoadTypeofExpression(Expression* expr) {
637 // Special handling of identifiers as subexpressions of typeof.
Steve Blockd0582a62009-12-15 09:54:21 +0000638 Variable* variable = expr->AsVariableProxy()->AsVariable();
Steve Blocka7e24c12009-10-30 11:49:00 +0000639 if (variable != NULL && !variable->is_this() && variable->is_global()) {
Steve Blockd0582a62009-12-15 09:54:21 +0000640 // For a global variable we build the property reference
641 // <global>.<variable> and perform a (regular non-contextual) property
642 // load to make sure we do not get reference errors.
Steve Blocka7e24c12009-10-30 11:49:00 +0000643 Slot global(variable, Slot::CONTEXT, Context::GLOBAL_INDEX);
644 Literal key(variable->name());
Steve Blocka7e24c12009-10-30 11:49:00 +0000645 Property property(&global, &key, RelocInfo::kNoPosition);
Steve Blockd0582a62009-12-15 09:54:21 +0000646 Reference ref(this, &property);
Steve Block6ded16b2010-05-10 14:33:55 +0100647 ref.GetValue();
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100648 } else if (variable != NULL && variable->AsSlot() != NULL) {
Steve Blockd0582a62009-12-15 09:54:21 +0000649 // For a variable that rewrites to a slot, we signal it is the immediate
650 // subexpression of a typeof.
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100651 LoadFromSlotCheckForArguments(variable->AsSlot(), INSIDE_TYPEOF);
Steve Blocka7e24c12009-10-30 11:49:00 +0000652 } else {
Steve Blockd0582a62009-12-15 09:54:21 +0000653 // Anything else can be handled normally.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100654 Load(expr);
Steve Blocka7e24c12009-10-30 11:49:00 +0000655 }
656}
657
658
Leon Clarked91b9f72010-01-27 17:25:45 +0000659Reference::Reference(CodeGenerator* cgen,
660 Expression* expression,
661 bool persist_after_get)
662 : cgen_(cgen),
663 expression_(expression),
664 type_(ILLEGAL),
665 persist_after_get_(persist_after_get) {
Iain Merrick75681382010-08-19 15:07:18 +0100666 // We generally assume that we are not in a spilled scope for most
667 // of the code generator. A failure to ensure this caused issue 815
668 // and this assert is designed to catch similar issues.
669 cgen->frame()->AssertIsNotSpilled();
Steve Blocka7e24c12009-10-30 11:49:00 +0000670 cgen->LoadReference(this);
671}
672
673
674Reference::~Reference() {
Leon Clarked91b9f72010-01-27 17:25:45 +0000675 ASSERT(is_unloaded() || is_illegal());
Steve Blocka7e24c12009-10-30 11:49:00 +0000676}
677
678
679void CodeGenerator::LoadReference(Reference* ref) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000680 Comment cmnt(masm_, "[ LoadReference");
681 Expression* e = ref->expression();
682 Property* property = e->AsProperty();
683 Variable* var = e->AsVariableProxy()->AsVariable();
684
685 if (property != NULL) {
686 // The expression is either a property or a variable proxy that rewrites
687 // to a property.
Steve Block6ded16b2010-05-10 14:33:55 +0100688 Load(property->obj());
Leon Clarkee46be812010-01-19 14:06:41 +0000689 if (property->key()->IsPropertyName()) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000690 ref->set_type(Reference::NAMED);
691 } else {
Steve Block6ded16b2010-05-10 14:33:55 +0100692 Load(property->key());
Steve Blocka7e24c12009-10-30 11:49:00 +0000693 ref->set_type(Reference::KEYED);
694 }
695 } else if (var != NULL) {
696 // The expression is a variable proxy that does not rewrite to a
697 // property. Global variables are treated as named property references.
698 if (var->is_global()) {
699 LoadGlobal();
700 ref->set_type(Reference::NAMED);
701 } else {
Kristian Monsen0d5e1162010-09-30 15:31:59 +0100702 ASSERT(var->AsSlot() != NULL);
Steve Blocka7e24c12009-10-30 11:49:00 +0000703 ref->set_type(Reference::SLOT);
704 }
705 } else {
706 // Anything else is a runtime error.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100707 Load(e);
Steve Blocka7e24c12009-10-30 11:49:00 +0000708 frame_->CallRuntime(Runtime::kThrowReferenceError, 1);
709 }
710}
711
712
713void CodeGenerator::UnloadReference(Reference* ref) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000714 int size = ref->size();
Leon Clarked91b9f72010-01-27 17:25:45 +0000715 ref->set_unloaded();
Steve Block6ded16b2010-05-10 14:33:55 +0100716 if (size == 0) return;
717
718 // Pop a reference from the stack while preserving TOS.
719 VirtualFrame::RegisterAllocationScope scope(this);
720 Comment cmnt(masm_, "[ UnloadReference");
721 if (size > 0) {
722 Register tos = frame_->PopToRegister();
723 frame_->Drop(size);
724 frame_->EmitPush(tos);
725 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000726}
727
728
729// ECMA-262, section 9.2, page 30: ToBoolean(). Convert the given
730// register to a boolean in the condition code register. The code
731// may jump to 'false_target' in case the register converts to 'false'.
732void CodeGenerator::ToBoolean(JumpTarget* true_target,
733 JumpTarget* false_target) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000734 // Note: The generated code snippet does not change stack variables.
735 // Only the condition code should be set.
Steve Block8defd9f2010-07-08 12:39:36 +0100736 bool known_smi = frame_->KnownSmiAt(0);
737 Register tos = frame_->PopToRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +0000738
739 // Fast case checks
740
741 // Check if the value is 'false'.
Steve Block8defd9f2010-07-08 12:39:36 +0100742 if (!known_smi) {
743 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
744 __ cmp(tos, ip);
745 false_target->Branch(eq);
Steve Blocka7e24c12009-10-30 11:49:00 +0000746
Steve Block8defd9f2010-07-08 12:39:36 +0100747 // Check if the value is 'true'.
748 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
749 __ cmp(tos, ip);
750 true_target->Branch(eq);
Steve Blocka7e24c12009-10-30 11:49:00 +0000751
Steve Block8defd9f2010-07-08 12:39:36 +0100752 // Check if the value is 'undefined'.
753 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
754 __ cmp(tos, ip);
755 false_target->Branch(eq);
756 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000757
758 // Check if the value is a smi.
Steve Block8defd9f2010-07-08 12:39:36 +0100759 __ cmp(tos, Operand(Smi::FromInt(0)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000760
Steve Block8defd9f2010-07-08 12:39:36 +0100761 if (!known_smi) {
762 false_target->Branch(eq);
763 __ tst(tos, Operand(kSmiTagMask));
764 true_target->Branch(eq);
765
Iain Merrick75681382010-08-19 15:07:18 +0100766 // Slow case.
767 if (CpuFeatures::IsSupported(VFP3)) {
768 CpuFeatures::Scope scope(VFP3);
769 // Implements the slow case by using ToBooleanStub.
770 // The ToBooleanStub takes a single argument, and
771 // returns a non-zero value for true, or zero for false.
772 // Both the argument value and the return value use the
773 // register assigned to tos_
774 ToBooleanStub stub(tos);
775 frame_->CallStub(&stub, 0);
776 // Convert the result in "tos" to a condition code.
Iain Merrick9ac36c92010-09-13 15:29:50 +0100777 __ cmp(tos, Operand(0, RelocInfo::NONE));
Iain Merrick75681382010-08-19 15:07:18 +0100778 } else {
779 // Implements slow case by calling the runtime.
780 frame_->EmitPush(tos);
781 frame_->CallRuntime(Runtime::kToBool, 1);
782 // Convert the result (r0) to a condition code.
783 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
784 __ cmp(r0, ip);
785 }
Steve Block8defd9f2010-07-08 12:39:36 +0100786 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000787
788 cc_reg_ = ne;
789}
790
791
792void CodeGenerator::GenericBinaryOperation(Token::Value op,
793 OverwriteMode overwrite_mode,
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100794 GenerateInlineSmi inline_smi,
Steve Blocka7e24c12009-10-30 11:49:00 +0000795 int constant_rhs) {
Steve Block6ded16b2010-05-10 14:33:55 +0100796 // top of virtual frame: y
797 // 2nd elt. on virtual frame : x
798 // result : top of virtual frame
799
800 // Stub is entered with a call: 'return address' is in lr.
801 switch (op) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100802 case Token::ADD:
803 case Token::SUB:
804 if (inline_smi) {
805 JumpTarget done;
806 Register rhs = frame_->PopToRegister();
807 Register lhs = frame_->PopToRegister(rhs);
808 Register scratch = VirtualFrame::scratch0();
809 __ orr(scratch, rhs, Operand(lhs));
810 // Check they are both small and positive.
811 __ tst(scratch, Operand(kSmiTagMask | 0xc0000000));
812 ASSERT(rhs.is(r0) || lhs.is(r0)); // r0 is free now.
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100813 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100814 if (op == Token::ADD) {
815 __ add(r0, lhs, Operand(rhs), LeaveCC, eq);
816 } else {
817 __ sub(r0, lhs, Operand(rhs), LeaveCC, eq);
818 }
819 done.Branch(eq);
820 GenericBinaryOpStub stub(op, overwrite_mode, lhs, rhs, constant_rhs);
821 frame_->SpillAll();
822 frame_->CallStub(&stub, 0);
823 done.Bind();
824 frame_->EmitPush(r0);
825 break;
826 } else {
827 // Fall through!
828 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000829 case Token::BIT_OR:
830 case Token::BIT_AND:
831 case Token::BIT_XOR:
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100832 if (inline_smi) {
833 bool rhs_is_smi = frame_->KnownSmiAt(0);
834 bool lhs_is_smi = frame_->KnownSmiAt(1);
835 Register rhs = frame_->PopToRegister();
836 Register lhs = frame_->PopToRegister(rhs);
837 Register smi_test_reg;
838 Condition cond;
839 if (!rhs_is_smi || !lhs_is_smi) {
840 if (rhs_is_smi) {
841 smi_test_reg = lhs;
842 } else if (lhs_is_smi) {
843 smi_test_reg = rhs;
844 } else {
845 smi_test_reg = VirtualFrame::scratch0();
846 __ orr(smi_test_reg, rhs, Operand(lhs));
847 }
848 // Check they are both Smis.
849 __ tst(smi_test_reg, Operand(kSmiTagMask));
850 cond = eq;
851 } else {
852 cond = al;
853 }
854 ASSERT(rhs.is(r0) || lhs.is(r0)); // r0 is free now.
855 if (op == Token::BIT_OR) {
856 __ orr(r0, lhs, Operand(rhs), LeaveCC, cond);
857 } else if (op == Token::BIT_AND) {
858 __ and_(r0, lhs, Operand(rhs), LeaveCC, cond);
859 } else {
860 ASSERT(op == Token::BIT_XOR);
Kristian Monsen50ef84f2010-07-29 15:18:00 +0100861 STATIC_ASSERT(kSmiTag == 0);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100862 __ eor(r0, lhs, Operand(rhs), LeaveCC, cond);
863 }
864 if (cond != al) {
865 JumpTarget done;
866 done.Branch(cond);
867 GenericBinaryOpStub stub(op, overwrite_mode, lhs, rhs, constant_rhs);
868 frame_->SpillAll();
869 frame_->CallStub(&stub, 0);
870 done.Bind();
871 }
872 frame_->EmitPush(r0);
873 break;
874 } else {
875 // Fall through!
876 }
877 case Token::MUL:
878 case Token::DIV:
879 case Token::MOD:
Steve Blocka7e24c12009-10-30 11:49:00 +0000880 case Token::SHL:
881 case Token::SHR:
882 case Token::SAR: {
Steve Block6ded16b2010-05-10 14:33:55 +0100883 Register rhs = frame_->PopToRegister();
884 Register lhs = frame_->PopToRegister(rhs); // Don't pop to rhs register.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100885 GenericBinaryOpStub stub(op, overwrite_mode, lhs, rhs, constant_rhs);
886 frame_->SpillAll();
887 frame_->CallStub(&stub, 0);
Steve Block6ded16b2010-05-10 14:33:55 +0100888 frame_->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +0000889 break;
890 }
891
Steve Block6ded16b2010-05-10 14:33:55 +0100892 case Token::COMMA: {
893 Register scratch = frame_->PopToRegister();
894 // Simply discard left value.
Steve Blocka7e24c12009-10-30 11:49:00 +0000895 frame_->Drop();
Steve Block6ded16b2010-05-10 14:33:55 +0100896 frame_->EmitPush(scratch);
Steve Blocka7e24c12009-10-30 11:49:00 +0000897 break;
Steve Block6ded16b2010-05-10 14:33:55 +0100898 }
Steve Blocka7e24c12009-10-30 11:49:00 +0000899
900 default:
901 // Other cases should have been handled before this point.
902 UNREACHABLE();
903 break;
904 }
905}
906
907
908class DeferredInlineSmiOperation: public DeferredCode {
909 public:
910 DeferredInlineSmiOperation(Token::Value op,
911 int value,
912 bool reversed,
Steve Block6ded16b2010-05-10 14:33:55 +0100913 OverwriteMode overwrite_mode,
914 Register tos)
Steve Blocka7e24c12009-10-30 11:49:00 +0000915 : op_(op),
916 value_(value),
917 reversed_(reversed),
Steve Block6ded16b2010-05-10 14:33:55 +0100918 overwrite_mode_(overwrite_mode),
919 tos_register_(tos) {
Steve Blocka7e24c12009-10-30 11:49:00 +0000920 set_comment("[ DeferredInlinedSmiOperation");
921 }
922
923 virtual void Generate();
Iain Merrick9ac36c92010-09-13 15:29:50 +0100924 // This stub makes explicit calls to SaveRegisters(), RestoreRegisters() and
925 // Exit(). Currently on ARM SaveRegisters() and RestoreRegisters() are empty
926 // methods, it is the responsibility of the deferred code to save and restore
927 // registers.
928 virtual bool AutoSaveAndRestore() { return false; }
929
930 void JumpToNonSmiInput(Condition cond);
931 void JumpToAnswerOutOfRange(Condition cond);
Steve Blocka7e24c12009-10-30 11:49:00 +0000932
933 private:
Iain Merrick9ac36c92010-09-13 15:29:50 +0100934 void GenerateNonSmiInput();
935 void GenerateAnswerOutOfRange();
936 void WriteNonSmiAnswer(Register answer,
937 Register heap_number,
938 Register scratch);
939
Steve Blocka7e24c12009-10-30 11:49:00 +0000940 Token::Value op_;
941 int value_;
942 bool reversed_;
943 OverwriteMode overwrite_mode_;
Steve Block6ded16b2010-05-10 14:33:55 +0100944 Register tos_register_;
Iain Merrick9ac36c92010-09-13 15:29:50 +0100945 Label non_smi_input_;
946 Label answer_out_of_range_;
Steve Blocka7e24c12009-10-30 11:49:00 +0000947};
948
949
Iain Merrick9ac36c92010-09-13 15:29:50 +0100950// For bit operations we try harder and handle the case where the input is not
951// a Smi but a 32bits integer without calling the generic stub.
952void DeferredInlineSmiOperation::JumpToNonSmiInput(Condition cond) {
953 ASSERT(Token::IsBitOp(op_));
954
955 __ b(cond, &non_smi_input_);
956}
957
958
959// For bit operations the result is always 32bits so we handle the case where
960// the result does not fit in a Smi without calling the generic stub.
961void DeferredInlineSmiOperation::JumpToAnswerOutOfRange(Condition cond) {
962 ASSERT(Token::IsBitOp(op_));
963
964 if ((op_ == Token::SHR) && !CpuFeatures::IsSupported(VFP3)) {
965 // >>> requires an unsigned to double conversion and the non VFP code
966 // does not support this conversion.
967 __ b(cond, entry_label());
968 } else {
969 __ b(cond, &answer_out_of_range_);
970 }
971}
972
Steve Block8defd9f2010-07-08 12:39:36 +0100973
974// On entry the non-constant side of the binary operation is in tos_register_
975// and the constant smi side is nowhere. The tos_register_ is not used by the
976// virtual frame. On exit the answer is in the tos_register_ and the virtual
977// frame is unchanged.
Steve Blocka7e24c12009-10-30 11:49:00 +0000978void DeferredInlineSmiOperation::Generate() {
Steve Block8defd9f2010-07-08 12:39:36 +0100979 VirtualFrame copied_frame(*frame_state()->frame());
980 copied_frame.SpillAll();
981
Steve Block6ded16b2010-05-10 14:33:55 +0100982 Register lhs = r1;
983 Register rhs = r0;
Steve Blocka7e24c12009-10-30 11:49:00 +0000984 switch (op_) {
985 case Token::ADD: {
986 // Revert optimistic add.
987 if (reversed_) {
Steve Block6ded16b2010-05-10 14:33:55 +0100988 __ sub(r0, tos_register_, Operand(Smi::FromInt(value_)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000989 __ mov(r1, Operand(Smi::FromInt(value_)));
990 } else {
Steve Block6ded16b2010-05-10 14:33:55 +0100991 __ sub(r1, tos_register_, Operand(Smi::FromInt(value_)));
Steve Blocka7e24c12009-10-30 11:49:00 +0000992 __ mov(r0, Operand(Smi::FromInt(value_)));
993 }
994 break;
995 }
996
997 case Token::SUB: {
998 // Revert optimistic sub.
999 if (reversed_) {
Steve Block6ded16b2010-05-10 14:33:55 +01001000 __ rsb(r0, tos_register_, Operand(Smi::FromInt(value_)));
Steve Blocka7e24c12009-10-30 11:49:00 +00001001 __ mov(r1, Operand(Smi::FromInt(value_)));
1002 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01001003 __ add(r1, tos_register_, Operand(Smi::FromInt(value_)));
Steve Blocka7e24c12009-10-30 11:49:00 +00001004 __ mov(r0, Operand(Smi::FromInt(value_)));
1005 }
1006 break;
1007 }
1008
1009 // For these operations there is no optimistic operation that needs to be
1010 // reverted.
1011 case Token::MUL:
1012 case Token::MOD:
1013 case Token::BIT_OR:
1014 case Token::BIT_XOR:
Steve Block8defd9f2010-07-08 12:39:36 +01001015 case Token::BIT_AND:
Steve Blocka7e24c12009-10-30 11:49:00 +00001016 case Token::SHL:
1017 case Token::SHR:
1018 case Token::SAR: {
Steve Block8defd9f2010-07-08 12:39:36 +01001019 if (tos_register_.is(r1)) {
1020 __ mov(r0, Operand(Smi::FromInt(value_)));
1021 } else {
1022 ASSERT(tos_register_.is(r0));
1023 __ mov(r1, Operand(Smi::FromInt(value_)));
1024 }
1025 if (reversed_ == tos_register_.is(r1)) {
Steve Block6ded16b2010-05-10 14:33:55 +01001026 lhs = r0;
1027 rhs = r1;
Steve Blocka7e24c12009-10-30 11:49:00 +00001028 }
1029 break;
1030 }
1031
1032 default:
1033 // Other cases should have been handled before this point.
1034 UNREACHABLE();
1035 break;
1036 }
1037
Steve Block6ded16b2010-05-10 14:33:55 +01001038 GenericBinaryOpStub stub(op_, overwrite_mode_, lhs, rhs, value_);
Steve Blocka7e24c12009-10-30 11:49:00 +00001039 __ CallStub(&stub);
Steve Block8defd9f2010-07-08 12:39:36 +01001040
Steve Block6ded16b2010-05-10 14:33:55 +01001041 // The generic stub returns its value in r0, but that's not
1042 // necessarily what we want. We want whatever the inlined code
1043 // expected, which is that the answer is in the same register as
1044 // the operand was.
1045 __ Move(tos_register_, r0);
Steve Block8defd9f2010-07-08 12:39:36 +01001046
1047 // The tos register was not in use for the virtual frame that we
1048 // came into this function with, so we can merge back to that frame
1049 // without trashing it.
1050 copied_frame.MergeTo(frame_state()->frame());
Iain Merrick9ac36c92010-09-13 15:29:50 +01001051
1052 Exit();
1053
1054 if (non_smi_input_.is_linked()) {
1055 GenerateNonSmiInput();
1056 }
1057
1058 if (answer_out_of_range_.is_linked()) {
1059 GenerateAnswerOutOfRange();
1060 }
1061}
1062
1063
1064// Convert and write the integer answer into heap_number.
1065void DeferredInlineSmiOperation::WriteNonSmiAnswer(Register answer,
1066 Register heap_number,
1067 Register scratch) {
1068 if (CpuFeatures::IsSupported(VFP3)) {
1069 CpuFeatures::Scope scope(VFP3);
1070 __ vmov(s0, answer);
1071 if (op_ == Token::SHR) {
1072 __ vcvt_f64_u32(d0, s0);
1073 } else {
1074 __ vcvt_f64_s32(d0, s0);
1075 }
1076 __ sub(scratch, heap_number, Operand(kHeapObjectTag));
1077 __ vstr(d0, scratch, HeapNumber::kValueOffset);
1078 } else {
1079 WriteInt32ToHeapNumberStub stub(answer, heap_number, scratch);
1080 __ CallStub(&stub);
1081 }
1082}
1083
1084
1085void DeferredInlineSmiOperation::GenerateNonSmiInput() {
1086 // We know the left hand side is not a Smi and the right hand side is an
1087 // immediate value (value_) which can be represented as a Smi. We only
1088 // handle bit operations.
1089 ASSERT(Token::IsBitOp(op_));
1090
1091 if (FLAG_debug_code) {
1092 __ Abort("Should not fall through!");
1093 }
1094
1095 __ bind(&non_smi_input_);
1096 if (FLAG_debug_code) {
1097 __ AbortIfSmi(tos_register_);
1098 }
1099
1100 // This routine uses the registers from r2 to r6. At the moment they are
1101 // not used by the register allocator, but when they are it should use
1102 // SpillAll and MergeTo like DeferredInlineSmiOperation::Generate() above.
1103
1104 Register heap_number_map = r7;
1105 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
1106 __ ldr(r3, FieldMemOperand(tos_register_, HeapNumber::kMapOffset));
1107 __ cmp(r3, heap_number_map);
1108 // Not a number, fall back to the GenericBinaryOpStub.
1109 __ b(ne, entry_label());
1110
1111 Register int32 = r2;
1112 // Not a 32bits signed int, fall back to the GenericBinaryOpStub.
Steve Block1e0659c2011-05-24 12:43:12 +01001113 __ ConvertToInt32(tos_register_, int32, r4, r5, d0, entry_label());
Iain Merrick9ac36c92010-09-13 15:29:50 +01001114
1115 // tos_register_ (r0 or r1): Original heap number.
1116 // int32: signed 32bits int.
1117
1118 Label result_not_a_smi;
1119 int shift_value = value_ & 0x1f;
1120 switch (op_) {
1121 case Token::BIT_OR: __ orr(int32, int32, Operand(value_)); break;
1122 case Token::BIT_XOR: __ eor(int32, int32, Operand(value_)); break;
1123 case Token::BIT_AND: __ and_(int32, int32, Operand(value_)); break;
1124 case Token::SAR:
1125 ASSERT(!reversed_);
1126 if (shift_value != 0) {
1127 __ mov(int32, Operand(int32, ASR, shift_value));
1128 }
1129 break;
1130 case Token::SHR:
1131 ASSERT(!reversed_);
1132 if (shift_value != 0) {
1133 __ mov(int32, Operand(int32, LSR, shift_value), SetCC);
1134 } else {
1135 // SHR is special because it is required to produce a positive answer.
1136 __ cmp(int32, Operand(0, RelocInfo::NONE));
1137 }
1138 if (CpuFeatures::IsSupported(VFP3)) {
1139 __ b(mi, &result_not_a_smi);
1140 } else {
1141 // Non VFP code cannot convert from unsigned to double, so fall back
1142 // to GenericBinaryOpStub.
1143 __ b(mi, entry_label());
1144 }
1145 break;
1146 case Token::SHL:
1147 ASSERT(!reversed_);
1148 if (shift_value != 0) {
1149 __ mov(int32, Operand(int32, LSL, shift_value));
1150 }
1151 break;
1152 default: UNREACHABLE();
1153 }
1154 // Check that the *signed* result fits in a smi. Not necessary for AND, SAR
1155 // if the shift if more than 0 or SHR if the shit is more than 1.
Ben Murdochb0fe1622011-05-05 13:52:32 +01001156 if (!( (op_ == Token::AND) ||
Iain Merrick9ac36c92010-09-13 15:29:50 +01001157 ((op_ == Token::SAR) && (shift_value > 0)) ||
1158 ((op_ == Token::SHR) && (shift_value > 1)))) {
1159 __ add(r3, int32, Operand(0x40000000), SetCC);
1160 __ b(mi, &result_not_a_smi);
1161 }
1162 __ mov(tos_register_, Operand(int32, LSL, kSmiTagSize));
1163 Exit();
1164
1165 if (result_not_a_smi.is_linked()) {
1166 __ bind(&result_not_a_smi);
1167 if (overwrite_mode_ != OVERWRITE_LEFT) {
1168 ASSERT((overwrite_mode_ == NO_OVERWRITE) ||
1169 (overwrite_mode_ == OVERWRITE_RIGHT));
1170 // If the allocation fails, fall back to the GenericBinaryOpStub.
1171 __ AllocateHeapNumber(r4, r5, r6, heap_number_map, entry_label());
1172 // Nothing can go wrong now, so overwrite tos.
1173 __ mov(tos_register_, Operand(r4));
1174 }
1175
1176 // int32: answer as signed 32bits integer.
1177 // tos_register_: Heap number to write the answer into.
1178 WriteNonSmiAnswer(int32, tos_register_, r3);
1179
1180 Exit();
1181 }
1182}
1183
1184
1185void DeferredInlineSmiOperation::GenerateAnswerOutOfRange() {
1186 // The input from a bitwise operation were Smis but the result cannot fit
John Reck59135872010-11-02 12:39:01 -07001187 // into a Smi, so we store it into a heap number. VirtualFrame::scratch0()
1188 // holds the untagged result to be converted. tos_register_ contains the
1189 // input. See the calls to JumpToAnswerOutOfRange to see how we got here.
Iain Merrick9ac36c92010-09-13 15:29:50 +01001190 ASSERT(Token::IsBitOp(op_));
1191 ASSERT(!reversed_);
1192
John Reck59135872010-11-02 12:39:01 -07001193 Register untagged_result = VirtualFrame::scratch0();
1194
Iain Merrick9ac36c92010-09-13 15:29:50 +01001195 if (FLAG_debug_code) {
1196 __ Abort("Should not fall through!");
1197 }
1198
1199 __ bind(&answer_out_of_range_);
1200 if (((value_ & 0x1f) == 0) && (op_ == Token::SHR)) {
John Reck59135872010-11-02 12:39:01 -07001201 // >>> 0 is a special case where the untagged_result register is not set up
1202 // yet. We untag the input to get it.
1203 __ mov(untagged_result, Operand(tos_register_, ASR, kSmiTagSize));
Iain Merrick9ac36c92010-09-13 15:29:50 +01001204 }
1205
1206 // This routine uses the registers from r2 to r6. At the moment they are
1207 // not used by the register allocator, but when they are it should use
1208 // SpillAll and MergeTo like DeferredInlineSmiOperation::Generate() above.
1209
1210 // Allocate the result heap number.
John Reck59135872010-11-02 12:39:01 -07001211 Register heap_number_map = VirtualFrame::scratch1();
Iain Merrick9ac36c92010-09-13 15:29:50 +01001212 Register heap_number = r4;
1213 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
1214 // If the allocation fails, fall back to the GenericBinaryOpStub.
1215 __ AllocateHeapNumber(heap_number, r5, r6, heap_number_map, entry_label());
John Reck59135872010-11-02 12:39:01 -07001216 WriteNonSmiAnswer(untagged_result, heap_number, r3);
Iain Merrick9ac36c92010-09-13 15:29:50 +01001217 __ mov(tos_register_, Operand(heap_number));
1218
1219 Exit();
Steve Blocka7e24c12009-10-30 11:49:00 +00001220}
1221
1222
1223static bool PopCountLessThanEqual2(unsigned int x) {
1224 x &= x - 1;
1225 return (x & (x - 1)) == 0;
1226}
1227
1228
1229// Returns the index of the lowest bit set.
1230static int BitPosition(unsigned x) {
1231 int bit_posn = 0;
1232 while ((x & 0xf) == 0) {
1233 bit_posn += 4;
1234 x >>= 4;
1235 }
1236 while ((x & 1) == 0) {
1237 bit_posn++;
1238 x >>= 1;
1239 }
1240 return bit_posn;
1241}
1242
1243
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001244// Can we multiply by x with max two shifts and an add.
1245// This answers yes to all integers from 2 to 10.
1246static bool IsEasyToMultiplyBy(int x) {
1247 if (x < 2) return false; // Avoid special cases.
1248 if (x > (Smi::kMaxValue + 1) >> 2) return false; // Almost always overflows.
1249 if (IsPowerOf2(x)) return true; // Simple shift.
1250 if (PopCountLessThanEqual2(x)) return true; // Shift and add and shift.
1251 if (IsPowerOf2(x + 1)) return true; // Patterns like 11111.
1252 return false;
1253}
1254
1255
1256// Can multiply by anything that IsEasyToMultiplyBy returns true for.
1257// Source and destination may be the same register. This routine does
1258// not set carry and overflow the way a mul instruction would.
1259static void InlineMultiplyByKnownInt(MacroAssembler* masm,
1260 Register source,
1261 Register destination,
1262 int known_int) {
1263 if (IsPowerOf2(known_int)) {
1264 masm->mov(destination, Operand(source, LSL, BitPosition(known_int)));
1265 } else if (PopCountLessThanEqual2(known_int)) {
1266 int first_bit = BitPosition(known_int);
1267 int second_bit = BitPosition(known_int ^ (1 << first_bit));
1268 masm->add(destination, source,
1269 Operand(source, LSL, second_bit - first_bit));
1270 if (first_bit != 0) {
1271 masm->mov(destination, Operand(destination, LSL, first_bit));
1272 }
1273 } else {
1274 ASSERT(IsPowerOf2(known_int + 1)); // Patterns like 1111.
1275 int the_bit = BitPosition(known_int + 1);
1276 masm->rsb(destination, source, Operand(source, LSL, the_bit));
1277 }
1278}
1279
1280
Steve Blocka7e24c12009-10-30 11:49:00 +00001281void CodeGenerator::SmiOperation(Token::Value op,
1282 Handle<Object> value,
1283 bool reversed,
1284 OverwriteMode mode) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001285 int int_value = Smi::cast(*value)->value();
1286
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001287 bool both_sides_are_smi = frame_->KnownSmiAt(0);
1288
Steve Block6ded16b2010-05-10 14:33:55 +01001289 bool something_to_inline;
1290 switch (op) {
1291 case Token::ADD:
1292 case Token::SUB:
1293 case Token::BIT_AND:
1294 case Token::BIT_OR:
1295 case Token::BIT_XOR: {
1296 something_to_inline = true;
1297 break;
1298 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001299 case Token::SHL: {
1300 something_to_inline = (both_sides_are_smi || !reversed);
1301 break;
1302 }
Steve Block6ded16b2010-05-10 14:33:55 +01001303 case Token::SHR:
1304 case Token::SAR: {
1305 if (reversed) {
1306 something_to_inline = false;
1307 } else {
1308 something_to_inline = true;
1309 }
1310 break;
1311 }
1312 case Token::MOD: {
1313 if (reversed || int_value < 2 || !IsPowerOf2(int_value)) {
1314 something_to_inline = false;
1315 } else {
1316 something_to_inline = true;
1317 }
1318 break;
1319 }
1320 case Token::MUL: {
1321 if (!IsEasyToMultiplyBy(int_value)) {
1322 something_to_inline = false;
1323 } else {
1324 something_to_inline = true;
1325 }
1326 break;
1327 }
1328 default: {
1329 something_to_inline = false;
1330 break;
1331 }
1332 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001333
Steve Block6ded16b2010-05-10 14:33:55 +01001334 if (!something_to_inline) {
1335 if (!reversed) {
1336 // Push the rhs onto the virtual frame by putting it in a TOS register.
1337 Register rhs = frame_->GetTOSRegister();
1338 __ mov(rhs, Operand(value));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001339 frame_->EmitPush(rhs, TypeInfo::Smi());
1340 GenericBinaryOperation(op, mode, GENERATE_INLINE_SMI, int_value);
Steve Block6ded16b2010-05-10 14:33:55 +01001341 } else {
1342 // Pop the rhs, then push lhs and rhs in the right order. Only performs
1343 // at most one pop, the rest takes place in TOS registers.
1344 Register lhs = frame_->GetTOSRegister(); // Get reg for pushing.
1345 Register rhs = frame_->PopToRegister(lhs); // Don't use lhs for this.
1346 __ mov(lhs, Operand(value));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001347 frame_->EmitPush(lhs, TypeInfo::Smi());
1348 TypeInfo t = both_sides_are_smi ? TypeInfo::Smi() : TypeInfo::Unknown();
1349 frame_->EmitPush(rhs, t);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001350 GenericBinaryOperation(op, mode, GENERATE_INLINE_SMI,
1351 GenericBinaryOpStub::kUnknownIntValue);
Steve Block6ded16b2010-05-10 14:33:55 +01001352 }
1353 return;
1354 }
1355
1356 // We move the top of stack to a register (normally no move is invoved).
1357 Register tos = frame_->PopToRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +00001358 switch (op) {
1359 case Token::ADD: {
1360 DeferredCode* deferred =
Steve Block6ded16b2010-05-10 14:33:55 +01001361 new DeferredInlineSmiOperation(op, int_value, reversed, mode, tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001362
Steve Block6ded16b2010-05-10 14:33:55 +01001363 __ add(tos, tos, Operand(value), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00001364 deferred->Branch(vs);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001365 if (!both_sides_are_smi) {
1366 __ tst(tos, Operand(kSmiTagMask));
1367 deferred->Branch(ne);
1368 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001369 deferred->BindExit();
Steve Block6ded16b2010-05-10 14:33:55 +01001370 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001371 break;
1372 }
1373
1374 case Token::SUB: {
1375 DeferredCode* deferred =
Steve Block6ded16b2010-05-10 14:33:55 +01001376 new DeferredInlineSmiOperation(op, int_value, reversed, mode, tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001377
1378 if (reversed) {
Steve Block6ded16b2010-05-10 14:33:55 +01001379 __ rsb(tos, tos, Operand(value), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00001380 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01001381 __ sub(tos, tos, Operand(value), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00001382 }
1383 deferred->Branch(vs);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001384 if (!both_sides_are_smi) {
1385 __ tst(tos, Operand(kSmiTagMask));
1386 deferred->Branch(ne);
1387 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001388 deferred->BindExit();
Steve Block6ded16b2010-05-10 14:33:55 +01001389 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001390 break;
1391 }
1392
1393
1394 case Token::BIT_OR:
1395 case Token::BIT_XOR:
1396 case Token::BIT_AND: {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001397 if (both_sides_are_smi) {
1398 switch (op) {
1399 case Token::BIT_OR: __ orr(tos, tos, Operand(value)); break;
1400 case Token::BIT_XOR: __ eor(tos, tos, Operand(value)); break;
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001401 case Token::BIT_AND: __ And(tos, tos, Operand(value)); break;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001402 default: UNREACHABLE();
1403 }
1404 frame_->EmitPush(tos, TypeInfo::Smi());
1405 } else {
Iain Merrick9ac36c92010-09-13 15:29:50 +01001406 DeferredInlineSmiOperation* deferred =
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001407 new DeferredInlineSmiOperation(op, int_value, reversed, mode, tos);
1408 __ tst(tos, Operand(kSmiTagMask));
Iain Merrick9ac36c92010-09-13 15:29:50 +01001409 deferred->JumpToNonSmiInput(ne);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001410 switch (op) {
1411 case Token::BIT_OR: __ orr(tos, tos, Operand(value)); break;
1412 case Token::BIT_XOR: __ eor(tos, tos, Operand(value)); break;
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01001413 case Token::BIT_AND: __ And(tos, tos, Operand(value)); break;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001414 default: UNREACHABLE();
1415 }
1416 deferred->BindExit();
Ben Murdochb0fe1622011-05-05 13:52:32 +01001417 TypeInfo result_type =
1418 (op == Token::BIT_AND) ? TypeInfo::Smi() : TypeInfo::Integer32();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001419 frame_->EmitPush(tos, result_type);
Steve Blocka7e24c12009-10-30 11:49:00 +00001420 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001421 break;
1422 }
1423
1424 case Token::SHL:
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001425 if (reversed) {
1426 ASSERT(both_sides_are_smi);
1427 int max_shift = 0;
1428 int max_result = int_value == 0 ? 1 : int_value;
1429 while (Smi::IsValid(max_result << 1)) {
1430 max_shift++;
1431 max_result <<= 1;
1432 }
1433 DeferredCode* deferred =
1434 new DeferredInlineSmiOperation(op, int_value, true, mode, tos);
1435 // Mask off the last 5 bits of the shift operand (rhs). This is part
1436 // of the definition of shift in JS and we know we have a Smi so we
1437 // can safely do this. The masked version gets passed to the
1438 // deferred code, but that makes no difference.
1439 __ and_(tos, tos, Operand(Smi::FromInt(0x1f)));
1440 __ cmp(tos, Operand(Smi::FromInt(max_shift)));
1441 deferred->Branch(ge);
1442 Register scratch = VirtualFrame::scratch0();
1443 __ mov(scratch, Operand(tos, ASR, kSmiTagSize)); // Untag.
1444 __ mov(tos, Operand(Smi::FromInt(int_value))); // Load constant.
1445 __ mov(tos, Operand(tos, LSL, scratch)); // Shift constant.
1446 deferred->BindExit();
1447 TypeInfo result = TypeInfo::Integer32();
1448 frame_->EmitPush(tos, result);
1449 break;
1450 }
1451 // Fall through!
Steve Blocka7e24c12009-10-30 11:49:00 +00001452 case Token::SHR:
1453 case Token::SAR: {
Steve Block6ded16b2010-05-10 14:33:55 +01001454 ASSERT(!reversed);
Iain Merrick9ac36c92010-09-13 15:29:50 +01001455 int shift_value = int_value & 0x1f;
Steve Block791712a2010-08-27 10:21:07 +01001456 TypeInfo result = TypeInfo::Number();
1457
1458 if (op == Token::SHR) {
Iain Merrick9ac36c92010-09-13 15:29:50 +01001459 if (shift_value > 1) {
Steve Block791712a2010-08-27 10:21:07 +01001460 result = TypeInfo::Smi();
Iain Merrick9ac36c92010-09-13 15:29:50 +01001461 } else if (shift_value > 0) {
Steve Block791712a2010-08-27 10:21:07 +01001462 result = TypeInfo::Integer32();
Iain Merrick75681382010-08-19 15:07:18 +01001463 }
Steve Block791712a2010-08-27 10:21:07 +01001464 } else if (op == Token::SAR) {
Iain Merrick9ac36c92010-09-13 15:29:50 +01001465 if (shift_value > 0) {
Steve Block791712a2010-08-27 10:21:07 +01001466 result = TypeInfo::Smi();
1467 } else {
1468 result = TypeInfo::Integer32();
1469 }
1470 } else {
1471 ASSERT(op == Token::SHL);
1472 result = TypeInfo::Integer32();
Iain Merrick75681382010-08-19 15:07:18 +01001473 }
Steve Block791712a2010-08-27 10:21:07 +01001474
Iain Merrick9ac36c92010-09-13 15:29:50 +01001475 DeferredInlineSmiOperation* deferred =
Steve Block6ded16b2010-05-10 14:33:55 +01001476 new DeferredInlineSmiOperation(op, shift_value, false, mode, tos);
Iain Merrick9ac36c92010-09-13 15:29:50 +01001477 if (!both_sides_are_smi) {
1478 __ tst(tos, Operand(kSmiTagMask));
1479 deferred->JumpToNonSmiInput(ne);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001480 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001481 switch (op) {
1482 case Token::SHL: {
1483 if (shift_value != 0) {
John Reck59135872010-11-02 12:39:01 -07001484 Register untagged_result = VirtualFrame::scratch0();
1485 Register scratch = VirtualFrame::scratch1();
Kristian Monsen25f61362010-05-21 11:50:48 +01001486 int adjusted_shift = shift_value - kSmiTagSize;
1487 ASSERT(adjusted_shift >= 0);
Iain Merrick9ac36c92010-09-13 15:29:50 +01001488
Kristian Monsen25f61362010-05-21 11:50:48 +01001489 if (adjusted_shift != 0) {
John Reck59135872010-11-02 12:39:01 -07001490 __ mov(untagged_result, Operand(tos, LSL, adjusted_shift));
1491 } else {
1492 __ mov(untagged_result, Operand(tos));
Kristian Monsen25f61362010-05-21 11:50:48 +01001493 }
Iain Merrick9ac36c92010-09-13 15:29:50 +01001494 // Check that the *signed* result fits in a smi.
John Reck59135872010-11-02 12:39:01 -07001495 __ add(scratch, untagged_result, Operand(0x40000000), SetCC);
Iain Merrick9ac36c92010-09-13 15:29:50 +01001496 deferred->JumpToAnswerOutOfRange(mi);
John Reck59135872010-11-02 12:39:01 -07001497 __ mov(tos, Operand(untagged_result, LSL, kSmiTagSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00001498 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001499 break;
1500 }
1501 case Token::SHR: {
Steve Blocka7e24c12009-10-30 11:49:00 +00001502 if (shift_value != 0) {
John Reck59135872010-11-02 12:39:01 -07001503 Register untagged_result = VirtualFrame::scratch0();
1504 // Remove tag.
1505 __ mov(untagged_result, Operand(tos, ASR, kSmiTagSize));
1506 __ mov(untagged_result, Operand(untagged_result, LSR, shift_value));
Kristian Monsen25f61362010-05-21 11:50:48 +01001507 if (shift_value == 1) {
Iain Merrick9ac36c92010-09-13 15:29:50 +01001508 // Check that the *unsigned* result fits in a smi.
1509 // Neither of the two high-order bits can be set:
Kristian Monsen25f61362010-05-21 11:50:48 +01001510 // - 0x80000000: high bit would be lost when smi tagging
Iain Merrick9ac36c92010-09-13 15:29:50 +01001511 // - 0x40000000: this number would convert to negative when Smi
1512 // tagging.
1513 // These two cases can only happen with shifts by 0 or 1 when
1514 // handed a valid smi.
John Reck59135872010-11-02 12:39:01 -07001515 __ tst(untagged_result, Operand(0xc0000000));
Iain Merrick9ac36c92010-09-13 15:29:50 +01001516 deferred->JumpToAnswerOutOfRange(ne);
Kristian Monsen25f61362010-05-21 11:50:48 +01001517 }
John Reck59135872010-11-02 12:39:01 -07001518 __ mov(tos, Operand(untagged_result, LSL, kSmiTagSize));
Iain Merrick9ac36c92010-09-13 15:29:50 +01001519 } else {
1520 __ cmp(tos, Operand(0, RelocInfo::NONE));
1521 deferred->JumpToAnswerOutOfRange(mi);
Steve Blocka7e24c12009-10-30 11:49:00 +00001522 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001523 break;
1524 }
1525 case Token::SAR: {
1526 if (shift_value != 0) {
Iain Merrick9ac36c92010-09-13 15:29:50 +01001527 // Do the shift and the tag removal in one operation. If the shift
Kristian Monsen25f61362010-05-21 11:50:48 +01001528 // is 31 bits (the highest possible value) then we emit the
Iain Merrick9ac36c92010-09-13 15:29:50 +01001529 // instruction as a shift by 0 which in the ARM ISA means shift
1530 // arithmetically by 32.
Kristian Monsen25f61362010-05-21 11:50:48 +01001531 __ mov(tos, Operand(tos, ASR, (kSmiTagSize + shift_value) & 0x1f));
Kristian Monsen25f61362010-05-21 11:50:48 +01001532 __ mov(tos, Operand(tos, LSL, kSmiTagSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00001533 }
1534 break;
1535 }
1536 default: UNREACHABLE();
1537 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001538 deferred->BindExit();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001539 frame_->EmitPush(tos, result);
Steve Blocka7e24c12009-10-30 11:49:00 +00001540 break;
1541 }
1542
1543 case Token::MOD: {
Steve Block6ded16b2010-05-10 14:33:55 +01001544 ASSERT(!reversed);
1545 ASSERT(int_value >= 2);
1546 ASSERT(IsPowerOf2(int_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001547 DeferredCode* deferred =
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001548 new DeferredInlineSmiOperation(op, int_value, reversed, mode, tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001549 unsigned mask = (0x80000000u | kSmiTagMask);
Steve Block6ded16b2010-05-10 14:33:55 +01001550 __ tst(tos, Operand(mask));
Steve Blocka7e24c12009-10-30 11:49:00 +00001551 deferred->Branch(ne); // Go to deferred code on non-Smis and negative.
1552 mask = (int_value << kSmiTagSize) - 1;
Steve Block6ded16b2010-05-10 14:33:55 +01001553 __ and_(tos, tos, Operand(mask));
Steve Blocka7e24c12009-10-30 11:49:00 +00001554 deferred->BindExit();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001555 // Mod of positive power of 2 Smi gives a Smi if the lhs is an integer.
1556 frame_->EmitPush(
1557 tos,
1558 both_sides_are_smi ? TypeInfo::Smi() : TypeInfo::Number());
Steve Blocka7e24c12009-10-30 11:49:00 +00001559 break;
1560 }
1561
1562 case Token::MUL: {
Steve Block6ded16b2010-05-10 14:33:55 +01001563 ASSERT(IsEasyToMultiplyBy(int_value));
Steve Blocka7e24c12009-10-30 11:49:00 +00001564 DeferredCode* deferred =
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001565 new DeferredInlineSmiOperation(op, int_value, reversed, mode, tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001566 unsigned max_smi_that_wont_overflow = Smi::kMaxValue / int_value;
1567 max_smi_that_wont_overflow <<= kSmiTagSize;
1568 unsigned mask = 0x80000000u;
1569 while ((mask & max_smi_that_wont_overflow) == 0) {
1570 mask |= mask >> 1;
1571 }
1572 mask |= kSmiTagMask;
1573 // This does a single mask that checks for a too high value in a
1574 // conservative way and for a non-Smi. It also filters out negative
1575 // numbers, unfortunately, but since this code is inline we prefer
1576 // brevity to comprehensiveness.
Steve Block6ded16b2010-05-10 14:33:55 +01001577 __ tst(tos, Operand(mask));
Steve Blocka7e24c12009-10-30 11:49:00 +00001578 deferred->Branch(ne);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01001579 InlineMultiplyByKnownInt(masm_, tos, tos, int_value);
Steve Blocka7e24c12009-10-30 11:49:00 +00001580 deferred->BindExit();
Steve Block6ded16b2010-05-10 14:33:55 +01001581 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00001582 break;
1583 }
1584
1585 default:
Steve Block6ded16b2010-05-10 14:33:55 +01001586 UNREACHABLE();
Steve Blocka7e24c12009-10-30 11:49:00 +00001587 break;
1588 }
Steve Blocka7e24c12009-10-30 11:49:00 +00001589}
1590
1591
Steve Block1e0659c2011-05-24 12:43:12 +01001592void CodeGenerator::Comparison(Condition cond,
Steve Blocka7e24c12009-10-30 11:49:00 +00001593 Expression* left,
1594 Expression* right,
1595 bool strict) {
Steve Block6ded16b2010-05-10 14:33:55 +01001596 VirtualFrame::RegisterAllocationScope scope(this);
Steve Blocka7e24c12009-10-30 11:49:00 +00001597
Steve Block6ded16b2010-05-10 14:33:55 +01001598 if (left != NULL) Load(left);
1599 if (right != NULL) Load(right);
1600
Steve Blocka7e24c12009-10-30 11:49:00 +00001601 // sp[0] : y
1602 // sp[1] : x
1603 // result : cc register
1604
1605 // Strict only makes sense for equality comparisons.
Steve Block1e0659c2011-05-24 12:43:12 +01001606 ASSERT(!strict || cond == eq);
Steve Blocka7e24c12009-10-30 11:49:00 +00001607
Steve Block6ded16b2010-05-10 14:33:55 +01001608 Register lhs;
1609 Register rhs;
1610
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001611 bool lhs_is_smi;
1612 bool rhs_is_smi;
1613
Steve Block6ded16b2010-05-10 14:33:55 +01001614 // We load the top two stack positions into registers chosen by the virtual
1615 // frame. This should keep the register shuffling to a minimum.
Steve Blocka7e24c12009-10-30 11:49:00 +00001616 // Implement '>' and '<=' by reversal to obtain ECMA-262 conversion order.
Steve Block1e0659c2011-05-24 12:43:12 +01001617 if (cond == gt || cond == le) {
1618 cond = ReverseCondition(cond);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001619 lhs_is_smi = frame_->KnownSmiAt(0);
1620 rhs_is_smi = frame_->KnownSmiAt(1);
Steve Block6ded16b2010-05-10 14:33:55 +01001621 lhs = frame_->PopToRegister();
1622 rhs = frame_->PopToRegister(lhs); // Don't pop to the same register again!
Steve Blocka7e24c12009-10-30 11:49:00 +00001623 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001624 rhs_is_smi = frame_->KnownSmiAt(0);
1625 lhs_is_smi = frame_->KnownSmiAt(1);
Steve Block6ded16b2010-05-10 14:33:55 +01001626 rhs = frame_->PopToRegister();
1627 lhs = frame_->PopToRegister(rhs); // Don't pop to the same register again!
Steve Blocka7e24c12009-10-30 11:49:00 +00001628 }
Steve Block6ded16b2010-05-10 14:33:55 +01001629
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001630 bool both_sides_are_smi = (lhs_is_smi && rhs_is_smi);
1631
Steve Block6ded16b2010-05-10 14:33:55 +01001632 ASSERT(rhs.is(r0) || rhs.is(r1));
1633 ASSERT(lhs.is(r0) || lhs.is(r1));
1634
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001635 JumpTarget exit;
Steve Blocka7e24c12009-10-30 11:49:00 +00001636
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001637 if (!both_sides_are_smi) {
1638 // Now we have the two sides in r0 and r1. We flush any other registers
1639 // because the stub doesn't know about register allocation.
1640 frame_->SpillAll();
1641 Register scratch = VirtualFrame::scratch0();
1642 Register smi_test_reg;
1643 if (lhs_is_smi) {
1644 smi_test_reg = rhs;
1645 } else if (rhs_is_smi) {
1646 smi_test_reg = lhs;
1647 } else {
1648 __ orr(scratch, lhs, Operand(rhs));
1649 smi_test_reg = scratch;
1650 }
1651 __ tst(smi_test_reg, Operand(kSmiTagMask));
1652 JumpTarget smi;
1653 smi.Branch(eq);
1654
1655 // Perform non-smi comparison by stub.
1656 // CompareStub takes arguments in r0 and r1, returns <0, >0 or 0 in r0.
1657 // We call with 0 args because there are 0 on the stack.
Steve Block1e0659c2011-05-24 12:43:12 +01001658 CompareStub stub(cond, strict, NO_SMI_COMPARE_IN_STUB, lhs, rhs);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001659 frame_->CallStub(&stub, 0);
Iain Merrick9ac36c92010-09-13 15:29:50 +01001660 __ cmp(r0, Operand(0, RelocInfo::NONE));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001661 exit.Jump();
1662
1663 smi.Bind();
Steve Block6ded16b2010-05-10 14:33:55 +01001664 }
1665
Steve Blocka7e24c12009-10-30 11:49:00 +00001666 // Do smi comparisons by pointer comparison.
Steve Block6ded16b2010-05-10 14:33:55 +01001667 __ cmp(lhs, Operand(rhs));
Steve Blocka7e24c12009-10-30 11:49:00 +00001668
1669 exit.Bind();
Steve Block1e0659c2011-05-24 12:43:12 +01001670 cc_reg_ = cond;
Steve Blocka7e24c12009-10-30 11:49:00 +00001671}
1672
1673
Steve Blocka7e24c12009-10-30 11:49:00 +00001674// Call the function on the stack with the given arguments.
1675void CodeGenerator::CallWithArguments(ZoneList<Expression*>* args,
Leon Clarkee46be812010-01-19 14:06:41 +00001676 CallFunctionFlags flags,
1677 int position) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001678 // Push the arguments ("left-to-right") on the stack.
1679 int arg_count = args->length();
1680 for (int i = 0; i < arg_count; i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001681 Load(args->at(i));
Steve Blocka7e24c12009-10-30 11:49:00 +00001682 }
1683
1684 // Record the position for debugging purposes.
1685 CodeForSourcePosition(position);
1686
1687 // Use the shared code stub to call the function.
1688 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Leon Clarkee46be812010-01-19 14:06:41 +00001689 CallFunctionStub call_function(arg_count, in_loop, flags);
Steve Blocka7e24c12009-10-30 11:49:00 +00001690 frame_->CallStub(&call_function, arg_count + 1);
1691
1692 // Restore context and pop function from the stack.
1693 __ ldr(cp, frame_->Context());
1694 frame_->Drop(); // discard the TOS
1695}
1696
1697
Steve Block6ded16b2010-05-10 14:33:55 +01001698void CodeGenerator::CallApplyLazy(Expression* applicand,
1699 Expression* receiver,
1700 VariableProxy* arguments,
1701 int position) {
1702 // An optimized implementation of expressions of the form
1703 // x.apply(y, arguments).
1704 // If the arguments object of the scope has not been allocated,
1705 // and x.apply is Function.prototype.apply, this optimization
1706 // just copies y and the arguments of the current function on the
1707 // stack, as receiver and arguments, and calls x.
1708 // In the implementation comments, we call x the applicand
1709 // and y the receiver.
Steve Block6ded16b2010-05-10 14:33:55 +01001710
1711 ASSERT(ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION);
1712 ASSERT(arguments->IsArguments());
1713
1714 // Load applicand.apply onto the stack. This will usually
1715 // give us a megamorphic load site. Not super, but it works.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001716 Load(applicand);
Steve Block6ded16b2010-05-10 14:33:55 +01001717 Handle<String> name = Factory::LookupAsciiSymbol("apply");
Leon Clarkef7060e22010-06-03 12:02:55 +01001718 frame_->Dup();
Steve Block6ded16b2010-05-10 14:33:55 +01001719 frame_->CallLoadIC(name, RelocInfo::CODE_TARGET);
1720 frame_->EmitPush(r0);
1721
1722 // Load the receiver and the existing arguments object onto the
1723 // expression stack. Avoid allocating the arguments object here.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001724 Load(receiver);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001725 LoadFromSlot(scope()->arguments()->AsSlot(), NOT_INSIDE_TYPEOF);
Steve Block6ded16b2010-05-10 14:33:55 +01001726
Steve Block8defd9f2010-07-08 12:39:36 +01001727 // At this point the top two stack elements are probably in registers
1728 // since they were just loaded. Ensure they are in regs and get the
1729 // regs.
1730 Register receiver_reg = frame_->Peek2();
1731 Register arguments_reg = frame_->Peek();
1732
1733 // From now on the frame is spilled.
1734 frame_->SpillAll();
1735
Steve Block6ded16b2010-05-10 14:33:55 +01001736 // Emit the source position information after having loaded the
1737 // receiver and the arguments.
1738 CodeForSourcePosition(position);
1739 // Contents of the stack at this point:
1740 // sp[0]: arguments object of the current function or the hole.
1741 // sp[1]: receiver
1742 // sp[2]: applicand.apply
1743 // sp[3]: applicand.
1744
1745 // Check if the arguments object has been lazily allocated
1746 // already. If so, just use that instead of copying the arguments
1747 // from the stack. This also deals with cases where a local variable
1748 // named 'arguments' has been introduced.
Steve Block8defd9f2010-07-08 12:39:36 +01001749 JumpTarget slow;
1750 Label done;
Ben Murdoch086aeea2011-05-13 15:57:08 +01001751 __ LoadRoot(ip, Heap::kArgumentsMarkerRootIndex);
Steve Block8defd9f2010-07-08 12:39:36 +01001752 __ cmp(ip, arguments_reg);
1753 slow.Branch(ne);
Steve Block6ded16b2010-05-10 14:33:55 +01001754
1755 Label build_args;
1756 // Get rid of the arguments object probe.
1757 frame_->Drop();
1758 // Stack now has 3 elements on it.
1759 // Contents of stack at this point:
Steve Block8defd9f2010-07-08 12:39:36 +01001760 // sp[0]: receiver - in the receiver_reg register.
Steve Block6ded16b2010-05-10 14:33:55 +01001761 // sp[1]: applicand.apply
1762 // sp[2]: applicand.
1763
1764 // Check that the receiver really is a JavaScript object.
Steve Block1e0659c2011-05-24 12:43:12 +01001765 __ JumpIfSmi(receiver_reg, &build_args);
Steve Block6ded16b2010-05-10 14:33:55 +01001766 // We allow all JSObjects including JSFunctions. As long as
1767 // JS_FUNCTION_TYPE is the last instance type and it is right
1768 // after LAST_JS_OBJECT_TYPE, we do not have to check the upper
1769 // bound.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01001770 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
1771 STATIC_ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
Steve Block8defd9f2010-07-08 12:39:36 +01001772 __ CompareObjectType(receiver_reg, r2, r3, FIRST_JS_OBJECT_TYPE);
Steve Block6ded16b2010-05-10 14:33:55 +01001773 __ b(lt, &build_args);
1774
1775 // Check that applicand.apply is Function.prototype.apply.
1776 __ ldr(r0, MemOperand(sp, kPointerSize));
Steve Block1e0659c2011-05-24 12:43:12 +01001777 __ JumpIfSmi(r0, &build_args);
Steve Block6ded16b2010-05-10 14:33:55 +01001778 __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE);
1779 __ b(ne, &build_args);
Steve Block6ded16b2010-05-10 14:33:55 +01001780 Handle<Code> apply_code(Builtins::builtin(Builtins::FunctionApply));
Steve Block791712a2010-08-27 10:21:07 +01001781 __ ldr(r1, FieldMemOperand(r0, JSFunction::kCodeEntryOffset));
1782 __ sub(r1, r1, Operand(Code::kHeaderSize - kHeapObjectTag));
Steve Block6ded16b2010-05-10 14:33:55 +01001783 __ cmp(r1, Operand(apply_code));
1784 __ b(ne, &build_args);
1785
1786 // Check that applicand is a function.
1787 __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
Steve Block1e0659c2011-05-24 12:43:12 +01001788 __ JumpIfSmi(r1, &build_args);
Steve Block6ded16b2010-05-10 14:33:55 +01001789 __ CompareObjectType(r1, r2, r3, JS_FUNCTION_TYPE);
1790 __ b(ne, &build_args);
1791
1792 // Copy the arguments to this function possibly from the
1793 // adaptor frame below it.
1794 Label invoke, adapted;
1795 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1796 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
1797 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1798 __ b(eq, &adapted);
1799
1800 // No arguments adaptor frame. Copy fixed number of arguments.
1801 __ mov(r0, Operand(scope()->num_parameters()));
1802 for (int i = 0; i < scope()->num_parameters(); i++) {
1803 __ ldr(r2, frame_->ParameterAt(i));
1804 __ push(r2);
1805 }
1806 __ jmp(&invoke);
1807
1808 // Arguments adaptor frame present. Copy arguments from there, but
1809 // avoid copying too many arguments to avoid stack overflows.
1810 __ bind(&adapted);
1811 static const uint32_t kArgumentsLimit = 1 * KB;
1812 __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset));
1813 __ mov(r0, Operand(r0, LSR, kSmiTagSize));
1814 __ mov(r3, r0);
1815 __ cmp(r0, Operand(kArgumentsLimit));
1816 __ b(gt, &build_args);
1817
1818 // Loop through the arguments pushing them onto the execution
1819 // stack. We don't inform the virtual frame of the push, so we don't
1820 // have to worry about getting rid of the elements from the virtual
1821 // frame.
1822 Label loop;
1823 // r3 is a small non-negative integer, due to the test above.
Iain Merrick9ac36c92010-09-13 15:29:50 +01001824 __ cmp(r3, Operand(0, RelocInfo::NONE));
Steve Block6ded16b2010-05-10 14:33:55 +01001825 __ b(eq, &invoke);
1826 // Compute the address of the first argument.
1827 __ add(r2, r2, Operand(r3, LSL, kPointerSizeLog2));
1828 __ add(r2, r2, Operand(kPointerSize));
1829 __ bind(&loop);
1830 // Post-decrement argument address by kPointerSize on each iteration.
1831 __ ldr(r4, MemOperand(r2, kPointerSize, NegPostIndex));
1832 __ push(r4);
1833 __ sub(r3, r3, Operand(1), SetCC);
1834 __ b(gt, &loop);
1835
1836 // Invoke the function.
1837 __ bind(&invoke);
1838 ParameterCount actual(r0);
1839 __ InvokeFunction(r1, actual, CALL_FUNCTION);
1840 // Drop applicand.apply and applicand from the stack, and push
1841 // the result of the function call, but leave the spilled frame
1842 // unchanged, with 3 elements, so it is correct when we compile the
1843 // slow-case code.
1844 __ add(sp, sp, Operand(2 * kPointerSize));
1845 __ push(r0);
1846 // Stack now has 1 element:
1847 // sp[0]: result
1848 __ jmp(&done);
1849
1850 // Slow-case: Allocate the arguments object since we know it isn't
1851 // there, and fall-through to the slow-case where we call
1852 // applicand.apply.
1853 __ bind(&build_args);
1854 // Stack now has 3 elements, because we have jumped from where:
1855 // sp[0]: receiver
1856 // sp[1]: applicand.apply
1857 // sp[2]: applicand.
1858 StoreArgumentsObject(false);
1859
1860 // Stack and frame now have 4 elements.
Steve Block8defd9f2010-07-08 12:39:36 +01001861 slow.Bind();
Steve Block6ded16b2010-05-10 14:33:55 +01001862
1863 // Generic computation of x.apply(y, args) with no special optimization.
1864 // Flip applicand.apply and applicand on the stack, so
1865 // applicand looks like the receiver of the applicand.apply call.
1866 // Then process it as a normal function call.
1867 __ ldr(r0, MemOperand(sp, 3 * kPointerSize));
1868 __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
Leon Clarkef7060e22010-06-03 12:02:55 +01001869 __ Strd(r0, r1, MemOperand(sp, 2 * kPointerSize));
Steve Block6ded16b2010-05-10 14:33:55 +01001870
1871 CallFunctionStub call_function(2, NOT_IN_LOOP, NO_CALL_FUNCTION_FLAGS);
1872 frame_->CallStub(&call_function, 3);
1873 // The function and its two arguments have been dropped.
1874 frame_->Drop(); // Drop the receiver as well.
1875 frame_->EmitPush(r0);
Ben Murdochbb769b22010-08-11 14:56:33 +01001876 frame_->SpillAll(); // A spilled frame is also jumping to label done.
Steve Block6ded16b2010-05-10 14:33:55 +01001877 // Stack now has 1 element:
1878 // sp[0]: result
1879 __ bind(&done);
1880
1881 // Restore the context register after a call.
1882 __ ldr(cp, frame_->Context());
1883}
1884
1885
Steve Blocka7e24c12009-10-30 11:49:00 +00001886void CodeGenerator::Branch(bool if_true, JumpTarget* target) {
Steve Blocka7e24c12009-10-30 11:49:00 +00001887 ASSERT(has_cc());
Steve Block1e0659c2011-05-24 12:43:12 +01001888 Condition cond = if_true ? cc_reg_ : NegateCondition(cc_reg_);
1889 target->Branch(cond);
Steve Blocka7e24c12009-10-30 11:49:00 +00001890 cc_reg_ = al;
1891}
1892
1893
1894void CodeGenerator::CheckStack() {
Steve Block8defd9f2010-07-08 12:39:36 +01001895 frame_->SpillAll();
Steve Blockd0582a62009-12-15 09:54:21 +00001896 Comment cmnt(masm_, "[ check stack");
1897 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
Steve Blockd0582a62009-12-15 09:54:21 +00001898 masm_->cmp(sp, Operand(ip));
1899 StackCheckStub stub;
1900 // Call the stub if lower.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001901 masm_->mov(ip,
Steve Blockd0582a62009-12-15 09:54:21 +00001902 Operand(reinterpret_cast<intptr_t>(stub.GetCode().location()),
1903 RelocInfo::CODE_TARGET),
1904 LeaveCC,
1905 lo);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08001906 masm_->Call(ip, lo);
Steve Blocka7e24c12009-10-30 11:49:00 +00001907}
1908
1909
1910void CodeGenerator::VisitStatements(ZoneList<Statement*>* statements) {
1911#ifdef DEBUG
1912 int original_height = frame_->height();
1913#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00001914 for (int i = 0; frame_ != NULL && i < statements->length(); i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001915 Visit(statements->at(i));
Steve Blocka7e24c12009-10-30 11:49:00 +00001916 }
1917 ASSERT(!has_valid_frame() || frame_->height() == original_height);
1918}
1919
1920
1921void CodeGenerator::VisitBlock(Block* node) {
1922#ifdef DEBUG
1923 int original_height = frame_->height();
1924#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00001925 Comment cmnt(masm_, "[ Block");
1926 CodeForStatementPosition(node);
Kristian Monsen25f61362010-05-21 11:50:48 +01001927 node->break_target()->SetExpectedHeight();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01001928 VisitStatements(node->statements());
Steve Blocka7e24c12009-10-30 11:49:00 +00001929 if (node->break_target()->is_linked()) {
1930 node->break_target()->Bind();
1931 }
1932 node->break_target()->Unuse();
1933 ASSERT(!has_valid_frame() || frame_->height() == original_height);
1934}
1935
1936
1937void CodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
Steve Block3ce2e202009-11-05 08:53:23 +00001938 frame_->EmitPush(cp);
Steve Block6ded16b2010-05-10 14:33:55 +01001939 frame_->EmitPush(Operand(pairs));
1940 frame_->EmitPush(Operand(Smi::FromInt(is_eval() ? 1 : 0)));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001941 frame_->EmitPush(Operand(Smi::FromInt(strict_mode_flag())));
Steve Block6ded16b2010-05-10 14:33:55 +01001942
Ben Murdoche0cee9b2011-05-25 10:26:03 +01001943 frame_->CallRuntime(Runtime::kDeclareGlobals, 4);
Steve Blocka7e24c12009-10-30 11:49:00 +00001944 // The result is discarded.
1945}
1946
1947
1948void CodeGenerator::VisitDeclaration(Declaration* node) {
1949#ifdef DEBUG
1950 int original_height = frame_->height();
1951#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00001952 Comment cmnt(masm_, "[ Declaration");
1953 Variable* var = node->proxy()->var();
1954 ASSERT(var != NULL); // must have been resolved
Kristian Monsen0d5e1162010-09-30 15:31:59 +01001955 Slot* slot = var->AsSlot();
Steve Blocka7e24c12009-10-30 11:49:00 +00001956
1957 // If it was not possible to allocate the variable at compile time,
1958 // we need to "declare" it at runtime to make sure it actually
1959 // exists in the local context.
1960 if (slot != NULL && slot->type() == Slot::LOOKUP) {
1961 // Variables with a "LOOKUP" slot were introduced as non-locals
1962 // during variable resolution and must have mode DYNAMIC.
1963 ASSERT(var->is_dynamic());
1964 // For now, just do a runtime call.
1965 frame_->EmitPush(cp);
Steve Block6ded16b2010-05-10 14:33:55 +01001966 frame_->EmitPush(Operand(var->name()));
Steve Blocka7e24c12009-10-30 11:49:00 +00001967 // Declaration nodes are always declared in only two modes.
1968 ASSERT(node->mode() == Variable::VAR || node->mode() == Variable::CONST);
1969 PropertyAttributes attr = node->mode() == Variable::VAR ? NONE : READ_ONLY;
Steve Block6ded16b2010-05-10 14:33:55 +01001970 frame_->EmitPush(Operand(Smi::FromInt(attr)));
Steve Blocka7e24c12009-10-30 11:49:00 +00001971 // Push initial value, if any.
1972 // Note: For variables we must not push an initial value (such as
1973 // 'undefined') because we may have a (legal) redeclaration and we
1974 // must not destroy the current value.
1975 if (node->mode() == Variable::CONST) {
Steve Block6ded16b2010-05-10 14:33:55 +01001976 frame_->EmitPushRoot(Heap::kTheHoleValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00001977 } else if (node->fun() != NULL) {
Steve Block6ded16b2010-05-10 14:33:55 +01001978 Load(node->fun());
Steve Blocka7e24c12009-10-30 11:49:00 +00001979 } else {
Iain Merrick9ac36c92010-09-13 15:29:50 +01001980 frame_->EmitPush(Operand(0, RelocInfo::NONE));
Steve Blocka7e24c12009-10-30 11:49:00 +00001981 }
Steve Block6ded16b2010-05-10 14:33:55 +01001982
Steve Blocka7e24c12009-10-30 11:49:00 +00001983 frame_->CallRuntime(Runtime::kDeclareContextSlot, 4);
1984 // Ignore the return value (declarations are statements).
Steve Block6ded16b2010-05-10 14:33:55 +01001985
Steve Blocka7e24c12009-10-30 11:49:00 +00001986 ASSERT(frame_->height() == original_height);
1987 return;
1988 }
1989
1990 ASSERT(!var->is_global());
1991
1992 // If we have a function or a constant, we need to initialize the variable.
1993 Expression* val = NULL;
1994 if (node->mode() == Variable::CONST) {
1995 val = new Literal(Factory::the_hole_value());
1996 } else {
1997 val = node->fun(); // NULL if we don't have a function
1998 }
1999
Steve Block8defd9f2010-07-08 12:39:36 +01002000
Steve Blocka7e24c12009-10-30 11:49:00 +00002001 if (val != NULL) {
Steve Block8defd9f2010-07-08 12:39:36 +01002002 WriteBarrierCharacter wb_info =
2003 val->type()->IsLikelySmi() ? LIKELY_SMI : UNLIKELY_SMI;
2004 if (val->AsLiteral() != NULL) wb_info = NEVER_NEWSPACE;
Steve Block6ded16b2010-05-10 14:33:55 +01002005 // Set initial value.
2006 Reference target(this, node->proxy());
2007 Load(val);
Steve Block8defd9f2010-07-08 12:39:36 +01002008 target.SetValue(NOT_CONST_INIT, wb_info);
Steve Block6ded16b2010-05-10 14:33:55 +01002009
Steve Blocka7e24c12009-10-30 11:49:00 +00002010 // Get rid of the assigned value (declarations are statements).
2011 frame_->Drop();
2012 }
2013 ASSERT(frame_->height() == original_height);
2014}
2015
2016
2017void CodeGenerator::VisitExpressionStatement(ExpressionStatement* node) {
2018#ifdef DEBUG
2019 int original_height = frame_->height();
2020#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002021 Comment cmnt(masm_, "[ ExpressionStatement");
2022 CodeForStatementPosition(node);
2023 Expression* expression = node->expression();
2024 expression->MarkAsStatement();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002025 Load(expression);
Steve Blocka7e24c12009-10-30 11:49:00 +00002026 frame_->Drop();
2027 ASSERT(frame_->height() == original_height);
2028}
2029
2030
2031void CodeGenerator::VisitEmptyStatement(EmptyStatement* node) {
2032#ifdef DEBUG
2033 int original_height = frame_->height();
2034#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002035 Comment cmnt(masm_, "// EmptyStatement");
2036 CodeForStatementPosition(node);
2037 // nothing to do
2038 ASSERT(frame_->height() == original_height);
2039}
2040
2041
2042void CodeGenerator::VisitIfStatement(IfStatement* node) {
2043#ifdef DEBUG
2044 int original_height = frame_->height();
2045#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002046 Comment cmnt(masm_, "[ IfStatement");
2047 // Generate different code depending on which parts of the if statement
2048 // are present or not.
2049 bool has_then_stm = node->HasThenStatement();
2050 bool has_else_stm = node->HasElseStatement();
2051
2052 CodeForStatementPosition(node);
2053
2054 JumpTarget exit;
2055 if (has_then_stm && has_else_stm) {
2056 Comment cmnt(masm_, "[ IfThenElse");
2057 JumpTarget then;
2058 JumpTarget else_;
2059 // if (cond)
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002060 LoadCondition(node->condition(), &then, &else_, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00002061 if (frame_ != NULL) {
2062 Branch(false, &else_);
2063 }
2064 // then
2065 if (frame_ != NULL || then.is_linked()) {
2066 then.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002067 Visit(node->then_statement());
Steve Blocka7e24c12009-10-30 11:49:00 +00002068 }
2069 if (frame_ != NULL) {
2070 exit.Jump();
2071 }
2072 // else
2073 if (else_.is_linked()) {
2074 else_.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002075 Visit(node->else_statement());
Steve Blocka7e24c12009-10-30 11:49:00 +00002076 }
2077
2078 } else if (has_then_stm) {
2079 Comment cmnt(masm_, "[ IfThen");
2080 ASSERT(!has_else_stm);
2081 JumpTarget then;
2082 // if (cond)
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002083 LoadCondition(node->condition(), &then, &exit, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00002084 if (frame_ != NULL) {
2085 Branch(false, &exit);
2086 }
2087 // then
2088 if (frame_ != NULL || then.is_linked()) {
2089 then.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002090 Visit(node->then_statement());
Steve Blocka7e24c12009-10-30 11:49:00 +00002091 }
2092
2093 } else if (has_else_stm) {
2094 Comment cmnt(masm_, "[ IfElse");
2095 ASSERT(!has_then_stm);
2096 JumpTarget else_;
2097 // if (!cond)
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002098 LoadCondition(node->condition(), &exit, &else_, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00002099 if (frame_ != NULL) {
2100 Branch(true, &exit);
2101 }
2102 // else
2103 if (frame_ != NULL || else_.is_linked()) {
2104 else_.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002105 Visit(node->else_statement());
Steve Blocka7e24c12009-10-30 11:49:00 +00002106 }
2107
2108 } else {
2109 Comment cmnt(masm_, "[ If");
2110 ASSERT(!has_then_stm && !has_else_stm);
2111 // if (cond)
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002112 LoadCondition(node->condition(), &exit, &exit, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00002113 if (frame_ != NULL) {
2114 if (has_cc()) {
2115 cc_reg_ = al;
2116 } else {
2117 frame_->Drop();
2118 }
2119 }
2120 }
2121
2122 // end
2123 if (exit.is_linked()) {
2124 exit.Bind();
2125 }
2126 ASSERT(!has_valid_frame() || frame_->height() == original_height);
2127}
2128
2129
2130void CodeGenerator::VisitContinueStatement(ContinueStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002131 Comment cmnt(masm_, "[ ContinueStatement");
2132 CodeForStatementPosition(node);
2133 node->target()->continue_target()->Jump();
2134}
2135
2136
2137void CodeGenerator::VisitBreakStatement(BreakStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002138 Comment cmnt(masm_, "[ BreakStatement");
2139 CodeForStatementPosition(node);
2140 node->target()->break_target()->Jump();
2141}
2142
2143
2144void CodeGenerator::VisitReturnStatement(ReturnStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002145 Comment cmnt(masm_, "[ ReturnStatement");
2146
2147 CodeForStatementPosition(node);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002148 Load(node->expression());
Iain Merrick75681382010-08-19 15:07:18 +01002149 frame_->PopToR0();
2150 frame_->PrepareForReturn();
Steve Blocka7e24c12009-10-30 11:49:00 +00002151 if (function_return_is_shadowed_) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002152 function_return_.Jump();
2153 } else {
2154 // Pop the result from the frame and prepare the frame for
2155 // returning thus making it easier to merge.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01002156 if (function_return_.is_bound()) {
2157 // If the function return label is already bound we reuse the
2158 // code by jumping to the return site.
2159 function_return_.Jump();
2160 } else {
2161 function_return_.Bind();
2162 GenerateReturnSequence();
2163 }
2164 }
2165}
Steve Blocka7e24c12009-10-30 11:49:00 +00002166
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01002167
2168void CodeGenerator::GenerateReturnSequence() {
2169 if (FLAG_trace) {
2170 // Push the return value on the stack as the parameter.
2171 // Runtime::TraceExit returns the parameter as it is.
2172 frame_->EmitPush(r0);
2173 frame_->CallRuntime(Runtime::kTraceExit, 1);
2174 }
2175
2176#ifdef DEBUG
2177 // Add a label for checking the size of the code used for returning.
2178 Label check_exit_codesize;
2179 masm_->bind(&check_exit_codesize);
2180#endif
2181 // Make sure that the constant pool is not emitted inside of the return
2182 // sequence.
2183 { Assembler::BlockConstPoolScope block_const_pool(masm_);
2184 // Tear down the frame which will restore the caller's frame pointer and
2185 // the link register.
2186 frame_->Exit();
2187
2188 // Here we use masm_-> instead of the __ macro to avoid the code coverage
2189 // tool from instrumenting as we rely on the code size here.
2190 int32_t sp_delta = (scope()->num_parameters() + 1) * kPointerSize;
2191 masm_->add(sp, sp, Operand(sp_delta));
2192 masm_->Jump(lr);
2193 DeleteFrame();
2194
2195#ifdef DEBUG
Steve Block1e0659c2011-05-24 12:43:12 +01002196 // Check that the size of the code used for returning is large enough
2197 // for the debugger's requirements.
2198 ASSERT(Assembler::kJSReturnSequenceInstructions <=
2199 masm_->InstructionsGeneratedSince(&check_exit_codesize));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01002200#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002201 }
2202}
2203
2204
2205void CodeGenerator::VisitWithEnterStatement(WithEnterStatement* node) {
2206#ifdef DEBUG
2207 int original_height = frame_->height();
2208#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002209 Comment cmnt(masm_, "[ WithEnterStatement");
2210 CodeForStatementPosition(node);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002211 Load(node->expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00002212 if (node->is_catch_block()) {
2213 frame_->CallRuntime(Runtime::kPushCatchContext, 1);
2214 } else {
2215 frame_->CallRuntime(Runtime::kPushContext, 1);
2216 }
2217#ifdef DEBUG
2218 JumpTarget verified_true;
Steve Block6ded16b2010-05-10 14:33:55 +01002219 __ cmp(r0, cp);
Steve Blocka7e24c12009-10-30 11:49:00 +00002220 verified_true.Branch(eq);
2221 __ stop("PushContext: r0 is expected to be the same as cp");
2222 verified_true.Bind();
2223#endif
2224 // Update context local.
2225 __ str(cp, frame_->Context());
2226 ASSERT(frame_->height() == original_height);
2227}
2228
2229
2230void CodeGenerator::VisitWithExitStatement(WithExitStatement* node) {
2231#ifdef DEBUG
2232 int original_height = frame_->height();
2233#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002234 Comment cmnt(masm_, "[ WithExitStatement");
2235 CodeForStatementPosition(node);
2236 // Pop context.
2237 __ ldr(cp, ContextOperand(cp, Context::PREVIOUS_INDEX));
2238 // Update context local.
2239 __ str(cp, frame_->Context());
2240 ASSERT(frame_->height() == original_height);
2241}
2242
2243
2244void CodeGenerator::VisitSwitchStatement(SwitchStatement* node) {
2245#ifdef DEBUG
2246 int original_height = frame_->height();
2247#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002248 Comment cmnt(masm_, "[ SwitchStatement");
2249 CodeForStatementPosition(node);
Kristian Monsen25f61362010-05-21 11:50:48 +01002250 node->break_target()->SetExpectedHeight();
Steve Blocka7e24c12009-10-30 11:49:00 +00002251
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002252 Load(node->tag());
Steve Blocka7e24c12009-10-30 11:49:00 +00002253
2254 JumpTarget next_test;
2255 JumpTarget fall_through;
2256 JumpTarget default_entry;
2257 JumpTarget default_exit(JumpTarget::BIDIRECTIONAL);
2258 ZoneList<CaseClause*>* cases = node->cases();
2259 int length = cases->length();
2260 CaseClause* default_clause = NULL;
2261
2262 for (int i = 0; i < length; i++) {
2263 CaseClause* clause = cases->at(i);
2264 if (clause->is_default()) {
2265 // Remember the default clause and compile it at the end.
2266 default_clause = clause;
2267 continue;
2268 }
2269
2270 Comment cmnt(masm_, "[ Case clause");
2271 // Compile the test.
2272 next_test.Bind();
2273 next_test.Unuse();
2274 // Duplicate TOS.
Steve Block8defd9f2010-07-08 12:39:36 +01002275 frame_->Dup();
Steve Blocka7e24c12009-10-30 11:49:00 +00002276 Comparison(eq, NULL, clause->label(), true);
2277 Branch(false, &next_test);
2278
2279 // Before entering the body from the test, remove the switch value from
2280 // the stack.
2281 frame_->Drop();
2282
2283 // Label the body so that fall through is enabled.
2284 if (i > 0 && cases->at(i - 1)->is_default()) {
2285 default_exit.Bind();
2286 } else {
2287 fall_through.Bind();
2288 fall_through.Unuse();
2289 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002290 VisitStatements(clause->statements());
Steve Blocka7e24c12009-10-30 11:49:00 +00002291
2292 // If control flow can fall through from the body, jump to the next body
2293 // or the end of the statement.
2294 if (frame_ != NULL) {
2295 if (i < length - 1 && cases->at(i + 1)->is_default()) {
2296 default_entry.Jump();
2297 } else {
2298 fall_through.Jump();
2299 }
2300 }
2301 }
2302
2303 // The final "test" removes the switch value.
2304 next_test.Bind();
2305 frame_->Drop();
2306
2307 // If there is a default clause, compile it.
2308 if (default_clause != NULL) {
2309 Comment cmnt(masm_, "[ Default clause");
2310 default_entry.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002311 VisitStatements(default_clause->statements());
Steve Blocka7e24c12009-10-30 11:49:00 +00002312 // If control flow can fall out of the default and there is a case after
Steve Block8defd9f2010-07-08 12:39:36 +01002313 // it, jump to that case's body.
Steve Blocka7e24c12009-10-30 11:49:00 +00002314 if (frame_ != NULL && default_exit.is_bound()) {
2315 default_exit.Jump();
2316 }
2317 }
2318
2319 if (fall_through.is_linked()) {
2320 fall_through.Bind();
2321 }
2322
2323 if (node->break_target()->is_linked()) {
2324 node->break_target()->Bind();
2325 }
2326 node->break_target()->Unuse();
2327 ASSERT(!has_valid_frame() || frame_->height() == original_height);
2328}
2329
2330
Steve Block3ce2e202009-11-05 08:53:23 +00002331void CodeGenerator::VisitDoWhileStatement(DoWhileStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002332#ifdef DEBUG
2333 int original_height = frame_->height();
2334#endif
Steve Block3ce2e202009-11-05 08:53:23 +00002335 Comment cmnt(masm_, "[ DoWhileStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00002336 CodeForStatementPosition(node);
Kristian Monsen25f61362010-05-21 11:50:48 +01002337 node->break_target()->SetExpectedHeight();
Steve Block3ce2e202009-11-05 08:53:23 +00002338 JumpTarget body(JumpTarget::BIDIRECTIONAL);
Steve Block6ded16b2010-05-10 14:33:55 +01002339 IncrementLoopNesting();
Steve Blocka7e24c12009-10-30 11:49:00 +00002340
Steve Block3ce2e202009-11-05 08:53:23 +00002341 // Label the top of the loop for the backward CFG edge. If the test
2342 // is always true we can use the continue target, and if the test is
2343 // always false there is no need.
2344 ConditionAnalysis info = AnalyzeCondition(node->cond());
2345 switch (info) {
2346 case ALWAYS_TRUE:
Kristian Monsen25f61362010-05-21 11:50:48 +01002347 node->continue_target()->SetExpectedHeight();
Steve Blocka7e24c12009-10-30 11:49:00 +00002348 node->continue_target()->Bind();
Steve Block3ce2e202009-11-05 08:53:23 +00002349 break;
2350 case ALWAYS_FALSE:
Kristian Monsen25f61362010-05-21 11:50:48 +01002351 node->continue_target()->SetExpectedHeight();
Steve Block3ce2e202009-11-05 08:53:23 +00002352 break;
2353 case DONT_KNOW:
Kristian Monsen25f61362010-05-21 11:50:48 +01002354 node->continue_target()->SetExpectedHeight();
Steve Block3ce2e202009-11-05 08:53:23 +00002355 body.Bind();
2356 break;
2357 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002358
Steve Block3ce2e202009-11-05 08:53:23 +00002359 CheckStack(); // TODO(1222600): ignore if body contains calls.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002360 Visit(node->body());
Steve Blocka7e24c12009-10-30 11:49:00 +00002361
Steve Blockd0582a62009-12-15 09:54:21 +00002362 // Compile the test.
Steve Block3ce2e202009-11-05 08:53:23 +00002363 switch (info) {
2364 case ALWAYS_TRUE:
2365 // If control can fall off the end of the body, jump back to the
2366 // top.
Steve Blocka7e24c12009-10-30 11:49:00 +00002367 if (has_valid_frame()) {
Steve Block3ce2e202009-11-05 08:53:23 +00002368 node->continue_target()->Jump();
Steve Blocka7e24c12009-10-30 11:49:00 +00002369 }
2370 break;
Steve Block3ce2e202009-11-05 08:53:23 +00002371 case ALWAYS_FALSE:
2372 // If we have a continue in the body, we only have to bind its
2373 // jump target.
2374 if (node->continue_target()->is_linked()) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002375 node->continue_target()->Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00002376 }
Steve Block3ce2e202009-11-05 08:53:23 +00002377 break;
2378 case DONT_KNOW:
2379 // We have to compile the test expression if it can be reached by
2380 // control flow falling out of the body or via continue.
2381 if (node->continue_target()->is_linked()) {
2382 node->continue_target()->Bind();
2383 }
2384 if (has_valid_frame()) {
Steve Blockd0582a62009-12-15 09:54:21 +00002385 Comment cmnt(masm_, "[ DoWhileCondition");
2386 CodeForDoWhileConditionPosition(node);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002387 LoadCondition(node->cond(), &body, node->break_target(), true);
Steve Blocka7e24c12009-10-30 11:49:00 +00002388 if (has_valid_frame()) {
Steve Block3ce2e202009-11-05 08:53:23 +00002389 // A invalid frame here indicates that control did not
2390 // fall out of the test expression.
2391 Branch(true, &body);
Steve Blocka7e24c12009-10-30 11:49:00 +00002392 }
2393 }
2394 break;
Steve Blocka7e24c12009-10-30 11:49:00 +00002395 }
2396
2397 if (node->break_target()->is_linked()) {
2398 node->break_target()->Bind();
2399 }
Steve Block6ded16b2010-05-10 14:33:55 +01002400 DecrementLoopNesting();
Steve Block3ce2e202009-11-05 08:53:23 +00002401 ASSERT(!has_valid_frame() || frame_->height() == original_height);
2402}
2403
2404
2405void CodeGenerator::VisitWhileStatement(WhileStatement* node) {
2406#ifdef DEBUG
2407 int original_height = frame_->height();
2408#endif
Steve Block3ce2e202009-11-05 08:53:23 +00002409 Comment cmnt(masm_, "[ WhileStatement");
2410 CodeForStatementPosition(node);
2411
2412 // If the test is never true and has no side effects there is no need
2413 // to compile the test or body.
2414 ConditionAnalysis info = AnalyzeCondition(node->cond());
2415 if (info == ALWAYS_FALSE) return;
2416
Kristian Monsen25f61362010-05-21 11:50:48 +01002417 node->break_target()->SetExpectedHeight();
Steve Block6ded16b2010-05-10 14:33:55 +01002418 IncrementLoopNesting();
Steve Block3ce2e202009-11-05 08:53:23 +00002419
2420 // Label the top of the loop with the continue target for the backward
2421 // CFG edge.
Kristian Monsen25f61362010-05-21 11:50:48 +01002422 node->continue_target()->SetExpectedHeight();
Steve Block3ce2e202009-11-05 08:53:23 +00002423 node->continue_target()->Bind();
2424
2425 if (info == DONT_KNOW) {
Steve Block8defd9f2010-07-08 12:39:36 +01002426 JumpTarget body(JumpTarget::BIDIRECTIONAL);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002427 LoadCondition(node->cond(), &body, node->break_target(), true);
Steve Block3ce2e202009-11-05 08:53:23 +00002428 if (has_valid_frame()) {
2429 // A NULL frame indicates that control did not fall out of the
2430 // test expression.
2431 Branch(false, node->break_target());
2432 }
2433 if (has_valid_frame() || body.is_linked()) {
2434 body.Bind();
2435 }
2436 }
2437
2438 if (has_valid_frame()) {
2439 CheckStack(); // TODO(1222600): ignore if body contains calls.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002440 Visit(node->body());
Steve Block3ce2e202009-11-05 08:53:23 +00002441
2442 // If control flow can fall out of the body, jump back to the top.
2443 if (has_valid_frame()) {
2444 node->continue_target()->Jump();
2445 }
2446 }
2447 if (node->break_target()->is_linked()) {
2448 node->break_target()->Bind();
2449 }
Steve Block6ded16b2010-05-10 14:33:55 +01002450 DecrementLoopNesting();
Steve Block3ce2e202009-11-05 08:53:23 +00002451 ASSERT(!has_valid_frame() || frame_->height() == original_height);
2452}
2453
2454
2455void CodeGenerator::VisitForStatement(ForStatement* node) {
2456#ifdef DEBUG
2457 int original_height = frame_->height();
2458#endif
Steve Block3ce2e202009-11-05 08:53:23 +00002459 Comment cmnt(masm_, "[ ForStatement");
2460 CodeForStatementPosition(node);
2461 if (node->init() != NULL) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002462 Visit(node->init());
Steve Block3ce2e202009-11-05 08:53:23 +00002463 }
2464
2465 // If the test is never true there is no need to compile the test or
2466 // body.
2467 ConditionAnalysis info = AnalyzeCondition(node->cond());
2468 if (info == ALWAYS_FALSE) return;
2469
Kristian Monsen25f61362010-05-21 11:50:48 +01002470 node->break_target()->SetExpectedHeight();
Steve Block6ded16b2010-05-10 14:33:55 +01002471 IncrementLoopNesting();
Steve Block3ce2e202009-11-05 08:53:23 +00002472
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002473 // We know that the loop index is a smi if it is not modified in the
2474 // loop body and it is checked against a constant limit in the loop
2475 // condition. In this case, we reset the static type information of the
2476 // loop index to smi before compiling the body, the update expression, and
2477 // the bottom check of the loop condition.
2478 TypeInfoCodeGenState type_info_scope(this,
2479 node->is_fast_smi_loop() ?
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002480 node->loop_variable()->AsSlot() :
2481 NULL,
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002482 TypeInfo::Smi());
2483
Steve Block3ce2e202009-11-05 08:53:23 +00002484 // If there is no update statement, label the top of the loop with the
2485 // continue target, otherwise with the loop target.
2486 JumpTarget loop(JumpTarget::BIDIRECTIONAL);
2487 if (node->next() == NULL) {
Kristian Monsen25f61362010-05-21 11:50:48 +01002488 node->continue_target()->SetExpectedHeight();
Steve Block3ce2e202009-11-05 08:53:23 +00002489 node->continue_target()->Bind();
2490 } else {
Kristian Monsen25f61362010-05-21 11:50:48 +01002491 node->continue_target()->SetExpectedHeight();
Steve Block3ce2e202009-11-05 08:53:23 +00002492 loop.Bind();
2493 }
2494
2495 // If the test is always true, there is no need to compile it.
2496 if (info == DONT_KNOW) {
2497 JumpTarget body;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002498 LoadCondition(node->cond(), &body, node->break_target(), true);
Steve Block3ce2e202009-11-05 08:53:23 +00002499 if (has_valid_frame()) {
2500 Branch(false, node->break_target());
2501 }
2502 if (has_valid_frame() || body.is_linked()) {
2503 body.Bind();
2504 }
2505 }
2506
2507 if (has_valid_frame()) {
2508 CheckStack(); // TODO(1222600): ignore if body contains calls.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002509 Visit(node->body());
Steve Block3ce2e202009-11-05 08:53:23 +00002510
2511 if (node->next() == NULL) {
2512 // If there is no update statement and control flow can fall out
2513 // of the loop, jump directly to the continue label.
2514 if (has_valid_frame()) {
2515 node->continue_target()->Jump();
2516 }
2517 } else {
2518 // If there is an update statement and control flow can reach it
2519 // via falling out of the body of the loop or continuing, we
2520 // compile the update statement.
2521 if (node->continue_target()->is_linked()) {
2522 node->continue_target()->Bind();
2523 }
2524 if (has_valid_frame()) {
2525 // Record source position of the statement as this code which is
2526 // after the code for the body actually belongs to the loop
2527 // statement and not the body.
2528 CodeForStatementPosition(node);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002529 Visit(node->next());
Steve Block3ce2e202009-11-05 08:53:23 +00002530 loop.Jump();
2531 }
2532 }
2533 }
2534 if (node->break_target()->is_linked()) {
2535 node->break_target()->Bind();
2536 }
Steve Block6ded16b2010-05-10 14:33:55 +01002537 DecrementLoopNesting();
Steve Blocka7e24c12009-10-30 11:49:00 +00002538 ASSERT(!has_valid_frame() || frame_->height() == original_height);
2539}
2540
2541
2542void CodeGenerator::VisitForInStatement(ForInStatement* node) {
2543#ifdef DEBUG
2544 int original_height = frame_->height();
2545#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00002546 Comment cmnt(masm_, "[ ForInStatement");
2547 CodeForStatementPosition(node);
2548
2549 JumpTarget primitive;
2550 JumpTarget jsobject;
2551 JumpTarget fixed_array;
2552 JumpTarget entry(JumpTarget::BIDIRECTIONAL);
2553 JumpTarget end_del_check;
2554 JumpTarget exit;
2555
2556 // Get the object to enumerate over (converted to JSObject).
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01002557 Load(node->enumerable());
Steve Blocka7e24c12009-10-30 11:49:00 +00002558
Iain Merrick75681382010-08-19 15:07:18 +01002559 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +00002560 // Both SpiderMonkey and kjs ignore null and undefined in contrast
2561 // to the specification. 12.6.4 mandates a call to ToObject.
2562 frame_->EmitPop(r0);
2563 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
2564 __ cmp(r0, ip);
2565 exit.Branch(eq);
2566 __ LoadRoot(ip, Heap::kNullValueRootIndex);
2567 __ cmp(r0, ip);
2568 exit.Branch(eq);
2569
2570 // Stack layout in body:
2571 // [iteration counter (Smi)]
2572 // [length of array]
2573 // [FixedArray]
2574 // [Map or 0]
2575 // [Object]
2576
2577 // Check if enumerable is already a JSObject
2578 __ tst(r0, Operand(kSmiTagMask));
2579 primitive.Branch(eq);
2580 __ CompareObjectType(r0, r1, r1, FIRST_JS_OBJECT_TYPE);
2581 jsobject.Branch(hs);
2582
2583 primitive.Bind();
2584 frame_->EmitPush(r0);
Steve Blockd0582a62009-12-15 09:54:21 +00002585 frame_->InvokeBuiltin(Builtins::TO_OBJECT, CALL_JS, 1);
Steve Blocka7e24c12009-10-30 11:49:00 +00002586
2587 jsobject.Bind();
2588 // Get the set of properties (as a FixedArray or Map).
Steve Blockd0582a62009-12-15 09:54:21 +00002589 // r0: value to be iterated over
2590 frame_->EmitPush(r0); // Push the object being iterated over.
2591
2592 // Check cache validity in generated code. This is a fast case for
2593 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
2594 // guarantee cache validity, call the runtime system to check cache
2595 // validity or get the property names in a fixed array.
2596 JumpTarget call_runtime;
2597 JumpTarget loop(JumpTarget::BIDIRECTIONAL);
2598 JumpTarget check_prototype;
2599 JumpTarget use_cache;
2600 __ mov(r1, Operand(r0));
2601 loop.Bind();
2602 // Check that there are no elements.
2603 __ ldr(r2, FieldMemOperand(r1, JSObject::kElementsOffset));
2604 __ LoadRoot(r4, Heap::kEmptyFixedArrayRootIndex);
2605 __ cmp(r2, r4);
2606 call_runtime.Branch(ne);
2607 // Check that instance descriptors are not empty so that we can
2608 // check for an enum cache. Leave the map in r3 for the subsequent
2609 // prototype load.
2610 __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
2611 __ ldr(r2, FieldMemOperand(r3, Map::kInstanceDescriptorsOffset));
2612 __ LoadRoot(ip, Heap::kEmptyDescriptorArrayRootIndex);
2613 __ cmp(r2, ip);
2614 call_runtime.Branch(eq);
2615 // Check that there in an enum cache in the non-empty instance
2616 // descriptors. This is the case if the next enumeration index
2617 // field does not contain a smi.
2618 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumerationIndexOffset));
2619 __ tst(r2, Operand(kSmiTagMask));
2620 call_runtime.Branch(eq);
2621 // For all objects but the receiver, check that the cache is empty.
2622 // r4: empty fixed array root.
2623 __ cmp(r1, r0);
2624 check_prototype.Branch(eq);
2625 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheBridgeCacheOffset));
2626 __ cmp(r2, r4);
2627 call_runtime.Branch(ne);
2628 check_prototype.Bind();
2629 // Load the prototype from the map and loop if non-null.
2630 __ ldr(r1, FieldMemOperand(r3, Map::kPrototypeOffset));
2631 __ LoadRoot(ip, Heap::kNullValueRootIndex);
2632 __ cmp(r1, ip);
2633 loop.Branch(ne);
2634 // The enum cache is valid. Load the map of the object being
2635 // iterated over and use the cache for the iteration.
2636 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
2637 use_cache.Jump();
2638
2639 call_runtime.Bind();
2640 // Call the runtime to get the property names for the object.
2641 frame_->EmitPush(r0); // push the object (slot 4) for the runtime call
Steve Blocka7e24c12009-10-30 11:49:00 +00002642 frame_->CallRuntime(Runtime::kGetPropertyNamesFast, 1);
2643
Steve Blockd0582a62009-12-15 09:54:21 +00002644 // If we got a map from the runtime call, we can do a fast
2645 // modification check. Otherwise, we got a fixed array, and we have
2646 // to do a slow check.
2647 // r0: map or fixed array (result from call to
2648 // Runtime::kGetPropertyNamesFast)
Steve Blocka7e24c12009-10-30 11:49:00 +00002649 __ mov(r2, Operand(r0));
2650 __ ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset));
2651 __ LoadRoot(ip, Heap::kMetaMapRootIndex);
2652 __ cmp(r1, ip);
2653 fixed_array.Branch(ne);
2654
Steve Blockd0582a62009-12-15 09:54:21 +00002655 use_cache.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00002656 // Get enum cache
Steve Blockd0582a62009-12-15 09:54:21 +00002657 // r0: map (either the result from a call to
2658 // Runtime::kGetPropertyNamesFast or has been fetched directly from
2659 // the object)
Steve Blocka7e24c12009-10-30 11:49:00 +00002660 __ mov(r1, Operand(r0));
2661 __ ldr(r1, FieldMemOperand(r1, Map::kInstanceDescriptorsOffset));
2662 __ ldr(r1, FieldMemOperand(r1, DescriptorArray::kEnumerationIndexOffset));
2663 __ ldr(r2,
2664 FieldMemOperand(r1, DescriptorArray::kEnumCacheBridgeCacheOffset));
2665
2666 frame_->EmitPush(r0); // map
2667 frame_->EmitPush(r2); // enum cache bridge cache
2668 __ ldr(r0, FieldMemOperand(r2, FixedArray::kLengthOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00002669 frame_->EmitPush(r0);
2670 __ mov(r0, Operand(Smi::FromInt(0)));
2671 frame_->EmitPush(r0);
2672 entry.Jump();
2673
2674 fixed_array.Bind();
2675 __ mov(r1, Operand(Smi::FromInt(0)));
2676 frame_->EmitPush(r1); // insert 0 in place of Map
2677 frame_->EmitPush(r0);
2678
2679 // Push the length of the array and the initial index onto the stack.
2680 __ ldr(r0, FieldMemOperand(r0, FixedArray::kLengthOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00002681 frame_->EmitPush(r0);
2682 __ mov(r0, Operand(Smi::FromInt(0))); // init index
2683 frame_->EmitPush(r0);
2684
2685 // Condition.
2686 entry.Bind();
2687 // sp[0] : index
2688 // sp[1] : array/enum cache length
2689 // sp[2] : array or enum cache
2690 // sp[3] : 0 or map
2691 // sp[4] : enumerable
2692 // Grab the current frame's height for the break and continue
2693 // targets only after all the state is pushed on the frame.
Kristian Monsen25f61362010-05-21 11:50:48 +01002694 node->break_target()->SetExpectedHeight();
2695 node->continue_target()->SetExpectedHeight();
Steve Blocka7e24c12009-10-30 11:49:00 +00002696
Kristian Monsen25f61362010-05-21 11:50:48 +01002697 // Load the current count to r0, load the length to r1.
Leon Clarkef7060e22010-06-03 12:02:55 +01002698 __ Ldrd(r0, r1, frame_->ElementAt(0));
Steve Block6ded16b2010-05-10 14:33:55 +01002699 __ cmp(r0, r1); // compare to the array length
Steve Blocka7e24c12009-10-30 11:49:00 +00002700 node->break_target()->Branch(hs);
2701
Steve Blocka7e24c12009-10-30 11:49:00 +00002702 // Get the i'th entry of the array.
2703 __ ldr(r2, frame_->ElementAt(2));
2704 __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2705 __ ldr(r3, MemOperand(r2, r0, LSL, kPointerSizeLog2 - kSmiTagSize));
2706
2707 // Get Map or 0.
2708 __ ldr(r2, frame_->ElementAt(3));
2709 // Check if this (still) matches the map of the enumerable.
2710 // If not, we have to filter the key.
2711 __ ldr(r1, frame_->ElementAt(4));
2712 __ ldr(r1, FieldMemOperand(r1, HeapObject::kMapOffset));
2713 __ cmp(r1, Operand(r2));
2714 end_del_check.Branch(eq);
2715
2716 // Convert the entry to a string (or null if it isn't a property anymore).
2717 __ ldr(r0, frame_->ElementAt(4)); // push enumerable
2718 frame_->EmitPush(r0);
2719 frame_->EmitPush(r3); // push entry
Steve Blockd0582a62009-12-15 09:54:21 +00002720 frame_->InvokeBuiltin(Builtins::FILTER_KEY, CALL_JS, 2);
Iain Merrick75681382010-08-19 15:07:18 +01002721 __ mov(r3, Operand(r0), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00002722 // If the property has been removed while iterating, we just skip it.
Steve Blocka7e24c12009-10-30 11:49:00 +00002723 node->continue_target()->Branch(eq);
2724
2725 end_del_check.Bind();
2726 // Store the entry in the 'each' expression and take another spin in the
2727 // loop. r3: i'th entry of the enum cache (or string there of)
2728 frame_->EmitPush(r3); // push entry
Iain Merrick75681382010-08-19 15:07:18 +01002729 { VirtualFrame::RegisterAllocationScope scope(this);
2730 Reference each(this, node->each());
Steve Blocka7e24c12009-10-30 11:49:00 +00002731 if (!each.is_illegal()) {
2732 if (each.size() > 0) {
Iain Merrick75681382010-08-19 15:07:18 +01002733 // Loading a reference may leave the frame in an unspilled state.
2734 frame_->SpillAll(); // Sync stack to memory.
2735 // Get the value (under the reference on the stack) from memory.
Steve Blocka7e24c12009-10-30 11:49:00 +00002736 __ ldr(r0, frame_->ElementAt(each.size()));
2737 frame_->EmitPush(r0);
Steve Block8defd9f2010-07-08 12:39:36 +01002738 each.SetValue(NOT_CONST_INIT, UNLIKELY_SMI);
Iain Merrick75681382010-08-19 15:07:18 +01002739 frame_->Drop(2); // The result of the set and the extra pushed value.
Leon Clarked91b9f72010-01-27 17:25:45 +00002740 } else {
2741 // If the reference was to a slot we rely on the convenient property
Iain Merrick75681382010-08-19 15:07:18 +01002742 // that it doesn't matter whether a value (eg, ebx pushed above) is
Leon Clarked91b9f72010-01-27 17:25:45 +00002743 // right on top of or right underneath a zero-sized reference.
Steve Block8defd9f2010-07-08 12:39:36 +01002744 each.SetValue(NOT_CONST_INIT, UNLIKELY_SMI);
Iain Merrick75681382010-08-19 15:07:18 +01002745 frame_->Drop(1); // Drop the result of the set operation.
Steve Blocka7e24c12009-10-30 11:49:00 +00002746 }
2747 }
2748 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002749 // Body.
2750 CheckStack(); // TODO(1222600): ignore if body contains calls.
Iain Merrick75681382010-08-19 15:07:18 +01002751 { VirtualFrame::RegisterAllocationScope scope(this);
2752 Visit(node->body());
2753 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002754
2755 // Next. Reestablish a spilled frame in case we are coming here via
2756 // a continue in the body.
2757 node->continue_target()->Bind();
2758 frame_->SpillAll();
2759 frame_->EmitPop(r0);
2760 __ add(r0, r0, Operand(Smi::FromInt(1)));
2761 frame_->EmitPush(r0);
2762 entry.Jump();
2763
2764 // Cleanup. No need to spill because VirtualFrame::Drop is safe for
2765 // any frame.
2766 node->break_target()->Bind();
2767 frame_->Drop(5);
2768
2769 // Exit.
2770 exit.Bind();
2771 node->continue_target()->Unuse();
2772 node->break_target()->Unuse();
2773 ASSERT(frame_->height() == original_height);
2774}
2775
2776
Steve Block3ce2e202009-11-05 08:53:23 +00002777void CodeGenerator::VisitTryCatchStatement(TryCatchStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002778#ifdef DEBUG
2779 int original_height = frame_->height();
2780#endif
Steve Block6ded16b2010-05-10 14:33:55 +01002781 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Block3ce2e202009-11-05 08:53:23 +00002782 Comment cmnt(masm_, "[ TryCatchStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00002783 CodeForStatementPosition(node);
2784
2785 JumpTarget try_block;
2786 JumpTarget exit;
2787
2788 try_block.Call();
2789 // --- Catch block ---
2790 frame_->EmitPush(r0);
2791
2792 // Store the caught exception in the catch variable.
Leon Clarkee46be812010-01-19 14:06:41 +00002793 Variable* catch_var = node->catch_var()->var();
Kristian Monsen0d5e1162010-09-30 15:31:59 +01002794 ASSERT(catch_var != NULL && catch_var->AsSlot() != NULL);
2795 StoreToSlot(catch_var->AsSlot(), NOT_CONST_INIT);
Steve Blocka7e24c12009-10-30 11:49:00 +00002796
2797 // Remove the exception from the stack.
2798 frame_->Drop();
2799
Iain Merrick75681382010-08-19 15:07:18 +01002800 { VirtualFrame::RegisterAllocationScope scope(this);
2801 VisitStatements(node->catch_block()->statements());
2802 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002803 if (frame_ != NULL) {
2804 exit.Jump();
2805 }
2806
2807
2808 // --- Try block ---
2809 try_block.Bind();
2810
2811 frame_->PushTryHandler(TRY_CATCH_HANDLER);
2812 int handler_height = frame_->height();
2813
2814 // Shadow the labels for all escapes from the try block, including
2815 // returns. During shadowing, the original label is hidden as the
2816 // LabelShadow and operations on the original actually affect the
2817 // shadowing label.
2818 //
2819 // We should probably try to unify the escaping labels and the return
2820 // label.
2821 int nof_escapes = node->escaping_targets()->length();
2822 List<ShadowTarget*> shadows(1 + nof_escapes);
2823
2824 // Add the shadow target for the function return.
2825 static const int kReturnShadowIndex = 0;
2826 shadows.Add(new ShadowTarget(&function_return_));
2827 bool function_return_was_shadowed = function_return_is_shadowed_;
2828 function_return_is_shadowed_ = true;
2829 ASSERT(shadows[kReturnShadowIndex]->other_target() == &function_return_);
2830
2831 // Add the remaining shadow targets.
2832 for (int i = 0; i < nof_escapes; i++) {
2833 shadows.Add(new ShadowTarget(node->escaping_targets()->at(i)));
2834 }
2835
2836 // Generate code for the statements in the try block.
Iain Merrick75681382010-08-19 15:07:18 +01002837 { VirtualFrame::RegisterAllocationScope scope(this);
2838 VisitStatements(node->try_block()->statements());
2839 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002840
2841 // Stop the introduced shadowing and count the number of required unlinks.
2842 // After shadowing stops, the original labels are unshadowed and the
2843 // LabelShadows represent the formerly shadowing labels.
2844 bool has_unlinks = false;
2845 for (int i = 0; i < shadows.length(); i++) {
2846 shadows[i]->StopShadowing();
2847 has_unlinks = has_unlinks || shadows[i]->is_linked();
2848 }
2849 function_return_is_shadowed_ = function_return_was_shadowed;
2850
2851 // Get an external reference to the handler address.
2852 ExternalReference handler_address(Top::k_handler_address);
2853
2854 // If we can fall off the end of the try block, unlink from try chain.
2855 if (has_valid_frame()) {
2856 // The next handler address is on top of the frame. Unlink from
2857 // the handler list and drop the rest of this handler from the
2858 // frame.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002859 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Iain Merrick75681382010-08-19 15:07:18 +01002860 frame_->EmitPop(r1); // r0 can contain the return value.
Steve Blocka7e24c12009-10-30 11:49:00 +00002861 __ mov(r3, Operand(handler_address));
2862 __ str(r1, MemOperand(r3));
2863 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
2864 if (has_unlinks) {
2865 exit.Jump();
2866 }
2867 }
2868
2869 // Generate unlink code for the (formerly) shadowing labels that have been
2870 // jumped to. Deallocate each shadow target.
2871 for (int i = 0; i < shadows.length(); i++) {
2872 if (shadows[i]->is_linked()) {
2873 // Unlink from try chain;
2874 shadows[i]->Bind();
2875 // Because we can be jumping here (to spilled code) from unspilled
2876 // code, we need to reestablish a spilled frame at this block.
2877 frame_->SpillAll();
2878
2879 // Reload sp from the top handler, because some statements that we
2880 // break from (eg, for...in) may have left stuff on the stack.
2881 __ mov(r3, Operand(handler_address));
2882 __ ldr(sp, MemOperand(r3));
2883 frame_->Forget(frame_->height() - handler_height);
2884
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002885 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Iain Merrick75681382010-08-19 15:07:18 +01002886 frame_->EmitPop(r1); // r0 can contain the return value.
Steve Blocka7e24c12009-10-30 11:49:00 +00002887 __ str(r1, MemOperand(r3));
2888 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
2889
2890 if (!function_return_is_shadowed_ && i == kReturnShadowIndex) {
2891 frame_->PrepareForReturn();
2892 }
2893 shadows[i]->other_target()->Jump();
2894 }
2895 }
2896
2897 exit.Bind();
2898 ASSERT(!has_valid_frame() || frame_->height() == original_height);
2899}
2900
2901
Steve Block3ce2e202009-11-05 08:53:23 +00002902void CodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00002903#ifdef DEBUG
2904 int original_height = frame_->height();
2905#endif
Steve Block6ded16b2010-05-10 14:33:55 +01002906 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Block3ce2e202009-11-05 08:53:23 +00002907 Comment cmnt(masm_, "[ TryFinallyStatement");
Steve Blocka7e24c12009-10-30 11:49:00 +00002908 CodeForStatementPosition(node);
2909
2910 // State: Used to keep track of reason for entering the finally
2911 // block. Should probably be extended to hold information for
2912 // break/continue from within the try block.
2913 enum { FALLING, THROWING, JUMPING };
2914
2915 JumpTarget try_block;
2916 JumpTarget finally_block;
2917
2918 try_block.Call();
2919
2920 frame_->EmitPush(r0); // save exception object on the stack
2921 // In case of thrown exceptions, this is where we continue.
2922 __ mov(r2, Operand(Smi::FromInt(THROWING)));
2923 finally_block.Jump();
2924
2925 // --- Try block ---
2926 try_block.Bind();
2927
2928 frame_->PushTryHandler(TRY_FINALLY_HANDLER);
2929 int handler_height = frame_->height();
2930
2931 // Shadow the labels for all escapes from the try block, including
2932 // returns. Shadowing hides the original label as the LabelShadow and
2933 // operations on the original actually affect the shadowing label.
2934 //
2935 // We should probably try to unify the escaping labels and the return
2936 // label.
2937 int nof_escapes = node->escaping_targets()->length();
2938 List<ShadowTarget*> shadows(1 + nof_escapes);
2939
2940 // Add the shadow target for the function return.
2941 static const int kReturnShadowIndex = 0;
2942 shadows.Add(new ShadowTarget(&function_return_));
2943 bool function_return_was_shadowed = function_return_is_shadowed_;
2944 function_return_is_shadowed_ = true;
2945 ASSERT(shadows[kReturnShadowIndex]->other_target() == &function_return_);
2946
2947 // Add the remaining shadow targets.
2948 for (int i = 0; i < nof_escapes; i++) {
2949 shadows.Add(new ShadowTarget(node->escaping_targets()->at(i)));
2950 }
2951
2952 // Generate code for the statements in the try block.
Iain Merrick75681382010-08-19 15:07:18 +01002953 { VirtualFrame::RegisterAllocationScope scope(this);
2954 VisitStatements(node->try_block()->statements());
2955 }
Steve Blocka7e24c12009-10-30 11:49:00 +00002956
2957 // Stop the introduced shadowing and count the number of required unlinks.
2958 // After shadowing stops, the original labels are unshadowed and the
2959 // LabelShadows represent the formerly shadowing labels.
2960 int nof_unlinks = 0;
2961 for (int i = 0; i < shadows.length(); i++) {
2962 shadows[i]->StopShadowing();
2963 if (shadows[i]->is_linked()) nof_unlinks++;
2964 }
2965 function_return_is_shadowed_ = function_return_was_shadowed;
2966
2967 // Get an external reference to the handler address.
2968 ExternalReference handler_address(Top::k_handler_address);
2969
2970 // If we can fall off the end of the try block, unlink from the try
2971 // chain and set the state on the frame to FALLING.
2972 if (has_valid_frame()) {
2973 // The next handler address is on top of the frame.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01002974 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00002975 frame_->EmitPop(r1);
2976 __ mov(r3, Operand(handler_address));
2977 __ str(r1, MemOperand(r3));
2978 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
2979
2980 // Fake a top of stack value (unneeded when FALLING) and set the
2981 // state in r2, then jump around the unlink blocks if any.
2982 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2983 frame_->EmitPush(r0);
2984 __ mov(r2, Operand(Smi::FromInt(FALLING)));
2985 if (nof_unlinks > 0) {
2986 finally_block.Jump();
2987 }
2988 }
2989
2990 // Generate code to unlink and set the state for the (formerly)
2991 // shadowing targets that have been jumped to.
2992 for (int i = 0; i < shadows.length(); i++) {
2993 if (shadows[i]->is_linked()) {
2994 // If we have come from the shadowed return, the return value is
2995 // in (a non-refcounted reference to) r0. We must preserve it
2996 // until it is pushed.
2997 //
2998 // Because we can be jumping here (to spilled code) from
2999 // unspilled code, we need to reestablish a spilled frame at
3000 // this block.
3001 shadows[i]->Bind();
3002 frame_->SpillAll();
3003
3004 // Reload sp from the top handler, because some statements that
3005 // we break from (eg, for...in) may have left stuff on the
3006 // stack.
3007 __ mov(r3, Operand(handler_address));
3008 __ ldr(sp, MemOperand(r3));
3009 frame_->Forget(frame_->height() - handler_height);
3010
3011 // Unlink this handler and drop it from the frame. The next
3012 // handler address is currently on top of the frame.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01003013 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
Steve Blocka7e24c12009-10-30 11:49:00 +00003014 frame_->EmitPop(r1);
3015 __ str(r1, MemOperand(r3));
3016 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
3017
3018 if (i == kReturnShadowIndex) {
3019 // If this label shadowed the function return, materialize the
3020 // return value on the stack.
3021 frame_->EmitPush(r0);
3022 } else {
3023 // Fake TOS for targets that shadowed breaks and continues.
3024 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
3025 frame_->EmitPush(r0);
3026 }
3027 __ mov(r2, Operand(Smi::FromInt(JUMPING + i)));
3028 if (--nof_unlinks > 0) {
3029 // If this is not the last unlink block, jump around the next.
3030 finally_block.Jump();
3031 }
3032 }
3033 }
3034
3035 // --- Finally block ---
3036 finally_block.Bind();
3037
3038 // Push the state on the stack.
3039 frame_->EmitPush(r2);
3040
3041 // We keep two elements on the stack - the (possibly faked) result
3042 // and the state - while evaluating the finally block.
3043 //
3044 // Generate code for the statements in the finally block.
Iain Merrick75681382010-08-19 15:07:18 +01003045 { VirtualFrame::RegisterAllocationScope scope(this);
3046 VisitStatements(node->finally_block()->statements());
3047 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003048
3049 if (has_valid_frame()) {
3050 // Restore state and return value or faked TOS.
3051 frame_->EmitPop(r2);
3052 frame_->EmitPop(r0);
3053 }
3054
3055 // Generate code to jump to the right destination for all used
3056 // formerly shadowing targets. Deallocate each shadow target.
3057 for (int i = 0; i < shadows.length(); i++) {
3058 if (has_valid_frame() && shadows[i]->is_bound()) {
3059 JumpTarget* original = shadows[i]->other_target();
3060 __ cmp(r2, Operand(Smi::FromInt(JUMPING + i)));
3061 if (!function_return_is_shadowed_ && i == kReturnShadowIndex) {
3062 JumpTarget skip;
3063 skip.Branch(ne);
3064 frame_->PrepareForReturn();
3065 original->Jump();
3066 skip.Bind();
3067 } else {
3068 original->Branch(eq);
3069 }
3070 }
3071 }
3072
3073 if (has_valid_frame()) {
3074 // Check if we need to rethrow the exception.
3075 JumpTarget exit;
3076 __ cmp(r2, Operand(Smi::FromInt(THROWING)));
3077 exit.Branch(ne);
3078
3079 // Rethrow exception.
3080 frame_->EmitPush(r0);
3081 frame_->CallRuntime(Runtime::kReThrow, 1);
3082
3083 // Done.
3084 exit.Bind();
3085 }
3086 ASSERT(!has_valid_frame() || frame_->height() == original_height);
3087}
3088
3089
3090void CodeGenerator::VisitDebuggerStatement(DebuggerStatement* node) {
3091#ifdef DEBUG
3092 int original_height = frame_->height();
3093#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003094 Comment cmnt(masm_, "[ DebuggerStatament");
3095 CodeForStatementPosition(node);
3096#ifdef ENABLE_DEBUGGER_SUPPORT
Andrei Popescu402d9372010-02-26 13:31:12 +00003097 frame_->DebugBreak();
Steve Blocka7e24c12009-10-30 11:49:00 +00003098#endif
3099 // Ignore the return value.
3100 ASSERT(frame_->height() == original_height);
3101}
3102
3103
Steve Block6ded16b2010-05-10 14:33:55 +01003104void CodeGenerator::InstantiateFunction(
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003105 Handle<SharedFunctionInfo> function_info,
3106 bool pretenure) {
Leon Clarkee46be812010-01-19 14:06:41 +00003107 // Use the fast case closure allocation code that allocates in new
3108 // space for nested functions that don't need literals cloning.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003109 if (scope()->is_function_scope() &&
3110 function_info->num_literals() == 0 &&
3111 !pretenure) {
Leon Clarkee46be812010-01-19 14:06:41 +00003112 FastNewClosureStub stub;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003113 frame_->EmitPush(Operand(function_info));
3114 frame_->SpillAll();
Leon Clarkee46be812010-01-19 14:06:41 +00003115 frame_->CallStub(&stub, 1);
3116 frame_->EmitPush(r0);
3117 } else {
3118 // Create a new closure.
3119 frame_->EmitPush(cp);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003120 frame_->EmitPush(Operand(function_info));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003121 frame_->EmitPush(Operand(pretenure
3122 ? Factory::true_value()
3123 : Factory::false_value()));
3124 frame_->CallRuntime(Runtime::kNewClosure, 3);
Leon Clarkee46be812010-01-19 14:06:41 +00003125 frame_->EmitPush(r0);
3126 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003127}
3128
3129
3130void CodeGenerator::VisitFunctionLiteral(FunctionLiteral* node) {
3131#ifdef DEBUG
3132 int original_height = frame_->height();
3133#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003134 Comment cmnt(masm_, "[ FunctionLiteral");
3135
Steve Block6ded16b2010-05-10 14:33:55 +01003136 // Build the function info and instantiate it.
3137 Handle<SharedFunctionInfo> function_info =
Ben Murdochf87a2032010-10-22 12:50:53 +01003138 Compiler::BuildFunctionInfo(node, script());
3139 if (function_info.is_null()) {
3140 SetStackOverflow();
Steve Blocka7e24c12009-10-30 11:49:00 +00003141 ASSERT(frame_->height() == original_height);
3142 return;
3143 }
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003144 InstantiateFunction(function_info, node->pretenure());
Steve Block6ded16b2010-05-10 14:33:55 +01003145 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003146}
3147
3148
Steve Block6ded16b2010-05-10 14:33:55 +01003149void CodeGenerator::VisitSharedFunctionInfoLiteral(
3150 SharedFunctionInfoLiteral* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003151#ifdef DEBUG
3152 int original_height = frame_->height();
3153#endif
Steve Block6ded16b2010-05-10 14:33:55 +01003154 Comment cmnt(masm_, "[ SharedFunctionInfoLiteral");
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08003155 InstantiateFunction(node->shared_function_info(), false);
Steve Block6ded16b2010-05-10 14:33:55 +01003156 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003157}
3158
3159
3160void CodeGenerator::VisitConditional(Conditional* node) {
3161#ifdef DEBUG
3162 int original_height = frame_->height();
3163#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003164 Comment cmnt(masm_, "[ Conditional");
3165 JumpTarget then;
3166 JumpTarget else_;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003167 LoadCondition(node->condition(), &then, &else_, true);
Steve Blocka7e24c12009-10-30 11:49:00 +00003168 if (has_valid_frame()) {
3169 Branch(false, &else_);
3170 }
3171 if (has_valid_frame() || then.is_linked()) {
3172 then.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003173 Load(node->then_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00003174 }
3175 if (else_.is_linked()) {
3176 JumpTarget exit;
3177 if (has_valid_frame()) exit.Jump();
3178 else_.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003179 Load(node->else_expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00003180 if (exit.is_linked()) exit.Bind();
3181 }
Steve Block6ded16b2010-05-10 14:33:55 +01003182 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003183}
3184
3185
3186void CodeGenerator::LoadFromSlot(Slot* slot, TypeofState typeof_state) {
Steve Blocka7e24c12009-10-30 11:49:00 +00003187 if (slot->type() == Slot::LOOKUP) {
3188 ASSERT(slot->var()->is_dynamic());
3189
Steve Block6ded16b2010-05-10 14:33:55 +01003190 // JumpTargets do not yet support merging frames so the frame must be
3191 // spilled when jumping to these targets.
Steve Blocka7e24c12009-10-30 11:49:00 +00003192 JumpTarget slow;
3193 JumpTarget done;
3194
Kristian Monsen25f61362010-05-21 11:50:48 +01003195 // Generate fast case for loading from slots that correspond to
3196 // local/global variables or arguments unless they are shadowed by
3197 // eval-introduced bindings.
3198 EmitDynamicLoadFromSlotFastCase(slot,
3199 typeof_state,
3200 &slow,
3201 &done);
Steve Blocka7e24c12009-10-30 11:49:00 +00003202
3203 slow.Bind();
3204 frame_->EmitPush(cp);
Steve Block8defd9f2010-07-08 12:39:36 +01003205 frame_->EmitPush(Operand(slot->var()->name()));
Steve Blocka7e24c12009-10-30 11:49:00 +00003206
3207 if (typeof_state == INSIDE_TYPEOF) {
3208 frame_->CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
3209 } else {
3210 frame_->CallRuntime(Runtime::kLoadContextSlot, 2);
3211 }
3212
3213 done.Bind();
3214 frame_->EmitPush(r0);
3215
3216 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01003217 Register scratch = VirtualFrame::scratch0();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003218 TypeInfo info = type_info(slot);
3219 frame_->EmitPush(SlotOperand(slot, scratch), info);
Steve Block8defd9f2010-07-08 12:39:36 +01003220
Steve Blocka7e24c12009-10-30 11:49:00 +00003221 if (slot->var()->mode() == Variable::CONST) {
3222 // Const slots may contain 'the hole' value (the constant hasn't been
3223 // initialized yet) which needs to be converted into the 'undefined'
3224 // value.
3225 Comment cmnt(masm_, "[ Unhole const");
Steve Block8defd9f2010-07-08 12:39:36 +01003226 Register tos = frame_->PopToRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +00003227 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
Steve Block8defd9f2010-07-08 12:39:36 +01003228 __ cmp(tos, ip);
3229 __ LoadRoot(tos, Heap::kUndefinedValueRootIndex, eq);
3230 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00003231 }
3232 }
3233}
3234
3235
Steve Block6ded16b2010-05-10 14:33:55 +01003236void CodeGenerator::LoadFromSlotCheckForArguments(Slot* slot,
3237 TypeofState state) {
Steve Block8defd9f2010-07-08 12:39:36 +01003238 VirtualFrame::RegisterAllocationScope scope(this);
Steve Block6ded16b2010-05-10 14:33:55 +01003239 LoadFromSlot(slot, state);
3240
3241 // Bail out quickly if we're not using lazy arguments allocation.
3242 if (ArgumentsMode() != LAZY_ARGUMENTS_ALLOCATION) return;
3243
3244 // ... or if the slot isn't a non-parameter arguments slot.
3245 if (slot->type() == Slot::PARAMETER || !slot->is_arguments()) return;
3246
Steve Block8defd9f2010-07-08 12:39:36 +01003247 // Load the loaded value from the stack into a register but leave it on the
Steve Block6ded16b2010-05-10 14:33:55 +01003248 // stack.
Steve Block8defd9f2010-07-08 12:39:36 +01003249 Register tos = frame_->Peek();
Steve Block6ded16b2010-05-10 14:33:55 +01003250
3251 // If the loaded value is the sentinel that indicates that we
3252 // haven't loaded the arguments object yet, we need to do it now.
3253 JumpTarget exit;
Ben Murdoch086aeea2011-05-13 15:57:08 +01003254 __ LoadRoot(ip, Heap::kArgumentsMarkerRootIndex);
Steve Block8defd9f2010-07-08 12:39:36 +01003255 __ cmp(tos, ip);
Steve Block6ded16b2010-05-10 14:33:55 +01003256 exit.Branch(ne);
3257 frame_->Drop();
3258 StoreArgumentsObject(false);
3259 exit.Bind();
3260}
3261
3262
Leon Clarkee46be812010-01-19 14:06:41 +00003263void CodeGenerator::StoreToSlot(Slot* slot, InitState init_state) {
3264 ASSERT(slot != NULL);
Steve Block8defd9f2010-07-08 12:39:36 +01003265 VirtualFrame::RegisterAllocationScope scope(this);
Leon Clarkee46be812010-01-19 14:06:41 +00003266 if (slot->type() == Slot::LOOKUP) {
3267 ASSERT(slot->var()->is_dynamic());
3268
3269 // For now, just do a runtime call.
3270 frame_->EmitPush(cp);
Steve Block8defd9f2010-07-08 12:39:36 +01003271 frame_->EmitPush(Operand(slot->var()->name()));
Leon Clarkee46be812010-01-19 14:06:41 +00003272
3273 if (init_state == CONST_INIT) {
3274 // Same as the case for a normal store, but ignores attribute
3275 // (e.g. READ_ONLY) of context slot so that we can initialize
3276 // const properties (introduced via eval("const foo = (some
3277 // expr);")). Also, uses the current function context instead of
3278 // the top context.
3279 //
3280 // Note that we must declare the foo upon entry of eval(), via a
3281 // context slot declaration, but we cannot initialize it at the
3282 // same time, because the const declaration may be at the end of
3283 // the eval code (sigh...) and the const variable may have been
3284 // used before (where its value is 'undefined'). Thus, we can only
3285 // do the initialization when we actually encounter the expression
3286 // and when the expression operands are defined and valid, and
3287 // thus we need the split into 2 operations: declaration of the
3288 // context slot followed by initialization.
3289 frame_->CallRuntime(Runtime::kInitializeConstContextSlot, 3);
3290 } else {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003291 frame_->EmitPush(Operand(Smi::FromInt(strict_mode_flag())));
3292 frame_->CallRuntime(Runtime::kStoreContextSlot, 4);
Leon Clarkee46be812010-01-19 14:06:41 +00003293 }
3294 // Storing a variable must keep the (new) value on the expression
3295 // stack. This is necessary for compiling assignment expressions.
3296 frame_->EmitPush(r0);
3297
3298 } else {
3299 ASSERT(!slot->var()->is_dynamic());
Steve Block6ded16b2010-05-10 14:33:55 +01003300 Register scratch = VirtualFrame::scratch0();
Steve Block8defd9f2010-07-08 12:39:36 +01003301 Register scratch2 = VirtualFrame::scratch1();
Leon Clarkee46be812010-01-19 14:06:41 +00003302
Steve Block6ded16b2010-05-10 14:33:55 +01003303 // The frame must be spilled when branching to this target.
Leon Clarkee46be812010-01-19 14:06:41 +00003304 JumpTarget exit;
Steve Block6ded16b2010-05-10 14:33:55 +01003305
Leon Clarkee46be812010-01-19 14:06:41 +00003306 if (init_state == CONST_INIT) {
3307 ASSERT(slot->var()->mode() == Variable::CONST);
3308 // Only the first const initialization must be executed (the slot
3309 // still contains 'the hole' value). When the assignment is
3310 // executed, the code is identical to a normal store (see below).
3311 Comment cmnt(masm_, "[ Init const");
Steve Block6ded16b2010-05-10 14:33:55 +01003312 __ ldr(scratch, SlotOperand(slot, scratch));
Leon Clarkee46be812010-01-19 14:06:41 +00003313 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01003314 __ cmp(scratch, ip);
Leon Clarkee46be812010-01-19 14:06:41 +00003315 exit.Branch(ne);
3316 }
3317
3318 // We must execute the store. Storing a variable must keep the
3319 // (new) value on the stack. This is necessary for compiling
3320 // assignment expressions.
3321 //
3322 // Note: We will reach here even with slot->var()->mode() ==
3323 // Variable::CONST because of const declarations which will
3324 // initialize consts to 'the hole' value and by doing so, end up
3325 // calling this code. r2 may be loaded with context; used below in
3326 // RecordWrite.
Steve Block6ded16b2010-05-10 14:33:55 +01003327 Register tos = frame_->Peek();
3328 __ str(tos, SlotOperand(slot, scratch));
Leon Clarkee46be812010-01-19 14:06:41 +00003329 if (slot->type() == Slot::CONTEXT) {
3330 // Skip write barrier if the written value is a smi.
Steve Block6ded16b2010-05-10 14:33:55 +01003331 __ tst(tos, Operand(kSmiTagMask));
3332 // We don't use tos any more after here.
Leon Clarkee46be812010-01-19 14:06:41 +00003333 exit.Branch(eq);
Steve Block6ded16b2010-05-10 14:33:55 +01003334 // scratch is loaded with context when calling SlotOperand above.
Leon Clarkee46be812010-01-19 14:06:41 +00003335 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
Steve Block8defd9f2010-07-08 12:39:36 +01003336 // We need an extra register. Until we have a way to do that in the
3337 // virtual frame we will cheat and ask for a free TOS register.
3338 Register scratch3 = frame_->GetTOSRegister();
3339 __ RecordWrite(scratch, Operand(offset), scratch2, scratch3);
Leon Clarkee46be812010-01-19 14:06:41 +00003340 }
3341 // If we definitely did not jump over the assignment, we do not need
3342 // to bind the exit label. Doing so can defeat peephole
3343 // optimization.
3344 if (init_state == CONST_INIT || slot->type() == Slot::CONTEXT) {
3345 exit.Bind();
3346 }
3347 }
3348}
3349
3350
Steve Blocka7e24c12009-10-30 11:49:00 +00003351void CodeGenerator::LoadFromGlobalSlotCheckExtensions(Slot* slot,
3352 TypeofState typeof_state,
Steve Blocka7e24c12009-10-30 11:49:00 +00003353 JumpTarget* slow) {
3354 // Check that no extension objects have been created by calls to
3355 // eval from the current scope to the global scope.
Steve Block6ded16b2010-05-10 14:33:55 +01003356 Register tmp = frame_->scratch0();
3357 Register tmp2 = frame_->scratch1();
Steve Blocka7e24c12009-10-30 11:49:00 +00003358 Register context = cp;
3359 Scope* s = scope();
3360 while (s != NULL) {
3361 if (s->num_heap_slots() > 0) {
3362 if (s->calls_eval()) {
Steve Block6ded16b2010-05-10 14:33:55 +01003363 frame_->SpillAll();
Steve Blocka7e24c12009-10-30 11:49:00 +00003364 // Check that extension is NULL.
3365 __ ldr(tmp2, ContextOperand(context, Context::EXTENSION_INDEX));
3366 __ tst(tmp2, tmp2);
3367 slow->Branch(ne);
3368 }
3369 // Load next context in chain.
3370 __ ldr(tmp, ContextOperand(context, Context::CLOSURE_INDEX));
3371 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset));
3372 context = tmp;
3373 }
3374 // If no outer scope calls eval, we do not need to check more
3375 // context extensions.
3376 if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break;
3377 s = s->outer_scope();
3378 }
3379
3380 if (s->is_eval_scope()) {
Steve Block6ded16b2010-05-10 14:33:55 +01003381 frame_->SpillAll();
Steve Blocka7e24c12009-10-30 11:49:00 +00003382 Label next, fast;
Steve Block6ded16b2010-05-10 14:33:55 +01003383 __ Move(tmp, context);
Steve Blocka7e24c12009-10-30 11:49:00 +00003384 __ bind(&next);
3385 // Terminate at global context.
3386 __ ldr(tmp2, FieldMemOperand(tmp, HeapObject::kMapOffset));
3387 __ LoadRoot(ip, Heap::kGlobalContextMapRootIndex);
3388 __ cmp(tmp2, ip);
3389 __ b(eq, &fast);
3390 // Check that extension is NULL.
3391 __ ldr(tmp2, ContextOperand(tmp, Context::EXTENSION_INDEX));
3392 __ tst(tmp2, tmp2);
3393 slow->Branch(ne);
3394 // Load next context in chain.
3395 __ ldr(tmp, ContextOperand(tmp, Context::CLOSURE_INDEX));
3396 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset));
3397 __ b(&next);
3398 __ bind(&fast);
3399 }
3400
Steve Blocka7e24c12009-10-30 11:49:00 +00003401 // Load the global object.
3402 LoadGlobal();
Steve Block6ded16b2010-05-10 14:33:55 +01003403 // Setup the name register and call load IC.
3404 frame_->CallLoadIC(slot->var()->name(),
3405 typeof_state == INSIDE_TYPEOF
3406 ? RelocInfo::CODE_TARGET
3407 : RelocInfo::CODE_TARGET_CONTEXT);
Steve Blocka7e24c12009-10-30 11:49:00 +00003408}
3409
3410
Kristian Monsen25f61362010-05-21 11:50:48 +01003411void CodeGenerator::EmitDynamicLoadFromSlotFastCase(Slot* slot,
3412 TypeofState typeof_state,
3413 JumpTarget* slow,
3414 JumpTarget* done) {
3415 // Generate fast-case code for variables that might be shadowed by
3416 // eval-introduced variables. Eval is used a lot without
3417 // introducing variables. In those cases, we do not want to
3418 // perform a runtime call for all variables in the scope
3419 // containing the eval.
3420 if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) {
3421 LoadFromGlobalSlotCheckExtensions(slot, typeof_state, slow);
3422 frame_->SpillAll();
3423 done->Jump();
3424
3425 } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) {
3426 frame_->SpillAll();
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003427 Slot* potential_slot = slot->var()->local_if_not_shadowed()->AsSlot();
Kristian Monsen25f61362010-05-21 11:50:48 +01003428 Expression* rewrite = slot->var()->local_if_not_shadowed()->rewrite();
3429 if (potential_slot != NULL) {
3430 // Generate fast case for locals that rewrite to slots.
3431 __ ldr(r0,
3432 ContextSlotOperandCheckExtensions(potential_slot,
3433 r1,
3434 r2,
3435 slow));
3436 if (potential_slot->var()->mode() == Variable::CONST) {
3437 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
3438 __ cmp(r0, ip);
3439 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
3440 }
3441 done->Jump();
3442 } else if (rewrite != NULL) {
3443 // Generate fast case for argument loads.
3444 Property* property = rewrite->AsProperty();
3445 if (property != NULL) {
3446 VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
3447 Literal* key_literal = property->key()->AsLiteral();
3448 if (obj_proxy != NULL &&
3449 key_literal != NULL &&
3450 obj_proxy->IsArguments() &&
3451 key_literal->handle()->IsSmi()) {
3452 // Load arguments object if there are no eval-introduced
3453 // variables. Then load the argument from the arguments
3454 // object using keyed load.
3455 __ ldr(r0,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003456 ContextSlotOperandCheckExtensions(obj_proxy->var()->AsSlot(),
Kristian Monsen25f61362010-05-21 11:50:48 +01003457 r1,
3458 r2,
3459 slow));
3460 frame_->EmitPush(r0);
3461 __ mov(r1, Operand(key_literal->handle()));
3462 frame_->EmitPush(r1);
3463 EmitKeyedLoad();
3464 done->Jump();
3465 }
3466 }
3467 }
3468 }
3469}
3470
3471
Steve Blocka7e24c12009-10-30 11:49:00 +00003472void CodeGenerator::VisitSlot(Slot* node) {
3473#ifdef DEBUG
3474 int original_height = frame_->height();
3475#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003476 Comment cmnt(masm_, "[ Slot");
Steve Block6ded16b2010-05-10 14:33:55 +01003477 LoadFromSlotCheckForArguments(node, NOT_INSIDE_TYPEOF);
3478 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003479}
3480
3481
3482void CodeGenerator::VisitVariableProxy(VariableProxy* node) {
3483#ifdef DEBUG
3484 int original_height = frame_->height();
3485#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003486 Comment cmnt(masm_, "[ VariableProxy");
3487
3488 Variable* var = node->var();
3489 Expression* expr = var->rewrite();
3490 if (expr != NULL) {
3491 Visit(expr);
3492 } else {
3493 ASSERT(var->is_global());
3494 Reference ref(this, node);
Steve Block6ded16b2010-05-10 14:33:55 +01003495 ref.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00003496 }
Steve Block6ded16b2010-05-10 14:33:55 +01003497 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003498}
3499
3500
3501void CodeGenerator::VisitLiteral(Literal* node) {
3502#ifdef DEBUG
3503 int original_height = frame_->height();
3504#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003505 Comment cmnt(masm_, "[ Literal");
Steve Block6ded16b2010-05-10 14:33:55 +01003506 Register reg = frame_->GetTOSRegister();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003507 bool is_smi = node->handle()->IsSmi();
Steve Block6ded16b2010-05-10 14:33:55 +01003508 __ mov(reg, Operand(node->handle()));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003509 frame_->EmitPush(reg, is_smi ? TypeInfo::Smi() : TypeInfo::Unknown());
Steve Block6ded16b2010-05-10 14:33:55 +01003510 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003511}
3512
3513
3514void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) {
3515#ifdef DEBUG
3516 int original_height = frame_->height();
3517#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003518 Comment cmnt(masm_, "[ RexExp Literal");
3519
Steve Block8defd9f2010-07-08 12:39:36 +01003520 Register tmp = VirtualFrame::scratch0();
3521 // Free up a TOS register that can be used to push the literal.
3522 Register literal = frame_->GetTOSRegister();
3523
Steve Blocka7e24c12009-10-30 11:49:00 +00003524 // Retrieve the literal array and check the allocated entry.
3525
3526 // Load the function of this activation.
Steve Block8defd9f2010-07-08 12:39:36 +01003527 __ ldr(tmp, frame_->Function());
Steve Blocka7e24c12009-10-30 11:49:00 +00003528
3529 // Load the literals array of the function.
Steve Block8defd9f2010-07-08 12:39:36 +01003530 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kLiteralsOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00003531
3532 // Load the literal at the ast saved index.
3533 int literal_offset =
3534 FixedArray::kHeaderSize + node->literal_index() * kPointerSize;
Steve Block8defd9f2010-07-08 12:39:36 +01003535 __ ldr(literal, FieldMemOperand(tmp, literal_offset));
Steve Blocka7e24c12009-10-30 11:49:00 +00003536
Ben Murdochbb769b22010-08-11 14:56:33 +01003537 JumpTarget materialized;
Steve Blocka7e24c12009-10-30 11:49:00 +00003538 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
Steve Block8defd9f2010-07-08 12:39:36 +01003539 __ cmp(literal, ip);
3540 // This branch locks the virtual frame at the done label to match the
3541 // one we have here, where the literal register is not on the stack and
3542 // nothing is spilled.
Ben Murdochbb769b22010-08-11 14:56:33 +01003543 materialized.Branch(ne);
Steve Blocka7e24c12009-10-30 11:49:00 +00003544
Steve Block8defd9f2010-07-08 12:39:36 +01003545 // If the entry is undefined we call the runtime system to compute
Steve Blocka7e24c12009-10-30 11:49:00 +00003546 // the literal.
Steve Block8defd9f2010-07-08 12:39:36 +01003547 // literal array (0)
3548 frame_->EmitPush(tmp);
3549 // literal index (1)
3550 frame_->EmitPush(Operand(Smi::FromInt(node->literal_index())));
3551 // RegExp pattern (2)
3552 frame_->EmitPush(Operand(node->pattern()));
3553 // RegExp flags (3)
3554 frame_->EmitPush(Operand(node->flags()));
Steve Blocka7e24c12009-10-30 11:49:00 +00003555 frame_->CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
Steve Block8defd9f2010-07-08 12:39:36 +01003556 __ Move(literal, r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00003557
Ben Murdochbb769b22010-08-11 14:56:33 +01003558 materialized.Bind();
3559
Steve Block8defd9f2010-07-08 12:39:36 +01003560 frame_->EmitPush(literal);
Ben Murdochbb769b22010-08-11 14:56:33 +01003561 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
3562 frame_->EmitPush(Operand(Smi::FromInt(size)));
3563 frame_->CallRuntime(Runtime::kAllocateInNewSpace, 1);
3564 // TODO(lrn): Use AllocateInNewSpace macro with fallback to runtime.
3565 // r0 is newly allocated space.
3566
3567 // Reuse literal variable with (possibly) a new register, still holding
3568 // the materialized boilerplate.
3569 literal = frame_->PopToRegister(r0);
3570
3571 __ CopyFields(r0, literal, tmp.bit(), size / kPointerSize);
3572
3573 // Push the clone.
3574 frame_->EmitPush(r0);
Steve Block6ded16b2010-05-10 14:33:55 +01003575 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003576}
3577
3578
Steve Blocka7e24c12009-10-30 11:49:00 +00003579void CodeGenerator::VisitObjectLiteral(ObjectLiteral* node) {
3580#ifdef DEBUG
3581 int original_height = frame_->height();
3582#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003583 Comment cmnt(masm_, "[ ObjectLiteral");
3584
Steve Block8defd9f2010-07-08 12:39:36 +01003585 Register literal = frame_->GetTOSRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +00003586 // Load the function of this activation.
Steve Block8defd9f2010-07-08 12:39:36 +01003587 __ ldr(literal, frame_->Function());
Leon Clarkee46be812010-01-19 14:06:41 +00003588 // Literal array.
Steve Block8defd9f2010-07-08 12:39:36 +01003589 __ ldr(literal, FieldMemOperand(literal, JSFunction::kLiteralsOffset));
3590 frame_->EmitPush(literal);
Leon Clarkee46be812010-01-19 14:06:41 +00003591 // Literal index.
Steve Block8defd9f2010-07-08 12:39:36 +01003592 frame_->EmitPush(Operand(Smi::FromInt(node->literal_index())));
Leon Clarkee46be812010-01-19 14:06:41 +00003593 // Constant properties.
Steve Block8defd9f2010-07-08 12:39:36 +01003594 frame_->EmitPush(Operand(node->constant_properties()));
Steve Block6ded16b2010-05-10 14:33:55 +01003595 // Should the object literal have fast elements?
Steve Block8defd9f2010-07-08 12:39:36 +01003596 frame_->EmitPush(Operand(Smi::FromInt(node->fast_elements() ? 1 : 0)));
Leon Clarkee46be812010-01-19 14:06:41 +00003597 if (node->depth() > 1) {
Steve Block6ded16b2010-05-10 14:33:55 +01003598 frame_->CallRuntime(Runtime::kCreateObjectLiteral, 4);
Leon Clarkee46be812010-01-19 14:06:41 +00003599 } else {
Steve Block6ded16b2010-05-10 14:33:55 +01003600 frame_->CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
Steve Blocka7e24c12009-10-30 11:49:00 +00003601 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003602 frame_->EmitPush(r0); // save the result
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08003603
3604 // Mark all computed expressions that are bound to a key that
3605 // is shadowed by a later occurrence of the same key. For the
3606 // marked expressions, no store code is emitted.
3607 node->CalculateEmitStore();
3608
Steve Blocka7e24c12009-10-30 11:49:00 +00003609 for (int i = 0; i < node->properties()->length(); i++) {
Andrei Popescu402d9372010-02-26 13:31:12 +00003610 // At the start of each iteration, the top of stack contains
3611 // the newly created object literal.
Steve Blocka7e24c12009-10-30 11:49:00 +00003612 ObjectLiteral::Property* property = node->properties()->at(i);
3613 Literal* key = property->key();
3614 Expression* value = property->value();
3615 switch (property->kind()) {
3616 case ObjectLiteral::Property::CONSTANT:
3617 break;
3618 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
3619 if (CompileTimeValue::IsCompileTimeValue(property->value())) break;
3620 // else fall through
Andrei Popescu402d9372010-02-26 13:31:12 +00003621 case ObjectLiteral::Property::COMPUTED:
3622 if (key->handle()->IsSymbol()) {
3623 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003624 Load(value);
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08003625 if (property->emit_store()) {
3626 frame_->PopToR0();
3627 // Fetch the object literal.
3628 frame_->SpillAllButCopyTOSToR1();
3629 __ mov(r2, Operand(key->handle()));
3630 frame_->CallCodeObject(ic, RelocInfo::CODE_TARGET, 0);
3631 } else {
3632 frame_->Drop();
3633 }
Andrei Popescu402d9372010-02-26 13:31:12 +00003634 break;
3635 }
3636 // else fall through
Steve Blocka7e24c12009-10-30 11:49:00 +00003637 case ObjectLiteral::Property::PROTOTYPE: {
Steve Block8defd9f2010-07-08 12:39:36 +01003638 frame_->Dup();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003639 Load(key);
3640 Load(value);
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08003641 if (property->emit_store()) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01003642 frame_->EmitPush(Operand(Smi::FromInt(NONE))); // PropertyAttributes
3643 frame_->CallRuntime(Runtime::kSetProperty, 4);
Teng-Hui Zhu3e5fa292010-11-09 16:16:48 -08003644 } else {
3645 frame_->Drop(3);
3646 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003647 break;
3648 }
3649 case ObjectLiteral::Property::SETTER: {
Steve Block8defd9f2010-07-08 12:39:36 +01003650 frame_->Dup();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003651 Load(key);
Steve Block8defd9f2010-07-08 12:39:36 +01003652 frame_->EmitPush(Operand(Smi::FromInt(1)));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003653 Load(value);
Steve Blocka7e24c12009-10-30 11:49:00 +00003654 frame_->CallRuntime(Runtime::kDefineAccessor, 4);
Steve Blocka7e24c12009-10-30 11:49:00 +00003655 break;
3656 }
3657 case ObjectLiteral::Property::GETTER: {
Steve Block8defd9f2010-07-08 12:39:36 +01003658 frame_->Dup();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003659 Load(key);
Steve Block8defd9f2010-07-08 12:39:36 +01003660 frame_->EmitPush(Operand(Smi::FromInt(0)));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003661 Load(value);
Steve Blocka7e24c12009-10-30 11:49:00 +00003662 frame_->CallRuntime(Runtime::kDefineAccessor, 4);
Steve Blocka7e24c12009-10-30 11:49:00 +00003663 break;
3664 }
3665 }
3666 }
Steve Block6ded16b2010-05-10 14:33:55 +01003667 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003668}
3669
3670
Steve Blocka7e24c12009-10-30 11:49:00 +00003671void CodeGenerator::VisitArrayLiteral(ArrayLiteral* node) {
3672#ifdef DEBUG
3673 int original_height = frame_->height();
3674#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003675 Comment cmnt(masm_, "[ ArrayLiteral");
3676
Steve Block8defd9f2010-07-08 12:39:36 +01003677 Register tos = frame_->GetTOSRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +00003678 // Load the function of this activation.
Steve Block8defd9f2010-07-08 12:39:36 +01003679 __ ldr(tos, frame_->Function());
Andrei Popescu402d9372010-02-26 13:31:12 +00003680 // Load the literals array of the function.
Steve Block8defd9f2010-07-08 12:39:36 +01003681 __ ldr(tos, FieldMemOperand(tos, JSFunction::kLiteralsOffset));
3682 frame_->EmitPush(tos);
3683 frame_->EmitPush(Operand(Smi::FromInt(node->literal_index())));
3684 frame_->EmitPush(Operand(node->constant_elements()));
Andrei Popescu402d9372010-02-26 13:31:12 +00003685 int length = node->values()->length();
Iain Merrick75681382010-08-19 15:07:18 +01003686 if (node->constant_elements()->map() == Heap::fixed_cow_array_map()) {
3687 FastCloneShallowArrayStub stub(
3688 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
3689 frame_->CallStub(&stub, 3);
3690 __ IncrementCounter(&Counters::cow_arrays_created_stub, 1, r1, r2);
3691 } else if (node->depth() > 1) {
Leon Clarkee46be812010-01-19 14:06:41 +00003692 frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3);
Iain Merrick75681382010-08-19 15:07:18 +01003693 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
Leon Clarkee46be812010-01-19 14:06:41 +00003694 frame_->CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
Andrei Popescu402d9372010-02-26 13:31:12 +00003695 } else {
Iain Merrick75681382010-08-19 15:07:18 +01003696 FastCloneShallowArrayStub stub(
3697 FastCloneShallowArrayStub::CLONE_ELEMENTS, length);
Andrei Popescu402d9372010-02-26 13:31:12 +00003698 frame_->CallStub(&stub, 3);
Steve Blocka7e24c12009-10-30 11:49:00 +00003699 }
Steve Blocka7e24c12009-10-30 11:49:00 +00003700 frame_->EmitPush(r0); // save the result
Leon Clarkee46be812010-01-19 14:06:41 +00003701 // r0: created object literal
Steve Blocka7e24c12009-10-30 11:49:00 +00003702
3703 // Generate code to set the elements in the array that are not
3704 // literals.
3705 for (int i = 0; i < node->values()->length(); i++) {
3706 Expression* value = node->values()->at(i);
3707
3708 // If value is a literal the property value is already set in the
3709 // boilerplate object.
3710 if (value->AsLiteral() != NULL) continue;
3711 // If value is a materialized literal the property value is already set
3712 // in the boilerplate object if it is simple.
3713 if (CompileTimeValue::IsCompileTimeValue(value)) continue;
3714
3715 // The property must be set by generated code.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003716 Load(value);
Steve Block8defd9f2010-07-08 12:39:36 +01003717 frame_->PopToR0();
Steve Blocka7e24c12009-10-30 11:49:00 +00003718 // Fetch the object literal.
Steve Block8defd9f2010-07-08 12:39:36 +01003719 frame_->SpillAllButCopyTOSToR1();
3720
Steve Blocka7e24c12009-10-30 11:49:00 +00003721 // Get the elements array.
3722 __ ldr(r1, FieldMemOperand(r1, JSObject::kElementsOffset));
3723
3724 // Write to the indexed properties array.
3725 int offset = i * kPointerSize + FixedArray::kHeaderSize;
3726 __ str(r0, FieldMemOperand(r1, offset));
3727
3728 // Update the write barrier for the array address.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01003729 __ RecordWrite(r1, Operand(offset), r3, r2);
Steve Blocka7e24c12009-10-30 11:49:00 +00003730 }
Steve Block6ded16b2010-05-10 14:33:55 +01003731 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00003732}
3733
3734
3735void CodeGenerator::VisitCatchExtensionObject(CatchExtensionObject* node) {
3736#ifdef DEBUG
3737 int original_height = frame_->height();
3738#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00003739 // Call runtime routine to allocate the catch extension object and
3740 // assign the exception value to the catch variable.
3741 Comment cmnt(masm_, "[ CatchExtensionObject");
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003742 Load(node->key());
3743 Load(node->value());
Steve Blocka7e24c12009-10-30 11:49:00 +00003744 frame_->CallRuntime(Runtime::kCreateCatchExtensionObject, 2);
3745 frame_->EmitPush(r0);
Steve Block6ded16b2010-05-10 14:33:55 +01003746 ASSERT_EQ(original_height + 1, frame_->height());
3747}
3748
3749
3750void CodeGenerator::EmitSlotAssignment(Assignment* node) {
3751#ifdef DEBUG
3752 int original_height = frame_->height();
3753#endif
3754 Comment cmnt(masm(), "[ Variable Assignment");
3755 Variable* var = node->target()->AsVariableProxy()->AsVariable();
3756 ASSERT(var != NULL);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01003757 Slot* slot = var->AsSlot();
Steve Block6ded16b2010-05-10 14:33:55 +01003758 ASSERT(slot != NULL);
3759
3760 // Evaluate the right-hand side.
3761 if (node->is_compound()) {
3762 // For a compound assignment the right-hand side is a binary operation
3763 // between the current property value and the actual right-hand side.
3764 LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF);
3765
3766 // Perform the binary operation.
3767 Literal* literal = node->value()->AsLiteral();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003768 bool overwrite_value = node->value()->ResultOverwriteAllowed();
Steve Block6ded16b2010-05-10 14:33:55 +01003769 if (literal != NULL && literal->handle()->IsSmi()) {
3770 SmiOperation(node->binary_op(),
3771 literal->handle(),
3772 false,
3773 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
3774 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003775 GenerateInlineSmi inline_smi =
3776 loop_nesting() > 0 ? GENERATE_INLINE_SMI : DONT_GENERATE_INLINE_SMI;
3777 if (literal != NULL) {
3778 ASSERT(!literal->handle()->IsSmi());
3779 inline_smi = DONT_GENERATE_INLINE_SMI;
3780 }
Steve Block6ded16b2010-05-10 14:33:55 +01003781 Load(node->value());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003782 GenericBinaryOperation(node->binary_op(),
3783 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE,
3784 inline_smi);
Steve Block6ded16b2010-05-10 14:33:55 +01003785 }
3786 } else {
3787 Load(node->value());
3788 }
3789
3790 // Perform the assignment.
3791 if (var->mode() != Variable::CONST || node->op() == Token::INIT_CONST) {
3792 CodeForSourcePosition(node->position());
3793 StoreToSlot(slot,
3794 node->op() == Token::INIT_CONST ? CONST_INIT : NOT_CONST_INIT);
3795 }
3796 ASSERT_EQ(original_height + 1, frame_->height());
3797}
3798
3799
3800void CodeGenerator::EmitNamedPropertyAssignment(Assignment* node) {
3801#ifdef DEBUG
3802 int original_height = frame_->height();
3803#endif
3804 Comment cmnt(masm(), "[ Named Property Assignment");
3805 Variable* var = node->target()->AsVariableProxy()->AsVariable();
3806 Property* prop = node->target()->AsProperty();
3807 ASSERT(var == NULL || (prop == NULL && var->is_global()));
3808
3809 // Initialize name and evaluate the receiver sub-expression if necessary. If
3810 // the receiver is trivial it is not placed on the stack at this point, but
3811 // loaded whenever actually needed.
3812 Handle<String> name;
3813 bool is_trivial_receiver = false;
3814 if (var != NULL) {
3815 name = var->name();
3816 } else {
3817 Literal* lit = prop->key()->AsLiteral();
3818 ASSERT_NOT_NULL(lit);
3819 name = Handle<String>::cast(lit->handle());
3820 // Do not materialize the receiver on the frame if it is trivial.
3821 is_trivial_receiver = prop->obj()->IsTrivial();
3822 if (!is_trivial_receiver) Load(prop->obj());
3823 }
3824
3825 // Change to slow case in the beginning of an initialization block to
3826 // avoid the quadratic behavior of repeatedly adding fast properties.
3827 if (node->starts_initialization_block()) {
3828 // Initialization block consists of assignments of the form expr.x = ..., so
3829 // this will never be an assignment to a variable, so there must be a
3830 // receiver object.
3831 ASSERT_EQ(NULL, var);
3832 if (is_trivial_receiver) {
3833 Load(prop->obj());
3834 } else {
3835 frame_->Dup();
3836 }
3837 frame_->CallRuntime(Runtime::kToSlowProperties, 1);
3838 }
3839
3840 // Change to fast case at the end of an initialization block. To prepare for
3841 // that add an extra copy of the receiver to the frame, so that it can be
3842 // converted back to fast case after the assignment.
3843 if (node->ends_initialization_block() && !is_trivial_receiver) {
3844 frame_->Dup();
3845 }
3846
3847 // Stack layout:
3848 // [tos] : receiver (only materialized if non-trivial)
3849 // [tos+1] : receiver if at the end of an initialization block
3850
3851 // Evaluate the right-hand side.
3852 if (node->is_compound()) {
3853 // For a compound assignment the right-hand side is a binary operation
3854 // between the current property value and the actual right-hand side.
3855 if (is_trivial_receiver) {
3856 Load(prop->obj());
3857 } else if (var != NULL) {
3858 LoadGlobal();
3859 } else {
3860 frame_->Dup();
3861 }
3862 EmitNamedLoad(name, var != NULL);
Steve Block6ded16b2010-05-10 14:33:55 +01003863
3864 // Perform the binary operation.
3865 Literal* literal = node->value()->AsLiteral();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003866 bool overwrite_value = node->value()->ResultOverwriteAllowed();
Steve Block6ded16b2010-05-10 14:33:55 +01003867 if (literal != NULL && literal->handle()->IsSmi()) {
3868 SmiOperation(node->binary_op(),
3869 literal->handle(),
3870 false,
3871 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
3872 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003873 GenerateInlineSmi inline_smi =
3874 loop_nesting() > 0 ? GENERATE_INLINE_SMI : DONT_GENERATE_INLINE_SMI;
3875 if (literal != NULL) {
3876 ASSERT(!literal->handle()->IsSmi());
3877 inline_smi = DONT_GENERATE_INLINE_SMI;
3878 }
Steve Block6ded16b2010-05-10 14:33:55 +01003879 Load(node->value());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003880 GenericBinaryOperation(node->binary_op(),
3881 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE,
3882 inline_smi);
Steve Block6ded16b2010-05-10 14:33:55 +01003883 }
3884 } else {
3885 // For non-compound assignment just load the right-hand side.
3886 Load(node->value());
3887 }
3888
3889 // Stack layout:
3890 // [tos] : value
3891 // [tos+1] : receiver (only materialized if non-trivial)
3892 // [tos+2] : receiver if at the end of an initialization block
3893
3894 // Perform the assignment. It is safe to ignore constants here.
3895 ASSERT(var == NULL || var->mode() != Variable::CONST);
3896 ASSERT_NE(Token::INIT_CONST, node->op());
3897 if (is_trivial_receiver) {
3898 // Load the receiver and swap with the value.
3899 Load(prop->obj());
3900 Register t0 = frame_->PopToRegister();
3901 Register t1 = frame_->PopToRegister(t0);
3902 frame_->EmitPush(t0);
3903 frame_->EmitPush(t1);
3904 }
3905 CodeForSourcePosition(node->position());
3906 bool is_contextual = (var != NULL);
3907 EmitNamedStore(name, is_contextual);
3908 frame_->EmitPush(r0);
3909
3910 // Change to fast case at the end of an initialization block.
3911 if (node->ends_initialization_block()) {
3912 ASSERT_EQ(NULL, var);
3913 // The argument to the runtime call is the receiver.
3914 if (is_trivial_receiver) {
3915 Load(prop->obj());
3916 } else {
3917 // A copy of the receiver is below the value of the assignment. Swap
3918 // the receiver and the value of the assignment expression.
3919 Register t0 = frame_->PopToRegister();
3920 Register t1 = frame_->PopToRegister(t0);
3921 frame_->EmitPush(t0);
3922 frame_->EmitPush(t1);
3923 }
3924 frame_->CallRuntime(Runtime::kToFastProperties, 1);
3925 }
3926
3927 // Stack layout:
3928 // [tos] : result
3929
3930 ASSERT_EQ(original_height + 1, frame_->height());
3931}
3932
3933
3934void CodeGenerator::EmitKeyedPropertyAssignment(Assignment* node) {
3935#ifdef DEBUG
3936 int original_height = frame_->height();
3937#endif
3938 Comment cmnt(masm_, "[ Keyed Property Assignment");
3939 Property* prop = node->target()->AsProperty();
3940 ASSERT_NOT_NULL(prop);
3941
3942 // Evaluate the receiver subexpression.
3943 Load(prop->obj());
3944
Steve Block8defd9f2010-07-08 12:39:36 +01003945 WriteBarrierCharacter wb_info;
3946
Steve Block6ded16b2010-05-10 14:33:55 +01003947 // Change to slow case in the beginning of an initialization block to
3948 // avoid the quadratic behavior of repeatedly adding fast properties.
3949 if (node->starts_initialization_block()) {
3950 frame_->Dup();
3951 frame_->CallRuntime(Runtime::kToSlowProperties, 1);
3952 }
3953
3954 // Change to fast case at the end of an initialization block. To prepare for
3955 // that add an extra copy of the receiver to the frame, so that it can be
3956 // converted back to fast case after the assignment.
3957 if (node->ends_initialization_block()) {
3958 frame_->Dup();
3959 }
3960
3961 // Evaluate the key subexpression.
3962 Load(prop->key());
3963
3964 // Stack layout:
3965 // [tos] : key
3966 // [tos+1] : receiver
3967 // [tos+2] : receiver if at the end of an initialization block
Steve Block8defd9f2010-07-08 12:39:36 +01003968 //
Steve Block6ded16b2010-05-10 14:33:55 +01003969 // Evaluate the right-hand side.
3970 if (node->is_compound()) {
3971 // For a compound assignment the right-hand side is a binary operation
3972 // between the current property value and the actual right-hand side.
Kristian Monsen25f61362010-05-21 11:50:48 +01003973 // Duplicate receiver and key for loading the current property value.
3974 frame_->Dup2();
Steve Block6ded16b2010-05-10 14:33:55 +01003975 EmitKeyedLoad();
3976 frame_->EmitPush(r0);
3977
3978 // Perform the binary operation.
3979 Literal* literal = node->value()->AsLiteral();
Kristian Monsen80d68ea2010-09-08 11:05:35 +01003980 bool overwrite_value = node->value()->ResultOverwriteAllowed();
Steve Block6ded16b2010-05-10 14:33:55 +01003981 if (literal != NULL && literal->handle()->IsSmi()) {
3982 SmiOperation(node->binary_op(),
3983 literal->handle(),
3984 false,
3985 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
3986 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003987 GenerateInlineSmi inline_smi =
3988 loop_nesting() > 0 ? GENERATE_INLINE_SMI : DONT_GENERATE_INLINE_SMI;
3989 if (literal != NULL) {
3990 ASSERT(!literal->handle()->IsSmi());
3991 inline_smi = DONT_GENERATE_INLINE_SMI;
3992 }
Steve Block6ded16b2010-05-10 14:33:55 +01003993 Load(node->value());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01003994 GenericBinaryOperation(node->binary_op(),
3995 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE,
3996 inline_smi);
Steve Block6ded16b2010-05-10 14:33:55 +01003997 }
Steve Block8defd9f2010-07-08 12:39:36 +01003998 wb_info = node->type()->IsLikelySmi() ? LIKELY_SMI : UNLIKELY_SMI;
Steve Block6ded16b2010-05-10 14:33:55 +01003999 } else {
4000 // For non-compound assignment just load the right-hand side.
4001 Load(node->value());
Steve Block8defd9f2010-07-08 12:39:36 +01004002 wb_info = node->value()->AsLiteral() != NULL ?
4003 NEVER_NEWSPACE :
4004 (node->value()->type()->IsLikelySmi() ? LIKELY_SMI : UNLIKELY_SMI);
Steve Block6ded16b2010-05-10 14:33:55 +01004005 }
4006
4007 // Stack layout:
4008 // [tos] : value
4009 // [tos+1] : key
4010 // [tos+2] : receiver
4011 // [tos+3] : receiver if at the end of an initialization block
4012
4013 // Perform the assignment. It is safe to ignore constants here.
4014 ASSERT(node->op() != Token::INIT_CONST);
4015 CodeForSourcePosition(node->position());
Steve Block8defd9f2010-07-08 12:39:36 +01004016 EmitKeyedStore(prop->key()->type(), wb_info);
Steve Block6ded16b2010-05-10 14:33:55 +01004017 frame_->EmitPush(r0);
4018
4019 // Stack layout:
4020 // [tos] : result
4021 // [tos+1] : receiver if at the end of an initialization block
4022
4023 // Change to fast case at the end of an initialization block.
4024 if (node->ends_initialization_block()) {
4025 // The argument to the runtime call is the extra copy of the receiver,
4026 // which is below the value of the assignment. Swap the receiver and
4027 // the value of the assignment expression.
4028 Register t0 = frame_->PopToRegister();
4029 Register t1 = frame_->PopToRegister(t0);
4030 frame_->EmitPush(t1);
4031 frame_->EmitPush(t0);
4032 frame_->CallRuntime(Runtime::kToFastProperties, 1);
4033 }
4034
4035 // Stack layout:
4036 // [tos] : result
4037
4038 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00004039}
4040
4041
4042void CodeGenerator::VisitAssignment(Assignment* node) {
Steve Block6ded16b2010-05-10 14:33:55 +01004043 VirtualFrame::RegisterAllocationScope scope(this);
Steve Blocka7e24c12009-10-30 11:49:00 +00004044#ifdef DEBUG
4045 int original_height = frame_->height();
4046#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00004047 Comment cmnt(masm_, "[ Assignment");
4048
Steve Block6ded16b2010-05-10 14:33:55 +01004049 Variable* var = node->target()->AsVariableProxy()->AsVariable();
4050 Property* prop = node->target()->AsProperty();
Steve Blocka7e24c12009-10-30 11:49:00 +00004051
Steve Block6ded16b2010-05-10 14:33:55 +01004052 if (var != NULL && !var->is_global()) {
4053 EmitSlotAssignment(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00004054
Steve Block6ded16b2010-05-10 14:33:55 +01004055 } else if ((prop != NULL && prop->key()->IsPropertyName()) ||
4056 (var != NULL && var->is_global())) {
4057 // Properties whose keys are property names and global variables are
4058 // treated as named property references. We do not need to consider
4059 // global 'this' because it is not a valid left-hand side.
4060 EmitNamedPropertyAssignment(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00004061
Steve Block6ded16b2010-05-10 14:33:55 +01004062 } else if (prop != NULL) {
4063 // Other properties (including rewritten parameters for a function that
4064 // uses arguments) are keyed property assignments.
4065 EmitKeyedPropertyAssignment(node);
4066
4067 } else {
4068 // Invalid left-hand side.
4069 Load(node->target());
4070 frame_->CallRuntime(Runtime::kThrowReferenceError, 1);
4071 // The runtime call doesn't actually return but the code generator will
4072 // still generate code and expects a certain frame height.
4073 frame_->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00004074 }
Steve Block6ded16b2010-05-10 14:33:55 +01004075 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00004076}
4077
4078
4079void CodeGenerator::VisitThrow(Throw* node) {
4080#ifdef DEBUG
4081 int original_height = frame_->height();
4082#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00004083 Comment cmnt(masm_, "[ Throw");
4084
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004085 Load(node->exception());
Steve Blocka7e24c12009-10-30 11:49:00 +00004086 CodeForSourcePosition(node->position());
4087 frame_->CallRuntime(Runtime::kThrow, 1);
4088 frame_->EmitPush(r0);
Steve Block6ded16b2010-05-10 14:33:55 +01004089 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00004090}
4091
4092
4093void CodeGenerator::VisitProperty(Property* node) {
4094#ifdef DEBUG
4095 int original_height = frame_->height();
4096#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00004097 Comment cmnt(masm_, "[ Property");
4098
4099 { Reference property(this, node);
Steve Block6ded16b2010-05-10 14:33:55 +01004100 property.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00004101 }
Steve Block6ded16b2010-05-10 14:33:55 +01004102 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00004103}
4104
4105
4106void CodeGenerator::VisitCall(Call* node) {
4107#ifdef DEBUG
4108 int original_height = frame_->height();
4109#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00004110 Comment cmnt(masm_, "[ Call");
4111
4112 Expression* function = node->expression();
4113 ZoneList<Expression*>* args = node->arguments();
4114
4115 // Standard function call.
4116 // Check if the function is a variable or a property.
4117 Variable* var = function->AsVariableProxy()->AsVariable();
4118 Property* property = function->AsProperty();
4119
4120 // ------------------------------------------------------------------------
4121 // Fast-case: Use inline caching.
4122 // ---
4123 // According to ECMA-262, section 11.2.3, page 44, the function to call
4124 // must be resolved after the arguments have been evaluated. The IC code
4125 // automatically handles this by loading the arguments before the function
4126 // is resolved in cache misses (this also holds for megamorphic calls).
4127 // ------------------------------------------------------------------------
4128
4129 if (var != NULL && var->is_possibly_eval()) {
4130 // ----------------------------------
4131 // JavaScript example: 'eval(arg)' // eval is not known to be shadowed
4132 // ----------------------------------
4133
4134 // In a call to eval, we first call %ResolvePossiblyDirectEval to
4135 // resolve the function we need to call and the receiver of the
4136 // call. Then we call the resolved function using the given
4137 // arguments.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004138
Steve Blocka7e24c12009-10-30 11:49:00 +00004139 // Prepare stack for call to resolved function.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004140 Load(function);
4141
4142 // Allocate a frame slot for the receiver.
Steve Block8defd9f2010-07-08 12:39:36 +01004143 frame_->EmitPushRoot(Heap::kUndefinedValueRootIndex);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004144
4145 // Load the arguments.
Steve Blocka7e24c12009-10-30 11:49:00 +00004146 int arg_count = args->length();
4147 for (int i = 0; i < arg_count; i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004148 Load(args->at(i));
Steve Blocka7e24c12009-10-30 11:49:00 +00004149 }
4150
Steve Block8defd9f2010-07-08 12:39:36 +01004151 VirtualFrame::SpilledScope spilled_scope(frame_);
4152
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004153 // If we know that eval can only be shadowed by eval-introduced
4154 // variables we attempt to load the global eval function directly
4155 // in generated code. If we succeed, there is no need to perform a
4156 // context lookup in the runtime system.
4157 JumpTarget done;
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004158 if (var->AsSlot() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) {
4159 ASSERT(var->AsSlot()->type() == Slot::LOOKUP);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004160 JumpTarget slow;
4161 // Prepare the stack for the call to
4162 // ResolvePossiblyDirectEvalNoLookup by pushing the loaded
4163 // function, the first argument to the eval call and the
4164 // receiver.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004165 LoadFromGlobalSlotCheckExtensions(var->AsSlot(),
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004166 NOT_INSIDE_TYPEOF,
4167 &slow);
4168 frame_->EmitPush(r0);
4169 if (arg_count > 0) {
4170 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
4171 frame_->EmitPush(r1);
4172 } else {
4173 frame_->EmitPush(r2);
4174 }
4175 __ ldr(r1, frame_->Receiver());
4176 frame_->EmitPush(r1);
4177
Steve Block1e0659c2011-05-24 12:43:12 +01004178 // Push the strict mode flag.
4179 frame_->EmitPush(Operand(Smi::FromInt(strict_mode_flag())));
4180
4181 frame_->CallRuntime(Runtime::kResolvePossiblyDirectEvalNoLookup, 4);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004182
4183 done.Jump();
4184 slow.Bind();
4185 }
4186
4187 // Prepare the stack for the call to ResolvePossiblyDirectEval by
4188 // pushing the loaded function, the first argument to the eval
4189 // call and the receiver.
Steve Blocka7e24c12009-10-30 11:49:00 +00004190 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize + kPointerSize));
4191 frame_->EmitPush(r1);
4192 if (arg_count > 0) {
4193 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
4194 frame_->EmitPush(r1);
4195 } else {
4196 frame_->EmitPush(r2);
4197 }
Leon Clarkee46be812010-01-19 14:06:41 +00004198 __ ldr(r1, frame_->Receiver());
4199 frame_->EmitPush(r1);
4200
Steve Block1e0659c2011-05-24 12:43:12 +01004201 // Push the strict mode flag.
4202 frame_->EmitPush(Operand(Smi::FromInt(strict_mode_flag())));
4203
Steve Blocka7e24c12009-10-30 11:49:00 +00004204 // Resolve the call.
Steve Block1e0659c2011-05-24 12:43:12 +01004205 frame_->CallRuntime(Runtime::kResolvePossiblyDirectEval, 4);
Steve Blocka7e24c12009-10-30 11:49:00 +00004206
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004207 // If we generated fast-case code bind the jump-target where fast
4208 // and slow case merge.
4209 if (done.is_linked()) done.Bind();
4210
Steve Blocka7e24c12009-10-30 11:49:00 +00004211 // Touch up stack with the right values for the function and the receiver.
Leon Clarkee46be812010-01-19 14:06:41 +00004212 __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize));
Steve Blocka7e24c12009-10-30 11:49:00 +00004213 __ str(r1, MemOperand(sp, arg_count * kPointerSize));
4214
4215 // Call the function.
4216 CodeForSourcePosition(node->position());
4217
4218 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Leon Clarkee46be812010-01-19 14:06:41 +00004219 CallFunctionStub call_function(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
Steve Blocka7e24c12009-10-30 11:49:00 +00004220 frame_->CallStub(&call_function, arg_count + 1);
4221
4222 __ ldr(cp, frame_->Context());
4223 // Remove the function from the stack.
4224 frame_->Drop();
4225 frame_->EmitPush(r0);
4226
4227 } else if (var != NULL && !var->is_this() && var->is_global()) {
4228 // ----------------------------------
4229 // JavaScript example: 'foo(1, 2, 3)' // foo is global
4230 // ----------------------------------
Steve Blocka7e24c12009-10-30 11:49:00 +00004231 // Pass the global object as the receiver and let the IC stub
4232 // patch the stack to use the global proxy as 'this' in the
4233 // invoked function.
4234 LoadGlobal();
4235
4236 // Load the arguments.
4237 int arg_count = args->length();
4238 for (int i = 0; i < arg_count; i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004239 Load(args->at(i));
Steve Blocka7e24c12009-10-30 11:49:00 +00004240 }
4241
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004242 VirtualFrame::SpilledScope spilled_scope(frame_);
Andrei Popescu402d9372010-02-26 13:31:12 +00004243 // Setup the name register and call the IC initialization code.
4244 __ mov(r2, Operand(var->name()));
Steve Blocka7e24c12009-10-30 11:49:00 +00004245 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004246 Handle<Code> stub = StubCache::ComputeCallInitialize(arg_count, in_loop);
Steve Blocka7e24c12009-10-30 11:49:00 +00004247 CodeForSourcePosition(node->position());
4248 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET_CONTEXT,
4249 arg_count + 1);
4250 __ ldr(cp, frame_->Context());
Steve Blocka7e24c12009-10-30 11:49:00 +00004251 frame_->EmitPush(r0);
4252
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004253 } else if (var != NULL && var->AsSlot() != NULL &&
4254 var->AsSlot()->type() == Slot::LOOKUP) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004255 // ----------------------------------
Kristian Monsen25f61362010-05-21 11:50:48 +01004256 // JavaScript examples:
4257 //
4258 // with (obj) foo(1, 2, 3) // foo may be in obj.
4259 //
4260 // function f() {};
4261 // function g() {
4262 // eval(...);
4263 // f(); // f could be in extension object.
4264 // }
Steve Blocka7e24c12009-10-30 11:49:00 +00004265 // ----------------------------------
4266
Kristian Monsen25f61362010-05-21 11:50:48 +01004267 JumpTarget slow, done;
4268
4269 // Generate fast case for loading functions from slots that
4270 // correspond to local/global variables or arguments unless they
4271 // are shadowed by eval-introduced bindings.
Kristian Monsen0d5e1162010-09-30 15:31:59 +01004272 EmitDynamicLoadFromSlotFastCase(var->AsSlot(),
Kristian Monsen25f61362010-05-21 11:50:48 +01004273 NOT_INSIDE_TYPEOF,
4274 &slow,
4275 &done);
4276
4277 slow.Bind();
Steve Blocka7e24c12009-10-30 11:49:00 +00004278 // Load the function
4279 frame_->EmitPush(cp);
Iain Merrick75681382010-08-19 15:07:18 +01004280 frame_->EmitPush(Operand(var->name()));
Steve Blocka7e24c12009-10-30 11:49:00 +00004281 frame_->CallRuntime(Runtime::kLoadContextSlot, 2);
4282 // r0: slot value; r1: receiver
4283
4284 // Load the receiver.
4285 frame_->EmitPush(r0); // function
4286 frame_->EmitPush(r1); // receiver
4287
Kristian Monsen25f61362010-05-21 11:50:48 +01004288 // If fast case code has been generated, emit code to push the
4289 // function and receiver and have the slow path jump around this
4290 // code.
4291 if (done.is_linked()) {
4292 JumpTarget call;
4293 call.Jump();
4294 done.Bind();
4295 frame_->EmitPush(r0); // function
Iain Merrick75681382010-08-19 15:07:18 +01004296 LoadGlobalReceiver(VirtualFrame::scratch0()); // receiver
Kristian Monsen25f61362010-05-21 11:50:48 +01004297 call.Bind();
4298 }
4299
4300 // Call the function. At this point, everything is spilled but the
4301 // function and receiver are in r0 and r1.
Leon Clarkee46be812010-01-19 14:06:41 +00004302 CallWithArguments(args, NO_CALL_FUNCTION_FLAGS, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00004303 frame_->EmitPush(r0);
4304
4305 } else if (property != NULL) {
4306 // Check if the key is a literal string.
4307 Literal* literal = property->key()->AsLiteral();
4308
4309 if (literal != NULL && literal->handle()->IsSymbol()) {
4310 // ------------------------------------------------------------------
4311 // JavaScript example: 'object.foo(1, 2, 3)' or 'map["key"](1, 2, 3)'
4312 // ------------------------------------------------------------------
4313
Steve Block6ded16b2010-05-10 14:33:55 +01004314 Handle<String> name = Handle<String>::cast(literal->handle());
Steve Blocka7e24c12009-10-30 11:49:00 +00004315
Steve Block6ded16b2010-05-10 14:33:55 +01004316 if (ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION &&
4317 name->IsEqualTo(CStrVector("apply")) &&
4318 args->length() == 2 &&
4319 args->at(1)->AsVariableProxy() != NULL &&
4320 args->at(1)->AsVariableProxy()->IsArguments()) {
4321 // Use the optimized Function.prototype.apply that avoids
4322 // allocating lazily allocated arguments objects.
4323 CallApplyLazy(property->obj(),
4324 args->at(0),
4325 args->at(1)->AsVariableProxy(),
4326 node->position());
4327
4328 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004329 Load(property->obj()); // Receiver.
Steve Block6ded16b2010-05-10 14:33:55 +01004330 // Load the arguments.
4331 int arg_count = args->length();
4332 for (int i = 0; i < arg_count; i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004333 Load(args->at(i));
Steve Block6ded16b2010-05-10 14:33:55 +01004334 }
4335
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004336 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Block6ded16b2010-05-10 14:33:55 +01004337 // Set the name register and call the IC initialization code.
4338 __ mov(r2, Operand(name));
4339 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004340 Handle<Code> stub =
4341 StubCache::ComputeCallInitialize(arg_count, in_loop);
Steve Block6ded16b2010-05-10 14:33:55 +01004342 CodeForSourcePosition(node->position());
4343 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET, arg_count + 1);
4344 __ ldr(cp, frame_->Context());
4345 frame_->EmitPush(r0);
4346 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004347
4348 } else {
4349 // -------------------------------------------
4350 // JavaScript example: 'array[index](1, 2, 3)'
4351 // -------------------------------------------
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004352
4353 // Load the receiver and name of the function.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004354 Load(property->obj());
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004355 Load(property->key());
4356
Steve Blocka7e24c12009-10-30 11:49:00 +00004357 if (property->is_synthetic()) {
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01004358 EmitKeyedLoad();
4359 // Put the function below the receiver.
Leon Clarked91b9f72010-01-27 17:25:45 +00004360 // Use the global receiver.
Kristian Monsen25f61362010-05-21 11:50:48 +01004361 frame_->EmitPush(r0); // Function.
Iain Merrick75681382010-08-19 15:07:18 +01004362 LoadGlobalReceiver(VirtualFrame::scratch0());
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01004363 // Call the function.
4364 CallWithArguments(args, RECEIVER_MIGHT_BE_VALUE, node->position());
4365 frame_->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00004366 } else {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004367 // Swap the name of the function and the receiver on the stack to follow
4368 // the calling convention for call ICs.
4369 Register key = frame_->PopToRegister();
4370 Register receiver = frame_->PopToRegister(key);
4371 frame_->EmitPush(key);
4372 frame_->EmitPush(receiver);
4373
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01004374 // Load the arguments.
4375 int arg_count = args->length();
4376 for (int i = 0; i < arg_count; i++) {
4377 Load(args->at(i));
4378 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004379
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004380 // Load the key into r2 and call the IC initialization code.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01004381 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004382 Handle<Code> stub =
4383 StubCache::ComputeKeyedCallInitialize(arg_count, in_loop);
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01004384 CodeForSourcePosition(node->position());
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004385 frame_->SpillAll();
4386 __ ldr(r2, frame_->ElementAt(arg_count + 1));
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01004387 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET, arg_count + 1);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08004388 frame_->Drop(); // Drop the key still on the stack.
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01004389 __ ldr(cp, frame_->Context());
4390 frame_->EmitPush(r0);
4391 }
Steve Blocka7e24c12009-10-30 11:49:00 +00004392 }
4393
4394 } else {
4395 // ----------------------------------
4396 // JavaScript example: 'foo(1, 2, 3)' // foo is not global
4397 // ----------------------------------
4398
4399 // Load the function.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004400 Load(function);
4401
Steve Blocka7e24c12009-10-30 11:49:00 +00004402 // Pass the global proxy as the receiver.
Iain Merrick75681382010-08-19 15:07:18 +01004403 LoadGlobalReceiver(VirtualFrame::scratch0());
Steve Blocka7e24c12009-10-30 11:49:00 +00004404
4405 // Call the function.
Leon Clarkee46be812010-01-19 14:06:41 +00004406 CallWithArguments(args, NO_CALL_FUNCTION_FLAGS, node->position());
Steve Blocka7e24c12009-10-30 11:49:00 +00004407 frame_->EmitPush(r0);
4408 }
Steve Block6ded16b2010-05-10 14:33:55 +01004409 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00004410}
4411
4412
4413void CodeGenerator::VisitCallNew(CallNew* node) {
4414#ifdef DEBUG
4415 int original_height = frame_->height();
4416#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00004417 Comment cmnt(masm_, "[ CallNew");
4418
4419 // According to ECMA-262, section 11.2.2, page 44, the function
4420 // expression in new calls must be evaluated before the
4421 // arguments. This is different from ordinary calls, where the
4422 // actual function to call is resolved after the arguments have been
4423 // evaluated.
4424
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004425 // Push constructor on the stack. If it's not a function it's used as
4426 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
4427 // ignored.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004428 Load(node->expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00004429
4430 // Push the arguments ("left-to-right") on the stack.
4431 ZoneList<Expression*>* args = node->arguments();
4432 int arg_count = args->length();
4433 for (int i = 0; i < arg_count; i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004434 Load(args->at(i));
Steve Blocka7e24c12009-10-30 11:49:00 +00004435 }
4436
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004437 // Spill everything from here to simplify the implementation.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004438 VirtualFrame::SpilledScope spilled_scope(frame_);
4439
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004440 // Load the argument count into r0 and the function into r1 as per
4441 // calling convention.
Steve Blocka7e24c12009-10-30 11:49:00 +00004442 __ mov(r0, Operand(arg_count));
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004443 __ ldr(r1, frame_->ElementAt(arg_count));
Steve Blocka7e24c12009-10-30 11:49:00 +00004444
4445 // Call the construct call builtin that handles allocation and
4446 // constructor invocation.
4447 CodeForSourcePosition(node->position());
4448 Handle<Code> ic(Builtins::builtin(Builtins::JSConstructCall));
Leon Clarke4515c472010-02-03 11:58:03 +00004449 frame_->CallCodeObject(ic, RelocInfo::CONSTRUCT_CALL, arg_count + 1);
Kristian Monsen80d68ea2010-09-08 11:05:35 +01004450 frame_->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00004451
Steve Block6ded16b2010-05-10 14:33:55 +01004452 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00004453}
4454
4455
4456void CodeGenerator::GenerateClassOf(ZoneList<Expression*>* args) {
Iain Merrick75681382010-08-19 15:07:18 +01004457 Register scratch = VirtualFrame::scratch0();
4458 JumpTarget null, function, leave, non_function_constructor;
Steve Blocka7e24c12009-10-30 11:49:00 +00004459
Iain Merrick75681382010-08-19 15:07:18 +01004460 // Load the object into register.
4461 ASSERT(args->length() == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004462 Load(args->at(0));
Iain Merrick75681382010-08-19 15:07:18 +01004463 Register tos = frame_->PopToRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +00004464
4465 // If the object is a smi, we return null.
Iain Merrick75681382010-08-19 15:07:18 +01004466 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00004467 null.Branch(eq);
4468
4469 // Check that the object is a JS object but take special care of JS
4470 // functions to make sure they have 'Function' as their class.
Iain Merrick75681382010-08-19 15:07:18 +01004471 __ CompareObjectType(tos, tos, scratch, FIRST_JS_OBJECT_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00004472 null.Branch(lt);
4473
4474 // As long as JS_FUNCTION_TYPE is the last instance type and it is
4475 // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
4476 // LAST_JS_OBJECT_TYPE.
Kristian Monsen50ef84f2010-07-29 15:18:00 +01004477 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
4478 STATIC_ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
Iain Merrick75681382010-08-19 15:07:18 +01004479 __ cmp(scratch, Operand(JS_FUNCTION_TYPE));
Steve Blocka7e24c12009-10-30 11:49:00 +00004480 function.Branch(eq);
4481
4482 // Check if the constructor in the map is a function.
Iain Merrick75681382010-08-19 15:07:18 +01004483 __ ldr(tos, FieldMemOperand(tos, Map::kConstructorOffset));
4484 __ CompareObjectType(tos, scratch, scratch, JS_FUNCTION_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00004485 non_function_constructor.Branch(ne);
4486
Iain Merrick75681382010-08-19 15:07:18 +01004487 // The tos register now contains the constructor function. Grab the
Steve Blocka7e24c12009-10-30 11:49:00 +00004488 // instance class name from there.
Iain Merrick75681382010-08-19 15:07:18 +01004489 __ ldr(tos, FieldMemOperand(tos, JSFunction::kSharedFunctionInfoOffset));
4490 __ ldr(tos,
4491 FieldMemOperand(tos, SharedFunctionInfo::kInstanceClassNameOffset));
4492 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00004493 leave.Jump();
4494
4495 // Functions have class 'Function'.
4496 function.Bind();
Iain Merrick75681382010-08-19 15:07:18 +01004497 __ mov(tos, Operand(Factory::function_class_symbol()));
4498 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00004499 leave.Jump();
4500
4501 // Objects with a non-function constructor have class 'Object'.
4502 non_function_constructor.Bind();
Iain Merrick75681382010-08-19 15:07:18 +01004503 __ mov(tos, Operand(Factory::Object_symbol()));
4504 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00004505 leave.Jump();
4506
4507 // Non-JS objects have class null.
4508 null.Bind();
Iain Merrick75681382010-08-19 15:07:18 +01004509 __ LoadRoot(tos, Heap::kNullValueRootIndex);
4510 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00004511
4512 // All done.
4513 leave.Bind();
4514}
4515
4516
4517void CodeGenerator::GenerateValueOf(ZoneList<Expression*>* args) {
Iain Merrick75681382010-08-19 15:07:18 +01004518 Register scratch = VirtualFrame::scratch0();
Steve Blocka7e24c12009-10-30 11:49:00 +00004519 JumpTarget leave;
Iain Merrick75681382010-08-19 15:07:18 +01004520
4521 ASSERT(args->length() == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004522 Load(args->at(0));
Iain Merrick75681382010-08-19 15:07:18 +01004523 Register tos = frame_->PopToRegister(); // tos contains object.
Steve Blocka7e24c12009-10-30 11:49:00 +00004524 // if (object->IsSmi()) return the object.
Iain Merrick75681382010-08-19 15:07:18 +01004525 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00004526 leave.Branch(eq);
4527 // It is a heap object - get map. If (!object->IsJSValue()) return the object.
Iain Merrick75681382010-08-19 15:07:18 +01004528 __ CompareObjectType(tos, scratch, scratch, JS_VALUE_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00004529 leave.Branch(ne);
4530 // Load the value.
Iain Merrick75681382010-08-19 15:07:18 +01004531 __ ldr(tos, FieldMemOperand(tos, JSValue::kValueOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004532 leave.Bind();
Iain Merrick75681382010-08-19 15:07:18 +01004533 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00004534}
4535
4536
4537void CodeGenerator::GenerateSetValueOf(ZoneList<Expression*>* args) {
Iain Merrick75681382010-08-19 15:07:18 +01004538 Register scratch1 = VirtualFrame::scratch0();
4539 Register scratch2 = VirtualFrame::scratch1();
Steve Blocka7e24c12009-10-30 11:49:00 +00004540 JumpTarget leave;
Iain Merrick75681382010-08-19 15:07:18 +01004541
4542 ASSERT(args->length() == 2);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004543 Load(args->at(0)); // Load the object.
4544 Load(args->at(1)); // Load the value.
Iain Merrick75681382010-08-19 15:07:18 +01004545 Register value = frame_->PopToRegister();
4546 Register object = frame_->PopToRegister(value);
Steve Blocka7e24c12009-10-30 11:49:00 +00004547 // if (object->IsSmi()) return object.
Iain Merrick75681382010-08-19 15:07:18 +01004548 __ tst(object, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00004549 leave.Branch(eq);
4550 // It is a heap object - get map. If (!object->IsJSValue()) return the object.
Iain Merrick75681382010-08-19 15:07:18 +01004551 __ CompareObjectType(object, scratch1, scratch1, JS_VALUE_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00004552 leave.Branch(ne);
4553 // Store the value.
Iain Merrick75681382010-08-19 15:07:18 +01004554 __ str(value, FieldMemOperand(object, JSValue::kValueOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00004555 // Update the write barrier.
Iain Merrick75681382010-08-19 15:07:18 +01004556 __ RecordWrite(object,
4557 Operand(JSValue::kValueOffset - kHeapObjectTag),
4558 scratch1,
4559 scratch2);
Steve Blocka7e24c12009-10-30 11:49:00 +00004560 // Leave.
4561 leave.Bind();
Iain Merrick75681382010-08-19 15:07:18 +01004562 frame_->EmitPush(value);
Steve Blocka7e24c12009-10-30 11:49:00 +00004563}
4564
4565
4566void CodeGenerator::GenerateIsSmi(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004567 ASSERT(args->length() == 1);
Leon Clarkef7060e22010-06-03 12:02:55 +01004568 Load(args->at(0));
4569 Register reg = frame_->PopToRegister();
4570 __ tst(reg, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00004571 cc_reg_ = eq;
4572}
4573
4574
4575void CodeGenerator::GenerateLog(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004576 // See comment in CodeGenerator::GenerateLog in codegen-ia32.cc.
4577 ASSERT_EQ(args->length(), 3);
4578#ifdef ENABLE_LOGGING_AND_PROFILING
4579 if (ShouldGenerateLog(args->at(0))) {
Leon Clarkef7060e22010-06-03 12:02:55 +01004580 Load(args->at(1));
4581 Load(args->at(2));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004582 frame_->CallRuntime(Runtime::kLog, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00004583 }
4584#endif
Leon Clarkef7060e22010-06-03 12:02:55 +01004585 frame_->EmitPushRoot(Heap::kUndefinedValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00004586}
4587
4588
4589void CodeGenerator::GenerateIsNonNegativeSmi(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004590 ASSERT(args->length() == 1);
Leon Clarkef7060e22010-06-03 12:02:55 +01004591 Load(args->at(0));
4592 Register reg = frame_->PopToRegister();
4593 __ tst(reg, Operand(kSmiTagMask | 0x80000000u));
Steve Blocka7e24c12009-10-30 11:49:00 +00004594 cc_reg_ = eq;
4595}
4596
4597
Steve Block8defd9f2010-07-08 12:39:36 +01004598// Generates the Math.pow method.
Steve Block6ded16b2010-05-10 14:33:55 +01004599void CodeGenerator::GenerateMathPow(ZoneList<Expression*>* args) {
4600 ASSERT(args->length() == 2);
4601 Load(args->at(0));
4602 Load(args->at(1));
Steve Block8defd9f2010-07-08 12:39:36 +01004603
4604 if (!CpuFeatures::IsSupported(VFP3)) {
4605 frame_->CallRuntime(Runtime::kMath_pow, 2);
4606 frame_->EmitPush(r0);
4607 } else {
4608 CpuFeatures::Scope scope(VFP3);
4609 JumpTarget runtime, done;
4610 Label exponent_nonsmi, base_nonsmi, powi, not_minus_half, allocate_return;
4611
4612 Register scratch1 = VirtualFrame::scratch0();
4613 Register scratch2 = VirtualFrame::scratch1();
4614
4615 // Get base and exponent to registers.
4616 Register exponent = frame_->PopToRegister();
4617 Register base = frame_->PopToRegister(exponent);
4618 Register heap_number_map = no_reg;
4619
4620 // Set the frame for the runtime jump target. The code below jumps to the
4621 // jump target label so the frame needs to be established before that.
4622 ASSERT(runtime.entry_frame() == NULL);
4623 runtime.set_entry_frame(frame_);
4624
Steve Block1e0659c2011-05-24 12:43:12 +01004625 __ JumpIfNotSmi(exponent, &exponent_nonsmi);
4626 __ JumpIfNotSmi(base, &base_nonsmi);
Steve Block8defd9f2010-07-08 12:39:36 +01004627
4628 heap_number_map = r6;
4629 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
4630
4631 // Exponent is a smi and base is a smi. Get the smi value into vfp register
4632 // d1.
4633 __ SmiToDoubleVFPRegister(base, d1, scratch1, s0);
4634 __ b(&powi);
4635
4636 __ bind(&base_nonsmi);
4637 // Exponent is smi and base is non smi. Get the double value from the base
4638 // into vfp register d1.
4639 __ ObjectToDoubleVFPRegister(base, d1,
4640 scratch1, scratch2, heap_number_map, s0,
4641 runtime.entry_label());
4642
4643 __ bind(&powi);
4644
4645 // Load 1.0 into d0.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01004646 __ vmov(d0, 1.0);
Steve Block8defd9f2010-07-08 12:39:36 +01004647
4648 // Get the absolute untagged value of the exponent and use that for the
4649 // calculation.
4650 __ mov(scratch1, Operand(exponent, ASR, kSmiTagSize), SetCC);
Iain Merrick9ac36c92010-09-13 15:29:50 +01004651 // Negate if negative.
4652 __ rsb(scratch1, scratch1, Operand(0, RelocInfo::NONE), LeaveCC, mi);
Steve Block8defd9f2010-07-08 12:39:36 +01004653 __ vmov(d2, d0, mi); // 1.0 needed in d2 later if exponent is negative.
4654
4655 // Run through all the bits in the exponent. The result is calculated in d0
4656 // and d1 holds base^(bit^2).
4657 Label more_bits;
4658 __ bind(&more_bits);
4659 __ mov(scratch1, Operand(scratch1, LSR, 1), SetCC);
4660 __ vmul(d0, d0, d1, cs); // Multiply with base^(bit^2) if bit is set.
4661 __ vmul(d1, d1, d1, ne); // Don't bother calculating next d1 if done.
4662 __ b(ne, &more_bits);
4663
4664 // If exponent is positive we are done.
Iain Merrick9ac36c92010-09-13 15:29:50 +01004665 __ cmp(exponent, Operand(0, RelocInfo::NONE));
Steve Block8defd9f2010-07-08 12:39:36 +01004666 __ b(ge, &allocate_return);
4667
4668 // If exponent is negative result is 1/result (d2 already holds 1.0 in that
4669 // case). However if d0 has reached infinity this will not provide the
4670 // correct result, so call runtime if that is the case.
4671 __ mov(scratch2, Operand(0x7FF00000));
Iain Merrick9ac36c92010-09-13 15:29:50 +01004672 __ mov(scratch1, Operand(0, RelocInfo::NONE));
Steve Block8defd9f2010-07-08 12:39:36 +01004673 __ vmov(d1, scratch1, scratch2); // Load infinity into d1.
Ben Murdochb8e0da22011-05-16 14:20:40 +01004674 __ VFPCompareAndSetFlags(d0, d1);
Steve Block8defd9f2010-07-08 12:39:36 +01004675 runtime.Branch(eq); // d0 reached infinity.
4676 __ vdiv(d0, d2, d0);
4677 __ b(&allocate_return);
4678
4679 __ bind(&exponent_nonsmi);
4680 // Special handling of raising to the power of -0.5 and 0.5. First check
4681 // that the value is a heap number and that the lower bits (which for both
4682 // values are zero).
4683 heap_number_map = r6;
4684 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
4685 __ ldr(scratch1, FieldMemOperand(exponent, HeapObject::kMapOffset));
4686 __ ldr(scratch2, FieldMemOperand(exponent, HeapNumber::kMantissaOffset));
4687 __ cmp(scratch1, heap_number_map);
4688 runtime.Branch(ne);
4689 __ tst(scratch2, scratch2);
4690 runtime.Branch(ne);
4691
4692 // Load the higher bits (which contains the floating point exponent).
4693 __ ldr(scratch1, FieldMemOperand(exponent, HeapNumber::kExponentOffset));
4694
4695 // Compare exponent with -0.5.
4696 __ cmp(scratch1, Operand(0xbfe00000));
4697 __ b(ne, &not_minus_half);
4698
4699 // Get the double value from the base into vfp register d0.
4700 __ ObjectToDoubleVFPRegister(base, d0,
4701 scratch1, scratch2, heap_number_map, s0,
4702 runtime.entry_label(),
4703 AVOID_NANS_AND_INFINITIES);
4704
Steve Block1e0659c2011-05-24 12:43:12 +01004705 // Convert -0 into +0 by adding +0.
4706 __ vmov(d2, 0.0);
4707 __ vadd(d0, d2, d0);
Steve Block8defd9f2010-07-08 12:39:36 +01004708 // Load 1.0 into d2.
Ben Murdoch3bec4d22010-07-22 14:51:16 +01004709 __ vmov(d2, 1.0);
Steve Block8defd9f2010-07-08 12:39:36 +01004710
Steve Block1e0659c2011-05-24 12:43:12 +01004711 // Calculate the reciprocal of the square root.
Steve Block8defd9f2010-07-08 12:39:36 +01004712 __ vsqrt(d0, d0);
Steve Block1e0659c2011-05-24 12:43:12 +01004713 __ vdiv(d0, d2, d0);
Steve Block8defd9f2010-07-08 12:39:36 +01004714
4715 __ b(&allocate_return);
4716
4717 __ bind(&not_minus_half);
4718 // Compare exponent with 0.5.
4719 __ cmp(scratch1, Operand(0x3fe00000));
4720 runtime.Branch(ne);
4721
4722 // Get the double value from the base into vfp register d0.
4723 __ ObjectToDoubleVFPRegister(base, d0,
4724 scratch1, scratch2, heap_number_map, s0,
4725 runtime.entry_label(),
4726 AVOID_NANS_AND_INFINITIES);
Steve Block1e0659c2011-05-24 12:43:12 +01004727 // Convert -0 into +0 by adding +0.
4728 __ vmov(d2, 0.0);
4729 __ vadd(d0, d2, d0);
Steve Block8defd9f2010-07-08 12:39:36 +01004730 __ vsqrt(d0, d0);
4731
4732 __ bind(&allocate_return);
4733 Register scratch3 = r5;
4734 __ AllocateHeapNumberWithValue(scratch3, d0, scratch1, scratch2,
4735 heap_number_map, runtime.entry_label());
4736 __ mov(base, scratch3);
4737 done.Jump();
4738
4739 runtime.Bind();
4740
4741 // Push back the arguments again for the runtime call.
4742 frame_->EmitPush(base);
4743 frame_->EmitPush(exponent);
4744 frame_->CallRuntime(Runtime::kMath_pow, 2);
4745 __ Move(base, r0);
4746
4747 done.Bind();
4748 frame_->EmitPush(base);
4749 }
Steve Block6ded16b2010-05-10 14:33:55 +01004750}
4751
4752
Steve Block8defd9f2010-07-08 12:39:36 +01004753// Generates the Math.sqrt method.
Steve Block6ded16b2010-05-10 14:33:55 +01004754void CodeGenerator::GenerateMathSqrt(ZoneList<Expression*>* args) {
4755 ASSERT(args->length() == 1);
4756 Load(args->at(0));
Steve Block8defd9f2010-07-08 12:39:36 +01004757
4758 if (!CpuFeatures::IsSupported(VFP3)) {
4759 frame_->CallRuntime(Runtime::kMath_sqrt, 1);
4760 frame_->EmitPush(r0);
4761 } else {
4762 CpuFeatures::Scope scope(VFP3);
4763 JumpTarget runtime, done;
4764
4765 Register scratch1 = VirtualFrame::scratch0();
4766 Register scratch2 = VirtualFrame::scratch1();
4767
4768 // Get the value from the frame.
4769 Register tos = frame_->PopToRegister();
4770
4771 // Set the frame for the runtime jump target. The code below jumps to the
4772 // jump target label so the frame needs to be established before that.
4773 ASSERT(runtime.entry_frame() == NULL);
4774 runtime.set_entry_frame(frame_);
4775
4776 Register heap_number_map = r6;
John Reck59135872010-11-02 12:39:01 -07004777 Register new_heap_number = r5;
Steve Block8defd9f2010-07-08 12:39:36 +01004778 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
4779
4780 // Get the double value from the heap number into vfp register d0.
4781 __ ObjectToDoubleVFPRegister(tos, d0,
4782 scratch1, scratch2, heap_number_map, s0,
4783 runtime.entry_label());
4784
4785 // Calculate the square root of d0 and place result in a heap number object.
4786 __ vsqrt(d0, d0);
John Reck59135872010-11-02 12:39:01 -07004787 __ AllocateHeapNumberWithValue(new_heap_number,
4788 d0,
4789 scratch1, scratch2,
4790 heap_number_map,
4791 runtime.entry_label());
4792 __ mov(tos, Operand(new_heap_number));
Steve Block8defd9f2010-07-08 12:39:36 +01004793 done.Jump();
4794
4795 runtime.Bind();
4796 // Push back the argument again for the runtime call.
4797 frame_->EmitPush(tos);
4798 frame_->CallRuntime(Runtime::kMath_sqrt, 1);
4799 __ Move(tos, r0);
4800
4801 done.Bind();
4802 frame_->EmitPush(tos);
4803 }
Steve Block6ded16b2010-05-10 14:33:55 +01004804}
4805
4806
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004807class DeferredStringCharCodeAt : public DeferredCode {
4808 public:
4809 DeferredStringCharCodeAt(Register object,
4810 Register index,
4811 Register scratch,
4812 Register result)
4813 : result_(result),
4814 char_code_at_generator_(object,
4815 index,
4816 scratch,
4817 result,
4818 &need_conversion_,
4819 &need_conversion_,
4820 &index_out_of_range_,
4821 STRING_INDEX_IS_NUMBER) {}
4822
4823 StringCharCodeAtGenerator* fast_case_generator() {
4824 return &char_code_at_generator_;
4825 }
4826
4827 virtual void Generate() {
4828 VirtualFrameRuntimeCallHelper call_helper(frame_state());
4829 char_code_at_generator_.GenerateSlow(masm(), call_helper);
4830
4831 __ bind(&need_conversion_);
4832 // Move the undefined value into the result register, which will
4833 // trigger conversion.
4834 __ LoadRoot(result_, Heap::kUndefinedValueRootIndex);
4835 __ jmp(exit_label());
4836
4837 __ bind(&index_out_of_range_);
4838 // When the index is out of range, the spec requires us to return
4839 // NaN.
4840 __ LoadRoot(result_, Heap::kNanValueRootIndex);
4841 __ jmp(exit_label());
4842 }
4843
4844 private:
4845 Register result_;
4846
4847 Label need_conversion_;
4848 Label index_out_of_range_;
4849
4850 StringCharCodeAtGenerator char_code_at_generator_;
4851};
4852
4853
4854// This generates code that performs a String.prototype.charCodeAt() call
4855// or returns a smi in order to trigger conversion.
4856void CodeGenerator::GenerateStringCharCodeAt(ZoneList<Expression*>* args) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004857 Comment(masm_, "[ GenerateStringCharCodeAt");
Steve Blocka7e24c12009-10-30 11:49:00 +00004858 ASSERT(args->length() == 2);
Steve Blockd0582a62009-12-15 09:54:21 +00004859
Leon Clarkef7060e22010-06-03 12:02:55 +01004860 Load(args->at(0));
4861 Load(args->at(1));
Steve Blockd0582a62009-12-15 09:54:21 +00004862
Iain Merrick75681382010-08-19 15:07:18 +01004863 Register index = frame_->PopToRegister();
4864 Register object = frame_->PopToRegister(index);
Steve Blockd0582a62009-12-15 09:54:21 +00004865
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004866 // We need two extra registers.
Iain Merrick75681382010-08-19 15:07:18 +01004867 Register scratch = VirtualFrame::scratch0();
4868 Register result = VirtualFrame::scratch1();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004869
4870 DeferredStringCharCodeAt* deferred =
4871 new DeferredStringCharCodeAt(object,
4872 index,
4873 scratch,
4874 result);
4875 deferred->fast_case_generator()->GenerateFast(masm_);
4876 deferred->BindExit();
Leon Clarkef7060e22010-06-03 12:02:55 +01004877 frame_->EmitPush(result);
Steve Blocka7e24c12009-10-30 11:49:00 +00004878}
4879
4880
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004881class DeferredStringCharFromCode : public DeferredCode {
4882 public:
4883 DeferredStringCharFromCode(Register code,
4884 Register result)
4885 : char_from_code_generator_(code, result) {}
4886
4887 StringCharFromCodeGenerator* fast_case_generator() {
4888 return &char_from_code_generator_;
4889 }
4890
4891 virtual void Generate() {
4892 VirtualFrameRuntimeCallHelper call_helper(frame_state());
4893 char_from_code_generator_.GenerateSlow(masm(), call_helper);
4894 }
4895
4896 private:
4897 StringCharFromCodeGenerator char_from_code_generator_;
4898};
4899
4900
4901// Generates code for creating a one-char string from a char code.
4902void CodeGenerator::GenerateStringCharFromCode(ZoneList<Expression*>* args) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004903 Comment(masm_, "[ GenerateStringCharFromCode");
Steve Block6ded16b2010-05-10 14:33:55 +01004904 ASSERT(args->length() == 1);
4905
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004906 Load(args->at(0));
4907
Iain Merrick75681382010-08-19 15:07:18 +01004908 Register result = frame_->GetTOSRegister();
4909 Register code = frame_->PopToRegister(result);
Steve Block6ded16b2010-05-10 14:33:55 +01004910
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004911 DeferredStringCharFromCode* deferred = new DeferredStringCharFromCode(
4912 code, result);
4913 deferred->fast_case_generator()->GenerateFast(masm_);
4914 deferred->BindExit();
4915 frame_->EmitPush(result);
4916}
4917
4918
4919class DeferredStringCharAt : public DeferredCode {
4920 public:
4921 DeferredStringCharAt(Register object,
4922 Register index,
4923 Register scratch1,
4924 Register scratch2,
4925 Register result)
4926 : result_(result),
4927 char_at_generator_(object,
4928 index,
4929 scratch1,
4930 scratch2,
4931 result,
4932 &need_conversion_,
4933 &need_conversion_,
4934 &index_out_of_range_,
4935 STRING_INDEX_IS_NUMBER) {}
4936
4937 StringCharAtGenerator* fast_case_generator() {
4938 return &char_at_generator_;
4939 }
4940
4941 virtual void Generate() {
4942 VirtualFrameRuntimeCallHelper call_helper(frame_state());
4943 char_at_generator_.GenerateSlow(masm(), call_helper);
4944
4945 __ bind(&need_conversion_);
4946 // Move smi zero into the result register, which will trigger
4947 // conversion.
4948 __ mov(result_, Operand(Smi::FromInt(0)));
4949 __ jmp(exit_label());
4950
4951 __ bind(&index_out_of_range_);
4952 // When the index is out of range, the spec requires us to return
4953 // the empty string.
4954 __ LoadRoot(result_, Heap::kEmptyStringRootIndex);
4955 __ jmp(exit_label());
4956 }
4957
4958 private:
4959 Register result_;
4960
4961 Label need_conversion_;
4962 Label index_out_of_range_;
4963
4964 StringCharAtGenerator char_at_generator_;
4965};
4966
4967
4968// This generates code that performs a String.prototype.charAt() call
4969// or returns a smi in order to trigger conversion.
4970void CodeGenerator::GenerateStringCharAt(ZoneList<Expression*>* args) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004971 Comment(masm_, "[ GenerateStringCharAt");
4972 ASSERT(args->length() == 2);
4973
4974 Load(args->at(0));
4975 Load(args->at(1));
4976
Iain Merrick75681382010-08-19 15:07:18 +01004977 Register index = frame_->PopToRegister();
4978 Register object = frame_->PopToRegister(index);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004979
4980 // We need three extra registers.
Iain Merrick75681382010-08-19 15:07:18 +01004981 Register scratch1 = VirtualFrame::scratch0();
4982 Register scratch2 = VirtualFrame::scratch1();
4983 // Use r6 without notifying the virtual frame.
4984 Register result = r6;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01004985
4986 DeferredStringCharAt* deferred =
4987 new DeferredStringCharAt(object,
4988 index,
4989 scratch1,
4990 scratch2,
4991 result);
4992 deferred->fast_case_generator()->GenerateFast(masm_);
4993 deferred->BindExit();
Steve Block6ded16b2010-05-10 14:33:55 +01004994 frame_->EmitPush(result);
4995}
4996
4997
Steve Blocka7e24c12009-10-30 11:49:00 +00004998void CodeGenerator::GenerateIsArray(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00004999 ASSERT(args->length() == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005000 Load(args->at(0));
Steve Blocka7e24c12009-10-30 11:49:00 +00005001 JumpTarget answer;
5002 // We need the CC bits to come out as not_equal in the case where the
5003 // object is a smi. This can't be done with the usual test opcode so
5004 // we use XOR to get the right CC bits.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005005 Register possible_array = frame_->PopToRegister();
5006 Register scratch = VirtualFrame::scratch0();
5007 __ and_(scratch, possible_array, Operand(kSmiTagMask));
5008 __ eor(scratch, scratch, Operand(kSmiTagMask), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00005009 answer.Branch(ne);
5010 // It is a heap object - get the map. Check if the object is a JS array.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005011 __ CompareObjectType(possible_array, scratch, scratch, JS_ARRAY_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00005012 answer.Bind();
5013 cc_reg_ = eq;
5014}
5015
5016
Andrei Popescu402d9372010-02-26 13:31:12 +00005017void CodeGenerator::GenerateIsRegExp(ZoneList<Expression*>* args) {
Andrei Popescu402d9372010-02-26 13:31:12 +00005018 ASSERT(args->length() == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005019 Load(args->at(0));
Andrei Popescu402d9372010-02-26 13:31:12 +00005020 JumpTarget answer;
5021 // We need the CC bits to come out as not_equal in the case where the
5022 // object is a smi. This can't be done with the usual test opcode so
5023 // we use XOR to get the right CC bits.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005024 Register possible_regexp = frame_->PopToRegister();
5025 Register scratch = VirtualFrame::scratch0();
5026 __ and_(scratch, possible_regexp, Operand(kSmiTagMask));
5027 __ eor(scratch, scratch, Operand(kSmiTagMask), SetCC);
Andrei Popescu402d9372010-02-26 13:31:12 +00005028 answer.Branch(ne);
5029 // It is a heap object - get the map. Check if the object is a regexp.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005030 __ CompareObjectType(possible_regexp, scratch, scratch, JS_REGEXP_TYPE);
Andrei Popescu402d9372010-02-26 13:31:12 +00005031 answer.Bind();
5032 cc_reg_ = eq;
5033}
5034
5035
Steve Blockd0582a62009-12-15 09:54:21 +00005036void CodeGenerator::GenerateIsObject(ZoneList<Expression*>* args) {
5037 // This generates a fast version of:
5038 // (typeof(arg) === 'object' || %_ClassOf(arg) == 'RegExp')
Steve Blockd0582a62009-12-15 09:54:21 +00005039 ASSERT(args->length() == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005040 Load(args->at(0));
5041 Register possible_object = frame_->PopToRegister();
5042 __ tst(possible_object, Operand(kSmiTagMask));
Steve Blockd0582a62009-12-15 09:54:21 +00005043 false_target()->Branch(eq);
5044
5045 __ LoadRoot(ip, Heap::kNullValueRootIndex);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005046 __ cmp(possible_object, ip);
Steve Blockd0582a62009-12-15 09:54:21 +00005047 true_target()->Branch(eq);
5048
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005049 Register map_reg = VirtualFrame::scratch0();
5050 __ ldr(map_reg, FieldMemOperand(possible_object, HeapObject::kMapOffset));
Steve Blockd0582a62009-12-15 09:54:21 +00005051 // Undetectable objects behave like undefined when tested with typeof.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005052 __ ldrb(possible_object, FieldMemOperand(map_reg, Map::kBitFieldOffset));
5053 __ tst(possible_object, Operand(1 << Map::kIsUndetectable));
Leon Clarkef7060e22010-06-03 12:02:55 +01005054 false_target()->Branch(ne);
Steve Blockd0582a62009-12-15 09:54:21 +00005055
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005056 __ ldrb(possible_object, FieldMemOperand(map_reg, Map::kInstanceTypeOffset));
5057 __ cmp(possible_object, Operand(FIRST_JS_OBJECT_TYPE));
Steve Blockd0582a62009-12-15 09:54:21 +00005058 false_target()->Branch(lt);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005059 __ cmp(possible_object, Operand(LAST_JS_OBJECT_TYPE));
Steve Blockd0582a62009-12-15 09:54:21 +00005060 cc_reg_ = le;
5061}
5062
5063
Ben Murdoch3bec4d22010-07-22 14:51:16 +01005064void CodeGenerator::GenerateIsSpecObject(ZoneList<Expression*>* args) {
5065 // This generates a fast version of:
5066 // (typeof(arg) === 'object' || %_ClassOf(arg) == 'RegExp' ||
5067 // typeof(arg) == function).
5068 // It includes undetectable objects (as opposed to IsObject).
5069 ASSERT(args->length() == 1);
5070 Load(args->at(0));
5071 Register value = frame_->PopToRegister();
5072 __ tst(value, Operand(kSmiTagMask));
5073 false_target()->Branch(eq);
5074 // Check that this is an object.
5075 __ ldr(value, FieldMemOperand(value, HeapObject::kMapOffset));
5076 __ ldrb(value, FieldMemOperand(value, Map::kInstanceTypeOffset));
5077 __ cmp(value, Operand(FIRST_JS_OBJECT_TYPE));
5078 cc_reg_ = ge;
5079}
5080
5081
Iain Merrick75681382010-08-19 15:07:18 +01005082// Deferred code to check whether the String JavaScript object is safe for using
5083// default value of. This code is called after the bit caching this information
5084// in the map has been checked with the map for the object in the map_result_
5085// register. On return the register map_result_ contains 1 for true and 0 for
5086// false.
5087class DeferredIsStringWrapperSafeForDefaultValueOf : public DeferredCode {
5088 public:
5089 DeferredIsStringWrapperSafeForDefaultValueOf(Register object,
5090 Register map_result,
5091 Register scratch1,
5092 Register scratch2)
5093 : object_(object),
5094 map_result_(map_result),
5095 scratch1_(scratch1),
5096 scratch2_(scratch2) { }
5097
5098 virtual void Generate() {
5099 Label false_result;
5100
5101 // Check that map is loaded as expected.
5102 if (FLAG_debug_code) {
5103 __ ldr(ip, FieldMemOperand(object_, HeapObject::kMapOffset));
5104 __ cmp(map_result_, ip);
5105 __ Assert(eq, "Map not in expected register");
5106 }
5107
5108 // Check for fast case object. Generate false result for slow case object.
5109 __ ldr(scratch1_, FieldMemOperand(object_, JSObject::kPropertiesOffset));
5110 __ ldr(scratch1_, FieldMemOperand(scratch1_, HeapObject::kMapOffset));
5111 __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
5112 __ cmp(scratch1_, ip);
5113 __ b(eq, &false_result);
5114
5115 // Look for valueOf symbol in the descriptor array, and indicate false if
5116 // found. The type is not checked, so if it is a transition it is a false
5117 // negative.
5118 __ ldr(map_result_,
5119 FieldMemOperand(map_result_, Map::kInstanceDescriptorsOffset));
5120 __ ldr(scratch2_, FieldMemOperand(map_result_, FixedArray::kLengthOffset));
5121 // map_result_: descriptor array
5122 // scratch2_: length of descriptor array
5123 // Calculate the end of the descriptor array.
5124 STATIC_ASSERT(kSmiTag == 0);
5125 STATIC_ASSERT(kSmiTagSize == 1);
5126 STATIC_ASSERT(kPointerSize == 4);
5127 __ add(scratch1_,
5128 map_result_,
5129 Operand(FixedArray::kHeaderSize - kHeapObjectTag));
5130 __ add(scratch1_,
5131 scratch1_,
5132 Operand(scratch2_, LSL, kPointerSizeLog2 - kSmiTagSize));
5133
5134 // Calculate location of the first key name.
5135 __ add(map_result_,
5136 map_result_,
5137 Operand(FixedArray::kHeaderSize - kHeapObjectTag +
5138 DescriptorArray::kFirstIndex * kPointerSize));
5139 // Loop through all the keys in the descriptor array. If one of these is the
5140 // symbol valueOf the result is false.
5141 Label entry, loop;
5142 // The use of ip to store the valueOf symbol asumes that it is not otherwise
5143 // used in the loop below.
5144 __ mov(ip, Operand(Factory::value_of_symbol()));
5145 __ jmp(&entry);
5146 __ bind(&loop);
5147 __ ldr(scratch2_, MemOperand(map_result_, 0));
5148 __ cmp(scratch2_, ip);
5149 __ b(eq, &false_result);
5150 __ add(map_result_, map_result_, Operand(kPointerSize));
5151 __ bind(&entry);
5152 __ cmp(map_result_, Operand(scratch1_));
5153 __ b(ne, &loop);
5154
5155 // Reload map as register map_result_ was used as temporary above.
5156 __ ldr(map_result_, FieldMemOperand(object_, HeapObject::kMapOffset));
5157
5158 // If a valueOf property is not found on the object check that it's
5159 // prototype is the un-modified String prototype. If not result is false.
5160 __ ldr(scratch1_, FieldMemOperand(map_result_, Map::kPrototypeOffset));
5161 __ tst(scratch1_, Operand(kSmiTagMask));
5162 __ b(eq, &false_result);
5163 __ ldr(scratch1_, FieldMemOperand(scratch1_, HeapObject::kMapOffset));
5164 __ ldr(scratch2_,
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08005165 ContextOperand(cp, Context::GLOBAL_INDEX));
Iain Merrick75681382010-08-19 15:07:18 +01005166 __ ldr(scratch2_,
5167 FieldMemOperand(scratch2_, GlobalObject::kGlobalContextOffset));
5168 __ ldr(scratch2_,
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08005169 ContextOperand(
Iain Merrick75681382010-08-19 15:07:18 +01005170 scratch2_, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
5171 __ cmp(scratch1_, scratch2_);
5172 __ b(ne, &false_result);
5173
5174 // Set the bit in the map to indicate that it has been checked safe for
5175 // default valueOf and set true result.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01005176 __ ldrb(scratch1_, FieldMemOperand(map_result_, Map::kBitField2Offset));
Iain Merrick75681382010-08-19 15:07:18 +01005177 __ orr(scratch1_,
5178 scratch1_,
5179 Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01005180 __ strb(scratch1_, FieldMemOperand(map_result_, Map::kBitField2Offset));
Iain Merrick75681382010-08-19 15:07:18 +01005181 __ mov(map_result_, Operand(1));
5182 __ jmp(exit_label());
5183 __ bind(&false_result);
5184 // Set false result.
Iain Merrick9ac36c92010-09-13 15:29:50 +01005185 __ mov(map_result_, Operand(0, RelocInfo::NONE));
Iain Merrick75681382010-08-19 15:07:18 +01005186 }
5187
5188 private:
5189 Register object_;
5190 Register map_result_;
5191 Register scratch1_;
5192 Register scratch2_;
5193};
5194
5195
5196void CodeGenerator::GenerateIsStringWrapperSafeForDefaultValueOf(
5197 ZoneList<Expression*>* args) {
5198 ASSERT(args->length() == 1);
5199 Load(args->at(0));
5200 Register obj = frame_->PopToRegister(); // Pop the string wrapper.
5201 if (FLAG_debug_code) {
5202 __ AbortIfSmi(obj);
5203 }
5204
5205 // Check whether this map has already been checked to be safe for default
5206 // valueOf.
5207 Register map_result = VirtualFrame::scratch0();
5208 __ ldr(map_result, FieldMemOperand(obj, HeapObject::kMapOffset));
5209 __ ldrb(ip, FieldMemOperand(map_result, Map::kBitField2Offset));
5210 __ tst(ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
5211 true_target()->Branch(ne);
5212
5213 // We need an additional two scratch registers for the deferred code.
5214 Register scratch1 = VirtualFrame::scratch1();
5215 // Use r6 without notifying the virtual frame.
5216 Register scratch2 = r6;
5217
5218 DeferredIsStringWrapperSafeForDefaultValueOf* deferred =
5219 new DeferredIsStringWrapperSafeForDefaultValueOf(
5220 obj, map_result, scratch1, scratch2);
5221 deferred->Branch(eq);
5222 deferred->BindExit();
5223 __ tst(map_result, Operand(map_result));
5224 cc_reg_ = ne;
5225}
5226
5227
Steve Blockd0582a62009-12-15 09:54:21 +00005228void CodeGenerator::GenerateIsFunction(ZoneList<Expression*>* args) {
5229 // This generates a fast version of:
5230 // (%_ClassOf(arg) === 'Function')
Steve Blockd0582a62009-12-15 09:54:21 +00005231 ASSERT(args->length() == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005232 Load(args->at(0));
5233 Register possible_function = frame_->PopToRegister();
5234 __ tst(possible_function, Operand(kSmiTagMask));
Steve Blockd0582a62009-12-15 09:54:21 +00005235 false_target()->Branch(eq);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005236 Register map_reg = VirtualFrame::scratch0();
5237 Register scratch = VirtualFrame::scratch1();
5238 __ CompareObjectType(possible_function, map_reg, scratch, JS_FUNCTION_TYPE);
Steve Blockd0582a62009-12-15 09:54:21 +00005239 cc_reg_ = eq;
5240}
5241
5242
Leon Clarked91b9f72010-01-27 17:25:45 +00005243void CodeGenerator::GenerateIsUndetectableObject(ZoneList<Expression*>* args) {
Leon Clarked91b9f72010-01-27 17:25:45 +00005244 ASSERT(args->length() == 1);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005245 Load(args->at(0));
5246 Register possible_undetectable = frame_->PopToRegister();
5247 __ tst(possible_undetectable, Operand(kSmiTagMask));
Leon Clarked91b9f72010-01-27 17:25:45 +00005248 false_target()->Branch(eq);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005249 Register scratch = VirtualFrame::scratch0();
5250 __ ldr(scratch,
5251 FieldMemOperand(possible_undetectable, HeapObject::kMapOffset));
5252 __ ldrb(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
5253 __ tst(scratch, Operand(1 << Map::kIsUndetectable));
Leon Clarked91b9f72010-01-27 17:25:45 +00005254 cc_reg_ = ne;
5255}
5256
5257
Steve Blocka7e24c12009-10-30 11:49:00 +00005258void CodeGenerator::GenerateIsConstructCall(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005259 ASSERT(args->length() == 0);
5260
Leon Clarkef7060e22010-06-03 12:02:55 +01005261 Register scratch0 = VirtualFrame::scratch0();
5262 Register scratch1 = VirtualFrame::scratch1();
Steve Blocka7e24c12009-10-30 11:49:00 +00005263 // Get the frame pointer for the calling frame.
Leon Clarkef7060e22010-06-03 12:02:55 +01005264 __ ldr(scratch0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00005265
5266 // Skip the arguments adaptor frame if it exists.
Leon Clarkef7060e22010-06-03 12:02:55 +01005267 __ ldr(scratch1,
5268 MemOperand(scratch0, StandardFrameConstants::kContextOffset));
5269 __ cmp(scratch1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
5270 __ ldr(scratch0,
5271 MemOperand(scratch0, StandardFrameConstants::kCallerFPOffset), eq);
Steve Blocka7e24c12009-10-30 11:49:00 +00005272
5273 // Check the marker in the calling frame.
Leon Clarkef7060e22010-06-03 12:02:55 +01005274 __ ldr(scratch1,
5275 MemOperand(scratch0, StandardFrameConstants::kMarkerOffset));
5276 __ cmp(scratch1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
Steve Blocka7e24c12009-10-30 11:49:00 +00005277 cc_reg_ = eq;
5278}
5279
5280
5281void CodeGenerator::GenerateArgumentsLength(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005282 ASSERT(args->length() == 0);
5283
Leon Clarkef7060e22010-06-03 12:02:55 +01005284 Register tos = frame_->GetTOSRegister();
5285 Register scratch0 = VirtualFrame::scratch0();
5286 Register scratch1 = VirtualFrame::scratch1();
Steve Blocka7e24c12009-10-30 11:49:00 +00005287
Steve Block6ded16b2010-05-10 14:33:55 +01005288 // Check if the calling frame is an arguments adaptor frame.
Leon Clarkef7060e22010-06-03 12:02:55 +01005289 __ ldr(scratch0,
5290 MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
5291 __ ldr(scratch1,
5292 MemOperand(scratch0, StandardFrameConstants::kContextOffset));
5293 __ cmp(scratch1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
5294
5295 // Get the number of formal parameters.
5296 __ mov(tos, Operand(Smi::FromInt(scope()->num_parameters())), LeaveCC, ne);
Steve Block6ded16b2010-05-10 14:33:55 +01005297
5298 // Arguments adaptor case: Read the arguments length from the
5299 // adaptor frame.
Leon Clarkef7060e22010-06-03 12:02:55 +01005300 __ ldr(tos,
5301 MemOperand(scratch0, ArgumentsAdaptorFrameConstants::kLengthOffset),
5302 eq);
Steve Block6ded16b2010-05-10 14:33:55 +01005303
Leon Clarkef7060e22010-06-03 12:02:55 +01005304 frame_->EmitPush(tos);
Steve Blocka7e24c12009-10-30 11:49:00 +00005305}
5306
5307
Steve Block6ded16b2010-05-10 14:33:55 +01005308void CodeGenerator::GenerateArguments(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005309 ASSERT(args->length() == 1);
5310
5311 // Satisfy contract with ArgumentsAccessStub:
5312 // Load the key into r1 and the formal parameters count into r0.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005313 Load(args->at(0));
Iain Merrick75681382010-08-19 15:07:18 +01005314 frame_->PopToR1();
5315 frame_->SpillAll();
Andrei Popescu31002712010-02-23 13:46:05 +00005316 __ mov(r0, Operand(Smi::FromInt(scope()->num_parameters())));
Steve Blocka7e24c12009-10-30 11:49:00 +00005317
5318 // Call the shared stub to get to arguments[key].
5319 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
5320 frame_->CallStub(&stub, 0);
5321 frame_->EmitPush(r0);
5322}
5323
5324
Steve Block6ded16b2010-05-10 14:33:55 +01005325void CodeGenerator::GenerateRandomHeapNumber(
5326 ZoneList<Expression*>* args) {
5327 VirtualFrame::SpilledScope spilled_scope(frame_);
Steve Blocka7e24c12009-10-30 11:49:00 +00005328 ASSERT(args->length() == 0);
Steve Block6ded16b2010-05-10 14:33:55 +01005329
5330 Label slow_allocate_heapnumber;
5331 Label heapnumber_allocated;
5332
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01005333 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
5334 __ AllocateHeapNumber(r4, r1, r2, r6, &slow_allocate_heapnumber);
Steve Block6ded16b2010-05-10 14:33:55 +01005335 __ jmp(&heapnumber_allocated);
5336
5337 __ bind(&slow_allocate_heapnumber);
Ben Murdoch3bec4d22010-07-22 14:51:16 +01005338 // Allocate a heap number.
5339 __ CallRuntime(Runtime::kNumberAlloc, 0);
Steve Block6ded16b2010-05-10 14:33:55 +01005340 __ mov(r4, Operand(r0));
5341
5342 __ bind(&heapnumber_allocated);
5343
5344 // Convert 32 random bits in r0 to 0.(32 random bits) in a double
5345 // by computing:
5346 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
5347 if (CpuFeatures::IsSupported(VFP3)) {
5348 __ PrepareCallCFunction(0, r1);
5349 __ CallCFunction(ExternalReference::random_uint32_function(), 0);
5350
5351 CpuFeatures::Scope scope(VFP3);
5352 // 0x41300000 is the top half of 1.0 x 2^20 as a double.
5353 // Create this constant using mov/orr to avoid PC relative load.
5354 __ mov(r1, Operand(0x41000000));
5355 __ orr(r1, r1, Operand(0x300000));
5356 // Move 0x41300000xxxxxxxx (x = random bits) to VFP.
5357 __ vmov(d7, r0, r1);
5358 // Move 0x4130000000000000 to VFP.
Iain Merrick9ac36c92010-09-13 15:29:50 +01005359 __ mov(r0, Operand(0, RelocInfo::NONE));
Steve Block6ded16b2010-05-10 14:33:55 +01005360 __ vmov(d8, r0, r1);
5361 // Subtract and store the result in the heap number.
5362 __ vsub(d7, d7, d8);
5363 __ sub(r0, r4, Operand(kHeapObjectTag));
5364 __ vstr(d7, r0, HeapNumber::kValueOffset);
5365 frame_->EmitPush(r4);
5366 } else {
5367 __ mov(r0, Operand(r4));
5368 __ PrepareCallCFunction(1, r1);
5369 __ CallCFunction(
5370 ExternalReference::fill_heap_number_with_random_function(), 1);
5371 frame_->EmitPush(r0);
5372 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005373}
5374
5375
Steve Blockd0582a62009-12-15 09:54:21 +00005376void CodeGenerator::GenerateStringAdd(ZoneList<Expression*>* args) {
5377 ASSERT_EQ(2, args->length());
5378
5379 Load(args->at(0));
5380 Load(args->at(1));
5381
Andrei Popescu31002712010-02-23 13:46:05 +00005382 StringAddStub stub(NO_STRING_ADD_FLAGS);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005383 frame_->SpillAll();
Andrei Popescu31002712010-02-23 13:46:05 +00005384 frame_->CallStub(&stub, 2);
Steve Blockd0582a62009-12-15 09:54:21 +00005385 frame_->EmitPush(r0);
5386}
5387
5388
Leon Clarkee46be812010-01-19 14:06:41 +00005389void CodeGenerator::GenerateSubString(ZoneList<Expression*>* args) {
5390 ASSERT_EQ(3, args->length());
5391
5392 Load(args->at(0));
5393 Load(args->at(1));
5394 Load(args->at(2));
5395
Andrei Popescu31002712010-02-23 13:46:05 +00005396 SubStringStub stub;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005397 frame_->SpillAll();
Andrei Popescu31002712010-02-23 13:46:05 +00005398 frame_->CallStub(&stub, 3);
Leon Clarkee46be812010-01-19 14:06:41 +00005399 frame_->EmitPush(r0);
5400}
5401
5402
5403void CodeGenerator::GenerateStringCompare(ZoneList<Expression*>* args) {
5404 ASSERT_EQ(2, args->length());
5405
5406 Load(args->at(0));
5407 Load(args->at(1));
5408
Leon Clarked91b9f72010-01-27 17:25:45 +00005409 StringCompareStub stub;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005410 frame_->SpillAll();
Leon Clarked91b9f72010-01-27 17:25:45 +00005411 frame_->CallStub(&stub, 2);
Leon Clarkee46be812010-01-19 14:06:41 +00005412 frame_->EmitPush(r0);
5413}
5414
5415
5416void CodeGenerator::GenerateRegExpExec(ZoneList<Expression*>* args) {
5417 ASSERT_EQ(4, args->length());
5418
5419 Load(args->at(0));
5420 Load(args->at(1));
5421 Load(args->at(2));
5422 Load(args->at(3));
Steve Block6ded16b2010-05-10 14:33:55 +01005423 RegExpExecStub stub;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005424 frame_->SpillAll();
Steve Block6ded16b2010-05-10 14:33:55 +01005425 frame_->CallStub(&stub, 4);
5426 frame_->EmitPush(r0);
5427}
Leon Clarkee46be812010-01-19 14:06:41 +00005428
Steve Block6ded16b2010-05-10 14:33:55 +01005429
5430void CodeGenerator::GenerateRegExpConstructResult(ZoneList<Expression*>* args) {
Steve Block6ded16b2010-05-10 14:33:55 +01005431 ASSERT_EQ(3, args->length());
Ben Murdochb0fe1622011-05-05 13:52:32 +01005432
Steve Block6ded16b2010-05-10 14:33:55 +01005433 Load(args->at(0)); // Size of array, smi.
5434 Load(args->at(1)); // "index" property value.
5435 Load(args->at(2)); // "input" property value.
Ben Murdochb0fe1622011-05-05 13:52:32 +01005436 RegExpConstructResultStub stub;
5437 frame_->SpillAll();
5438 frame_->CallStub(&stub, 3);
Steve Block6ded16b2010-05-10 14:33:55 +01005439 frame_->EmitPush(r0);
5440}
5441
5442
5443class DeferredSearchCache: public DeferredCode {
5444 public:
5445 DeferredSearchCache(Register dst, Register cache, Register key)
5446 : dst_(dst), cache_(cache), key_(key) {
5447 set_comment("[ DeferredSearchCache");
5448 }
5449
5450 virtual void Generate();
5451
5452 private:
5453 Register dst_, cache_, key_;
5454};
5455
5456
5457void DeferredSearchCache::Generate() {
5458 __ Push(cache_, key_);
5459 __ CallRuntime(Runtime::kGetFromCache, 2);
Iain Merrick75681382010-08-19 15:07:18 +01005460 __ Move(dst_, r0);
Steve Block6ded16b2010-05-10 14:33:55 +01005461}
5462
5463
5464void CodeGenerator::GenerateGetFromCache(ZoneList<Expression*>* args) {
5465 ASSERT_EQ(2, args->length());
5466
5467 ASSERT_NE(NULL, args->at(0)->AsLiteral());
5468 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
5469
5470 Handle<FixedArray> jsfunction_result_caches(
5471 Top::global_context()->jsfunction_result_caches());
5472 if (jsfunction_result_caches->length() <= cache_id) {
5473 __ Abort("Attempt to use undefined cache.");
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005474 frame_->EmitPushRoot(Heap::kUndefinedValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01005475 return;
5476 }
5477
5478 Load(args->at(1));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005479
Iain Merrick75681382010-08-19 15:07:18 +01005480 frame_->PopToR1();
5481 frame_->SpillAll();
5482 Register key = r1; // Just poped to r1
5483 Register result = r0; // Free, as frame has just been spilled.
5484 Register scratch1 = VirtualFrame::scratch0();
5485 Register scratch2 = VirtualFrame::scratch1();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005486
Iain Merrick75681382010-08-19 15:07:18 +01005487 __ ldr(scratch1, ContextOperand(cp, Context::GLOBAL_INDEX));
5488 __ ldr(scratch1,
5489 FieldMemOperand(scratch1, GlobalObject::kGlobalContextOffset));
5490 __ ldr(scratch1,
5491 ContextOperand(scratch1, Context::JSFUNCTION_RESULT_CACHES_INDEX));
5492 __ ldr(scratch1,
5493 FieldMemOperand(scratch1, FixedArray::OffsetOfElementAt(cache_id)));
Steve Block6ded16b2010-05-10 14:33:55 +01005494
Iain Merrick75681382010-08-19 15:07:18 +01005495 DeferredSearchCache* deferred =
5496 new DeferredSearchCache(result, scratch1, key);
Steve Block6ded16b2010-05-10 14:33:55 +01005497
5498 const int kFingerOffset =
5499 FixedArray::OffsetOfElementAt(JSFunctionResultCache::kFingerIndex);
Kristian Monsen50ef84f2010-07-29 15:18:00 +01005500 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
Iain Merrick75681382010-08-19 15:07:18 +01005501 __ ldr(result, FieldMemOperand(scratch1, kFingerOffset));
5502 // result now holds finger offset as a smi.
5503 __ add(scratch2, scratch1, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
5504 // scratch2 now points to the start of fixed array elements.
5505 __ ldr(result,
5506 MemOperand(
5507 scratch2, result, LSL, kPointerSizeLog2 - kSmiTagSize, PreIndex));
5508 // Note side effect of PreIndex: scratch2 now points to the key of the pair.
5509 __ cmp(key, result);
Steve Block6ded16b2010-05-10 14:33:55 +01005510 deferred->Branch(ne);
5511
Iain Merrick75681382010-08-19 15:07:18 +01005512 __ ldr(result, MemOperand(scratch2, kPointerSize));
Steve Block6ded16b2010-05-10 14:33:55 +01005513
5514 deferred->BindExit();
Iain Merrick75681382010-08-19 15:07:18 +01005515 frame_->EmitPush(result);
Leon Clarkee46be812010-01-19 14:06:41 +00005516}
5517
5518
Andrei Popescu402d9372010-02-26 13:31:12 +00005519void CodeGenerator::GenerateNumberToString(ZoneList<Expression*>* args) {
5520 ASSERT_EQ(args->length(), 1);
5521
5522 // Load the argument on the stack and jump to the runtime.
5523 Load(args->at(0));
5524
Steve Block6ded16b2010-05-10 14:33:55 +01005525 NumberToStringStub stub;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005526 frame_->SpillAll();
Steve Block6ded16b2010-05-10 14:33:55 +01005527 frame_->CallStub(&stub, 1);
5528 frame_->EmitPush(r0);
5529}
5530
5531
5532class DeferredSwapElements: public DeferredCode {
5533 public:
5534 DeferredSwapElements(Register object, Register index1, Register index2)
5535 : object_(object), index1_(index1), index2_(index2) {
5536 set_comment("[ DeferredSwapElements");
5537 }
5538
5539 virtual void Generate();
5540
5541 private:
5542 Register object_, index1_, index2_;
5543};
5544
5545
5546void DeferredSwapElements::Generate() {
5547 __ push(object_);
5548 __ push(index1_);
5549 __ push(index2_);
5550 __ CallRuntime(Runtime::kSwapElements, 3);
5551}
5552
5553
5554void CodeGenerator::GenerateSwapElements(ZoneList<Expression*>* args) {
5555 Comment cmnt(masm_, "[ GenerateSwapElements");
5556
5557 ASSERT_EQ(3, args->length());
5558
5559 Load(args->at(0));
5560 Load(args->at(1));
5561 Load(args->at(2));
5562
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005563 VirtualFrame::SpilledScope spilled_scope(frame_);
5564
Steve Block6ded16b2010-05-10 14:33:55 +01005565 Register index2 = r2;
5566 Register index1 = r1;
5567 Register object = r0;
5568 Register tmp1 = r3;
5569 Register tmp2 = r4;
5570
5571 frame_->EmitPop(index2);
5572 frame_->EmitPop(index1);
5573 frame_->EmitPop(object);
5574
5575 DeferredSwapElements* deferred =
5576 new DeferredSwapElements(object, index1, index2);
5577
5578 // Fetch the map and check if array is in fast case.
5579 // Check that object doesn't require security checks and
5580 // has no indexed interceptor.
5581 __ CompareObjectType(object, tmp1, tmp2, FIRST_JS_OBJECT_TYPE);
5582 deferred->Branch(lt);
5583 __ ldrb(tmp2, FieldMemOperand(tmp1, Map::kBitFieldOffset));
5584 __ tst(tmp2, Operand(KeyedLoadIC::kSlowCaseBitFieldMask));
Steve Block1e0659c2011-05-24 12:43:12 +01005585 deferred->Branch(ne);
Steve Block6ded16b2010-05-10 14:33:55 +01005586
Iain Merrick75681382010-08-19 15:07:18 +01005587 // Check the object's elements are in fast case and writable.
Steve Block6ded16b2010-05-10 14:33:55 +01005588 __ ldr(tmp1, FieldMemOperand(object, JSObject::kElementsOffset));
5589 __ ldr(tmp2, FieldMemOperand(tmp1, HeapObject::kMapOffset));
5590 __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
5591 __ cmp(tmp2, ip);
5592 deferred->Branch(ne);
5593
5594 // Smi-tagging is equivalent to multiplying by 2.
5595 STATIC_ASSERT(kSmiTag == 0);
5596 STATIC_ASSERT(kSmiTagSize == 1);
5597
5598 // Check that both indices are smis.
5599 __ mov(tmp2, index1);
5600 __ orr(tmp2, tmp2, index2);
5601 __ tst(tmp2, Operand(kSmiTagMask));
Steve Block1e0659c2011-05-24 12:43:12 +01005602 deferred->Branch(ne);
Steve Block6ded16b2010-05-10 14:33:55 +01005603
Ben Murdochdb5a90a2011-01-06 18:27:03 +00005604 // Check that both indices are valid.
5605 __ ldr(tmp2, FieldMemOperand(object, JSArray::kLengthOffset));
5606 __ cmp(tmp2, index1);
5607 __ cmp(tmp2, index2, hi);
5608 deferred->Branch(ls);
5609
Steve Block6ded16b2010-05-10 14:33:55 +01005610 // Bring the offsets into the fixed array in tmp1 into index1 and
5611 // index2.
5612 __ mov(tmp2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
5613 __ add(index1, tmp2, Operand(index1, LSL, kPointerSizeLog2 - kSmiTagSize));
5614 __ add(index2, tmp2, Operand(index2, LSL, kPointerSizeLog2 - kSmiTagSize));
5615
5616 // Swap elements.
5617 Register tmp3 = object;
5618 object = no_reg;
5619 __ ldr(tmp3, MemOperand(tmp1, index1));
5620 __ ldr(tmp2, MemOperand(tmp1, index2));
5621 __ str(tmp3, MemOperand(tmp1, index2));
5622 __ str(tmp2, MemOperand(tmp1, index1));
5623
5624 Label done;
5625 __ InNewSpace(tmp1, tmp2, eq, &done);
5626 // Possible optimization: do a check that both values are Smis
5627 // (or them and test against Smi mask.)
5628
5629 __ mov(tmp2, tmp1);
Steve Block9fac8402011-05-12 15:51:54 +01005630 __ add(index1, index1, tmp1);
5631 __ add(index2, index2, tmp1);
5632 __ RecordWriteHelper(tmp1, index1, tmp3);
5633 __ RecordWriteHelper(tmp2, index2, tmp3);
Steve Block6ded16b2010-05-10 14:33:55 +01005634 __ bind(&done);
5635
5636 deferred->BindExit();
5637 __ LoadRoot(tmp1, Heap::kUndefinedValueRootIndex);
5638 frame_->EmitPush(tmp1);
5639}
5640
5641
5642void CodeGenerator::GenerateCallFunction(ZoneList<Expression*>* args) {
5643 Comment cmnt(masm_, "[ GenerateCallFunction");
5644
5645 ASSERT(args->length() >= 2);
5646
5647 int n_args = args->length() - 2; // for receiver and function.
5648 Load(args->at(0)); // receiver
5649 for (int i = 0; i < n_args; i++) {
5650 Load(args->at(i + 1));
5651 }
5652 Load(args->at(n_args + 1)); // function
5653 frame_->CallJSFunction(n_args);
Andrei Popescu402d9372010-02-26 13:31:12 +00005654 frame_->EmitPush(r0);
5655}
5656
5657
5658void CodeGenerator::GenerateMathSin(ZoneList<Expression*>* args) {
5659 ASSERT_EQ(args->length(), 1);
Andrei Popescu402d9372010-02-26 13:31:12 +00005660 Load(args->at(0));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005661 if (CpuFeatures::IsSupported(VFP3)) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01005662 TranscendentalCacheStub stub(TranscendentalCache::SIN,
5663 TranscendentalCacheStub::TAGGED);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005664 frame_->SpillAllButCopyTOSToR0();
5665 frame_->CallStub(&stub, 1);
5666 } else {
5667 frame_->CallRuntime(Runtime::kMath_sin, 1);
5668 }
Andrei Popescu402d9372010-02-26 13:31:12 +00005669 frame_->EmitPush(r0);
5670}
5671
5672
5673void CodeGenerator::GenerateMathCos(ZoneList<Expression*>* args) {
5674 ASSERT_EQ(args->length(), 1);
Andrei Popescu402d9372010-02-26 13:31:12 +00005675 Load(args->at(0));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005676 if (CpuFeatures::IsSupported(VFP3)) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01005677 TranscendentalCacheStub stub(TranscendentalCache::COS,
5678 TranscendentalCacheStub::TAGGED);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005679 frame_->SpillAllButCopyTOSToR0();
5680 frame_->CallStub(&stub, 1);
5681 } else {
5682 frame_->CallRuntime(Runtime::kMath_cos, 1);
5683 }
Andrei Popescu402d9372010-02-26 13:31:12 +00005684 frame_->EmitPush(r0);
5685}
5686
5687
Ben Murdochb0fe1622011-05-05 13:52:32 +01005688void CodeGenerator::GenerateMathLog(ZoneList<Expression*>* args) {
5689 ASSERT_EQ(args->length(), 1);
5690 Load(args->at(0));
5691 if (CpuFeatures::IsSupported(VFP3)) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01005692 TranscendentalCacheStub stub(TranscendentalCache::LOG,
5693 TranscendentalCacheStub::TAGGED);
Ben Murdochb0fe1622011-05-05 13:52:32 +01005694 frame_->SpillAllButCopyTOSToR0();
5695 frame_->CallStub(&stub, 1);
5696 } else {
5697 frame_->CallRuntime(Runtime::kMath_log, 1);
5698 }
5699 frame_->EmitPush(r0);
5700}
5701
5702
Steve Blocka7e24c12009-10-30 11:49:00 +00005703void CodeGenerator::GenerateObjectEquals(ZoneList<Expression*>* args) {
Steve Blocka7e24c12009-10-30 11:49:00 +00005704 ASSERT(args->length() == 2);
5705
5706 // Load the two objects into registers and perform the comparison.
Leon Clarkef7060e22010-06-03 12:02:55 +01005707 Load(args->at(0));
5708 Load(args->at(1));
5709 Register lhs = frame_->PopToRegister();
5710 Register rhs = frame_->PopToRegister(lhs);
5711 __ cmp(lhs, rhs);
Steve Blocka7e24c12009-10-30 11:49:00 +00005712 cc_reg_ = eq;
5713}
5714
5715
Ben Murdochbb769b22010-08-11 14:56:33 +01005716void CodeGenerator::GenerateIsRegExpEquivalent(ZoneList<Expression*>* args) {
5717 ASSERT(args->length() == 2);
5718
5719 // Load the two objects into registers and perform the comparison.
5720 Load(args->at(0));
5721 Load(args->at(1));
5722 Register right = frame_->PopToRegister();
5723 Register left = frame_->PopToRegister(right);
5724 Register tmp = frame_->scratch0();
5725 Register tmp2 = frame_->scratch1();
5726
5727 // Jumps to done must have the eq flag set if the test is successful
5728 // and clear if the test has failed.
5729 Label done;
5730
5731 // Fail if either is a non-HeapObject.
5732 __ cmp(left, Operand(right));
5733 __ b(eq, &done);
5734 __ and_(tmp, left, Operand(right));
5735 __ eor(tmp, tmp, Operand(kSmiTagMask));
5736 __ tst(tmp, Operand(kSmiTagMask));
5737 __ b(ne, &done);
5738 __ ldr(tmp, FieldMemOperand(left, HeapObject::kMapOffset));
5739 __ ldrb(tmp2, FieldMemOperand(tmp, Map::kInstanceTypeOffset));
5740 __ cmp(tmp2, Operand(JS_REGEXP_TYPE));
5741 __ b(ne, &done);
5742 __ ldr(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
5743 __ cmp(tmp, Operand(tmp2));
5744 __ b(ne, &done);
5745 __ ldr(tmp, FieldMemOperand(left, JSRegExp::kDataOffset));
5746 __ ldr(tmp2, FieldMemOperand(right, JSRegExp::kDataOffset));
5747 __ cmp(tmp, tmp2);
5748 __ bind(&done);
5749 cc_reg_ = eq;
5750}
5751
5752
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005753void CodeGenerator::GenerateHasCachedArrayIndex(ZoneList<Expression*>* args) {
5754 ASSERT(args->length() == 1);
5755 Load(args->at(0));
5756 Register value = frame_->PopToRegister();
5757 Register tmp = frame_->scratch0();
5758 __ ldr(tmp, FieldMemOperand(value, String::kHashFieldOffset));
5759 __ tst(tmp, Operand(String::kContainsCachedArrayIndexMask));
5760 cc_reg_ = eq;
5761}
5762
5763
5764void CodeGenerator::GenerateGetCachedArrayIndex(ZoneList<Expression*>* args) {
5765 ASSERT(args->length() == 1);
5766 Load(args->at(0));
5767 Register value = frame_->PopToRegister();
5768
5769 __ ldr(value, FieldMemOperand(value, String::kHashFieldOffset));
5770 __ IndexFromHash(value, value);
5771 frame_->EmitPush(value);
5772}
5773
Ben Murdochbb769b22010-08-11 14:56:33 +01005774
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08005775void CodeGenerator::GenerateFastAsciiArrayJoin(ZoneList<Expression*>* args) {
5776 ASSERT(args->length() == 2);
5777 Load(args->at(0));
5778 Register value = frame_->PopToRegister();
5779 __ LoadRoot(value, Heap::kUndefinedValueRootIndex);
5780 frame_->EmitPush(value);
5781}
5782
5783
Steve Blocka7e24c12009-10-30 11:49:00 +00005784void CodeGenerator::VisitCallRuntime(CallRuntime* node) {
5785#ifdef DEBUG
5786 int original_height = frame_->height();
5787#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00005788 if (CheckForInlineRuntimeCall(node)) {
5789 ASSERT((has_cc() && frame_->height() == original_height) ||
5790 (!has_cc() && frame_->height() == original_height + 1));
5791 return;
5792 }
5793
5794 ZoneList<Expression*>* args = node->arguments();
5795 Comment cmnt(masm_, "[ CallRuntime");
5796 Runtime::Function* function = node->function();
5797
5798 if (function == NULL) {
5799 // Prepare stack for calling JS runtime function.
Steve Blocka7e24c12009-10-30 11:49:00 +00005800 // Push the builtins object found in the current global object.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005801 Register scratch = VirtualFrame::scratch0();
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08005802 __ ldr(scratch, GlobalObjectOperand());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005803 Register builtins = frame_->GetTOSRegister();
5804 __ ldr(builtins, FieldMemOperand(scratch, GlobalObject::kBuiltinsOffset));
5805 frame_->EmitPush(builtins);
Steve Blocka7e24c12009-10-30 11:49:00 +00005806 }
5807
5808 // Push the arguments ("left-to-right").
5809 int arg_count = args->length();
5810 for (int i = 0; i < arg_count; i++) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005811 Load(args->at(i));
Steve Blocka7e24c12009-10-30 11:49:00 +00005812 }
5813
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005814 VirtualFrame::SpilledScope spilled_scope(frame_);
5815
Steve Blocka7e24c12009-10-30 11:49:00 +00005816 if (function == NULL) {
5817 // Call the JS runtime function.
Andrei Popescu402d9372010-02-26 13:31:12 +00005818 __ mov(r2, Operand(node->name()));
Steve Blocka7e24c12009-10-30 11:49:00 +00005819 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08005820 Handle<Code> stub = StubCache::ComputeCallInitialize(arg_count, in_loop);
Steve Blocka7e24c12009-10-30 11:49:00 +00005821 frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET, arg_count + 1);
5822 __ ldr(cp, frame_->Context());
Steve Blocka7e24c12009-10-30 11:49:00 +00005823 frame_->EmitPush(r0);
5824 } else {
5825 // Call the C runtime function.
5826 frame_->CallRuntime(function, arg_count);
5827 frame_->EmitPush(r0);
5828 }
Steve Block6ded16b2010-05-10 14:33:55 +01005829 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00005830}
5831
5832
5833void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
5834#ifdef DEBUG
5835 int original_height = frame_->height();
5836#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00005837 Comment cmnt(masm_, "[ UnaryOperation");
5838
5839 Token::Value op = node->op();
5840
5841 if (op == Token::NOT) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005842 LoadCondition(node->expression(), false_target(), true_target(), true);
Steve Blocka7e24c12009-10-30 11:49:00 +00005843 // LoadCondition may (and usually does) leave a test and branch to
5844 // be emitted by the caller. In that case, negate the condition.
5845 if (has_cc()) cc_reg_ = NegateCondition(cc_reg_);
5846
5847 } else if (op == Token::DELETE) {
5848 Property* property = node->expression()->AsProperty();
5849 Variable* variable = node->expression()->AsVariableProxy()->AsVariable();
5850 if (property != NULL) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005851 Load(property->obj());
5852 Load(property->key());
Ben Murdoche0cee9b2011-05-25 10:26:03 +01005853 frame_->EmitPush(Operand(Smi::FromInt(strict_mode_flag())));
5854 frame_->InvokeBuiltin(Builtins::DELETE, CALL_JS, 3);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005855 frame_->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00005856
5857 } else if (variable != NULL) {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01005858 // Delete of an unqualified identifier is disallowed in strict mode
5859 // but "delete this" is.
5860 ASSERT(strict_mode_flag() == kNonStrictMode || variable->is_this());
Kristian Monsen0d5e1162010-09-30 15:31:59 +01005861 Slot* slot = variable->AsSlot();
Steve Blocka7e24c12009-10-30 11:49:00 +00005862 if (variable->is_global()) {
5863 LoadGlobal();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005864 frame_->EmitPush(Operand(variable->name()));
Ben Murdoche0cee9b2011-05-25 10:26:03 +01005865 frame_->EmitPush(Operand(Smi::FromInt(kNonStrictMode)));
5866 frame_->InvokeBuiltin(Builtins::DELETE, CALL_JS, 3);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005867 frame_->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00005868
5869 } else if (slot != NULL && slot->type() == Slot::LOOKUP) {
Steve Block1e0659c2011-05-24 12:43:12 +01005870 // Delete from the context holding the named variable.
Steve Blocka7e24c12009-10-30 11:49:00 +00005871 frame_->EmitPush(cp);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005872 frame_->EmitPush(Operand(variable->name()));
Steve Block1e0659c2011-05-24 12:43:12 +01005873 frame_->CallRuntime(Runtime::kDeleteContextSlot, 2);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005874 frame_->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00005875
5876 } else {
5877 // Default: Result of deleting non-global, not dynamically
5878 // introduced variables is false.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005879 frame_->EmitPushRoot(Heap::kFalseValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00005880 }
5881
5882 } else {
5883 // Default: Result of deleting expressions is true.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005884 Load(node->expression()); // may have side-effects
Steve Blocka7e24c12009-10-30 11:49:00 +00005885 frame_->Drop();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005886 frame_->EmitPushRoot(Heap::kTrueValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00005887 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005888
5889 } else if (op == Token::TYPEOF) {
5890 // Special case for loading the typeof expression; see comment on
5891 // LoadTypeofExpression().
5892 LoadTypeofExpression(node->expression());
5893 frame_->CallRuntime(Runtime::kTypeof, 1);
5894 frame_->EmitPush(r0); // r0 has result
5895
5896 } else {
Kristian Monsen80d68ea2010-09-08 11:05:35 +01005897 bool can_overwrite = node->expression()->ResultOverwriteAllowed();
Leon Clarkeac952652010-07-15 11:15:24 +01005898 UnaryOverwriteMode overwrite =
5899 can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
5900
5901 bool no_negative_zero = node->expression()->no_negative_zero();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005902 Load(node->expression());
Steve Blocka7e24c12009-10-30 11:49:00 +00005903 switch (op) {
5904 case Token::NOT:
5905 case Token::DELETE:
5906 case Token::TYPEOF:
5907 UNREACHABLE(); // handled above
5908 break;
5909
5910 case Token::SUB: {
Steve Block8defd9f2010-07-08 12:39:36 +01005911 frame_->PopToR0();
Leon Clarkeac952652010-07-15 11:15:24 +01005912 GenericUnaryOpStub stub(
5913 Token::SUB,
5914 overwrite,
Kristian Monsen0d5e1162010-09-30 15:31:59 +01005915 NO_UNARY_FLAGS,
Leon Clarkeac952652010-07-15 11:15:24 +01005916 no_negative_zero ? kIgnoreNegativeZero : kStrictNegativeZero);
Steve Blocka7e24c12009-10-30 11:49:00 +00005917 frame_->CallStub(&stub, 0);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005918 frame_->EmitPush(r0); // r0 has result
Steve Blocka7e24c12009-10-30 11:49:00 +00005919 break;
5920 }
5921
5922 case Token::BIT_NOT: {
Steve Block8defd9f2010-07-08 12:39:36 +01005923 Register tos = frame_->PopToRegister();
5924 JumpTarget not_smi_label;
Steve Blocka7e24c12009-10-30 11:49:00 +00005925 JumpTarget continue_label;
Steve Block8defd9f2010-07-08 12:39:36 +01005926 // Smi check.
5927 __ tst(tos, Operand(kSmiTagMask));
5928 not_smi_label.Branch(ne);
Steve Blocka7e24c12009-10-30 11:49:00 +00005929
Steve Block8defd9f2010-07-08 12:39:36 +01005930 __ mvn(tos, Operand(tos));
5931 __ bic(tos, tos, Operand(kSmiTagMask)); // Bit-clear inverted smi-tag.
5932 frame_->EmitPush(tos);
5933 // The fast case is the first to jump to the continue label, so it gets
5934 // to decide the virtual frame layout.
Steve Blocka7e24c12009-10-30 11:49:00 +00005935 continue_label.Jump();
Leon Clarke4515c472010-02-03 11:58:03 +00005936
Steve Block8defd9f2010-07-08 12:39:36 +01005937 not_smi_label.Bind();
5938 frame_->SpillAll();
5939 __ Move(r0, tos);
Kristian Monsen0d5e1162010-09-30 15:31:59 +01005940 GenericUnaryOpStub stub(Token::BIT_NOT,
5941 overwrite,
5942 NO_UNARY_SMI_CODE_IN_STUB);
Steve Block8defd9f2010-07-08 12:39:36 +01005943 frame_->CallStub(&stub, 0);
5944 frame_->EmitPush(r0);
5945
Steve Blocka7e24c12009-10-30 11:49:00 +00005946 continue_label.Bind();
5947 break;
5948 }
5949
5950 case Token::VOID:
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01005951 frame_->Drop();
5952 frame_->EmitPushRoot(Heap::kUndefinedValueRootIndex);
Steve Blocka7e24c12009-10-30 11:49:00 +00005953 break;
5954
5955 case Token::ADD: {
Steve Block8defd9f2010-07-08 12:39:36 +01005956 Register tos = frame_->Peek();
Steve Blocka7e24c12009-10-30 11:49:00 +00005957 // Smi check.
5958 JumpTarget continue_label;
Steve Block8defd9f2010-07-08 12:39:36 +01005959 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00005960 continue_label.Branch(eq);
Steve Block8defd9f2010-07-08 12:39:36 +01005961
Steve Blockd0582a62009-12-15 09:54:21 +00005962 frame_->InvokeBuiltin(Builtins::TO_NUMBER, CALL_JS, 1);
Steve Block8defd9f2010-07-08 12:39:36 +01005963 frame_->EmitPush(r0);
5964
Steve Blocka7e24c12009-10-30 11:49:00 +00005965 continue_label.Bind();
5966 break;
5967 }
5968 default:
5969 UNREACHABLE();
5970 }
Steve Blocka7e24c12009-10-30 11:49:00 +00005971 }
5972 ASSERT(!has_valid_frame() ||
5973 (has_cc() && frame_->height() == original_height) ||
5974 (!has_cc() && frame_->height() == original_height + 1));
5975}
5976
5977
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08005978class DeferredCountOperation: public DeferredCode {
5979 public:
5980 DeferredCountOperation(Register value,
5981 bool is_increment,
5982 bool is_postfix,
5983 int target_size)
5984 : value_(value),
5985 is_increment_(is_increment),
5986 is_postfix_(is_postfix),
5987 target_size_(target_size) {}
5988
5989 virtual void Generate() {
5990 VirtualFrame copied_frame(*frame_state()->frame());
5991
5992 Label slow;
5993 // Check for smi operand.
5994 __ tst(value_, Operand(kSmiTagMask));
5995 __ b(ne, &slow);
5996
5997 // Revert optimistic increment/decrement.
5998 if (is_increment_) {
5999 __ sub(value_, value_, Operand(Smi::FromInt(1)));
6000 } else {
6001 __ add(value_, value_, Operand(Smi::FromInt(1)));
6002 }
6003
6004 // Slow case: Convert to number. At this point the
6005 // value to be incremented is in the value register..
6006 __ bind(&slow);
6007
6008 // Convert the operand to a number.
6009 copied_frame.EmitPush(value_);
6010
6011 copied_frame.InvokeBuiltin(Builtins::TO_NUMBER, CALL_JS, 1);
6012
6013 if (is_postfix_) {
6014 // Postfix: store to result (on the stack).
6015 __ str(r0, MemOperand(sp, target_size_ * kPointerSize));
6016 }
6017
6018 copied_frame.EmitPush(r0);
6019 copied_frame.EmitPush(Operand(Smi::FromInt(1)));
6020
6021 if (is_increment_) {
6022 copied_frame.CallRuntime(Runtime::kNumberAdd, 2);
6023 } else {
6024 copied_frame.CallRuntime(Runtime::kNumberSub, 2);
6025 }
6026
6027 __ Move(value_, r0);
6028
6029 copied_frame.MergeTo(frame_state()->frame());
6030 }
6031
6032 private:
6033 Register value_;
6034 bool is_increment_;
6035 bool is_postfix_;
6036 int target_size_;
6037};
6038
6039
Steve Blocka7e24c12009-10-30 11:49:00 +00006040void CodeGenerator::VisitCountOperation(CountOperation* node) {
6041#ifdef DEBUG
6042 int original_height = frame_->height();
6043#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00006044 Comment cmnt(masm_, "[ CountOperation");
Steve Block8defd9f2010-07-08 12:39:36 +01006045 VirtualFrame::RegisterAllocationScope scope(this);
Steve Blocka7e24c12009-10-30 11:49:00 +00006046
6047 bool is_postfix = node->is_postfix();
6048 bool is_increment = node->op() == Token::INC;
6049
6050 Variable* var = node->expression()->AsVariableProxy()->AsVariable();
6051 bool is_const = (var != NULL && var->mode() == Variable::CONST);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006052 bool is_slot = (var != NULL && var->mode() == Variable::VAR);
Steve Blocka7e24c12009-10-30 11:49:00 +00006053
Kristian Monsen0d5e1162010-09-30 15:31:59 +01006054 if (!is_const && is_slot && type_info(var->AsSlot()).IsSmi()) {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006055 // The type info declares that this variable is always a Smi. That
6056 // means it is a Smi both before and after the increment/decrement.
6057 // Lets make use of that to make a very minimal count.
6058 Reference target(this, node->expression(), !is_const);
6059 ASSERT(!target.is_illegal());
6060 target.GetValue(); // Pushes the value.
6061 Register value = frame_->PopToRegister();
6062 if (is_postfix) frame_->EmitPush(value);
6063 if (is_increment) {
6064 __ add(value, value, Operand(Smi::FromInt(1)));
6065 } else {
6066 __ sub(value, value, Operand(Smi::FromInt(1)));
6067 }
6068 frame_->EmitPush(value);
Steve Block8defd9f2010-07-08 12:39:36 +01006069 target.SetValue(NOT_CONST_INIT, LIKELY_SMI);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006070 if (is_postfix) frame_->Pop();
6071 ASSERT_EQ(original_height + 1, frame_->height());
6072 return;
6073 }
6074
6075 // If it's a postfix expression and its result is not ignored and the
6076 // reference is non-trivial, then push a placeholder on the stack now
6077 // to hold the result of the expression.
6078 bool placeholder_pushed = false;
6079 if (!is_slot && is_postfix) {
Kristian Monsen25f61362010-05-21 11:50:48 +01006080 frame_->EmitPush(Operand(Smi::FromInt(0)));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006081 placeholder_pushed = true;
Steve Blocka7e24c12009-10-30 11:49:00 +00006082 }
6083
Leon Clarked91b9f72010-01-27 17:25:45 +00006084 // A constant reference is not saved to, so a constant reference is not a
6085 // compound assignment reference.
6086 { Reference target(this, node->expression(), !is_const);
Steve Blocka7e24c12009-10-30 11:49:00 +00006087 if (target.is_illegal()) {
6088 // Spoof the virtual frame to have the expected height (one higher
6089 // than on entry).
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006090 if (!placeholder_pushed) frame_->EmitPush(Operand(Smi::FromInt(0)));
Steve Block6ded16b2010-05-10 14:33:55 +01006091 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00006092 return;
6093 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006094
Kristian Monsen25f61362010-05-21 11:50:48 +01006095 // This pushes 0, 1 or 2 words on the object to be used later when updating
6096 // the target. It also pushes the current value of the target.
Steve Block6ded16b2010-05-10 14:33:55 +01006097 target.GetValue();
Steve Blocka7e24c12009-10-30 11:49:00 +00006098
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006099 bool value_is_known_smi = frame_->KnownSmiAt(0);
Kristian Monsen25f61362010-05-21 11:50:48 +01006100 Register value = frame_->PopToRegister();
Steve Blocka7e24c12009-10-30 11:49:00 +00006101
6102 // Postfix: Store the old value as the result.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006103 if (placeholder_pushed) {
Kristian Monsen25f61362010-05-21 11:50:48 +01006104 frame_->SetElementAt(value, target.size());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006105 } else if (is_postfix) {
6106 frame_->EmitPush(value);
6107 __ mov(VirtualFrame::scratch0(), value);
6108 value = VirtualFrame::scratch0();
Steve Blocka7e24c12009-10-30 11:49:00 +00006109 }
6110
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006111 // We can't use any type information here since the virtual frame from the
6112 // deferred code may have lost information and we can't merge a virtual
6113 // frame with less specific type knowledge to a virtual frame with more
6114 // specific knowledge that has already used that specific knowledge to
6115 // generate code.
6116 frame_->ForgetTypeInfo();
6117
6118 // The constructor here will capture the current virtual frame and use it to
6119 // merge to after the deferred code has run. No virtual frame changes are
6120 // allowed from here until the 'BindExit' below.
6121 DeferredCode* deferred =
6122 new DeferredCountOperation(value,
6123 is_increment,
6124 is_postfix,
6125 target.size());
6126 if (!value_is_known_smi) {
6127 // Check for smi operand.
6128 __ tst(value, Operand(kSmiTagMask));
6129
6130 deferred->Branch(ne);
6131 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006132
Steve Blocka7e24c12009-10-30 11:49:00 +00006133 // Perform optimistic increment/decrement.
6134 if (is_increment) {
Kristian Monsen25f61362010-05-21 11:50:48 +01006135 __ add(value, value, Operand(Smi::FromInt(1)), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00006136 } else {
Kristian Monsen25f61362010-05-21 11:50:48 +01006137 __ sub(value, value, Operand(Smi::FromInt(1)), SetCC);
Steve Blocka7e24c12009-10-30 11:49:00 +00006138 }
6139
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006140 // If increment/decrement overflows, go to deferred code.
6141 deferred->Branch(vs);
Steve Blocka7e24c12009-10-30 11:49:00 +00006142
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006143 deferred->BindExit();
Steve Blocka7e24c12009-10-30 11:49:00 +00006144
Steve Blocka7e24c12009-10-30 11:49:00 +00006145 // Store the new value in the target if not const.
Kristian Monsen25f61362010-05-21 11:50:48 +01006146 // At this point the answer is in the value register.
Kristian Monsen25f61362010-05-21 11:50:48 +01006147 frame_->EmitPush(value);
6148 // Set the target with the result, leaving the result on
6149 // top of the stack. Removes the target from the stack if
6150 // it has a non-zero size.
Steve Block8defd9f2010-07-08 12:39:36 +01006151 if (!is_const) target.SetValue(NOT_CONST_INIT, LIKELY_SMI);
Steve Blocka7e24c12009-10-30 11:49:00 +00006152 }
6153
6154 // Postfix: Discard the new value and use the old.
Kristian Monsen25f61362010-05-21 11:50:48 +01006155 if (is_postfix) frame_->Pop();
Steve Block6ded16b2010-05-10 14:33:55 +01006156 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00006157}
6158
6159
Steve Block6ded16b2010-05-10 14:33:55 +01006160void CodeGenerator::GenerateLogicalBooleanOperation(BinaryOperation* node) {
Steve Blocka7e24c12009-10-30 11:49:00 +00006161 // According to ECMA-262 section 11.11, page 58, the binary logical
6162 // operators must yield the result of one of the two expressions
6163 // before any ToBoolean() conversions. This means that the value
6164 // produced by a && or || operator is not necessarily a boolean.
6165
6166 // NOTE: If the left hand side produces a materialized value (not in
6167 // the CC register), we force the right hand side to do the
6168 // same. This is necessary because we may have to branch to the exit
6169 // after evaluating the left hand side (due to the shortcut
6170 // semantics), but the compiler must (statically) know if the result
6171 // of compiling the binary operation is materialized or not.
Steve Block6ded16b2010-05-10 14:33:55 +01006172 if (node->op() == Token::AND) {
Steve Blocka7e24c12009-10-30 11:49:00 +00006173 JumpTarget is_true;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006174 LoadCondition(node->left(), &is_true, false_target(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00006175 if (has_valid_frame() && !has_cc()) {
6176 // The left-hand side result is on top of the virtual frame.
6177 JumpTarget pop_and_continue;
6178 JumpTarget exit;
6179
Leon Clarkef7060e22010-06-03 12:02:55 +01006180 frame_->Dup();
Steve Blocka7e24c12009-10-30 11:49:00 +00006181 // Avoid popping the result if it converts to 'false' using the
6182 // standard ToBoolean() conversion as described in ECMA-262,
6183 // section 9.2, page 30.
6184 ToBoolean(&pop_and_continue, &exit);
6185 Branch(false, &exit);
6186
6187 // Pop the result of evaluating the first part.
6188 pop_and_continue.Bind();
Leon Clarkef7060e22010-06-03 12:02:55 +01006189 frame_->Pop();
Steve Blocka7e24c12009-10-30 11:49:00 +00006190
6191 // Evaluate right side expression.
6192 is_true.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006193 Load(node->right());
Steve Blocka7e24c12009-10-30 11:49:00 +00006194
6195 // Exit (always with a materialized value).
6196 exit.Bind();
6197 } else if (has_cc() || is_true.is_linked()) {
6198 // The left-hand side is either (a) partially compiled to
6199 // control flow with a final branch left to emit or (b) fully
6200 // compiled to control flow and possibly true.
6201 if (has_cc()) {
6202 Branch(false, false_target());
6203 }
6204 is_true.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006205 LoadCondition(node->right(), true_target(), false_target(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00006206 } else {
6207 // Nothing to do.
6208 ASSERT(!has_valid_frame() && !has_cc() && !is_true.is_linked());
6209 }
6210
Steve Block6ded16b2010-05-10 14:33:55 +01006211 } else {
6212 ASSERT(node->op() == Token::OR);
Steve Blocka7e24c12009-10-30 11:49:00 +00006213 JumpTarget is_false;
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006214 LoadCondition(node->left(), true_target(), &is_false, false);
Steve Blocka7e24c12009-10-30 11:49:00 +00006215 if (has_valid_frame() && !has_cc()) {
6216 // The left-hand side result is on top of the virtual frame.
6217 JumpTarget pop_and_continue;
6218 JumpTarget exit;
6219
Leon Clarkef7060e22010-06-03 12:02:55 +01006220 frame_->Dup();
Steve Blocka7e24c12009-10-30 11:49:00 +00006221 // Avoid popping the result if it converts to 'true' using the
6222 // standard ToBoolean() conversion as described in ECMA-262,
6223 // section 9.2, page 30.
6224 ToBoolean(&exit, &pop_and_continue);
6225 Branch(true, &exit);
6226
6227 // Pop the result of evaluating the first part.
6228 pop_and_continue.Bind();
Leon Clarkef7060e22010-06-03 12:02:55 +01006229 frame_->Pop();
Steve Blocka7e24c12009-10-30 11:49:00 +00006230
6231 // Evaluate right side expression.
6232 is_false.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006233 Load(node->right());
Steve Blocka7e24c12009-10-30 11:49:00 +00006234
6235 // Exit (always with a materialized value).
6236 exit.Bind();
6237 } else if (has_cc() || is_false.is_linked()) {
6238 // The left-hand side is either (a) partially compiled to
6239 // control flow with a final branch left to emit or (b) fully
6240 // compiled to control flow and possibly false.
6241 if (has_cc()) {
6242 Branch(true, true_target());
6243 }
6244 is_false.Bind();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006245 LoadCondition(node->right(), true_target(), false_target(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00006246 } else {
6247 // Nothing to do.
6248 ASSERT(!has_valid_frame() && !has_cc() && !is_false.is_linked());
6249 }
Steve Block6ded16b2010-05-10 14:33:55 +01006250 }
6251}
Steve Blocka7e24c12009-10-30 11:49:00 +00006252
Steve Block6ded16b2010-05-10 14:33:55 +01006253
6254void CodeGenerator::VisitBinaryOperation(BinaryOperation* node) {
6255#ifdef DEBUG
6256 int original_height = frame_->height();
6257#endif
6258 Comment cmnt(masm_, "[ BinaryOperation");
6259
6260 if (node->op() == Token::AND || node->op() == Token::OR) {
Steve Block6ded16b2010-05-10 14:33:55 +01006261 GenerateLogicalBooleanOperation(node);
Steve Blocka7e24c12009-10-30 11:49:00 +00006262 } else {
6263 // Optimize for the case where (at least) one of the expressions
6264 // is a literal small integer.
6265 Literal* lliteral = node->left()->AsLiteral();
6266 Literal* rliteral = node->right()->AsLiteral();
6267 // NOTE: The code below assumes that the slow cases (calls to runtime)
6268 // never return a constant/immutable object.
Kristian Monsen80d68ea2010-09-08 11:05:35 +01006269 bool overwrite_left = node->left()->ResultOverwriteAllowed();
6270 bool overwrite_right = node->right()->ResultOverwriteAllowed();
Steve Blocka7e24c12009-10-30 11:49:00 +00006271
6272 if (rliteral != NULL && rliteral->handle()->IsSmi()) {
Steve Block6ded16b2010-05-10 14:33:55 +01006273 VirtualFrame::RegisterAllocationScope scope(this);
6274 Load(node->left());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006275 if (frame_->KnownSmiAt(0)) overwrite_left = false;
Steve Blocka7e24c12009-10-30 11:49:00 +00006276 SmiOperation(node->op(),
6277 rliteral->handle(),
6278 false,
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006279 overwrite_left ? OVERWRITE_LEFT : NO_OVERWRITE);
Steve Blocka7e24c12009-10-30 11:49:00 +00006280 } else if (lliteral != NULL && lliteral->handle()->IsSmi()) {
Steve Block6ded16b2010-05-10 14:33:55 +01006281 VirtualFrame::RegisterAllocationScope scope(this);
6282 Load(node->right());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006283 if (frame_->KnownSmiAt(0)) overwrite_right = false;
Steve Blocka7e24c12009-10-30 11:49:00 +00006284 SmiOperation(node->op(),
6285 lliteral->handle(),
6286 true,
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006287 overwrite_right ? OVERWRITE_RIGHT : NO_OVERWRITE);
Steve Blocka7e24c12009-10-30 11:49:00 +00006288 } else {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006289 GenerateInlineSmi inline_smi =
6290 loop_nesting() > 0 ? GENERATE_INLINE_SMI : DONT_GENERATE_INLINE_SMI;
6291 if (lliteral != NULL) {
6292 ASSERT(!lliteral->handle()->IsSmi());
6293 inline_smi = DONT_GENERATE_INLINE_SMI;
6294 }
6295 if (rliteral != NULL) {
6296 ASSERT(!rliteral->handle()->IsSmi());
6297 inline_smi = DONT_GENERATE_INLINE_SMI;
6298 }
Steve Block6ded16b2010-05-10 14:33:55 +01006299 VirtualFrame::RegisterAllocationScope scope(this);
Steve Blocka7e24c12009-10-30 11:49:00 +00006300 OverwriteMode overwrite_mode = NO_OVERWRITE;
6301 if (overwrite_left) {
6302 overwrite_mode = OVERWRITE_LEFT;
6303 } else if (overwrite_right) {
6304 overwrite_mode = OVERWRITE_RIGHT;
6305 }
Steve Block6ded16b2010-05-10 14:33:55 +01006306 Load(node->left());
6307 Load(node->right());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006308 GenericBinaryOperation(node->op(), overwrite_mode, inline_smi);
Steve Blocka7e24c12009-10-30 11:49:00 +00006309 }
Steve Blocka7e24c12009-10-30 11:49:00 +00006310 }
6311 ASSERT(!has_valid_frame() ||
6312 (has_cc() && frame_->height() == original_height) ||
6313 (!has_cc() && frame_->height() == original_height + 1));
6314}
6315
6316
6317void CodeGenerator::VisitThisFunction(ThisFunction* node) {
6318#ifdef DEBUG
6319 int original_height = frame_->height();
6320#endif
Leon Clarkef7060e22010-06-03 12:02:55 +01006321 frame_->EmitPush(MemOperand(frame_->Function()));
Steve Block6ded16b2010-05-10 14:33:55 +01006322 ASSERT_EQ(original_height + 1, frame_->height());
Steve Blocka7e24c12009-10-30 11:49:00 +00006323}
6324
6325
6326void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
6327#ifdef DEBUG
6328 int original_height = frame_->height();
6329#endif
Steve Blocka7e24c12009-10-30 11:49:00 +00006330 Comment cmnt(masm_, "[ CompareOperation");
6331
Steve Block6ded16b2010-05-10 14:33:55 +01006332 VirtualFrame::RegisterAllocationScope nonspilled_scope(this);
6333
Steve Blocka7e24c12009-10-30 11:49:00 +00006334 // Get the expressions from the node.
6335 Expression* left = node->left();
6336 Expression* right = node->right();
6337 Token::Value op = node->op();
6338
Steve Blocka7e24c12009-10-30 11:49:00 +00006339 // To make typeof testing for natives implemented in JavaScript really
6340 // efficient, we generate special code for expressions of the form:
6341 // 'typeof <expression> == <string>'.
6342 UnaryOperation* operation = left->AsUnaryOperation();
6343 if ((op == Token::EQ || op == Token::EQ_STRICT) &&
6344 (operation != NULL && operation->op() == Token::TYPEOF) &&
6345 (right->AsLiteral() != NULL &&
6346 right->AsLiteral()->handle()->IsString())) {
6347 Handle<String> check(String::cast(*right->AsLiteral()->handle()));
6348
Steve Block6ded16b2010-05-10 14:33:55 +01006349 // Load the operand, move it to a register.
Steve Blocka7e24c12009-10-30 11:49:00 +00006350 LoadTypeofExpression(operation->expression());
Steve Block6ded16b2010-05-10 14:33:55 +01006351 Register tos = frame_->PopToRegister();
6352
Steve Block6ded16b2010-05-10 14:33:55 +01006353 Register scratch = VirtualFrame::scratch0();
Steve Blocka7e24c12009-10-30 11:49:00 +00006354
6355 if (check->Equals(Heap::number_symbol())) {
Steve Block6ded16b2010-05-10 14:33:55 +01006356 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00006357 true_target()->Branch(eq);
Steve Block6ded16b2010-05-10 14:33:55 +01006358 __ ldr(tos, FieldMemOperand(tos, HeapObject::kMapOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00006359 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01006360 __ cmp(tos, ip);
Steve Blocka7e24c12009-10-30 11:49:00 +00006361 cc_reg_ = eq;
6362
6363 } else if (check->Equals(Heap::string_symbol())) {
Steve Block6ded16b2010-05-10 14:33:55 +01006364 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00006365 false_target()->Branch(eq);
6366
Steve Block6ded16b2010-05-10 14:33:55 +01006367 __ ldr(tos, FieldMemOperand(tos, HeapObject::kMapOffset));
Steve Blocka7e24c12009-10-30 11:49:00 +00006368
6369 // It can be an undetectable string object.
Steve Block6ded16b2010-05-10 14:33:55 +01006370 __ ldrb(scratch, FieldMemOperand(tos, Map::kBitFieldOffset));
6371 __ and_(scratch, scratch, Operand(1 << Map::kIsUndetectable));
6372 __ cmp(scratch, Operand(1 << Map::kIsUndetectable));
Steve Blocka7e24c12009-10-30 11:49:00 +00006373 false_target()->Branch(eq);
6374
Steve Block6ded16b2010-05-10 14:33:55 +01006375 __ ldrb(scratch, FieldMemOperand(tos, Map::kInstanceTypeOffset));
6376 __ cmp(scratch, Operand(FIRST_NONSTRING_TYPE));
Steve Blocka7e24c12009-10-30 11:49:00 +00006377 cc_reg_ = lt;
6378
6379 } else if (check->Equals(Heap::boolean_symbol())) {
6380 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01006381 __ cmp(tos, ip);
Steve Blocka7e24c12009-10-30 11:49:00 +00006382 true_target()->Branch(eq);
6383 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01006384 __ cmp(tos, ip);
Steve Blocka7e24c12009-10-30 11:49:00 +00006385 cc_reg_ = eq;
6386
6387 } else if (check->Equals(Heap::undefined_symbol())) {
6388 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01006389 __ cmp(tos, ip);
Steve Blocka7e24c12009-10-30 11:49:00 +00006390 true_target()->Branch(eq);
6391
Steve Block6ded16b2010-05-10 14:33:55 +01006392 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00006393 false_target()->Branch(eq);
6394
6395 // It can be an undetectable object.
Steve Block6ded16b2010-05-10 14:33:55 +01006396 __ ldr(tos, FieldMemOperand(tos, HeapObject::kMapOffset));
6397 __ ldrb(scratch, FieldMemOperand(tos, Map::kBitFieldOffset));
6398 __ and_(scratch, scratch, Operand(1 << Map::kIsUndetectable));
6399 __ cmp(scratch, Operand(1 << Map::kIsUndetectable));
Steve Blocka7e24c12009-10-30 11:49:00 +00006400
6401 cc_reg_ = eq;
6402
6403 } else if (check->Equals(Heap::function_symbol())) {
Steve Block6ded16b2010-05-10 14:33:55 +01006404 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00006405 false_target()->Branch(eq);
Steve Block6ded16b2010-05-10 14:33:55 +01006406 Register map_reg = scratch;
6407 __ CompareObjectType(tos, map_reg, tos, JS_FUNCTION_TYPE);
Steve Blockd0582a62009-12-15 09:54:21 +00006408 true_target()->Branch(eq);
6409 // Regular expressions are callable so typeof == 'function'.
Steve Block6ded16b2010-05-10 14:33:55 +01006410 __ CompareInstanceType(map_reg, tos, JS_REGEXP_TYPE);
Steve Blocka7e24c12009-10-30 11:49:00 +00006411 cc_reg_ = eq;
6412
6413 } else if (check->Equals(Heap::object_symbol())) {
Steve Block6ded16b2010-05-10 14:33:55 +01006414 __ tst(tos, Operand(kSmiTagMask));
Steve Blocka7e24c12009-10-30 11:49:00 +00006415 false_target()->Branch(eq);
6416
Steve Blocka7e24c12009-10-30 11:49:00 +00006417 __ LoadRoot(ip, Heap::kNullValueRootIndex);
Steve Block6ded16b2010-05-10 14:33:55 +01006418 __ cmp(tos, ip);
Steve Blocka7e24c12009-10-30 11:49:00 +00006419 true_target()->Branch(eq);
6420
Steve Block6ded16b2010-05-10 14:33:55 +01006421 Register map_reg = scratch;
6422 __ CompareObjectType(tos, map_reg, tos, JS_REGEXP_TYPE);
Steve Blockd0582a62009-12-15 09:54:21 +00006423 false_target()->Branch(eq);
6424
Steve Blocka7e24c12009-10-30 11:49:00 +00006425 // It can be an undetectable object.
Steve Block6ded16b2010-05-10 14:33:55 +01006426 __ ldrb(tos, FieldMemOperand(map_reg, Map::kBitFieldOffset));
6427 __ and_(tos, tos, Operand(1 << Map::kIsUndetectable));
6428 __ cmp(tos, Operand(1 << Map::kIsUndetectable));
Steve Blocka7e24c12009-10-30 11:49:00 +00006429 false_target()->Branch(eq);
6430
Steve Block6ded16b2010-05-10 14:33:55 +01006431 __ ldrb(tos, FieldMemOperand(map_reg, Map::kInstanceTypeOffset));
6432 __ cmp(tos, Operand(FIRST_JS_OBJECT_TYPE));
Steve Blocka7e24c12009-10-30 11:49:00 +00006433 false_target()->Branch(lt);
Steve Block6ded16b2010-05-10 14:33:55 +01006434 __ cmp(tos, Operand(LAST_JS_OBJECT_TYPE));
Steve Blocka7e24c12009-10-30 11:49:00 +00006435 cc_reg_ = le;
6436
6437 } else {
6438 // Uncommon case: typeof testing against a string literal that is
6439 // never returned from the typeof operator.
6440 false_target()->Jump();
6441 }
6442 ASSERT(!has_valid_frame() ||
6443 (has_cc() && frame_->height() == original_height));
6444 return;
6445 }
6446
6447 switch (op) {
6448 case Token::EQ:
6449 Comparison(eq, left, right, false);
6450 break;
6451
6452 case Token::LT:
6453 Comparison(lt, left, right);
6454 break;
6455
6456 case Token::GT:
6457 Comparison(gt, left, right);
6458 break;
6459
6460 case Token::LTE:
6461 Comparison(le, left, right);
6462 break;
6463
6464 case Token::GTE:
6465 Comparison(ge, left, right);
6466 break;
6467
6468 case Token::EQ_STRICT:
6469 Comparison(eq, left, right, true);
6470 break;
6471
6472 case Token::IN: {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006473 Load(left);
6474 Load(right);
Steve Blockd0582a62009-12-15 09:54:21 +00006475 frame_->InvokeBuiltin(Builtins::IN, CALL_JS, 2);
Steve Blocka7e24c12009-10-30 11:49:00 +00006476 frame_->EmitPush(r0);
6477 break;
6478 }
6479
6480 case Token::INSTANCEOF: {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006481 Load(left);
6482 Load(right);
Ben Murdochb0fe1622011-05-05 13:52:32 +01006483 InstanceofStub stub(InstanceofStub::kNoFlags);
Steve Blocka7e24c12009-10-30 11:49:00 +00006484 frame_->CallStub(&stub, 2);
6485 // At this point if instanceof succeeded then r0 == 0.
6486 __ tst(r0, Operand(r0));
6487 cc_reg_ = eq;
6488 break;
6489 }
6490
6491 default:
6492 UNREACHABLE();
6493 }
6494 ASSERT((has_cc() && frame_->height() == original_height) ||
6495 (!has_cc() && frame_->height() == original_height + 1));
6496}
6497
6498
Kristian Monsen80d68ea2010-09-08 11:05:35 +01006499void CodeGenerator::VisitCompareToNull(CompareToNull* node) {
6500#ifdef DEBUG
6501 int original_height = frame_->height();
6502#endif
6503 Comment cmnt(masm_, "[ CompareToNull");
6504
6505 Load(node->expression());
6506 Register tos = frame_->PopToRegister();
6507 __ LoadRoot(ip, Heap::kNullValueRootIndex);
6508 __ cmp(tos, ip);
6509
6510 // The 'null' value is only equal to 'undefined' if using non-strict
6511 // comparisons.
6512 if (!node->is_strict()) {
6513 true_target()->Branch(eq);
6514 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
6515 __ cmp(tos, Operand(ip));
6516 true_target()->Branch(eq);
6517
6518 __ tst(tos, Operand(kSmiTagMask));
6519 false_target()->Branch(eq);
6520
6521 // It can be an undetectable object.
6522 __ ldr(tos, FieldMemOperand(tos, HeapObject::kMapOffset));
6523 __ ldrb(tos, FieldMemOperand(tos, Map::kBitFieldOffset));
6524 __ and_(tos, tos, Operand(1 << Map::kIsUndetectable));
6525 __ cmp(tos, Operand(1 << Map::kIsUndetectable));
6526 }
6527
6528 cc_reg_ = eq;
6529 ASSERT(has_cc() && frame_->height() == original_height);
6530}
6531
6532
Steve Block6ded16b2010-05-10 14:33:55 +01006533class DeferredReferenceGetNamedValue: public DeferredCode {
6534 public:
Leon Clarkef7060e22010-06-03 12:02:55 +01006535 explicit DeferredReferenceGetNamedValue(Register receiver,
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006536 Handle<String> name,
6537 bool is_contextual)
6538 : receiver_(receiver),
6539 name_(name),
6540 is_contextual_(is_contextual),
6541 is_dont_delete_(false) {
6542 set_comment(is_contextual
6543 ? "[ DeferredReferenceGetNamedValue (contextual)"
6544 : "[ DeferredReferenceGetNamedValue");
Steve Block6ded16b2010-05-10 14:33:55 +01006545 }
6546
6547 virtual void Generate();
6548
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006549 void set_is_dont_delete(bool value) {
6550 ASSERT(is_contextual_);
6551 is_dont_delete_ = value;
6552 }
6553
Steve Block6ded16b2010-05-10 14:33:55 +01006554 private:
Leon Clarkef7060e22010-06-03 12:02:55 +01006555 Register receiver_;
Steve Block6ded16b2010-05-10 14:33:55 +01006556 Handle<String> name_;
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006557 bool is_contextual_;
6558 bool is_dont_delete_;
Steve Block6ded16b2010-05-10 14:33:55 +01006559};
6560
6561
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006562// Convention for this is that on entry the receiver is in a register that
6563// is not used by the stack. On exit the answer is found in that same
6564// register and the stack has the same height.
Steve Block6ded16b2010-05-10 14:33:55 +01006565void DeferredReferenceGetNamedValue::Generate() {
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006566#ifdef DEBUG
6567 int expected_height = frame_state()->frame()->height();
6568#endif
6569 VirtualFrame copied_frame(*frame_state()->frame());
6570 copied_frame.SpillAll();
Leon Clarkef7060e22010-06-03 12:02:55 +01006571
Steve Block6ded16b2010-05-10 14:33:55 +01006572 Register scratch1 = VirtualFrame::scratch0();
6573 Register scratch2 = VirtualFrame::scratch1();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006574 ASSERT(!receiver_.is(scratch1) && !receiver_.is(scratch2));
Steve Block6ded16b2010-05-10 14:33:55 +01006575 __ DecrementCounter(&Counters::named_load_inline, 1, scratch1, scratch2);
6576 __ IncrementCounter(&Counters::named_load_inline_miss, 1, scratch1, scratch2);
6577
Leon Clarkef7060e22010-06-03 12:02:55 +01006578 // Ensure receiver in r0 and name in r2 to match load ic calling convention.
6579 __ Move(r0, receiver_);
Steve Block6ded16b2010-05-10 14:33:55 +01006580 __ mov(r2, Operand(name_));
6581
6582 // The rest of the instructions in the deferred code must be together.
6583 { Assembler::BlockConstPoolScope block_const_pool(masm_);
6584 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006585 RelocInfo::Mode mode = is_contextual_
6586 ? RelocInfo::CODE_TARGET_CONTEXT
6587 : RelocInfo::CODE_TARGET;
6588 __ Call(ic, mode);
6589 // We must mark the code just after the call with the correct marker.
6590 MacroAssembler::NopMarkerTypes code_marker;
6591 if (is_contextual_) {
6592 code_marker = is_dont_delete_
6593 ? MacroAssembler::PROPERTY_ACCESS_INLINED_CONTEXT_DONT_DELETE
6594 : MacroAssembler::PROPERTY_ACCESS_INLINED_CONTEXT;
6595 } else {
6596 code_marker = MacroAssembler::PROPERTY_ACCESS_INLINED;
6597 }
6598 __ MarkCode(code_marker);
Steve Block6ded16b2010-05-10 14:33:55 +01006599
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006600 // At this point the answer is in r0. We move it to the expected register
6601 // if necessary.
6602 __ Move(receiver_, r0);
6603
6604 // Now go back to the frame that we entered with. This will not overwrite
6605 // the receiver register since that register was not in use when we came
6606 // in. The instructions emitted by this merge are skipped over by the
6607 // inline load patching mechanism when looking for the branch instruction
6608 // that tells it where the code to patch is.
6609 copied_frame.MergeTo(frame_state()->frame());
6610
Steve Block6ded16b2010-05-10 14:33:55 +01006611 // Block the constant pool for one more instruction after leaving this
6612 // constant pool block scope to include the branch instruction ending the
6613 // deferred code.
6614 __ BlockConstPoolFor(1);
6615 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006616 ASSERT_EQ(expected_height, frame_state()->frame()->height());
Steve Block6ded16b2010-05-10 14:33:55 +01006617}
6618
6619
6620class DeferredReferenceGetKeyedValue: public DeferredCode {
6621 public:
Kristian Monsen25f61362010-05-21 11:50:48 +01006622 DeferredReferenceGetKeyedValue(Register key, Register receiver)
6623 : key_(key), receiver_(receiver) {
Steve Block6ded16b2010-05-10 14:33:55 +01006624 set_comment("[ DeferredReferenceGetKeyedValue");
6625 }
6626
6627 virtual void Generate();
Kristian Monsen25f61362010-05-21 11:50:48 +01006628
6629 private:
6630 Register key_;
6631 Register receiver_;
Steve Block6ded16b2010-05-10 14:33:55 +01006632};
6633
6634
Steve Block8defd9f2010-07-08 12:39:36 +01006635// Takes key and register in r0 and r1 or vice versa. Returns result
6636// in r0.
Steve Block6ded16b2010-05-10 14:33:55 +01006637void DeferredReferenceGetKeyedValue::Generate() {
Kristian Monsen25f61362010-05-21 11:50:48 +01006638 ASSERT((key_.is(r0) && receiver_.is(r1)) ||
6639 (key_.is(r1) && receiver_.is(r0)));
6640
Steve Block8defd9f2010-07-08 12:39:36 +01006641 VirtualFrame copied_frame(*frame_state()->frame());
6642 copied_frame.SpillAll();
6643
Steve Block6ded16b2010-05-10 14:33:55 +01006644 Register scratch1 = VirtualFrame::scratch0();
6645 Register scratch2 = VirtualFrame::scratch1();
6646 __ DecrementCounter(&Counters::keyed_load_inline, 1, scratch1, scratch2);
6647 __ IncrementCounter(&Counters::keyed_load_inline_miss, 1, scratch1, scratch2);
6648
Kristian Monsen25f61362010-05-21 11:50:48 +01006649 // Ensure key in r0 and receiver in r1 to match keyed load ic calling
6650 // convention.
6651 if (key_.is(r1)) {
6652 __ Swap(r0, r1, ip);
6653 }
6654
Steve Block6ded16b2010-05-10 14:33:55 +01006655 // The rest of the instructions in the deferred code must be together.
6656 { Assembler::BlockConstPoolScope block_const_pool(masm_);
Kristian Monsen25f61362010-05-21 11:50:48 +01006657 // Call keyed load IC. It has the arguments key and receiver in r0 and r1.
Steve Block6ded16b2010-05-10 14:33:55 +01006658 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
6659 __ Call(ic, RelocInfo::CODE_TARGET);
6660 // The call must be followed by a nop instruction to indicate that the
6661 // keyed load has been inlined.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006662 __ MarkCode(MacroAssembler::PROPERTY_ACCESS_INLINED);
Steve Block6ded16b2010-05-10 14:33:55 +01006663
Steve Block8defd9f2010-07-08 12:39:36 +01006664 // Now go back to the frame that we entered with. This will not overwrite
6665 // the receiver or key registers since they were not in use when we came
6666 // in. The instructions emitted by this merge are skipped over by the
6667 // inline load patching mechanism when looking for the branch instruction
6668 // that tells it where the code to patch is.
6669 copied_frame.MergeTo(frame_state()->frame());
6670
Steve Block6ded16b2010-05-10 14:33:55 +01006671 // Block the constant pool for one more instruction after leaving this
6672 // constant pool block scope to include the branch instruction ending the
6673 // deferred code.
6674 __ BlockConstPoolFor(1);
6675 }
6676}
6677
6678
6679class DeferredReferenceSetKeyedValue: public DeferredCode {
6680 public:
Leon Clarkef7060e22010-06-03 12:02:55 +01006681 DeferredReferenceSetKeyedValue(Register value,
6682 Register key,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01006683 Register receiver,
6684 StrictModeFlag strict_mode)
6685 : value_(value),
6686 key_(key),
6687 receiver_(receiver),
6688 strict_mode_(strict_mode) {
Steve Block6ded16b2010-05-10 14:33:55 +01006689 set_comment("[ DeferredReferenceSetKeyedValue");
6690 }
6691
6692 virtual void Generate();
Leon Clarkef7060e22010-06-03 12:02:55 +01006693
6694 private:
6695 Register value_;
6696 Register key_;
6697 Register receiver_;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01006698 StrictModeFlag strict_mode_;
Steve Block6ded16b2010-05-10 14:33:55 +01006699};
6700
6701
6702void DeferredReferenceSetKeyedValue::Generate() {
6703 Register scratch1 = VirtualFrame::scratch0();
6704 Register scratch2 = VirtualFrame::scratch1();
6705 __ DecrementCounter(&Counters::keyed_store_inline, 1, scratch1, scratch2);
6706 __ IncrementCounter(
6707 &Counters::keyed_store_inline_miss, 1, scratch1, scratch2);
6708
Leon Clarkef7060e22010-06-03 12:02:55 +01006709 // Ensure value in r0, key in r1 and receiver in r2 to match keyed store ic
6710 // calling convention.
6711 if (value_.is(r1)) {
6712 __ Swap(r0, r1, ip);
6713 }
6714 ASSERT(receiver_.is(r2));
6715
Steve Block6ded16b2010-05-10 14:33:55 +01006716 // The rest of the instructions in the deferred code must be together.
6717 { Assembler::BlockConstPoolScope block_const_pool(masm_);
Leon Clarkef7060e22010-06-03 12:02:55 +01006718 // Call keyed store IC. It has the arguments value, key and receiver in r0,
6719 // r1 and r2.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01006720 Handle<Code> ic(Builtins::builtin(
6721 (strict_mode_ == kStrictMode) ? Builtins::KeyedStoreIC_Initialize_Strict
6722 : Builtins::KeyedStoreIC_Initialize));
Steve Block6ded16b2010-05-10 14:33:55 +01006723 __ Call(ic, RelocInfo::CODE_TARGET);
6724 // The call must be followed by a nop instruction to indicate that the
6725 // keyed store has been inlined.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006726 __ MarkCode(MacroAssembler::PROPERTY_ACCESS_INLINED);
Steve Block6ded16b2010-05-10 14:33:55 +01006727
6728 // Block the constant pool for one more instruction after leaving this
6729 // constant pool block scope to include the branch instruction ending the
6730 // deferred code.
6731 __ BlockConstPoolFor(1);
6732 }
6733}
6734
6735
Kristian Monsen50ef84f2010-07-29 15:18:00 +01006736class DeferredReferenceSetNamedValue: public DeferredCode {
6737 public:
6738 DeferredReferenceSetNamedValue(Register value,
6739 Register receiver,
Ben Murdoche0cee9b2011-05-25 10:26:03 +01006740 Handle<String> name,
6741 StrictModeFlag strict_mode)
6742 : value_(value),
6743 receiver_(receiver),
6744 name_(name),
6745 strict_mode_(strict_mode) {
Kristian Monsen50ef84f2010-07-29 15:18:00 +01006746 set_comment("[ DeferredReferenceSetNamedValue");
6747 }
6748
6749 virtual void Generate();
6750
6751 private:
6752 Register value_;
6753 Register receiver_;
6754 Handle<String> name_;
Ben Murdoche0cee9b2011-05-25 10:26:03 +01006755 StrictModeFlag strict_mode_;
Kristian Monsen50ef84f2010-07-29 15:18:00 +01006756};
6757
6758
6759// Takes value in r0, receiver in r1 and returns the result (the
6760// value) in r0.
6761void DeferredReferenceSetNamedValue::Generate() {
6762 // Record the entry frame and spill.
6763 VirtualFrame copied_frame(*frame_state()->frame());
6764 copied_frame.SpillAll();
6765
6766 // Ensure value in r0, receiver in r1 to match store ic calling
6767 // convention.
6768 ASSERT(value_.is(r0) && receiver_.is(r1));
6769 __ mov(r2, Operand(name_));
6770
6771 // The rest of the instructions in the deferred code must be together.
6772 { Assembler::BlockConstPoolScope block_const_pool(masm_);
6773 // Call keyed store IC. It has the arguments value, key and receiver in r0,
6774 // r1 and r2.
Ben Murdoche0cee9b2011-05-25 10:26:03 +01006775 Handle<Code> ic(Builtins::builtin(
6776 (strict_mode_ == kStrictMode) ? Builtins::StoreIC_Initialize_Strict
6777 : Builtins::StoreIC_Initialize));
Kristian Monsen50ef84f2010-07-29 15:18:00 +01006778 __ Call(ic, RelocInfo::CODE_TARGET);
6779 // The call must be followed by a nop instruction to indicate that the
6780 // named store has been inlined.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006781 __ MarkCode(MacroAssembler::PROPERTY_ACCESS_INLINED);
Kristian Monsen50ef84f2010-07-29 15:18:00 +01006782
6783 // Go back to the frame we entered with. The instructions
6784 // generated by this merge are skipped over by the inline store
6785 // patching mechanism when looking for the branch instruction that
6786 // tells it where the code to patch is.
6787 copied_frame.MergeTo(frame_state()->frame());
6788
6789 // Block the constant pool for one more instruction after leaving this
6790 // constant pool block scope to include the branch instruction ending the
6791 // deferred code.
6792 __ BlockConstPoolFor(1);
6793 }
6794}
6795
6796
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006797// Consumes the top of stack (the receiver) and pushes the result instead.
Steve Block6ded16b2010-05-10 14:33:55 +01006798void CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) {
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006799 bool contextual_load_in_builtin =
6800 is_contextual &&
6801 (Bootstrapper::IsActive() ||
6802 (!info_->closure().is_null() && info_->closure()->IsBuiltin()));
6803
6804 if (scope()->is_global_scope() ||
6805 loop_nesting() == 0 ||
6806 contextual_load_in_builtin) {
Steve Block6ded16b2010-05-10 14:33:55 +01006807 Comment cmnt(masm(), "[ Load from named Property");
6808 // Setup the name register and call load IC.
6809 frame_->CallLoadIC(name,
6810 is_contextual
6811 ? RelocInfo::CODE_TARGET_CONTEXT
6812 : RelocInfo::CODE_TARGET);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006813 frame_->EmitPush(r0); // Push answer.
Steve Block6ded16b2010-05-10 14:33:55 +01006814 } else {
6815 // Inline the in-object property case.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006816 Comment cmnt(masm(), is_contextual
6817 ? "[ Inlined contextual property load"
6818 : "[ Inlined named property load");
Steve Block6ded16b2010-05-10 14:33:55 +01006819
6820 // Counter will be decremented in the deferred code. Placed here to avoid
6821 // having it in the instruction stream below where patching will occur.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006822 if (is_contextual) {
6823 __ IncrementCounter(&Counters::named_load_global_inline, 1,
6824 frame_->scratch0(), frame_->scratch1());
6825 } else {
6826 __ IncrementCounter(&Counters::named_load_inline, 1,
6827 frame_->scratch0(), frame_->scratch1());
6828 }
Steve Block6ded16b2010-05-10 14:33:55 +01006829
6830 // The following instructions are the inlined load of an in-object property.
6831 // Parts of this code is patched, so the exact instructions generated needs
6832 // to be fixed. Therefore the instruction pool is blocked when generating
6833 // this code
6834
6835 // Load the receiver from the stack.
Leon Clarkef7060e22010-06-03 12:02:55 +01006836 Register receiver = frame_->PopToRegister();
Steve Block6ded16b2010-05-10 14:33:55 +01006837
6838 DeferredReferenceGetNamedValue* deferred =
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006839 new DeferredReferenceGetNamedValue(receiver, name, is_contextual);
Steve Block6ded16b2010-05-10 14:33:55 +01006840
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006841 bool is_dont_delete = false;
6842 if (is_contextual) {
6843 if (!info_->closure().is_null()) {
6844 // When doing lazy compilation we can check if the global cell
6845 // already exists and use its "don't delete" status as a hint.
6846 AssertNoAllocation no_gc;
6847 v8::internal::GlobalObject* global_object =
6848 info_->closure()->context()->global();
6849 LookupResult lookup;
6850 global_object->LocalLookupRealNamedProperty(*name, &lookup);
6851 if (lookup.IsProperty() && lookup.type() == NORMAL) {
6852 ASSERT(lookup.holder() == global_object);
6853 ASSERT(global_object->property_dictionary()->ValueAt(
6854 lookup.GetDictionaryEntry())->IsJSGlobalPropertyCell());
6855 is_dont_delete = lookup.IsDontDelete();
6856 }
6857 }
6858 if (is_dont_delete) {
6859 __ IncrementCounter(&Counters::dont_delete_hint_hit, 1,
6860 frame_->scratch0(), frame_->scratch1());
6861 }
6862 }
Steve Block6ded16b2010-05-10 14:33:55 +01006863
6864 { Assembler::BlockConstPoolScope block_const_pool(masm_);
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006865 if (!is_contextual) {
6866 // Check that the receiver is a heap object.
6867 __ tst(receiver, Operand(kSmiTagMask));
6868 deferred->Branch(eq);
6869 }
6870
6871 // Check for the_hole_value if necessary.
6872 // Below we rely on the number of instructions generated, and we can't
6873 // cope with the Check macro which does not generate a fixed number of
6874 // instructions.
6875 Label skip, check_the_hole, cont;
6876 if (FLAG_debug_code && is_contextual && is_dont_delete) {
6877 __ b(&skip);
6878 __ bind(&check_the_hole);
6879 __ Check(ne, "DontDelete cells can't contain the hole");
6880 __ b(&cont);
6881 __ bind(&skip);
6882 }
6883
6884#ifdef DEBUG
6885 int InlinedNamedLoadInstructions = 5;
6886 Label check_inlined_codesize;
6887 masm_->bind(&check_inlined_codesize);
6888#endif
Steve Block6ded16b2010-05-10 14:33:55 +01006889
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006890 Register scratch = VirtualFrame::scratch0();
6891 Register scratch2 = VirtualFrame::scratch1();
6892
Steve Block6ded16b2010-05-10 14:33:55 +01006893 // Check the map. The null map used below is patched by the inline cache
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006894 // code. Therefore we can't use a LoadRoot call.
6895 __ ldr(scratch, FieldMemOperand(receiver, HeapObject::kMapOffset));
6896 __ mov(scratch2, Operand(Factory::null_value()));
6897 __ cmp(scratch, scratch2);
Steve Block6ded16b2010-05-10 14:33:55 +01006898 deferred->Branch(ne);
6899
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006900 if (is_contextual) {
6901#ifdef DEBUG
6902 InlinedNamedLoadInstructions += 1;
6903#endif
6904 // Load the (initially invalid) cell and get its value.
6905 masm()->mov(receiver, Operand(Factory::null_value()));
6906 __ ldr(receiver,
6907 FieldMemOperand(receiver, JSGlobalPropertyCell::kValueOffset));
6908
6909 deferred->set_is_dont_delete(is_dont_delete);
6910
6911 if (!is_dont_delete) {
6912#ifdef DEBUG
6913 InlinedNamedLoadInstructions += 3;
6914#endif
6915 __ cmp(receiver, Operand(Factory::the_hole_value()));
6916 deferred->Branch(eq);
6917 } else if (FLAG_debug_code) {
6918#ifdef DEBUG
6919 InlinedNamedLoadInstructions += 3;
6920#endif
6921 __ cmp(receiver, Operand(Factory::the_hole_value()));
6922 __ b(&check_the_hole, eq);
6923 __ bind(&cont);
6924 }
6925 } else {
6926 // Initially use an invalid index. The index will be patched by the
6927 // inline cache code.
6928 __ ldr(receiver, MemOperand(receiver, 0));
6929 }
Steve Block6ded16b2010-05-10 14:33:55 +01006930
6931 // Make sure that the expected number of instructions are generated.
Shimeng (Simon) Wang8a31eba2010-12-06 19:01:33 -08006932 // If the code before is updated, the offsets in ic-arm.cc
6933 // LoadIC::PatchInlinedContextualLoad and PatchInlinedLoad need
6934 // to be updated.
6935 ASSERT_EQ(InlinedNamedLoadInstructions,
Steve Block6ded16b2010-05-10 14:33:55 +01006936 masm_->InstructionsGeneratedSince(&check_inlined_codesize));
6937 }
6938
6939 deferred->BindExit();
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01006940 // At this point the receiver register has the result, either from the
6941 // deferred code or from the inlined code.
6942 frame_->EmitPush(receiver);
Steve Block6ded16b2010-05-10 14:33:55 +01006943 }
6944}
6945
6946
6947void CodeGenerator::EmitNamedStore(Handle<String> name, bool is_contextual) {
6948#ifdef DEBUG
Kristian Monsen50ef84f2010-07-29 15:18:00 +01006949 int expected_height = frame()->height() - (is_contextual ? 1 : 2);
Steve Block6ded16b2010-05-10 14:33:55 +01006950#endif
Steve Block6ded16b2010-05-10 14:33:55 +01006951
Kristian Monsen50ef84f2010-07-29 15:18:00 +01006952 Result result;
6953 if (is_contextual || scope()->is_global_scope() || loop_nesting() == 0) {
Steve Block1e0659c2011-05-24 12:43:12 +01006954 frame()->CallStoreIC(name, is_contextual, strict_mode_flag());
Kristian Monsen50ef84f2010-07-29 15:18:00 +01006955 } else {
6956 // Inline the in-object property case.
6957 JumpTarget slow, done;
6958
6959 // Get the value and receiver from the stack.
6960 frame()->PopToR0();
6961 Register value = r0;
6962 frame()->PopToR1();
6963 Register receiver = r1;
6964
6965 DeferredReferenceSetNamedValue* deferred =
Ben Murdoche0cee9b2011-05-25 10:26:03 +01006966 new DeferredReferenceSetNamedValue(
6967 value, receiver, name, strict_mode_flag());
Kristian Monsen50ef84f2010-07-29 15:18:00 +01006968
6969 // Check that the receiver is a heap object.
6970 __ tst(receiver, Operand(kSmiTagMask));
6971 deferred->Branch(eq);
6972
6973 // The following instructions are the part of the inlined
6974 // in-object property store code which can be patched. Therefore
6975 // the exact number of instructions generated must be fixed, so
6976 // the constant pool is blocked while generating this code.
6977 { Assembler::BlockConstPoolScope block_const_pool(masm_);
6978 Register scratch0 = VirtualFrame::scratch0();
6979 Register scratch1 = VirtualFrame::scratch1();
6980
6981 // Check the map. Initially use an invalid map to force a
6982 // failure. The map check will be patched in the runtime system.
6983 __ ldr(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
6984
6985#ifdef DEBUG
6986 Label check_inlined_codesize;
6987 masm_->bind(&check_inlined_codesize);
6988#endif
6989 __ mov(scratch0, Operand(Factory::null_value()));
6990 __ cmp(scratch0, scratch1);
6991 deferred->Branch(ne);
6992
6993 int offset = 0;
6994 __ str(value, MemOperand(receiver, offset));
6995
6996 // Update the write barrier and record its size. We do not use
6997 // the RecordWrite macro here because we want the offset
6998 // addition instruction first to make it easy to patch.
6999 Label record_write_start, record_write_done;
7000 __ bind(&record_write_start);
7001 // Add offset into the object.
7002 __ add(scratch0, receiver, Operand(offset));
7003 // Test that the object is not in the new space. We cannot set
7004 // region marks for new space pages.
7005 __ InNewSpace(receiver, scratch1, eq, &record_write_done);
7006 // Record the actual write.
7007 __ RecordWriteHelper(receiver, scratch0, scratch1);
7008 __ bind(&record_write_done);
7009 // Clobber all input registers when running with the debug-code flag
7010 // turned on to provoke errors.
7011 if (FLAG_debug_code) {
7012 __ mov(receiver, Operand(BitCast<int32_t>(kZapValue)));
7013 __ mov(scratch0, Operand(BitCast<int32_t>(kZapValue)));
7014 __ mov(scratch1, Operand(BitCast<int32_t>(kZapValue)));
7015 }
7016 // Check that this is the first inlined write barrier or that
7017 // this inlined write barrier has the same size as all the other
7018 // inlined write barriers.
7019 ASSERT((inlined_write_barrier_size_ == -1) ||
7020 (inlined_write_barrier_size_ ==
7021 masm()->InstructionsGeneratedSince(&record_write_start)));
7022 inlined_write_barrier_size_ =
7023 masm()->InstructionsGeneratedSince(&record_write_start);
7024
7025 // Make sure that the expected number of instructions are generated.
7026 ASSERT_EQ(GetInlinedNamedStoreInstructionsAfterPatch(),
7027 masm()->InstructionsGeneratedSince(&check_inlined_codesize));
7028 }
7029 deferred->BindExit();
7030 }
7031 ASSERT_EQ(expected_height, frame()->height());
Steve Block6ded16b2010-05-10 14:33:55 +01007032}
7033
7034
7035void CodeGenerator::EmitKeyedLoad() {
7036 if (loop_nesting() == 0) {
7037 Comment cmnt(masm_, "[ Load from keyed property");
7038 frame_->CallKeyedLoadIC();
7039 } else {
7040 // Inline the keyed load.
7041 Comment cmnt(masm_, "[ Inlined load from keyed property");
7042
7043 // Counter will be decremented in the deferred code. Placed here to avoid
7044 // having it in the instruction stream below where patching will occur.
7045 __ IncrementCounter(&Counters::keyed_load_inline, 1,
7046 frame_->scratch0(), frame_->scratch1());
7047
Kristian Monsen25f61362010-05-21 11:50:48 +01007048 // Load the key and receiver from the stack.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007049 bool key_is_known_smi = frame_->KnownSmiAt(0);
Kristian Monsen25f61362010-05-21 11:50:48 +01007050 Register key = frame_->PopToRegister();
7051 Register receiver = frame_->PopToRegister(key);
Steve Block6ded16b2010-05-10 14:33:55 +01007052
Kristian Monsen25f61362010-05-21 11:50:48 +01007053 // The deferred code expects key and receiver in registers.
Steve Block6ded16b2010-05-10 14:33:55 +01007054 DeferredReferenceGetKeyedValue* deferred =
Kristian Monsen25f61362010-05-21 11:50:48 +01007055 new DeferredReferenceGetKeyedValue(key, receiver);
Steve Block6ded16b2010-05-10 14:33:55 +01007056
7057 // Check that the receiver is a heap object.
7058 __ tst(receiver, Operand(kSmiTagMask));
7059 deferred->Branch(eq);
7060
7061 // The following instructions are the part of the inlined load keyed
7062 // property code which can be patched. Therefore the exact number of
7063 // instructions generated need to be fixed, so the constant pool is blocked
7064 // while generating this code.
Steve Block6ded16b2010-05-10 14:33:55 +01007065 { Assembler::BlockConstPoolScope block_const_pool(masm_);
7066 Register scratch1 = VirtualFrame::scratch0();
7067 Register scratch2 = VirtualFrame::scratch1();
7068 // Check the map. The null map used below is patched by the inline cache
7069 // code.
7070 __ ldr(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007071
7072 // Check that the key is a smi.
7073 if (!key_is_known_smi) {
7074 __ tst(key, Operand(kSmiTagMask));
7075 deferred->Branch(ne);
7076 }
7077
Kristian Monsen25f61362010-05-21 11:50:48 +01007078#ifdef DEBUG
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007079 Label check_inlined_codesize;
7080 masm_->bind(&check_inlined_codesize);
Kristian Monsen25f61362010-05-21 11:50:48 +01007081#endif
Steve Block6ded16b2010-05-10 14:33:55 +01007082 __ mov(scratch2, Operand(Factory::null_value()));
7083 __ cmp(scratch1, scratch2);
7084 deferred->Branch(ne);
7085
Iain Merrick75681382010-08-19 15:07:18 +01007086 // Get the elements array from the receiver.
Steve Block6ded16b2010-05-10 14:33:55 +01007087 __ ldr(scratch1, FieldMemOperand(receiver, JSObject::kElementsOffset));
Iain Merrick75681382010-08-19 15:07:18 +01007088 __ AssertFastElements(scratch1);
Steve Block6ded16b2010-05-10 14:33:55 +01007089
7090 // Check that key is within bounds. Use unsigned comparison to handle
7091 // negative keys.
7092 __ ldr(scratch2, FieldMemOperand(scratch1, FixedArray::kLengthOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007093 __ cmp(scratch2, key);
Steve Block6ded16b2010-05-10 14:33:55 +01007094 deferred->Branch(ls); // Unsigned less equal.
7095
7096 // Load and check that the result is not the hole (key is a smi).
7097 __ LoadRoot(scratch2, Heap::kTheHoleValueRootIndex);
7098 __ add(scratch1,
7099 scratch1,
7100 Operand(FixedArray::kHeaderSize - kHeapObjectTag));
Kristian Monsen25f61362010-05-21 11:50:48 +01007101 __ ldr(scratch1,
Steve Block6ded16b2010-05-10 14:33:55 +01007102 MemOperand(scratch1, key, LSL,
7103 kPointerSizeLog2 - (kSmiTagSize + kSmiShiftSize)));
Kristian Monsen25f61362010-05-21 11:50:48 +01007104 __ cmp(scratch1, scratch2);
Steve Block6ded16b2010-05-10 14:33:55 +01007105 deferred->Branch(eq);
7106
Kristian Monsen25f61362010-05-21 11:50:48 +01007107 __ mov(r0, scratch1);
Steve Block6ded16b2010-05-10 14:33:55 +01007108 // Make sure that the expected number of instructions are generated.
Steve Block8defd9f2010-07-08 12:39:36 +01007109 ASSERT_EQ(GetInlinedKeyedLoadInstructionsAfterPatch(),
Steve Block6ded16b2010-05-10 14:33:55 +01007110 masm_->InstructionsGeneratedSince(&check_inlined_codesize));
7111 }
7112
7113 deferred->BindExit();
7114 }
7115}
7116
7117
Steve Block8defd9f2010-07-08 12:39:36 +01007118void CodeGenerator::EmitKeyedStore(StaticType* key_type,
7119 WriteBarrierCharacter wb_info) {
Steve Block6ded16b2010-05-10 14:33:55 +01007120 // Generate inlined version of the keyed store if the code is in a loop
7121 // and the key is likely to be a smi.
7122 if (loop_nesting() > 0 && key_type->IsLikelySmi()) {
7123 // Inline the keyed store.
7124 Comment cmnt(masm_, "[ Inlined store to keyed property");
7125
Leon Clarkef7060e22010-06-03 12:02:55 +01007126 Register scratch1 = VirtualFrame::scratch0();
7127 Register scratch2 = VirtualFrame::scratch1();
7128 Register scratch3 = r3;
Steve Block6ded16b2010-05-10 14:33:55 +01007129
7130 // Counter will be decremented in the deferred code. Placed here to avoid
7131 // having it in the instruction stream below where patching will occur.
7132 __ IncrementCounter(&Counters::keyed_store_inline, 1,
Leon Clarkef7060e22010-06-03 12:02:55 +01007133 scratch1, scratch2);
7134
Steve Block8defd9f2010-07-08 12:39:36 +01007135
7136
Leon Clarkef7060e22010-06-03 12:02:55 +01007137 // Load the value, key and receiver from the stack.
Steve Block8defd9f2010-07-08 12:39:36 +01007138 bool value_is_harmless = frame_->KnownSmiAt(0);
7139 if (wb_info == NEVER_NEWSPACE) value_is_harmless = true;
7140 bool key_is_smi = frame_->KnownSmiAt(1);
Leon Clarkef7060e22010-06-03 12:02:55 +01007141 Register value = frame_->PopToRegister();
7142 Register key = frame_->PopToRegister(value);
Steve Block8defd9f2010-07-08 12:39:36 +01007143 VirtualFrame::SpilledScope spilled(frame_);
Leon Clarkef7060e22010-06-03 12:02:55 +01007144 Register receiver = r2;
7145 frame_->EmitPop(receiver);
Steve Block8defd9f2010-07-08 12:39:36 +01007146
7147#ifdef DEBUG
7148 bool we_remembered_the_write_barrier = value_is_harmless;
7149#endif
Leon Clarkef7060e22010-06-03 12:02:55 +01007150
7151 // The deferred code expects value, key and receiver in registers.
7152 DeferredReferenceSetKeyedValue* deferred =
Ben Murdoche0cee9b2011-05-25 10:26:03 +01007153 new DeferredReferenceSetKeyedValue(
7154 value, key, receiver, strict_mode_flag());
Steve Block6ded16b2010-05-10 14:33:55 +01007155
7156 // Check that the value is a smi. As this inlined code does not set the
7157 // write barrier it is only possible to store smi values.
Steve Block8defd9f2010-07-08 12:39:36 +01007158 if (!value_is_harmless) {
7159 // If the value is not likely to be a Smi then let's test the fixed array
7160 // for new space instead. See below.
7161 if (wb_info == LIKELY_SMI) {
7162 __ tst(value, Operand(kSmiTagMask));
7163 deferred->Branch(ne);
7164#ifdef DEBUG
7165 we_remembered_the_write_barrier = true;
7166#endif
7167 }
7168 }
Steve Block6ded16b2010-05-10 14:33:55 +01007169
Steve Block8defd9f2010-07-08 12:39:36 +01007170 if (!key_is_smi) {
7171 // Check that the key is a smi.
7172 __ tst(key, Operand(kSmiTagMask));
7173 deferred->Branch(ne);
7174 }
Steve Block6ded16b2010-05-10 14:33:55 +01007175
7176 // Check that the receiver is a heap object.
Leon Clarkef7060e22010-06-03 12:02:55 +01007177 __ tst(receiver, Operand(kSmiTagMask));
Steve Block6ded16b2010-05-10 14:33:55 +01007178 deferred->Branch(eq);
7179
7180 // Check that the receiver is a JSArray.
Leon Clarkef7060e22010-06-03 12:02:55 +01007181 __ CompareObjectType(receiver, scratch1, scratch1, JS_ARRAY_TYPE);
Steve Block6ded16b2010-05-10 14:33:55 +01007182 deferred->Branch(ne);
7183
7184 // Check that the key is within bounds. Both the key and the length of
7185 // the JSArray are smis. Use unsigned comparison to handle negative keys.
Leon Clarkef7060e22010-06-03 12:02:55 +01007186 __ ldr(scratch1, FieldMemOperand(receiver, JSArray::kLengthOffset));
7187 __ cmp(scratch1, key);
Steve Block6ded16b2010-05-10 14:33:55 +01007188 deferred->Branch(ls); // Unsigned less equal.
7189
Steve Block8defd9f2010-07-08 12:39:36 +01007190 // Get the elements array from the receiver.
7191 __ ldr(scratch1, FieldMemOperand(receiver, JSObject::kElementsOffset));
7192 if (!value_is_harmless && wb_info != LIKELY_SMI) {
7193 Label ok;
7194 __ and_(scratch2, scratch1, Operand(ExternalReference::new_space_mask()));
7195 __ cmp(scratch2, Operand(ExternalReference::new_space_start()));
7196 __ tst(value, Operand(kSmiTagMask), ne);
7197 deferred->Branch(ne);
7198#ifdef DEBUG
7199 we_remembered_the_write_barrier = true;
7200#endif
7201 }
7202 // Check that the elements array is not a dictionary.
7203 __ ldr(scratch2, FieldMemOperand(scratch1, JSObject::kMapOffset));
Steve Block6ded16b2010-05-10 14:33:55 +01007204 // The following instructions are the part of the inlined store keyed
7205 // property code which can be patched. Therefore the exact number of
7206 // instructions generated need to be fixed, so the constant pool is blocked
7207 // while generating this code.
Steve Block6ded16b2010-05-10 14:33:55 +01007208 { Assembler::BlockConstPoolScope block_const_pool(masm_);
Steve Block8defd9f2010-07-08 12:39:36 +01007209#ifdef DEBUG
7210 Label check_inlined_codesize;
7211 masm_->bind(&check_inlined_codesize);
7212#endif
7213
Steve Block6ded16b2010-05-10 14:33:55 +01007214 // Read the fixed array map from the constant pool (not from the root
7215 // array) so that the value can be patched. When debugging, we patch this
7216 // comparison to always fail so that we will hit the IC call in the
7217 // deferred code which will allow the debugger to break for fast case
7218 // stores.
Leon Clarkef7060e22010-06-03 12:02:55 +01007219 __ mov(scratch3, Operand(Factory::fixed_array_map()));
7220 __ cmp(scratch2, scratch3);
Steve Block6ded16b2010-05-10 14:33:55 +01007221 deferred->Branch(ne);
7222
7223 // Store the value.
Leon Clarkef7060e22010-06-03 12:02:55 +01007224 __ add(scratch1, scratch1,
7225 Operand(FixedArray::kHeaderSize - kHeapObjectTag));
7226 __ str(value,
7227 MemOperand(scratch1, key, LSL,
7228 kPointerSizeLog2 - (kSmiTagSize + kSmiShiftSize)));
Steve Block6ded16b2010-05-10 14:33:55 +01007229
7230 // Make sure that the expected number of instructions are generated.
Leon Clarkef7060e22010-06-03 12:02:55 +01007231 ASSERT_EQ(kInlinedKeyedStoreInstructionsAfterPatch,
Steve Block6ded16b2010-05-10 14:33:55 +01007232 masm_->InstructionsGeneratedSince(&check_inlined_codesize));
7233 }
7234
Steve Block8defd9f2010-07-08 12:39:36 +01007235 ASSERT(we_remembered_the_write_barrier);
7236
Steve Block6ded16b2010-05-10 14:33:55 +01007237 deferred->BindExit();
7238 } else {
Ben Murdoche0cee9b2011-05-25 10:26:03 +01007239 frame()->CallKeyedStoreIC(strict_mode_flag());
Steve Block6ded16b2010-05-10 14:33:55 +01007240 }
Leon Clarked91b9f72010-01-27 17:25:45 +00007241}
7242
7243
Steve Blocka7e24c12009-10-30 11:49:00 +00007244#ifdef DEBUG
7245bool CodeGenerator::HasValidEntryRegisters() { return true; }
7246#endif
7247
7248
7249#undef __
7250#define __ ACCESS_MASM(masm)
7251
Steve Blocka7e24c12009-10-30 11:49:00 +00007252Handle<String> Reference::GetName() {
7253 ASSERT(type_ == NAMED);
7254 Property* property = expression_->AsProperty();
7255 if (property == NULL) {
7256 // Global variable reference treated as a named property reference.
7257 VariableProxy* proxy = expression_->AsVariableProxy();
7258 ASSERT(proxy->AsVariable() != NULL);
7259 ASSERT(proxy->AsVariable()->is_global());
7260 return proxy->name();
7261 } else {
7262 Literal* raw_name = property->key()->AsLiteral();
7263 ASSERT(raw_name != NULL);
7264 return Handle<String>(String::cast(*raw_name->handle()));
7265 }
7266}
7267
7268
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007269void Reference::DupIfPersist() {
7270 if (persist_after_get_) {
7271 switch (type_) {
7272 case KEYED:
7273 cgen_->frame()->Dup2();
7274 break;
7275 case NAMED:
7276 cgen_->frame()->Dup();
7277 // Fall through.
7278 case UNLOADED:
7279 case ILLEGAL:
7280 case SLOT:
7281 // Do nothing.
7282 ;
7283 }
7284 } else {
7285 set_unloaded();
7286 }
7287}
7288
7289
Steve Blockd0582a62009-12-15 09:54:21 +00007290void Reference::GetValue() {
Steve Blocka7e24c12009-10-30 11:49:00 +00007291 ASSERT(cgen_->HasValidEntryRegisters());
7292 ASSERT(!is_illegal());
7293 ASSERT(!cgen_->has_cc());
7294 MacroAssembler* masm = cgen_->masm();
7295 Property* property = expression_->AsProperty();
7296 if (property != NULL) {
7297 cgen_->CodeForSourcePosition(property->position());
7298 }
7299
7300 switch (type_) {
7301 case SLOT: {
7302 Comment cmnt(masm, "[ Load from Slot");
Kristian Monsen0d5e1162010-09-30 15:31:59 +01007303 Slot* slot = expression_->AsVariableProxy()->AsVariable()->AsSlot();
Steve Blocka7e24c12009-10-30 11:49:00 +00007304 ASSERT(slot != NULL);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007305 DupIfPersist();
Steve Block6ded16b2010-05-10 14:33:55 +01007306 cgen_->LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF);
Steve Blocka7e24c12009-10-30 11:49:00 +00007307 break;
7308 }
7309
7310 case NAMED: {
Steve Blocka7e24c12009-10-30 11:49:00 +00007311 Variable* var = expression_->AsVariableProxy()->AsVariable();
Steve Block6ded16b2010-05-10 14:33:55 +01007312 bool is_global = var != NULL;
7313 ASSERT(!is_global || var->is_global());
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007314 Handle<String> name = GetName();
7315 DupIfPersist();
7316 cgen_->EmitNamedLoad(name, is_global);
Steve Blocka7e24c12009-10-30 11:49:00 +00007317 break;
7318 }
7319
7320 case KEYED: {
Leon Clarkef7060e22010-06-03 12:02:55 +01007321 ASSERT(property != NULL);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +01007322 DupIfPersist();
Steve Block6ded16b2010-05-10 14:33:55 +01007323 cgen_->EmitKeyedLoad();
Leon Clarked91b9f72010-01-27 17:25:45 +00007324 cgen_->frame()->EmitPush(r0);
Steve Blocka7e24c12009-10-30 11:49:00 +00007325 break;
7326 }
7327
7328 default:
7329 UNREACHABLE();
7330 }
7331}
7332
7333
Steve Block8defd9f2010-07-08 12:39:36 +01007334void Reference::SetValue(InitState init_state, WriteBarrierCharacter wb_info) {
Steve Blocka7e24c12009-10-30 11:49:00 +00007335 ASSERT(!is_illegal());
7336 ASSERT(!cgen_->has_cc());
7337 MacroAssembler* masm = cgen_->masm();
7338 VirtualFrame* frame = cgen_->frame();
7339 Property* property = expression_->AsProperty();
7340 if (property != NULL) {
7341 cgen_->CodeForSourcePosition(property->position());
7342 }
7343
7344 switch (type_) {
7345 case SLOT: {
7346 Comment cmnt(masm, "[ Store to Slot");
Kristian Monsen0d5e1162010-09-30 15:31:59 +01007347 Slot* slot = expression_->AsVariableProxy()->AsVariable()->AsSlot();
Leon Clarkee46be812010-01-19 14:06:41 +00007348 cgen_->StoreToSlot(slot, init_state);
Steve Block6ded16b2010-05-10 14:33:55 +01007349 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00007350 break;
7351 }
7352
7353 case NAMED: {
7354 Comment cmnt(masm, "[ Store to named Property");
Steve Block6ded16b2010-05-10 14:33:55 +01007355 cgen_->EmitNamedStore(GetName(), false);
Steve Blocka7e24c12009-10-30 11:49:00 +00007356 frame->EmitPush(r0);
Andrei Popescu402d9372010-02-26 13:31:12 +00007357 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00007358 break;
7359 }
7360
7361 case KEYED: {
7362 Comment cmnt(masm, "[ Store to keyed Property");
7363 Property* property = expression_->AsProperty();
7364 ASSERT(property != NULL);
7365 cgen_->CodeForSourcePosition(property->position());
Steve Block8defd9f2010-07-08 12:39:36 +01007366 cgen_->EmitKeyedStore(property->key()->type(), wb_info);
Steve Blocka7e24c12009-10-30 11:49:00 +00007367 frame->EmitPush(r0);
Leon Clarkef7060e22010-06-03 12:02:55 +01007368 set_unloaded();
Steve Blocka7e24c12009-10-30 11:49:00 +00007369 break;
7370 }
7371
7372 default:
7373 UNREACHABLE();
7374 }
7375}
7376
7377
Leon Clarkee46be812010-01-19 14:06:41 +00007378const char* GenericBinaryOpStub::GetName() {
7379 if (name_ != NULL) return name_;
7380 const int len = 100;
7381 name_ = Bootstrapper::AllocateAutoDeletedArray(len);
7382 if (name_ == NULL) return "OOM";
7383 const char* op_name = Token::Name(op_);
7384 const char* overwrite_name;
7385 switch (mode_) {
7386 case NO_OVERWRITE: overwrite_name = "Alloc"; break;
7387 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break;
7388 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
7389 default: overwrite_name = "UnknownOverwrite"; break;
7390 }
7391
7392 OS::SNPrintF(Vector<char>(name_, len),
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01007393 "GenericBinaryOpStub_%s_%s%s_%s",
Leon Clarkee46be812010-01-19 14:06:41 +00007394 op_name,
7395 overwrite_name,
Kristian Monsen9dcf7e22010-06-28 14:14:28 +01007396 specialized_on_rhs_ ? "_ConstantRhs" : "",
7397 BinaryOpIC::GetName(runtime_operands_type_));
Leon Clarkee46be812010-01-19 14:06:41 +00007398 return name_;
7399}
7400
7401
Steve Blocka7e24c12009-10-30 11:49:00 +00007402#undef __
7403
7404} } // namespace v8::internal
Leon Clarkef7060e22010-06-03 12:02:55 +01007405
7406#endif // V8_TARGET_ARCH_ARM